]> git.ipfire.org Git - thirdparty/gcc.git/blobdiff - gcc/fortran/trans-intrinsic.c
Update copyright years.
[thirdparty/gcc.git] / gcc / fortran / trans-intrinsic.c
index ac61c096a99e064d90bd3a5009e5502e701e4d3e..0e7c60a906bceeab7f3361950c8dc664e438af66 100644 (file)
@@ -1,5 +1,5 @@
 /* Intrinsic translation
-   Copyright (C) 2002-2015 Free Software Foundation, Inc.
+   Copyright (C) 2002-2021 Free Software Foundation, Inc.
    Contributed by Paul Brook <paul@nowt.org>
    and Steven Bosscher <s.bosscher@student.tudelft.nl>
 
@@ -24,27 +24,25 @@ along with GCC; see the file COPYING3.  If not see
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
+#include "memmodel.h"
 #include "tm.h"                /* For UNITS_PER_WORD.  */
-#include "alias.h"
 #include "tree.h"
-#include "fold-const.h"
+#include "gfortran.h"
+#include "trans.h"
 #include "stringpool.h"
+#include "fold-const.h"
+#include "internal-fn.h"
 #include "tree-nested.h"
 #include "stor-layout.h"
-#include "gfortran.h"
-#include "diagnostic-core.h"   /* For internal_error.  */
 #include "toplev.h"    /* For rest_of_decl_compilation.  */
-#include "flags.h"
 #include "arith.h"
-#include "intrinsic.h"
-#include "trans.h"
 #include "trans-const.h"
 #include "trans-types.h"
 #include "trans-array.h"
 #include "dependency.h"        /* For CAF array alias analysis.  */
+#include "attribs.h"
+
 /* Only for gfc_trans_assign and gfc_trans_pointer_assign.  */
-#include "trans-stmt.h"
-#include "tree-nested.h"
 
 /* This maps Fortran intrinsic math functions to external library or GCC
    builtin functions.  */
@@ -124,6 +122,9 @@ static GTY(()) gfc_intrinsic_map_t gfc_intrinsic_map[] =
 
   /* Functions in libgfortran.  */
   LIB_FUNCTION (ERFC_SCALED, "erfc_scaled", false),
+  LIB_FUNCTION (SIND, "sind", false),
+  LIB_FUNCTION (COSD, "cosd", false),
+  LIB_FUNCTION (TAND, "tand", false),
 
   /* End the list.  */
   LIB_FUNCTION (NONE, NULL, false)
@@ -155,7 +156,7 @@ builtin_decl_for_precision (enum built_in_function base_built_in,
     i = m->double_built_in;
   else if (precision == TYPE_PRECISION (long_double_type_node))
     i = m->long_double_built_in;
-  else if (precision == TYPE_PRECISION (float128_type_node))
+  else if (precision == TYPE_PRECISION (gfc_float128_type_node))
     {
       /* Special treatment, because it is not exactly a built-in, but
         a library function.  */
@@ -363,7 +364,7 @@ build_fixbound_expr (stmtblock_t * pblock, tree arg, tree type, int up)
 
   tmp = convert (argtype, intval);
   cond = fold_build2_loc (input_location, up ? GE_EXPR : LE_EXPR,
-                         boolean_type_node, tmp, arg);
+                         logical_type_node, tmp, arg);
 
   tmp = fold_build2_loc (input_location, up ? PLUS_EXPR : MINUS_EXPR, type,
                         intval, build_int_cst (type, 1));
@@ -396,11 +397,24 @@ build_round_expr (tree arg, tree restype)
     fn = builtin_decl_for_precision (BUILT_IN_LROUND, argprec);
   else if (resprec <= LONG_LONG_TYPE_SIZE)
     fn = builtin_decl_for_precision (BUILT_IN_LLROUND, argprec);
+  else if (resprec >= argprec && resprec == 128)
+    {
+      /* Search for a real kind suitable as temporary for conversion.  */
+      int kind = -1;
+      for (int i = 0; kind < 0 && gfc_real_kinds[i].kind != 0; i++)
+       if (gfc_real_kinds[i].mode_precision >= resprec)
+         kind = gfc_real_kinds[i].kind;
+      if (kind < 0)
+       gfc_internal_error ("Could not find real kind with at least %d bits",
+                           resprec);
+      arg = fold_convert (gfc_float128_type_node, arg);
+      fn = gfc_builtin_decl_for_float_kind (BUILT_IN_ROUND, kind);
+    }
   else
     gcc_unreachable ();
 
-  return fold_convert (restype, build_call_expr_loc (input_location,
-                                                fn, 1, arg));
+  return convert (restype, build_call_expr_loc (input_location,
+                                               fn, 1, arg));
 }
 
 
@@ -416,19 +430,15 @@ build_fix_expr (stmtblock_t * pblock, tree arg, tree type,
     {
     case RND_FLOOR:
       return build_fixbound_expr (pblock, arg, type, 0);
-      break;
 
     case RND_CEIL:
       return build_fixbound_expr (pblock, arg, type, 1);
-      break;
 
     case RND_ROUND:
       return build_round_expr (arg, type);
-      break;
 
     case RND_TRUNC:
       return fold_build1_loc (input_location, FIX_TRUNC_EXPR, type, arg);
-      break;
 
     default:
       gcc_unreachable ();
@@ -499,14 +509,14 @@ gfc_conv_intrinsic_aint (gfc_se * se, gfc_expr * expr, enum rounding_mode op)
   n = gfc_validate_kind (BT_INTEGER, kind, false);
   mpfr_set_z (huge, gfc_integer_kinds[n].huge, GFC_RND_MODE);
   tmp = gfc_conv_mpfr_to_tree (huge, kind, 0);
-  cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, arg[0],
+  cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node, arg[0],
                          tmp);
 
   mpfr_neg (huge, huge, GFC_RND_MODE);
   tmp = gfc_conv_mpfr_to_tree (huge, kind, 0);
-  tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, arg[0],
+  tmp = fold_build2_loc (input_location, GT_EXPR, logical_type_node, arg[0],
                         tmp);
-  cond = fold_build2_loc (input_location, TRUTH_AND_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, TRUTH_AND_EXPR, logical_type_node,
                          cond, tmp);
   itype = gfc_get_int_type (kind);
 
@@ -605,7 +615,77 @@ define_quad_builtin (const char *name, tree type, bool is_const)
   return fndecl;
 }
 
+/* Add SIMD attribute for FNDECL built-in if the built-in
+   name is in VECTORIZED_BUILTINS.  */
+
+static void
+add_simd_flag_for_built_in (tree fndecl)
+{
+  if (gfc_vectorized_builtins == NULL
+      || fndecl == NULL_TREE)
+    return;
+
+  const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
+  int *clauses = gfc_vectorized_builtins->get (name);
+  if (clauses)
+    {
+      for (unsigned i = 0; i < 3; i++)
+       if (*clauses & (1 << i))
+         {
+           gfc_simd_clause simd_type = (gfc_simd_clause)*clauses;
+           tree omp_clause = NULL_TREE;
+           if (simd_type == SIMD_NONE)
+             ; /* No SIMD clause.  */
+           else
+             {
+               omp_clause_code code
+                 = (simd_type == SIMD_INBRANCH
+                    ? OMP_CLAUSE_INBRANCH : OMP_CLAUSE_NOTINBRANCH);
+               omp_clause = build_omp_clause (UNKNOWN_LOCATION, code);
+               omp_clause = build_tree_list (NULL_TREE, omp_clause);
+             }
+
+           DECL_ATTRIBUTES (fndecl)
+             = tree_cons (get_identifier ("omp declare simd"), omp_clause,
+                          DECL_ATTRIBUTES (fndecl));
+         }
+    }
+}
+
+  /* Set SIMD attribute to all built-in functions that are mentioned
+     in gfc_vectorized_builtins vector.  */
+
+void
+gfc_adjust_builtins (void)
+{
+  gfc_intrinsic_map_t *m;
+  for (m = gfc_intrinsic_map;
+       m->id != GFC_ISYM_NONE || m->double_built_in != END_BUILTINS; m++)
+    {
+      add_simd_flag_for_built_in (m->real4_decl);
+      add_simd_flag_for_built_in (m->complex4_decl);
+      add_simd_flag_for_built_in (m->real8_decl);
+      add_simd_flag_for_built_in (m->complex8_decl);
+      add_simd_flag_for_built_in (m->real10_decl);
+      add_simd_flag_for_built_in (m->complex10_decl);
+      add_simd_flag_for_built_in (m->real16_decl);
+      add_simd_flag_for_built_in (m->complex16_decl);
+      add_simd_flag_for_built_in (m->real16_decl);
+      add_simd_flag_for_built_in (m->complex16_decl);
+    }
+
+  /* Release all strings.  */
+  if (gfc_vectorized_builtins != NULL)
+    {
+      for (hash_map<nofree_string_hash, int>::iterator it
+          = gfc_vectorized_builtins->begin ();
+          it != gfc_vectorized_builtins->end (); ++it)
+       free (CONST_CAST (char *, (*it).first));
 
+      delete gfc_vectorized_builtins;
+      gfc_vectorized_builtins = NULL;
+    }
+}
 
 /* Initialize function decls for library functions.  The external functions
    are created as required.  Builtin functions are added here.  */
@@ -627,8 +707,8 @@ gfc_build_intrinsic_lib_fndecls (void)
 
     memset (quad_decls, 0, sizeof(tree) * (END_BUILTINS + 1));
 
-    type = float128_type_node;
-    complex_type = complex_float128_type_node;
+    type = gfc_float128_type_node;
+    complex_type = gfc_complex_float128_type_node;
     /* type (*) (type) */
     func_1 = build_function_type_list (type, type, NULL_TREE);
     /* int (*) (type) */
@@ -873,7 +953,7 @@ gfc_conv_intrinsic_lib_function (gfc_se * se, gfc_expr * expr)
   fndecl = gfc_get_intrinsic_lib_fndecl (m, expr);
   rettype = TREE_TYPE (TREE_TYPE (fndecl));
 
-  fndecl = build_addr (fndecl, current_function_decl);
+  fndecl = build_addr (fndecl);
   se->expr = build_call_array_loc (input_location, rettype, fndecl, num_args, args);
 }
 
@@ -894,7 +974,7 @@ gfc_trans_same_strlen_check (const char* intr_name, locus* where,
     return;
 
   /* Compare the two string lengths.  */
-  cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, a, b);
+  cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node, a, b);
 
   /* Output the runtime-check.  */
   name = gfc_build_cstring_const (intr_name);
@@ -988,7 +1068,7 @@ conv_caf_vector_subscript_elem (stmtblock_t *block, int i, tree desc,
 
   if (vector != NULL_TREE)
     {
-      /* Set dim.lower/upper/stride.  */
+      /* Set vector and kind.  */
       field = gfc_advance_chain (TYPE_FIELDS (type), 0);
       tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
                         desc, field, NULL_TREE);
@@ -1000,7 +1080,7 @@ conv_caf_vector_subscript_elem (stmtblock_t *block, int i, tree desc,
     }
   else
     {
-      /* Set vector and kind.  */
+      /* Set dim.lower/upper/stride.  */
       field = gfc_advance_chain (TYPE_FIELDS (type), 0);
       tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
                             desc, field, NULL_TREE);
@@ -1100,16 +1180,504 @@ conv_caf_vector_subscript (stmtblock_t *block, tree desc, gfc_array_ref *ar)
 }
 
 
+static tree
+compute_component_offset (tree field, tree type)
+{
+  tree tmp;
+  if (DECL_FIELD_BIT_OFFSET (field) != NULL_TREE
+      && !integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
+    {
+      tmp = fold_build2 (TRUNC_DIV_EXPR, type,
+                        DECL_FIELD_BIT_OFFSET (field),
+                        bitsize_unit_node);
+      return fold_build2 (PLUS_EXPR, type, DECL_FIELD_OFFSET (field), tmp);
+    }
+  else
+    return DECL_FIELD_OFFSET (field);
+}
+
+
+static tree
+conv_expr_ref_to_caf_ref (stmtblock_t *block, gfc_expr *expr)
+{
+  gfc_ref *ref = expr->ref, *last_comp_ref;
+  tree caf_ref = NULL_TREE, prev_caf_ref = NULL_TREE, reference_type, tmp, tmp2,
+      field, last_type, inner_struct, mode, mode_rhs, dim_array, dim, dim_type,
+      start, end, stride, vector, nvec;
+  gfc_se se;
+  bool ref_static_array = false;
+  tree last_component_ref_tree = NULL_TREE;
+  int i, last_type_n;
+
+  if (expr->symtree)
+    {
+      last_component_ref_tree = expr->symtree->n.sym->backend_decl;
+      ref_static_array = !expr->symtree->n.sym->attr.allocatable
+         && !expr->symtree->n.sym->attr.pointer;
+    }
+
+  /* Prevent uninit-warning.  */
+  reference_type = NULL_TREE;
+
+  /* Skip refs upto the first coarray-ref.  */
+  last_comp_ref = NULL;
+  while (ref && (ref->type != REF_ARRAY || ref->u.ar.codimen == 0))
+    {
+      /* Remember the type of components skipped.  */
+      if (ref->type == REF_COMPONENT)
+       last_comp_ref = ref;
+      ref = ref->next;
+    }
+  /* When a component was skipped, get the type information of the last
+     component ref, else get the type from the symbol.  */
+  if (last_comp_ref)
+    {
+      last_type = gfc_typenode_for_spec (&last_comp_ref->u.c.component->ts);
+      last_type_n = last_comp_ref->u.c.component->ts.type;
+    }
+  else
+    {
+      last_type = gfc_typenode_for_spec (&expr->symtree->n.sym->ts);
+      last_type_n = expr->symtree->n.sym->ts.type;
+    }
+
+  while (ref)
+    {
+      if (ref->type == REF_ARRAY && ref->u.ar.codimen > 0
+         && ref->u.ar.dimen == 0)
+       {
+         /* Skip pure coindexes.  */
+         ref = ref->next;
+         continue;
+       }
+      tmp = gfc_create_var (gfc_get_caf_reference_type (), "caf_ref");
+      reference_type = TREE_TYPE (tmp);
+
+      if (caf_ref == NULL_TREE)
+       caf_ref = tmp;
+
+      /* Construct the chain of refs.  */
+      if (prev_caf_ref != NULL_TREE)
+       {
+         field = gfc_advance_chain (TYPE_FIELDS (reference_type), 0);
+         tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                 TREE_TYPE (field), prev_caf_ref, field,
+                                 NULL_TREE);
+         gfc_add_modify (block, tmp2, gfc_build_addr_expr (TREE_TYPE (field),
+                                                           tmp));
+       }
+      prev_caf_ref = tmp;
+
+      switch (ref->type)
+       {
+       case REF_COMPONENT:
+         last_type = gfc_typenode_for_spec (&ref->u.c.component->ts);
+         last_type_n = ref->u.c.component->ts.type;
+         /* Set the type of the ref.  */
+         field = gfc_advance_chain (TYPE_FIELDS (reference_type), 1);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), prev_caf_ref, field,
+                                NULL_TREE);
+         gfc_add_modify (block, tmp, build_int_cst (integer_type_node,
+                                                    GFC_CAF_REF_COMPONENT));
+
+         /* Ref the c in union u.  */
+         field = gfc_advance_chain (TYPE_FIELDS (reference_type), 3);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), prev_caf_ref, field,
+                                NULL_TREE);
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (field)), 0);
+         inner_struct = fold_build3_loc (input_location, COMPONENT_REF,
+                                      TREE_TYPE (field), tmp, field,
+                                      NULL_TREE);
+
+         /* Set the offset.  */
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (inner_struct)), 0);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), inner_struct, field,
+                                NULL_TREE);
+         /* Computing the offset is somewhat harder.  The bit_offset has to be
+            taken into account.  When the bit_offset in the field_decl is non-
+            null, divide it by the bitsize_unit and add it to the regular
+            offset.  */
+         tmp2 = compute_component_offset (ref->u.c.component->backend_decl,
+                                          TREE_TYPE (tmp));
+         gfc_add_modify (block, tmp, fold_convert (TREE_TYPE (tmp), tmp2));
+
+         /* Set caf_token_offset.  */
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (inner_struct)), 1);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), inner_struct, field,
+                                NULL_TREE);
+         if ((ref->u.c.component->attr.allocatable
+              || ref->u.c.component->attr.pointer)
+             && ref->u.c.component->attr.dimension)
+           {
+             tree arr_desc_token_offset;
+             /* Get the token field from the descriptor.  */
+             arr_desc_token_offset = TREE_OPERAND (
+                   gfc_conv_descriptor_token (ref->u.c.component->backend_decl), 1);
+             arr_desc_token_offset
+                 = compute_component_offset (arr_desc_token_offset,
+                                             TREE_TYPE (tmp));
+             tmp2 = fold_build2_loc (input_location, PLUS_EXPR,
+                                     TREE_TYPE (tmp2), tmp2,
+                                     arr_desc_token_offset);
+           }
+         else if (ref->u.c.component->caf_token)
+           tmp2 = compute_component_offset (ref->u.c.component->caf_token,
+                                            TREE_TYPE (tmp));
+         else
+           tmp2 = integer_zero_node;
+         gfc_add_modify (block, tmp, fold_convert (TREE_TYPE (tmp), tmp2));
+
+         /* Remember whether this ref was to a non-allocatable/non-pointer
+            component so the next array ref can be tailored correctly.  */
+         ref_static_array = !ref->u.c.component->attr.allocatable
+             && !ref->u.c.component->attr.pointer;
+         last_component_ref_tree = ref_static_array
+             ? ref->u.c.component->backend_decl : NULL_TREE;
+         break;
+       case REF_ARRAY:
+         if (ref_static_array && ref->u.ar.as->type == AS_DEFERRED)
+           ref_static_array = false;
+         /* Set the type of the ref.  */
+         field = gfc_advance_chain (TYPE_FIELDS (reference_type), 1);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), prev_caf_ref, field,
+                                NULL_TREE);
+         gfc_add_modify (block, tmp, build_int_cst (integer_type_node,
+                                                    ref_static_array
+                                                    ? GFC_CAF_REF_STATIC_ARRAY
+                                                    : GFC_CAF_REF_ARRAY));
+
+         /* Ref the a in union u.  */
+         field = gfc_advance_chain (TYPE_FIELDS (reference_type), 3);
+         tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                TREE_TYPE (field), prev_caf_ref, field,
+                                NULL_TREE);
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (field)), 1);
+         inner_struct = fold_build3_loc (input_location, COMPONENT_REF,
+                                      TREE_TYPE (field), tmp, field,
+                                      NULL_TREE);
+
+         /* Set the static_array_type in a for static arrays.  */
+         if (ref_static_array)
+           {
+             field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (inner_struct)),
+                                        1);
+             tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                    TREE_TYPE (field), inner_struct, field,
+                                    NULL_TREE);
+             gfc_add_modify (block, tmp, build_int_cst (TREE_TYPE (tmp),
+                                                        last_type_n));
+           }
+         /* Ref the mode in the inner_struct.  */
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (inner_struct)), 0);
+         mode = fold_build3_loc (input_location, COMPONENT_REF,
+                                 TREE_TYPE (field), inner_struct, field,
+                                 NULL_TREE);
+         /* Ref the dim in the inner_struct.  */
+         field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (inner_struct)), 2);
+         dim_array = fold_build3_loc (input_location, COMPONENT_REF,
+                                      TREE_TYPE (field), inner_struct, field,
+                                      NULL_TREE);
+         for (i = 0; i < ref->u.ar.dimen; ++i)
+           {
+             /* Ref dim i.  */
+             dim = gfc_build_array_ref (dim_array, gfc_rank_cst[i], NULL_TREE);
+             dim_type = TREE_TYPE (dim);
+             mode_rhs = start = end = stride = NULL_TREE;
+             switch (ref->u.ar.dimen_type[i])
+               {
+               case DIMEN_RANGE:
+                 if (ref->u.ar.end[i])
+                   {
+                     gfc_init_se (&se, NULL);
+                     gfc_conv_expr (&se, ref->u.ar.end[i]);
+                     gfc_add_block_to_block (block, &se.pre);
+                     if (ref_static_array)
+                       {
+                         /* Make the index zero-based, when reffing a static
+                            array.  */
+                         end = se.expr;
+                         gfc_init_se (&se, NULL);
+                         gfc_conv_expr (&se, ref->u.ar.as->lower[i]);
+                         gfc_add_block_to_block (block, &se.pre);
+                         se.expr = fold_build2 (MINUS_EXPR,
+                                                gfc_array_index_type,
+                                                end, fold_convert (
+                                                  gfc_array_index_type,
+                                                  se.expr));
+                       }
+                     end = gfc_evaluate_now (fold_convert (
+                                               gfc_array_index_type,
+                                               se.expr),
+                                             block);
+                   }
+                 else if (ref_static_array)
+                   end = fold_build2 (MINUS_EXPR,
+                                      gfc_array_index_type,
+                                      gfc_conv_array_ubound (
+                                        last_component_ref_tree, i),
+                                      gfc_conv_array_lbound (
+                                        last_component_ref_tree, i));
+                 else
+                   {
+                     end = NULL_TREE;
+                     mode_rhs = build_int_cst (unsigned_char_type_node,
+                                               GFC_CAF_ARR_REF_OPEN_END);
+                   }
+                 if (ref->u.ar.stride[i])
+                   {
+                     gfc_init_se (&se, NULL);
+                     gfc_conv_expr (&se, ref->u.ar.stride[i]);
+                     gfc_add_block_to_block (block, &se.pre);
+                     stride = gfc_evaluate_now (fold_convert (
+                                                  gfc_array_index_type,
+                                                  se.expr),
+                                                block);
+                     if (ref_static_array)
+                       {
+                         /* Make the index zero-based, when reffing a static
+                            array.  */
+                         stride = fold_build2 (MULT_EXPR,
+                                               gfc_array_index_type,
+                                               gfc_conv_array_stride (
+                                                 last_component_ref_tree,
+                                                 i),
+                                               stride);
+                         gcc_assert (end != NULL_TREE);
+                         /* Multiply with the product of array's stride and
+                            the step of the ref to a virtual upper bound.
+                            We cannot compute the actual upper bound here or
+                            the caflib would compute the extend
+                            incorrectly.  */
+                         end = fold_build2 (MULT_EXPR, gfc_array_index_type,
+                                            end, gfc_conv_array_stride (
+                                              last_component_ref_tree,
+                                              i));
+                         end = gfc_evaluate_now (end, block);
+                         stride = gfc_evaluate_now (stride, block);
+                       }
+                   }
+                 else if (ref_static_array)
+                   {
+                     stride = gfc_conv_array_stride (last_component_ref_tree,
+                                                     i);
+                     end = fold_build2 (MULT_EXPR, gfc_array_index_type,
+                                        end, stride);
+                     end = gfc_evaluate_now (end, block);
+                   }
+                 else
+                   /* Always set a ref stride of one to make caflib's
+                      handling easier.  */
+                   stride = gfc_index_one_node;
+
+                 /* Fall through.  */
+               case DIMEN_ELEMENT:
+                 if (ref->u.ar.start[i])
+                   {
+                     gfc_init_se (&se, NULL);
+                     gfc_conv_expr (&se, ref->u.ar.start[i]);
+                     gfc_add_block_to_block (block, &se.pre);
+                     if (ref_static_array)
+                       {
+                         /* Make the index zero-based, when reffing a static
+                            array.  */
+                         start = fold_convert (gfc_array_index_type, se.expr);
+                         gfc_init_se (&se, NULL);
+                         gfc_conv_expr (&se, ref->u.ar.as->lower[i]);
+                         gfc_add_block_to_block (block, &se.pre);
+                         se.expr = fold_build2 (MINUS_EXPR,
+                                                gfc_array_index_type,
+                                                start, fold_convert (
+                                                  gfc_array_index_type,
+                                                  se.expr));
+                         /* Multiply with the stride.  */
+                         se.expr = fold_build2 (MULT_EXPR,
+                                                gfc_array_index_type,
+                                                se.expr,
+                                                gfc_conv_array_stride (
+                                                  last_component_ref_tree,
+                                                  i));
+                       }
+                     start = gfc_evaluate_now (fold_convert (
+                                                 gfc_array_index_type,
+                                                 se.expr),
+                                               block);
+                     if (mode_rhs == NULL_TREE)
+                       mode_rhs = build_int_cst (unsigned_char_type_node,
+                                                 ref->u.ar.dimen_type[i]
+                                                 == DIMEN_ELEMENT
+                                                 ? GFC_CAF_ARR_REF_SINGLE
+                                                 : GFC_CAF_ARR_REF_RANGE);
+                   }
+                 else if (ref_static_array)
+                   {
+                     start = integer_zero_node;
+                     mode_rhs = build_int_cst (unsigned_char_type_node,
+                                               ref->u.ar.start[i] == NULL
+                                               ? GFC_CAF_ARR_REF_FULL
+                                               : GFC_CAF_ARR_REF_RANGE);
+                   }
+                 else if (end == NULL_TREE)
+                   mode_rhs = build_int_cst (unsigned_char_type_node,
+                                             GFC_CAF_ARR_REF_FULL);
+                 else
+                   mode_rhs = build_int_cst (unsigned_char_type_node,
+                                             GFC_CAF_ARR_REF_OPEN_START);
+
+                 /* Ref the s in dim.  */
+                 field = gfc_advance_chain (TYPE_FIELDS (dim_type), 0);
+                 tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                        TREE_TYPE (field), dim, field,
+                                        NULL_TREE);
+
+                 /* Set start in s.  */
+                 if (start != NULL_TREE)
+                   {
+                     field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)),
+                                                0);
+                     tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                             TREE_TYPE (field), tmp, field,
+                                             NULL_TREE);
+                     gfc_add_modify (block, tmp2,
+                                     fold_convert (TREE_TYPE (tmp2), start));
+                   }
+
+                 /* Set end in s.  */
+                 if (end != NULL_TREE)
+                   {
+                     field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)),
+                                                1);
+                     tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                             TREE_TYPE (field), tmp, field,
+                                             NULL_TREE);
+                     gfc_add_modify (block, tmp2,
+                                     fold_convert (TREE_TYPE (tmp2), end));
+                   }
+
+                 /* Set end in s.  */
+                 if (stride != NULL_TREE)
+                   {
+                     field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)),
+                                                2);
+                     tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                             TREE_TYPE (field), tmp, field,
+                                             NULL_TREE);
+                     gfc_add_modify (block, tmp2,
+                                     fold_convert (TREE_TYPE (tmp2), stride));
+                   }
+                 break;
+               case DIMEN_VECTOR:
+                 /* TODO: In case of static array.  */
+                 gcc_assert (!ref_static_array);
+                 mode_rhs = build_int_cst (unsigned_char_type_node,
+                                           GFC_CAF_ARR_REF_VECTOR);
+                 gfc_init_se (&se, NULL);
+                 se.descriptor_only = 1;
+                 gfc_conv_expr_descriptor (&se, ref->u.ar.start[i]);
+                 gfc_add_block_to_block (block, &se.pre);
+                 vector = se.expr;
+                 tmp = gfc_conv_descriptor_lbound_get (vector,
+                                                       gfc_rank_cst[0]);
+                 tmp2 = gfc_conv_descriptor_ubound_get (vector,
+                                                        gfc_rank_cst[0]);
+                 nvec = gfc_conv_array_extent_dim (tmp, tmp2, NULL);
+                 tmp = gfc_conv_descriptor_stride_get (vector,
+                                                       gfc_rank_cst[0]);
+                 nvec = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
+                                         TREE_TYPE (nvec), nvec, tmp);
+                 vector = gfc_conv_descriptor_data_get (vector);
+
+                 /* Ref the v in dim.  */
+                 field = gfc_advance_chain (TYPE_FIELDS (dim_type), 1);
+                 tmp = fold_build3_loc (input_location, COMPONENT_REF,
+                                        TREE_TYPE (field), dim, field,
+                                        NULL_TREE);
+
+                 /* Set vector in v.  */
+                 field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)), 0);
+                 tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                         TREE_TYPE (field), tmp, field,
+                                         NULL_TREE);
+                 gfc_add_modify (block, tmp2, fold_convert (TREE_TYPE (tmp2),
+                                                            vector));
+
+                 /* Set nvec in v.  */
+                 field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)), 1);
+                 tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                         TREE_TYPE (field), tmp, field,
+                                         NULL_TREE);
+                 gfc_add_modify (block, tmp2, fold_convert (TREE_TYPE (tmp2),
+                                                            nvec));
+
+                 /* Set kind in v.  */
+                 field = gfc_advance_chain (TYPE_FIELDS (TREE_TYPE (tmp)), 2);
+                 tmp2 = fold_build3_loc (input_location, COMPONENT_REF,
+                                         TREE_TYPE (field), tmp, field,
+                                         NULL_TREE);
+                 gfc_add_modify (block, tmp2, build_int_cst (integer_type_node,
+                                                 ref->u.ar.start[i]->ts.kind));
+                 break;
+               default:
+                 gcc_unreachable ();
+               }
+             /* Set the mode for dim i.  */
+             tmp = gfc_build_array_ref (mode, gfc_rank_cst[i], NULL_TREE);
+             gfc_add_modify (block, tmp, fold_convert (TREE_TYPE (tmp),
+                                                       mode_rhs));
+           }
+
+         /* Set the mode for dim i+1 to GFC_ARR_REF_NONE.  */
+         if (i < GFC_MAX_DIMENSIONS)
+           {
+             tmp = gfc_build_array_ref (mode, gfc_rank_cst[i], NULL_TREE);
+             gfc_add_modify (block, tmp,
+                             build_int_cst (unsigned_char_type_node,
+                                            GFC_CAF_ARR_REF_NONE));
+           }
+         break;
+       default:
+         gcc_unreachable ();
+       }
+
+      /* Set the size of the current type.  */
+      field = gfc_advance_chain (TYPE_FIELDS (reference_type), 2);
+      tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
+                            prev_caf_ref, field, NULL_TREE);
+      gfc_add_modify (block, tmp, fold_convert (TREE_TYPE (field),
+                                               TYPE_SIZE_UNIT (last_type)));
+
+      ref = ref->next;
+    }
+
+  if (prev_caf_ref != NULL_TREE)
+    {
+      field = gfc_advance_chain (TYPE_FIELDS (reference_type), 0);
+      tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
+                            prev_caf_ref, field, NULL_TREE);
+      gfc_add_modify (block, tmp, fold_convert (TREE_TYPE (field),
+                                                 null_pointer_node));
+    }
+  return caf_ref != NULL_TREE ? gfc_build_addr_expr (NULL_TREE, caf_ref)
+                             : NULL_TREE;
+}
+
 /* Get data from a remote coarray.  */
 
 static void
 gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
-                           tree may_require_tmp)
+                           tree may_require_tmp, bool may_realloc,
+                           symbol_attribute *caf_attr)
 {
-  gfc_expr *array_expr;
+  gfc_expr *array_expr, *tmp_stat;
   gfc_se argse;
   tree caf_decl, token, offset, image_index, tmp;
-  tree res_var, dst_var, type, kind, vec;
+  tree res_var, dst_var, type, kind, vec, stat;
+  tree caf_reference;
+  symbol_attribute caf_attr_store;
 
   gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
 
@@ -1124,10 +1692,132 @@ gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
   array_expr = (lhs == NULL_TREE) ? expr->value.function.actual->expr : expr;
   type = gfc_typenode_for_spec (&array_expr->ts);
 
+  if (caf_attr == NULL)
+    {
+      caf_attr_store = gfc_caf_attr (array_expr);
+      caf_attr = &caf_attr_store;
+    }
+
   res_var = lhs;
   dst_var = lhs;
 
   vec = null_pointer_node;
+  tmp_stat = gfc_find_stat_co (expr);
+
+  if (tmp_stat)
+    {
+      gfc_se stat_se;
+      gfc_init_se (&stat_se, NULL);
+      gfc_conv_expr_reference (&stat_se, tmp_stat);
+      stat = stat_se.expr;
+      gfc_add_block_to_block (&se->pre, &stat_se.pre);
+      gfc_add_block_to_block (&se->post, &stat_se.post);
+    }
+  else
+    stat = null_pointer_node;
+
+  /* Only use the new get_by_ref () where it is necessary.  I.e., when the lhs
+     is reallocatable or the right-hand side has allocatable components.  */
+  if (caf_attr->alloc_comp || caf_attr->pointer_comp || may_realloc)
+    {
+      /* Get using caf_get_by_ref.  */
+      caf_reference = conv_expr_ref_to_caf_ref (&se->pre, array_expr);
+
+      if (caf_reference != NULL_TREE)
+       {
+         if (lhs == NULL_TREE)
+           {
+             if (array_expr->ts.type == BT_CHARACTER)
+               gfc_init_se (&argse, NULL);
+             if (array_expr->rank == 0)
+               {
+                 symbol_attribute attr;
+                 gfc_clear_attr (&attr);
+                 if (array_expr->ts.type == BT_CHARACTER)
+                   {
+                     res_var = gfc_conv_string_tmp (se,
+                                                    build_pointer_type (type),
+                                            array_expr->ts.u.cl->backend_decl);
+                     argse.string_length = array_expr->ts.u.cl->backend_decl;
+                   }
+                 else
+                   res_var = gfc_create_var (type, "caf_res");
+                 dst_var = gfc_conv_scalar_to_descriptor (se, res_var, attr);
+                 dst_var = gfc_build_addr_expr (NULL_TREE, dst_var);
+               }
+             else
+               {
+                 /* Create temporary.  */
+                 if (array_expr->ts.type == BT_CHARACTER)
+                   gfc_conv_expr_descriptor (&argse, array_expr);
+                 may_realloc = gfc_trans_create_temp_array (&se->pre,
+                                                            &se->post,
+                                                            se->ss, type,
+                                                            NULL_TREE, false,
+                                                            false, false,
+                                                            &array_expr->where)
+                     == NULL_TREE;
+                 res_var = se->ss->info->data.array.descriptor;
+                 dst_var = gfc_build_addr_expr (NULL_TREE, res_var);
+                 if (may_realloc)
+                   {
+                     tmp = gfc_conv_descriptor_data_get (res_var);
+                     tmp = gfc_deallocate_with_status (tmp, NULL_TREE,
+                                                       NULL_TREE, NULL_TREE,
+                                                       NULL_TREE, true,
+                                                       NULL,
+                                                    GFC_CAF_COARRAY_NOCOARRAY);
+                     gfc_add_expr_to_block (&se->post, tmp);
+                   }
+               }
+           }
+
+         kind = build_int_cst (integer_type_node, expr->ts.kind);
+         if (lhs_kind == NULL_TREE)
+           lhs_kind = kind;
+
+         caf_decl = gfc_get_tree_for_caf_expr (array_expr);
+         if (TREE_CODE (TREE_TYPE (caf_decl)) == REFERENCE_TYPE)
+           caf_decl = build_fold_indirect_ref_loc (input_location, caf_decl);
+         image_index = gfc_caf_get_image_index (&se->pre, array_expr,
+                                                caf_decl);
+         gfc_get_caf_token_offset (se, &token, NULL, caf_decl, NULL,
+                                   array_expr);
+
+         /* No overlap possible as we have generated a temporary.  */
+         if (lhs == NULL_TREE)
+           may_require_tmp = boolean_false_node;
+
+         /* It guarantees memory consistency within the same segment.  */
+         tmp = gfc_build_string_const (strlen ("memory") + 1, "memory");
+         tmp = build5_loc (input_location, ASM_EXPR, void_type_node,
+                           gfc_build_string_const (1, ""), NULL_TREE,
+                           NULL_TREE, tree_cons (NULL_TREE, tmp, NULL_TREE),
+                           NULL_TREE);
+         ASM_VOLATILE_P (tmp) = 1;
+         gfc_add_expr_to_block (&se->pre, tmp);
+
+         tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_get_by_ref,
+                                    10, token, image_index, dst_var,
+                                    caf_reference, lhs_kind, kind,
+                                    may_require_tmp,
+                                    may_realloc ? boolean_true_node :
+                                                  boolean_false_node,
+                                    stat, build_int_cst (integer_type_node,
+                                                         array_expr->ts.type));
+
+         gfc_add_expr_to_block (&se->pre, tmp);
+
+         if (se->ss)
+           gfc_advance_se_ss_chain (se);
+
+         se->expr = res_var;
+         if (array_expr->ts.type == BT_CHARACTER)
+           se->string_length = argse.string_length;
+
+         return;
+       }
+    }
 
   gfc_init_se (&argse, NULL);
   if (array_expr->rank == 0)
@@ -1167,11 +1857,12 @@ gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
          ar->as = ar2.as;
          ar->type = AR_FULL;
        }
+      // TODO: Check whether argse.want_coarray = 1 can help with the below.
       gfc_conv_expr_descriptor (&argse, array_expr);
       /* Using gfc_conv_expr_descriptor, we only get the descriptor, but that
-         has the wrong type if component references are done.  */
+        has the wrong type if component references are done.  */
       gfc_add_modify (&argse.pre, gfc_conv_descriptor_dtype (argse.expr),
-                      gfc_get_dtype_rank_type (has_vector ? ar2.dimen
+                     gfc_get_dtype_rank_type (has_vector ? ar2.dimen
                                                          : array_expr->rank,
                                               type));
       if (has_vector)
@@ -1186,10 +1877,10 @@ gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
          for (int n = 0; n < se->ss->loop->dimen; n++)
            if (se->loop->to[n] == NULL_TREE)
              {
-               se->loop->from[n] =
-                       gfc_conv_descriptor_lbound_get (argse.expr, gfc_rank_cst[n]);
-               se->loop->to[n] =
-                       gfc_conv_descriptor_ubound_get (argse.expr, gfc_rank_cst[n]);
+               se->loop->from[n] = gfc_conv_descriptor_lbound_get (argse.expr,
+                                                              gfc_rank_cst[n]);
+               se->loop->to[n] = gfc_conv_descriptor_ubound_get (argse.expr,
+                                                              gfc_rank_cst[n]);
              }
          gfc_trans_create_temp_array (&argse.pre, &argse.post, se->ss, type,
                                       NULL_TREE, false, true, false,
@@ -1211,15 +1902,25 @@ gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
   if (TREE_CODE (TREE_TYPE (caf_decl)) == REFERENCE_TYPE)
     caf_decl = build_fold_indirect_ref_loc (input_location, caf_decl);
   image_index = gfc_caf_get_image_index (&se->pre, array_expr, caf_decl);
-  gfc_get_caf_token_offset (&token, &offset, caf_decl, argse.expr, array_expr);
+  gfc_get_caf_token_offset (se, &token, &offset, caf_decl, argse.expr,
+                           array_expr);
 
   /* No overlap possible as we have generated a temporary.  */
   if (lhs == NULL_TREE)
     may_require_tmp = boolean_false_node;
 
-  tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_get, 9,
+  /* It guarantees memory consistency within the same segment.  */
+  tmp = gfc_build_string_const (strlen ("memory") + 1, "memory");
+  tmp = build5_loc (input_location, ASM_EXPR, void_type_node,
+                   gfc_build_string_const (1, ""), NULL_TREE, NULL_TREE,
+                   tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE);
+  ASM_VOLATILE_P (tmp) = 1;
+  gfc_add_expr_to_block (&se->pre, tmp);
+
+  tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_get, 10,
                             token, offset, image_index, argse.expr, vec,
-                            dst_var, kind, lhs_kind, may_require_tmp);
+                            dst_var, kind, lhs_kind, may_require_tmp, stat);
+
   gfc_add_expr_to_block (&se->pre, tmp);
 
   if (se->ss)
@@ -1231,94 +1932,251 @@ gfc_conv_intrinsic_caf_get (gfc_se *se, gfc_expr *expr, tree lhs, tree lhs_kind,
 }
 
 
-/* Send data to a remove coarray.  */
+/* Send data to a remote coarray.  */
 
 static tree
 conv_caf_send (gfc_code *code) {
-  gfc_expr *lhs_expr, *rhs_expr;
+  gfc_expr *lhs_expr, *rhs_expr, *tmp_stat, *tmp_team;
   gfc_se lhs_se, rhs_se;
   stmtblock_t block;
   tree caf_decl, token, offset, image_index, tmp, lhs_kind, rhs_kind;
-  tree may_require_tmp;
+  tree may_require_tmp, src_stat, dst_stat, dst_team;
   tree lhs_type = NULL_TREE;
   tree vec = null_pointer_node, rhs_vec = null_pointer_node;
+  symbol_attribute lhs_caf_attr, rhs_caf_attr;
 
   gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
 
   lhs_expr = code->ext.actual->expr;
   rhs_expr = code->ext.actual->next->expr;
-  may_require_tmp = gfc_check_dependency (lhs_expr, rhs_expr, false) == 0
+  may_require_tmp = gfc_check_dependency (lhs_expr, rhs_expr, true) == 0
                    ? boolean_false_node : boolean_true_node;
   gfc_init_block (&block);
 
+  lhs_caf_attr = gfc_caf_attr (lhs_expr);
+  rhs_caf_attr = gfc_caf_attr (rhs_expr);
+  src_stat = dst_stat = null_pointer_node;
+  dst_team = null_pointer_node;
+
   /* LHS.  */
   gfc_init_se (&lhs_se, NULL);
   if (lhs_expr->rank == 0)
     {
-      symbol_attribute attr;
-      gfc_clear_attr (&attr);
-      gfc_conv_expr (&lhs_se, lhs_expr);
-      lhs_type = TREE_TYPE (lhs_se.expr);
-      lhs_se.expr = gfc_conv_scalar_to_descriptor (&lhs_se, lhs_se.expr, attr);
-      lhs_se.expr = gfc_build_addr_expr (NULL_TREE, lhs_se.expr);
-    }
-  else
-    {
-      /* If has_vector, pass descriptor for whole array and the
-         vector bounds separately.  */
-      gfc_array_ref *ar, ar2;
-      bool has_vector = false;
-
-      if (gfc_is_coindexed (lhs_expr) && gfc_has_vector_subscript (lhs_expr))
+      if (lhs_expr->ts.type == BT_CHARACTER && lhs_expr->ts.deferred)
        {
-          has_vector = true;
-          ar = gfc_find_array_ref (lhs_expr);
-         ar2 = *ar;
-         memset (ar, '\0', sizeof (*ar));
-         ar->as = ar2.as;
-         ar->type = AR_FULL;
+         lhs_se.expr = gfc_get_tree_for_caf_expr (lhs_expr);
+         lhs_se.expr = gfc_build_addr_expr (NULL_TREE, lhs_se.expr);
+       }
+      else
+       {
+         symbol_attribute attr;
+         gfc_clear_attr (&attr);
+         gfc_conv_expr (&lhs_se, lhs_expr);
+         lhs_type = TREE_TYPE (lhs_se.expr);
+         lhs_se.expr = gfc_conv_scalar_to_descriptor (&lhs_se, lhs_se.expr,
+                                                      attr);
+         lhs_se.expr = gfc_build_addr_expr (NULL_TREE, lhs_se.expr);
        }
+    }
+  else if ((lhs_caf_attr.alloc_comp || lhs_caf_attr.pointer_comp)
+          && lhs_caf_attr.codimension)
+    {
       lhs_se.want_pointer = 1;
       gfc_conv_expr_descriptor (&lhs_se, lhs_expr);
       /* Using gfc_conv_expr_descriptor, we only get the descriptor, but that
-         has the wrong type if component references are done.  */
+        has the wrong type if component references are done.  */
       lhs_type = gfc_typenode_for_spec (&lhs_expr->ts);
       tmp = build_fold_indirect_ref_loc (input_location, lhs_se.expr);
       gfc_add_modify (&lhs_se.pre, gfc_conv_descriptor_dtype (tmp),
-                      gfc_get_dtype_rank_type (has_vector ? ar2.dimen
-                                                         : lhs_expr->rank,
+                     gfc_get_dtype_rank_type (
+                       gfc_has_vector_subscript (lhs_expr)
+                       ? gfc_find_array_ref (lhs_expr)->dimen
+                       : lhs_expr->rank,
                      lhs_type));
-      if (has_vector)
+    }
+  else
+    {
+      bool has_vector = gfc_has_vector_subscript (lhs_expr);
+
+      if (gfc_is_coindexed (lhs_expr) || !has_vector)
        {
-         vec = conv_caf_vector_subscript (&block, lhs_se.expr, &ar2);
-         *ar = ar2;
+         /* If has_vector, pass descriptor for whole array and the
+            vector bounds separately.  */
+         gfc_array_ref *ar, ar2;
+         bool has_tmp_lhs_array = false;
+         if (has_vector)
+           {
+             has_tmp_lhs_array = true;
+             ar = gfc_find_array_ref (lhs_expr);
+             ar2 = *ar;
+             memset (ar, '\0', sizeof (*ar));
+             ar->as = ar2.as;
+             ar->type = AR_FULL;
+           }
+         lhs_se.want_pointer = 1;
+         gfc_conv_expr_descriptor (&lhs_se, lhs_expr);
+         /* Using gfc_conv_expr_descriptor, we only get the descriptor, but
+            that has the wrong type if component references are done.  */
+         lhs_type = gfc_typenode_for_spec (&lhs_expr->ts);
+         tmp = build_fold_indirect_ref_loc (input_location, lhs_se.expr);
+         gfc_add_modify (&lhs_se.pre, gfc_conv_descriptor_dtype (tmp),
+                         gfc_get_dtype_rank_type (has_vector ? ar2.dimen
+                                                             : lhs_expr->rank,
+                                                  lhs_type));
+         if (has_tmp_lhs_array)
+           {
+             vec = conv_caf_vector_subscript (&block, lhs_se.expr, &ar2);
+             *ar = ar2;
+           }
+       }
+      else
+       {
+         /* Special casing for arr1 ([...]) = arr2[...], i.e. caf_get to
+            indexed array expression.  This is rewritten to:
+
+            tmp_array = arr2[...]
+            arr1 ([...]) = tmp_array
+
+            because using the standard gfc_conv_expr (lhs_expr) did the
+            assignment with lhs and rhs exchanged.  */
+
+         gfc_ss *lss_for_tmparray, *lss_real;
+         gfc_loopinfo loop;
+         gfc_se se;
+         stmtblock_t body;
+         tree tmparr_desc, src;
+         tree index = gfc_index_zero_node;
+         tree stride = gfc_index_zero_node;
+         int n;
+
+         /* Walk both sides of the assignment, once to get the shape of the
+            temporary array to create right.  */
+         lss_for_tmparray = gfc_walk_expr (lhs_expr);
+         /* And a second time to be able to create an assignment of the
+            temporary to the lhs_expr.  gfc_trans_create_temp_array replaces
+            the tree in the descriptor with the one for the temporary
+            array.  */
+         lss_real = gfc_walk_expr (lhs_expr);
+         gfc_init_loopinfo (&loop);
+         gfc_add_ss_to_loop (&loop, lss_for_tmparray);
+         gfc_add_ss_to_loop (&loop, lss_real);
+         gfc_conv_ss_startstride (&loop);
+         gfc_conv_loop_setup (&loop, &lhs_expr->where);
+         lhs_type = gfc_typenode_for_spec (&lhs_expr->ts);
+         gfc_trans_create_temp_array (&lhs_se.pre, &lhs_se.post,
+                                      lss_for_tmparray, lhs_type, NULL_TREE,
+                                      false, true, false,
+                                      &lhs_expr->where);
+         tmparr_desc = lss_for_tmparray->info->data.array.descriptor;
+         gfc_start_scalarized_body (&loop, &body);
+         gfc_init_se (&se, NULL);
+         gfc_copy_loopinfo_to_se (&se, &loop);
+         se.ss = lss_real;
+         gfc_conv_expr (&se, lhs_expr);
+         gfc_add_block_to_block (&body, &se.pre);
+
+         /* Walk over all indexes of the loop.  */
+         for (n = loop.dimen - 1; n > 0; --n)
+           {
+             tmp = loop.loopvar[n];
+             tmp = fold_build2_loc (input_location, MINUS_EXPR,
+                                    gfc_array_index_type, tmp, loop.from[n]);
+             tmp = fold_build2_loc (input_location, PLUS_EXPR,
+                                    gfc_array_index_type, tmp, index);
+
+             stride = fold_build2_loc (input_location, MINUS_EXPR,
+                                       gfc_array_index_type,
+                                       loop.to[n - 1], loop.from[n - 1]);
+             stride = fold_build2_loc (input_location, PLUS_EXPR,
+                                       gfc_array_index_type,
+                                       stride, gfc_index_one_node);
+
+             index = fold_build2_loc (input_location, MULT_EXPR,
+                                      gfc_array_index_type, tmp, stride);
+           }
+
+         index = fold_build2_loc (input_location, MINUS_EXPR,
+                                  gfc_array_index_type,
+                                  index, loop.from[0]);
+
+         index = fold_build2_loc (input_location, PLUS_EXPR,
+                                  gfc_array_index_type,
+                                  loop.loopvar[0], index);
+
+         src = build_fold_indirect_ref (gfc_conv_array_data (tmparr_desc));
+         src = gfc_build_array_ref (src, index, NULL);
+         /* Now create the assignment of lhs_expr = tmp_array.  */
+         gfc_add_modify (&body, se.expr, src);
+         gfc_add_block_to_block (&body, &se.post);
+         lhs_se.expr = gfc_build_addr_expr (NULL_TREE, tmparr_desc);
+         gfc_trans_scalarizing_loops (&loop, &body);
+         gfc_add_block_to_block (&loop.pre, &loop.post);
+         gfc_add_expr_to_block (&lhs_se.post, gfc_finish_block (&loop.pre));
+         gfc_free_ss (lss_for_tmparray);
+         gfc_free_ss (lss_real);
        }
     }
 
   lhs_kind = build_int_cst (integer_type_node, lhs_expr->ts.kind);
-  gfc_add_block_to_block (&block, &lhs_se.pre);
 
   /* Special case: RHS is a coarray but LHS is not; this code path avoids a
      temporary and a loop.  */
-  if (!gfc_is_coindexed (lhs_expr))
+  if (!gfc_is_coindexed (lhs_expr)
+      && (!lhs_caf_attr.codimension
+         || !(lhs_expr->rank > 0
+              && (lhs_caf_attr.allocatable || lhs_caf_attr.pointer))))
     {
+      bool lhs_may_realloc = lhs_expr->rank > 0 && lhs_caf_attr.allocatable;
       gcc_assert (gfc_is_coindexed (rhs_expr));
       gfc_init_se (&rhs_se, NULL);
+      if (lhs_expr->rank == 0 && lhs_caf_attr.allocatable)
+       {
+         gfc_se scal_se;
+         gfc_init_se (&scal_se, NULL);
+         scal_se.want_pointer = 1;
+         gfc_conv_expr (&scal_se, lhs_expr);
+         /* Ensure scalar on lhs is allocated.  */
+         gfc_add_block_to_block (&block, &scal_se.pre);
+
+         gfc_allocate_using_malloc (&scal_se.pre, scal_se.expr,
+                                   TYPE_SIZE_UNIT (
+                                      gfc_typenode_for_spec (&lhs_expr->ts)),
+                                   NULL_TREE);
+         tmp = fold_build2 (EQ_EXPR, logical_type_node, scal_se.expr,
+                            null_pointer_node);
+         tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
+                                tmp, gfc_finish_block (&scal_se.pre),
+                                build_empty_stmt (input_location));
+         gfc_add_expr_to_block (&block, tmp);
+       }
+      else
+       lhs_may_realloc = lhs_may_realloc
+           && gfc_full_array_ref_p (lhs_expr->ref, NULL);
+      gfc_add_block_to_block (&block, &lhs_se.pre);
       gfc_conv_intrinsic_caf_get (&rhs_se, rhs_expr, lhs_se.expr, lhs_kind,
-                                 may_require_tmp);
+                                 may_require_tmp, lhs_may_realloc,
+                                 &rhs_caf_attr);
       gfc_add_block_to_block (&block, &rhs_se.pre);
       gfc_add_block_to_block (&block, &rhs_se.post);
       gfc_add_block_to_block (&block, &lhs_se.post);
       return gfc_finish_block (&block);
     }
 
-  /* Obtain token, offset and image index for the LHS.  */
+  gfc_add_block_to_block (&block, &lhs_se.pre);
 
+  /* Obtain token, offset and image index for the LHS.  */
   caf_decl = gfc_get_tree_for_caf_expr (lhs_expr);
   if (TREE_CODE (TREE_TYPE (caf_decl)) == REFERENCE_TYPE)
     caf_decl = build_fold_indirect_ref_loc (input_location, caf_decl);
   image_index = gfc_caf_get_image_index (&block, lhs_expr, caf_decl);
-  gfc_get_caf_token_offset (&token, &offset, caf_decl, lhs_se.expr, lhs_expr);
+  tmp = lhs_se.expr;
+  if (lhs_caf_attr.alloc_comp)
+    gfc_get_caf_token_offset (&lhs_se, &token, NULL, caf_decl, NULL_TREE,
+                             NULL);
+  else
+    gfc_get_caf_token_offset (&lhs_se, &token, &offset, caf_decl, tmp,
+                             lhs_expr);
+  lhs_se.expr = tmp;
 
   /* RHS.  */
   gfc_init_se (&rhs_se, NULL);
@@ -1330,11 +2188,26 @@ conv_caf_send (gfc_code *code) {
       symbol_attribute attr;
       gfc_clear_attr (&attr);
       gfc_conv_expr (&rhs_se, rhs_expr);
-      if (!gfc_is_coindexed (rhs_expr) && rhs_expr->ts.type != BT_CHARACTER)
-        rhs_se.expr = fold_convert (lhs_type , rhs_se.expr);
       rhs_se.expr = gfc_conv_scalar_to_descriptor (&rhs_se, rhs_se.expr, attr);
       rhs_se.expr = gfc_build_addr_expr (NULL_TREE, rhs_se.expr);
     }
+  else if ((rhs_caf_attr.alloc_comp || rhs_caf_attr.pointer_comp)
+          && rhs_caf_attr.codimension)
+    {
+      tree tmp2;
+      rhs_se.want_pointer = 1;
+      gfc_conv_expr_descriptor (&rhs_se, rhs_expr);
+      /* Using gfc_conv_expr_descriptor, we only get the descriptor, but that
+        has the wrong type if component references are done.  */
+      tmp2 = gfc_typenode_for_spec (&rhs_expr->ts);
+      tmp = build_fold_indirect_ref_loc (input_location, rhs_se.expr);
+      gfc_add_modify (&rhs_se.pre, gfc_conv_descriptor_dtype (tmp),
+                     gfc_get_dtype_rank_type (
+                       gfc_has_vector_subscript (rhs_expr)
+                       ? gfc_find_array_ref (rhs_expr)->dimen
+                       : rhs_expr->rank,
+                     tmp2));
+    }
   else
     {
       /* If has_vector, pass descriptor for whole array and the
@@ -1373,29 +2246,122 @@ conv_caf_send (gfc_code *code) {
 
   rhs_kind = build_int_cst (integer_type_node, rhs_expr->ts.kind);
 
+  tmp_stat = gfc_find_stat_co (lhs_expr);
+
+  if (tmp_stat)
+    {
+      gfc_se stat_se;
+      gfc_init_se (&stat_se, NULL);
+      gfc_conv_expr_reference (&stat_se, tmp_stat);
+      dst_stat = stat_se.expr;
+      gfc_add_block_to_block (&block, &stat_se.pre);
+      gfc_add_block_to_block (&block, &stat_se.post);
+    }
+
+  tmp_team = gfc_find_team_co (lhs_expr);
+
+  if (tmp_team)
+    {
+      gfc_se team_se;
+      gfc_init_se (&team_se, NULL);
+      gfc_conv_expr_reference (&team_se, tmp_team);
+      dst_team = team_se.expr;
+      gfc_add_block_to_block (&block, &team_se.pre);
+      gfc_add_block_to_block (&block, &team_se.post);
+    }
+
   if (!gfc_is_coindexed (rhs_expr))
-    tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_send, 9, token,
-                            offset, image_index, lhs_se.expr, vec,
-                            rhs_se.expr, lhs_kind, rhs_kind, may_require_tmp);
+    {
+      if (lhs_caf_attr.alloc_comp || lhs_caf_attr.pointer_comp)
+       {
+         tree reference, dst_realloc;
+         reference = conv_expr_ref_to_caf_ref (&block, lhs_expr);
+         dst_realloc = lhs_caf_attr.allocatable ? boolean_true_node
+                                            : boolean_false_node;
+         tmp = build_call_expr_loc (input_location,
+                                    gfor_fndecl_caf_send_by_ref,
+                                    10, token, image_index, rhs_se.expr,
+                                    reference, lhs_kind, rhs_kind,
+                                    may_require_tmp, dst_realloc, src_stat,
+                                    build_int_cst (integer_type_node,
+                                                   lhs_expr->ts.type));
+         }
+      else
+       tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_send, 11,
+                                  token, offset, image_index, lhs_se.expr, vec,
+                                  rhs_se.expr, lhs_kind, rhs_kind,
+                                  may_require_tmp, src_stat, dst_team);
+    }
   else
     {
       tree rhs_token, rhs_offset, rhs_image_index;
 
+      /* It guarantees memory consistency within the same segment.  */
+      tmp = gfc_build_string_const (strlen ("memory") + 1, "memory");
+      tmp = build5_loc (input_location, ASM_EXPR, void_type_node,
+                         gfc_build_string_const (1, ""), NULL_TREE, NULL_TREE,
+                         tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE);
+      ASM_VOLATILE_P (tmp) = 1;
+      gfc_add_expr_to_block (&block, tmp);
+
       caf_decl = gfc_get_tree_for_caf_expr (rhs_expr);
       if (TREE_CODE (TREE_TYPE (caf_decl)) == REFERENCE_TYPE)
        caf_decl = build_fold_indirect_ref_loc (input_location, caf_decl);
       rhs_image_index = gfc_caf_get_image_index (&block, rhs_expr, caf_decl);
-      gfc_get_caf_token_offset (&rhs_token, &rhs_offset, caf_decl, rhs_se.expr,
-                               rhs_expr);
-      tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sendget, 13,
-                                token, offset, image_index, lhs_se.expr, vec,
-                                rhs_token, rhs_offset, rhs_image_index,
-                                rhs_se.expr, rhs_vec, lhs_kind, rhs_kind,
-                                may_require_tmp);
+      tmp = rhs_se.expr;
+      if (rhs_caf_attr.alloc_comp || rhs_caf_attr.pointer_comp)
+       {
+         tmp_stat = gfc_find_stat_co (lhs_expr);
+
+         if (tmp_stat)
+           {
+             gfc_se stat_se;
+             gfc_init_se (&stat_se, NULL);
+             gfc_conv_expr_reference (&stat_se, tmp_stat);
+             src_stat = stat_se.expr;
+             gfc_add_block_to_block (&block, &stat_se.pre);
+             gfc_add_block_to_block (&block, &stat_se.post);
+           }
+
+         gfc_get_caf_token_offset (&rhs_se, &rhs_token, NULL, caf_decl,
+                                   NULL_TREE, NULL);
+         tree lhs_reference, rhs_reference;
+         lhs_reference = conv_expr_ref_to_caf_ref (&block, lhs_expr);
+         rhs_reference = conv_expr_ref_to_caf_ref (&block, rhs_expr);
+         tmp = build_call_expr_loc (input_location,
+                                    gfor_fndecl_caf_sendget_by_ref, 13,
+                                    token, image_index, lhs_reference,
+                                    rhs_token, rhs_image_index, rhs_reference,
+                                    lhs_kind, rhs_kind, may_require_tmp,
+                                    dst_stat, src_stat,
+                                    build_int_cst (integer_type_node,
+                                                   lhs_expr->ts.type),
+                                    build_int_cst (integer_type_node,
+                                                   rhs_expr->ts.type));
+       }
+      else
+       {
+         gfc_get_caf_token_offset (&rhs_se, &rhs_token, &rhs_offset, caf_decl,
+                                   tmp, rhs_expr);
+         tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sendget,
+                                    14, token, offset, image_index,
+                                    lhs_se.expr, vec, rhs_token, rhs_offset,
+                                    rhs_image_index, tmp, rhs_vec, lhs_kind,
+                                    rhs_kind, may_require_tmp, src_stat);
+       }
     }
   gfc_add_expr_to_block (&block, tmp);
   gfc_add_block_to_block (&block, &lhs_se.post);
   gfc_add_block_to_block (&block, &rhs_se.post);
+
+  /* It guarantees memory consistency within the same segment.  */
+  tmp = gfc_build_string_const (strlen ("memory") + 1, "memory");
+  tmp = build5_loc (input_location, ASM_EXPR, void_type_node,
+                   gfc_build_string_const (1, ""), NULL_TREE, NULL_TREE,
+                   tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE);
+  ASM_VOLATILE_P (tmp) = 1;
+  gfc_add_expr_to_block (&block, tmp);
+
   return gfc_finish_block (&block);
 }
 
@@ -1477,8 +2443,9 @@ trans_this_image (gfc_se * se, gfc_expr *expr)
 
       if (INTEGER_CST_P (dim_arg))
        {
-         if (wi::ltu_p (dim_arg, 1)
-             || wi::gtu_p (dim_arg, GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))))
+         if (wi::ltu_p (wi::to_wide (dim_arg), 1)
+             || wi::gtu_p (wi::to_wide (dim_arg),
+                           GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))))
            gfc_error ("%<dim%> argument of %s intrinsic at %L is not a valid "
                       "dimension index", expr->value.function.isym->name,
                       &expr->where);
@@ -1486,14 +2453,14 @@ trans_this_image (gfc_se * se, gfc_expr *expr)
      else if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
        {
          dim_arg = gfc_evaluate_now (dim_arg, &se->pre);
-         cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                  dim_arg,
                                  build_int_cst (TREE_TYPE (dim_arg), 1));
          tmp = gfc_rank_cst[GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))];
-         tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+         tmp = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                                 dim_arg, tmp);
          cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
-                                 boolean_type_node, cond, tmp);
+                                 logical_type_node, cond, tmp);
          gfc_trans_runtime_check (true, false, cond, &se->pre, &expr->where,
                                   gfc_msg_fault);
        }
@@ -1584,7 +2551,7 @@ trans_this_image (gfc_se * se, gfc_expr *expr)
                          m, extent));
 
   /* Exit condition:  if (i >= min_var) goto exit_label.  */
-  cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, loop_var,
+  cond = fold_build2_loc (input_location, GE_EXPR, logical_type_node, loop_var,
                  min_var);
   tmp = build1_v (GOTO_EXPR, exit_label);
   tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, tmp,
@@ -1609,7 +2576,7 @@ trans_this_image (gfc_se * se, gfc_expr *expr)
   /*  sub(co_dim) = (co_dim < corank) ? ml - m*extent + lcobound(dim_arg)
                                      : m + lcobound(corank) */
 
-  cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, dim_arg,
+  cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node, dim_arg,
                          build_int_cst (TREE_TYPE (dim_arg), corank));
 
   lbound = gfc_conv_descriptor_lbound_get (desc,
@@ -1629,28 +2596,106 @@ trans_this_image (gfc_se * se, gfc_expr *expr)
 }
 
 
+/* Convert a call to image_status.  */
+
 static void
-trans_image_index (gfc_se * se, gfc_expr *expr)
+conv_intrinsic_image_status (gfc_se *se, gfc_expr *expr)
 {
-  tree num_images, cond, coindex, type, lbound, ubound, desc, subdesc,
-       tmp, invalid_bound;
-  gfc_se argse, subse;
-  int rank, corank, codim;
-
-  type = gfc_get_int_type (gfc_default_integer_kind);
-  corank = gfc_get_corank (expr->value.function.actual->expr);
-  rank = expr->value.function.actual->expr->rank;
+  unsigned int num_args;
+  tree *args, tmp;
 
-  /* Obtain the descriptor of the COARRAY.  */
-  gfc_init_se (&argse, NULL);
-  argse.want_coarray = 1;
-  gfc_conv_expr_descriptor (&argse, expr->value.function.actual->expr);
-  gfc_add_block_to_block (&se->pre, &argse.pre);
-  gfc_add_block_to_block (&se->post, &argse.post);
-  desc = argse.expr;
+  num_args = gfc_intrinsic_argument_list_length (expr);
+  args = XALLOCAVEC (tree, num_args);
+  gfc_conv_intrinsic_function_args (se, expr, args, num_args);
+  /* In args[0] the number of the image the status is desired for has to be
+     given.  */
 
-  /* Obtain a handle to the SUB argument.  */
-  gfc_init_se (&subse, NULL);
+  if (flag_coarray == GFC_FCOARRAY_SINGLE)
+    {
+      tree arg;
+      arg = gfc_evaluate_now (args[0], &se->pre);
+      tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
+                            fold_convert (integer_type_node, arg),
+                            integer_one_node);
+      tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node,
+                            tmp, integer_zero_node,
+                            build_int_cst (integer_type_node,
+                                           GFC_STAT_STOPPED_IMAGE));
+    }
+  else if (flag_coarray == GFC_FCOARRAY_LIB)
+    tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_image_status, 2,
+                              args[0], build_int_cst (integer_type_node, -1));
+  else
+    gcc_unreachable ();
+
+  se->expr = tmp;
+}
+
+static void
+conv_intrinsic_team_number (gfc_se *se, gfc_expr *expr)
+{
+  unsigned int num_args;
+
+  tree *args, tmp;
+
+  num_args = gfc_intrinsic_argument_list_length (expr);
+  args = XALLOCAVEC (tree, num_args);
+  gfc_conv_intrinsic_function_args (se, expr, args, num_args);
+
+  if (flag_coarray ==
+      GFC_FCOARRAY_SINGLE && expr->value.function.actual->expr)
+    {
+      tree arg;
+
+      arg = gfc_evaluate_now (args[0], &se->pre);
+      tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
+                            fold_convert (integer_type_node, arg),
+                            integer_one_node);
+      tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node,
+                            tmp, integer_zero_node,
+                            build_int_cst (integer_type_node,
+                                           GFC_STAT_STOPPED_IMAGE));
+    }
+  else if (flag_coarray == GFC_FCOARRAY_SINGLE)
+    {
+      // the value -1 represents that no team has been created yet
+      tmp = build_int_cst (integer_type_node, -1);
+    }
+  else if (flag_coarray == GFC_FCOARRAY_LIB && expr->value.function.actual->expr)
+    tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_team_number, 1,
+                              args[0], build_int_cst (integer_type_node, -1));
+  else if (flag_coarray == GFC_FCOARRAY_LIB)
+    tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_team_number, 1,
+               integer_zero_node, build_int_cst (integer_type_node, -1));
+  else
+    gcc_unreachable ();
+
+  se->expr = tmp;
+}
+
+
+static void
+trans_image_index (gfc_se * se, gfc_expr *expr)
+{
+  tree num_images, cond, coindex, type, lbound, ubound, desc, subdesc,
+       tmp, invalid_bound;
+  gfc_se argse, subse;
+  int rank, corank, codim;
+
+  type = gfc_get_int_type (gfc_default_integer_kind);
+  corank = gfc_get_corank (expr->value.function.actual->expr);
+  rank = expr->value.function.actual->expr->rank;
+
+  /* Obtain the descriptor of the COARRAY.  */
+  gfc_init_se (&argse, NULL);
+  argse.want_coarray = 1;
+  gfc_conv_expr_descriptor (&argse, expr->value.function.actual->expr);
+  gfc_add_block_to_block (&se->pre, &argse.pre);
+  gfc_add_block_to_block (&se->post, &argse.post);
+  desc = argse.expr;
+
+  /* Obtain a handle to the SUB argument.  */
+  gfc_init_se (&subse, NULL);
   gfc_conv_expr_descriptor (&subse, expr->value.function.actual->next->expr);
   gfc_add_block_to_block (&se->pre, &subse.pre);
   gfc_add_block_to_block (&se->post, &subse.post);
@@ -1662,7 +2707,7 @@ trans_image_index (gfc_se * se, gfc_expr *expr)
 
   lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[rank+corank-1]);
   tmp = gfc_build_array_ref (subdesc, gfc_rank_cst[corank-1], NULL);
-  invalid_bound = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+  invalid_bound = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                 fold_convert (gfc_array_index_type, tmp),
                                 lbound);
 
@@ -1671,16 +2716,16 @@ trans_image_index (gfc_se * se, gfc_expr *expr)
       lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[codim]);
       ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[codim]);
       tmp = gfc_build_array_ref (subdesc, gfc_rank_cst[codim-rank], NULL);
-      cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+      cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                              fold_convert (gfc_array_index_type, tmp),
                              lbound);
       invalid_bound = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                      boolean_type_node, invalid_bound, cond);
-      cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+                                      logical_type_node, invalid_bound, cond);
+      cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                              fold_convert (gfc_array_index_type, tmp),
                              ubound);
       invalid_bound = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                      boolean_type_node, invalid_bound, cond);
+                                      logical_type_node, invalid_bound, cond);
     }
 
   invalid_bound = gfc_unlikely (invalid_bound, PRED_FORTRAN_INVALID_BOUND);
@@ -1740,16 +2785,15 @@ trans_image_index (gfc_se * se, gfc_expr *expr)
   tmp = gfc_create_var (type, NULL);
   gfc_add_modify (&se->pre, tmp, coindex);
 
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, tmp,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node, tmp,
                          num_images);
-  cond = fold_build2_loc (input_location, TRUTH_OR_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, TRUTH_OR_EXPR, logical_type_node,
                          cond,
-                         fold_convert (boolean_type_node, invalid_bound));
+                         fold_convert (logical_type_node, invalid_bound));
   se->expr = fold_build3_loc (input_location, COND_EXPR, type, cond,
                              build_int_cst (type, 0), tmp);
 }
 
-
 static void
 trans_num_images (gfc_se * se, gfc_expr *expr)
 {
@@ -1777,7 +2821,6 @@ trans_num_images (gfc_se * se, gfc_expr *expr)
     }
   else
     failed = build_int_cst (integer_type_node, -1);
-
   tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_num_images, 2,
                             distance, failed);
   se->expr = fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
@@ -1798,6 +2841,90 @@ gfc_conv_intrinsic_rank (gfc_se *se, gfc_expr *expr)
   gfc_add_block_to_block (&se->post, &argse.post);
 
   se->expr = gfc_conv_descriptor_rank (argse.expr);
+  se->expr = fold_convert (gfc_get_int_type (gfc_default_integer_kind),
+                          se->expr);
+}
+
+
+static void
+gfc_conv_intrinsic_is_contiguous (gfc_se * se, gfc_expr * expr)
+{
+  gfc_expr *arg;
+  arg = expr->value.function.actual->expr;
+  gfc_conv_is_contiguous_expr (se, arg);
+  se->expr = fold_convert (gfc_typenode_for_spec (&expr->ts), se->expr);
+}
+
+/* This function does the work for gfc_conv_intrinsic_is_contiguous,
+   plus it can be called directly.  */
+
+void
+gfc_conv_is_contiguous_expr (gfc_se *se, gfc_expr *arg)
+{
+  gfc_ss *ss;
+  gfc_se argse;
+  tree desc, tmp, stride, extent, cond;
+  int i;
+  tree fncall0;
+  gfc_array_spec *as;
+
+  if (arg->ts.type == BT_CLASS)
+    gfc_add_class_array_ref (arg);
+
+  ss = gfc_walk_expr (arg);
+  gcc_assert (ss != gfc_ss_terminator);
+  gfc_init_se (&argse, NULL);
+  argse.data_not_needed = 1;
+  gfc_conv_expr_descriptor (&argse, arg);
+
+  as = gfc_get_full_arrayspec_from_expr (arg);
+
+  /* Create:  stride[0] == 1 && stride[1] == extend[0]*stride[0] && ...
+     Note in addition that zero-sized arrays don't count as contiguous.  */
+
+  if (as && as->type == AS_ASSUMED_RANK)
+    {
+      /* Build the call to is_contiguous0.  */
+      argse.want_pointer = 1;
+      gfc_conv_expr_descriptor (&argse, arg);
+      gfc_add_block_to_block (&se->pre, &argse.pre);
+      gfc_add_block_to_block (&se->post, &argse.post);
+      desc = gfc_evaluate_now (argse.expr, &se->pre);
+      fncall0 = build_call_expr_loc (input_location,
+                                    gfor_fndecl_is_contiguous0, 1, desc);
+      se->expr = fncall0;
+      se->expr = convert (logical_type_node, se->expr);
+    }
+  else
+    {
+      gfc_add_block_to_block (&se->pre, &argse.pre);
+      gfc_add_block_to_block (&se->post, &argse.post);
+      desc = gfc_evaluate_now (argse.expr, &se->pre);
+
+      stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[0]);
+      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+                             stride, build_int_cst (TREE_TYPE (stride), 1));
+
+      for (i = 0; i < arg->rank - 1; i++)
+       {
+         tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[i]);
+         extent = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[i]);
+         extent = fold_build2_loc (input_location, MINUS_EXPR,
+                                   gfc_array_index_type, extent, tmp);
+         extent = fold_build2_loc (input_location, PLUS_EXPR,
+                                   gfc_array_index_type, extent,
+                                   gfc_index_one_node);
+         tmp = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[i]);
+         tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
+                                tmp, extent);
+         stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[i+1]);
+         tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+                                stride, tmp);
+         cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
+                                 boolean_type_node, cond, tmp);
+       }
+      se->expr = cond;
+    }
 }
 
 
@@ -1863,8 +2990,9 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
   if (INTEGER_CST_P (bound))
     {
       if (((!as || as->type != AS_ASSUMED_RANK)
-          && wi::geu_p (bound, GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc))))
-         || wi::gtu_p (bound, GFC_MAX_DIMENSIONS))
+          && wi::geu_p (wi::to_wide (bound),
+                        GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc))))
+         || wi::gtu_p (wi::to_wide (bound), GFC_MAX_DIMENSIONS))
        gfc_error ("%<dim%> argument of %s intrinsic at %L is not a valid "
                   "dimension index", upper ? "UBOUND" : "LBOUND",
                   &expr->where);
@@ -1875,16 +3003,16 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
       if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
         {
           bound = gfc_evaluate_now (bound, &se->pre);
-          cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+          cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                  bound, build_int_cst (TREE_TYPE (bound), 0));
          if (as && as->type == AS_ASSUMED_RANK)
            tmp = gfc_conv_descriptor_rank (desc);
          else
            tmp = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc))];
-          tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
+          tmp = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
                                 bound, fold_convert(TREE_TYPE (bound), tmp));
           cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
-                                 boolean_type_node, cond, tmp);
+                                 logical_type_node, cond, tmp);
           gfc_trans_runtime_check (true, false, cond, &se->pre, &expr->where,
                                   gfc_msg_fault);
         }
@@ -1930,27 +3058,27 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
     {
       tree stride = gfc_conv_descriptor_stride_get (desc, bound);
 
-      cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
+      cond1 = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
                               ubound, lbound);
-      cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
+      cond3 = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
                               stride, gfc_index_zero_node);
       cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                              boolean_type_node, cond3, cond1);
-      cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+                              logical_type_node, cond3, cond1);
+      cond4 = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                               stride, gfc_index_zero_node);
 
       if (upper)
        {
          tree cond5;
          cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                 boolean_type_node, cond3, cond4);
-         cond5 = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+                                 logical_type_node, cond3, cond4);
+         cond5 = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                   gfc_index_one_node, lbound);
          cond5 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                  boolean_type_node, cond4, cond5);
+                                  logical_type_node, cond4, cond5);
 
          cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                 boolean_type_node, cond, cond5);
+                                 logical_type_node, cond, cond5);
 
          if (assumed_rank_lb_one)
            {
@@ -1969,16 +3097,16 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
       else
        {
          if (as->type == AS_ASSUMED_SIZE)
-           cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+           cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                    bound, build_int_cst (TREE_TYPE (bound),
                                                          arg->expr->rank - 1));
          else
-           cond = boolean_false_node;
+           cond = logical_false_node;
 
          cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                  boolean_type_node, cond3, cond4);
+                                  logical_type_node, cond3, cond4);
          cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                 boolean_type_node, cond, cond1);
+                                 logical_type_node, cond, cond1);
 
          se->expr = fold_build3_loc (input_location, COND_EXPR,
                                      gfc_array_index_type, cond,
@@ -2002,6 +3130,29 @@ gfc_conv_intrinsic_bound (gfc_se * se, gfc_expr * expr, int upper)
        se->expr = gfc_index_one_node;
     }
 
+  /* According to F2018 16.9.172, para 5, an assumed rank object, argument
+     associated with and assumed size array, has the ubound of the final
+     dimension set to -1 and UBOUND must return this.  */
+  if (upper && as && as->type == AS_ASSUMED_RANK)
+    {
+      tree minus_one = build_int_cst (gfc_array_index_type, -1);
+      tree rank = fold_convert (gfc_array_index_type,
+                               gfc_conv_descriptor_rank (desc));
+      rank = fold_build2_loc (input_location, PLUS_EXPR,
+                             gfc_array_index_type, rank, minus_one);
+      /* Fix the expression to stop it from becoming even more complicated.  */
+      se->expr = gfc_evaluate_now (se->expr, &se->pre);
+      cond = fold_build2_loc (input_location, NE_EXPR,
+                            logical_type_node, bound, rank);
+      cond1 = fold_build2_loc (input_location, NE_EXPR,
+                              logical_type_node, ubound, minus_one);
+      cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
+                             logical_type_node, cond, cond1);
+      se->expr = fold_build3_loc (input_location, COND_EXPR,
+                                 gfc_array_index_type, cond,
+                                 se->expr, minus_one);
+    }
+
   type = gfc_typenode_for_spec (&expr->ts);
   se->expr = convert (type, se->expr);
 }
@@ -2059,8 +3210,9 @@ conv_intrinsic_cobound (gfc_se * se, gfc_expr * expr)
 
       if (INTEGER_CST_P (bound))
        {
-         if (wi::ltu_p (bound, 1)
-             || wi::gtu_p (bound, GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))))
+         if (wi::ltu_p (wi::to_wide (bound), 1)
+             || wi::gtu_p (wi::to_wide (bound),
+                           GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))))
            gfc_error ("%<dim%> argument of %s intrinsic at %L is not a valid "
                       "dimension index", expr->value.function.isym->name,
                       &expr->where);
@@ -2068,13 +3220,13 @@ conv_intrinsic_cobound (gfc_se * se, gfc_expr * expr)
       else if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
         {
          bound = gfc_evaluate_now (bound, &se->pre);
-         cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                  bound, build_int_cst (TREE_TYPE (bound), 1));
          tmp = gfc_rank_cst[GFC_TYPE_ARRAY_CORANK (TREE_TYPE (desc))];
-         tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+         tmp = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                                 bound, tmp);
          cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
-                                 boolean_type_node, cond, tmp);
+                                 logical_type_node, cond, tmp);
          gfc_trans_runtime_check (true, false, cond, &se->pre, &expr->where,
                                   gfc_msg_fault);
        }
@@ -2143,7 +3295,7 @@ conv_intrinsic_cobound (gfc_se * se, gfc_expr * expr)
 
       if (corank > 1)
        {
-         cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+         cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                  bound,
                                  build_int_cst (TREE_TYPE (bound),
                                                 arg->expr->rank + corank - 1));
@@ -2192,7 +3344,6 @@ conv_intrinsic_stride (gfc_se * se, gfc_expr * expr)
   se->expr = gfc_conv_descriptor_stride_get (desc, tmp);
 }
 
-
 static void
 gfc_conv_intrinsic_abs (gfc_se * se, gfc_expr * expr)
 {
@@ -2294,7 +3445,7 @@ gfc_conv_intrinsic_mod (gfc_se * se, gfc_expr * expr, int modulo)
       /* The builtin should always be available.  */
       gcc_assert (fmod != NULL_TREE);
 
-      tmp = build_addr (fmod, current_function_decl);
+      tmp = build_addr (fmod);
       se->expr = build_call_array_loc (input_location,
                                       TREE_TYPE (TREE_TYPE (fmod)),
                                        tmp, 2, args);
@@ -2332,16 +3483,16 @@ gfc_conv_intrinsic_mod (gfc_se * se, gfc_expr * expr, int modulo)
       tmp = gfc_evaluate_now (se->expr, &se->pre);
       if (!flag_signed_zeros)
        {
-         test = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         test = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                  args[0], zero);
-         test2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         test2 = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                   args[1], zero);
          test2 = fold_build2_loc (input_location, TRUTH_XOR_EXPR,
-                                  boolean_type_node, test, test2);
-         test = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+                                  logical_type_node, test, test2);
+         test = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                  tmp, zero);
          test = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                 boolean_type_node, test, test2);
+                                 logical_type_node, test, test2);
          test = gfc_evaluate_now (test, &se->pre);
          se->expr = fold_build3_loc (input_location, COND_EXPR, type, test,
                                      fold_build2_loc (input_location,
@@ -2354,18 +3505,18 @@ gfc_conv_intrinsic_mod (gfc_se * se, gfc_expr * expr, int modulo)
          tree expr1, copysign, cscall;
          copysign = gfc_builtin_decl_for_float_kind (BUILT_IN_COPYSIGN,
                                                      expr->ts.kind);
-         test = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         test = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                  args[0], zero);
-         test2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
+         test2 = fold_build2_loc (input_location, LT_EXPR, logical_type_node,
                                   args[1], zero);
          test2 = fold_build2_loc (input_location, TRUTH_XOR_EXPR,
-                                  boolean_type_node, test, test2);
+                                  logical_type_node, test, test2);
          expr1 = fold_build3_loc (input_location, COND_EXPR, type, test2,
                                   fold_build2_loc (input_location,
                                                    PLUS_EXPR,
                                                    type, tmp, args[1]),
                                   tmp);
-         test = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+         test = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                  tmp, zero);
          cscall = build_call_expr_loc (input_location, copysign, 2, zero,
                                        args[1]);
@@ -2421,12 +3572,12 @@ gfc_conv_intrinsic_dshift (gfc_se * se, gfc_expr * expr, bool dshiftl)
   res = fold_build2_loc (input_location, BIT_IOR_EXPR, type, left, right);
 
   /* Special cases.  */
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, shift,
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, shift,
                          build_int_cst (stype, 0));
   res = fold_build3_loc (input_location, COND_EXPR, type, cond,
                         dshiftl ? arg1 : arg2, res);
 
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, shift,
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, shift,
                          build_int_cst (stype, bitsize));
   res = fold_build3_loc (input_location, COND_EXPR, type, cond,
                         dshiftl ? arg2 : arg1, res);
@@ -2453,7 +3604,7 @@ gfc_conv_intrinsic_dim (gfc_se * se, gfc_expr * expr)
   val = gfc_evaluate_now (val, &se->pre);
 
   zero = gfc_build_const (type, integer_zero_node);
-  tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, val, zero);
+  tmp = fold_build2_loc (input_location, LE_EXPR, logical_type_node, val, zero);
   se->expr = fold_build3_loc (input_location, COND_EXPR, type, tmp, zero, val);
 }
 
@@ -2486,7 +3637,7 @@ gfc_conv_intrinsic_sign (gfc_se * se, gfc_expr * expr)
        {
          tree cond, zero;
          zero = build_real_from_int_cst (TREE_TYPE (args[1]), integer_zero_node);
-         cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+         cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                  args[1], zero);
          se->expr = fold_build3_loc (input_location, COND_EXPR,
                                  TREE_TYPE (args[0]), cond,
@@ -2600,14 +3751,14 @@ gfc_conv_intrinsic_ctime (gfc_se * se, gfc_expr * expr)
   args[0] = gfc_build_addr_expr (NULL_TREE, var);
   args[1] = gfc_build_addr_expr (NULL_TREE, len);
 
-  fndecl = build_addr (gfor_fndecl_ctime, current_function_decl);
+  fndecl = build_addr (gfor_fndecl_ctime);
   tmp = build_call_array_loc (input_location,
                          TREE_TYPE (TREE_TYPE (gfor_fndecl_ctime)),
                          fndecl, num_args, args);
   gfc_add_expr_to_block (&se->pre, tmp);
 
   /* Free the temporary afterwards, if necessary.  */
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          len, build_int_cst (TREE_TYPE (len), 0));
   tmp = gfc_call_free (var);
   tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
@@ -2639,14 +3790,14 @@ gfc_conv_intrinsic_fdate (gfc_se * se, gfc_expr * expr)
   args[0] = gfc_build_addr_expr (NULL_TREE, var);
   args[1] = gfc_build_addr_expr (NULL_TREE, len);
 
-  fndecl = build_addr (gfor_fndecl_fdate, current_function_decl);
+  fndecl = build_addr (gfor_fndecl_fdate);
   tmp = build_call_array_loc (input_location,
                          TREE_TYPE (TREE_TYPE (gfor_fndecl_fdate)),
                          fndecl, num_args, args);
   gfc_add_expr_to_block (&se->pre, tmp);
 
   /* Free the temporary afterwards, if necessary.  */
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          len, build_int_cst (TREE_TYPE (len), 0));
   tmp = gfc_call_free (var);
   tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
@@ -2678,6 +3829,52 @@ conv_intrinsic_free (gfc_code *code)
 }
 
 
+/* Call the RANDOM_INIT library subroutine with a hidden argument for
+   handling seeding on coarray images.  */
+
+static tree
+conv_intrinsic_random_init (gfc_code *code)
+{
+  stmtblock_t block;
+  gfc_se se;
+  tree arg1, arg2, arg3, tmp;
+  tree logical4_type_node = gfc_get_logical_type (4);
+
+  /* Make the function call.  */
+  gfc_init_block (&block);
+  gfc_init_se (&se, NULL);
+
+  /* Convert REPEATABLE to a LOGICAL(4) entity.  */
+  gfc_conv_expr (&se, code->ext.actual->expr);
+  gfc_add_block_to_block (&block, &se.pre);
+  arg1 = fold_convert (logical4_type_node, gfc_evaluate_now (se.expr, &block));
+  gfc_add_block_to_block (&block, &se.post);
+
+  /* Convert IMAGE_DISTINCT to a LOGICAL(4) entity.  */
+  gfc_conv_expr (&se, code->ext.actual->next->expr);
+  gfc_add_block_to_block (&block, &se.pre);
+  arg2 = fold_convert (logical4_type_node, gfc_evaluate_now (se.expr, &block));
+  gfc_add_block_to_block (&block, &se.post);
+
+  /* Create the hidden argument.  For non-coarray codes and -fcoarray=single,
+     simply set this to 0.  For -fcoarray=lib, generate a call to
+     THIS_IMAGE() without arguments.  */
+  arg3 = build_int_cst (gfc_get_int_type (4), 0);
+  if (flag_coarray == GFC_FCOARRAY_LIB)
+    {
+      arg3 = build_call_expr_loc (input_location, gfor_fndecl_caf_this_image,
+                                 1, arg3);
+      se.expr = fold_convert (gfc_get_int_type (4), arg3);
+    }
+
+  tmp = build_call_expr_loc (input_location, gfor_fndecl_random_init, 3,
+                            arg1, arg2, arg3);
+  gfc_add_expr_to_block (&block, tmp);
+
+  return gfc_finish_block (&block);
+}
+
+
 /* Call the SYSTEM_CLOCK library functions, handling the type and kind
    conversions.  */
 
@@ -2849,14 +4046,14 @@ gfc_conv_intrinsic_ttynam (gfc_se * se, gfc_expr * expr)
   args[0] = gfc_build_addr_expr (NULL_TREE, var);
   args[1] = gfc_build_addr_expr (NULL_TREE, len);
 
-  fndecl = build_addr (gfor_fndecl_ttynam, current_function_decl);
+  fndecl = build_addr (gfor_fndecl_ttynam);
   tmp = build_call_array_loc (input_location,
                          TREE_TYPE (TREE_TYPE (gfor_fndecl_ttynam)),
                          fndecl, num_args, args);
   gfc_add_expr_to_block (&se->pre, tmp);
 
   /* Free the temporary afterwards, if necessary.  */
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          len, build_int_cst (TREE_TYPE (len), 0));
   tmp = gfc_call_free (var);
   tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
@@ -2871,14 +4068,15 @@ gfc_conv_intrinsic_ttynam (gfc_se * se, gfc_expr * expr)
     minmax (a1, a2, a3, ...)
     {
       mvar = a1;
-      if (a2 .op. mvar || isnan (mvar))
-        mvar = a2;
-      if (a3 .op. mvar || isnan (mvar))
-        mvar = a3;
+      mvar = COMP (mvar, a2)
+      mvar = COMP (mvar, a3)
       ...
-      return mvar
+      return mvar;
     }
- */
+    Where COMP is MIN/MAX_EXPR for integral types or when we don't
+    care about NaNs, or IFN_FMIN/MAX when the target has support for
+    fast NaN-honouring min/max.  When neither holds expand a sequence
+    of explicit comparisons.  */
 
 /* TODO: Mismatching types can occur when specific names are used.
    These should be handled during resolution.  */
@@ -2888,9 +4086,9 @@ gfc_conv_intrinsic_minmax (gfc_se * se, gfc_expr * expr, enum tree_code op)
   tree tmp;
   tree mvar;
   tree val;
-  tree thencase;
   tree *args;
   tree type;
+  tree argtype;
   gfc_actual_arglist *argexpr;
   unsigned int i, nargs;
 
@@ -2900,66 +4098,64 @@ gfc_conv_intrinsic_minmax (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_conv_intrinsic_function_args (se, expr, args, nargs);
   type = gfc_typenode_for_spec (&expr->ts);
 
-  argexpr = expr->value.function.actual;
-  if (TREE_TYPE (args[0]) != type)
-    args[0] = convert (type, args[0]);
   /* Only evaluate the argument once.  */
-  if (TREE_CODE (args[0]) != VAR_DECL && !TREE_CONSTANT (args[0]))
+  if (!VAR_P (args[0]) && !TREE_CONSTANT (args[0]))
     args[0] = gfc_evaluate_now (args[0], &se->pre);
 
-  mvar = gfc_create_var (type, "M");
-  gfc_add_modify (&se->pre, mvar, args[0]);
-  for (i = 1, argexpr = argexpr->next; i < nargs; i++)
+  /* Determine suitable type of temporary, as a GNU extension allows
+     different argument kinds.  */
+  argtype = TREE_TYPE (args[0]);
+  argexpr = expr->value.function.actual;
+  for (i = 1, argexpr = argexpr->next; i < nargs; i++, argexpr = argexpr->next)
     {
-      tree cond, isnan;
+      tree tmptype = TREE_TYPE (args[i]);
+      if (TYPE_PRECISION (tmptype) > TYPE_PRECISION (argtype))
+       argtype = tmptype;
+    }
+  mvar = gfc_create_var (argtype, "M");
+  gfc_add_modify (&se->pre, mvar, convert (argtype, args[0]));
 
+  argexpr = expr->value.function.actual;
+  for (i = 1, argexpr = argexpr->next; i < nargs; i++, argexpr = argexpr->next)
+    {
+      tree cond = NULL_TREE;
       val = args[i];
 
       /* Handle absent optional arguments by ignoring the comparison.  */
       if (argexpr->expr->expr_type == EXPR_VARIABLE
          && argexpr->expr->symtree->n.sym->attr.optional
          && TREE_CODE (val) == INDIRECT_REF)
-       cond = fold_build2_loc (input_location,
-                               NE_EXPR, boolean_type_node,
+       {
+         cond = fold_build2_loc (input_location,
+                               NE_EXPR, logical_type_node,
                                TREE_OPERAND (val, 0),
                        build_int_cst (TREE_TYPE (TREE_OPERAND (val, 0)), 0));
-      else
-      {
-       cond = NULL_TREE;
-
-       /* Only evaluate the argument once.  */
-       if (TREE_CODE (val) != VAR_DECL && !TREE_CONSTANT (val))
-         val = gfc_evaluate_now (val, &se->pre);
-      }
-
-      thencase = build2_v (MODIFY_EXPR, mvar, convert (type, val));
-
-      tmp = fold_build2_loc (input_location, op, boolean_type_node,
-                            convert (type, val), mvar);
-
-      /* FIXME: When the IEEE_ARITHMETIC module is implemented, the call to
-        __builtin_isnan might be made dependent on that module being loaded,
-        to help performance of programs that don't rely on IEEE semantics.  */
-      if (FLOAT_TYPE_P (TREE_TYPE (mvar)))
-       {
-         isnan = build_call_expr_loc (input_location,
-                                      builtin_decl_explicit (BUILT_IN_ISNAN),
-                                      1, mvar);
-         tmp = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                                boolean_type_node, tmp,
-                                fold_convert (boolean_type_node, isnan));
        }
-      tmp = build3_v (COND_EXPR, tmp, thencase,
-                     build_empty_stmt (input_location));
+      else if (!VAR_P (val) && !TREE_CONSTANT (val))
+       /* Only evaluate the argument once.  */
+       val = gfc_evaluate_now (val, &se->pre);
+
+      tree calc;
+      /* For floating point types, the question is what MAX(a, NaN) or
+        MIN(a, NaN) should return (where "a" is a normal number).
+        There are valid usecase for returning either one, but the
+        Fortran standard doesn't specify which one should be chosen.
+        Also, there is no consensus among other tested compilers.  In
+        short, it's a mess.  So lets just do whatever is fastest.  */
+      tree_code code = op == GT_EXPR ? MAX_EXPR : MIN_EXPR;
+      calc = fold_build2_loc (input_location, code, argtype,
+                             convert (argtype, val), mvar);
+      tmp = build2_v (MODIFY_EXPR, mvar, calc);
 
       if (cond != NULL_TREE)
        tmp = build3_v (COND_EXPR, cond, tmp,
                        build_empty_stmt (input_location));
-
       gfc_add_expr_to_block (&se->pre, tmp);
-      argexpr = argexpr->next;
     }
-  se->expr = mvar;
+  if (TREE_CODE (type) == INTEGER_TYPE)
+    se->expr = fold_build1_loc (input_location, FIX_TRUNC_EXPR, type, mvar);
+  else
+    se->expr = convert (type, mvar);
 }
 
 
@@ -2992,14 +4188,14 @@ gfc_conv_intrinsic_minmax_char (gfc_se * se, gfc_expr * expr, int op)
     gcc_unreachable ();
 
   /* Make the function call.  */
-  fndecl = build_addr (function, current_function_decl);
+  fndecl = build_addr (function);
   tmp = build_call_array_loc (input_location,
                          TREE_TYPE (TREE_TYPE (function)), fndecl,
                          nargs + 4, args);
   gfc_add_expr_to_block (&se->pre, tmp);
 
   /* Free the temporary afterwards, if necessary.  */
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          len, build_int_cst (TREE_TYPE (len), 0));
   tmp = gfc_call_free (var);
   tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
@@ -3044,12 +4240,124 @@ gfc_get_symbol_for_expr (gfc_expr * expr, bool ignore_optional)
   return sym;
 }
 
-/* Generate a call to an external intrinsic function.  */
+/* Remove empty actual arguments.  */
+
+static void
+remove_empty_actual_arguments (gfc_actual_arglist **ap)
+{
+  while (*ap)
+    {
+      if ((*ap)->expr == NULL)
+       {
+         gfc_actual_arglist *r = *ap;
+         *ap = r->next;
+         r->next = NULL;
+         gfc_free_actual_arglist (r);
+       }
+      else
+       ap = &((*ap)->next);
+    }
+}
+
+#define MAX_SPEC_ARG 12
+
+/* Make up an fn spec that's right for intrinsic functions that we
+   want to call.  */
+
+static char *
+intrinsic_fnspec (gfc_expr *expr)
+{
+  static char fnspec_buf[MAX_SPEC_ARG*2+1];
+  char *fp;
+  int i;
+  int num_char_args;
+
+#define ADD_CHAR(c) do { *fp++ = c; *fp++ = ' '; } while(0)
+
+  /* Set the fndecl.  */
+  fp = fnspec_buf;
+  /* Function return value.  FIXME: Check if the second letter could
+     be something other than a space, for further optimization.  */
+  ADD_CHAR ('.');
+  if (expr->rank == 0)
+    {
+      if (expr->ts.type == BT_CHARACTER)
+       {
+         ADD_CHAR ('w');  /* Address of character.  */
+         ADD_CHAR ('.');  /* Length of character.  */
+       }
+    }
+  else
+    ADD_CHAR ('w');  /* Return value is a descriptor.  */
+
+  num_char_args = 0;
+  for (gfc_actual_arglist *a = expr->value.function.actual; a; a = a->next)
+    {
+      if (a->expr == NULL)
+       continue;
+
+      if (a->name && strcmp (a->name,"%VAL") == 0)
+       ADD_CHAR ('.');
+      else
+       {
+         if (a->expr->rank > 0)
+           ADD_CHAR ('r');
+         else
+           ADD_CHAR ('R');
+       }
+      num_char_args += a->expr->ts.type == BT_CHARACTER;
+      gcc_assert (fp - fnspec_buf + num_char_args <= MAX_SPEC_ARG*2);
+    }
+
+  for (i = 0; i < num_char_args; i++)
+    ADD_CHAR ('.');
+
+  *fp = '\0';
+  return fnspec_buf;
+}
+
+#undef MAX_SPEC_ARG
+#undef ADD_CHAR
+
+/* Generate the right symbol for the specific intrinsic function and
+ modify the expr accordingly.  This assumes that absent optional
+ arguments should be removed.  */
+
+gfc_symbol *
+specific_intrinsic_symbol (gfc_expr *expr)
+{
+  gfc_symbol *sym;
+
+  sym = gfc_find_intrinsic_symbol (expr);
+  if (sym == NULL)
+    {
+      sym = gfc_get_intrinsic_function_symbol (expr);
+      sym->ts = expr->ts;
+      if (sym->ts.type == BT_CHARACTER && sym->ts.u.cl)
+       sym->ts.u.cl = gfc_new_charlen (sym->ns, NULL);
+
+      gfc_copy_formal_args_intr (sym, expr->value.function.isym,
+                                expr->value.function.actual, true);
+      sym->backend_decl
+       = gfc_get_extern_function_decl (sym, expr->value.function.actual,
+                                       intrinsic_fnspec (expr));
+    }
+
+  remove_empty_actual_arguments (&(expr->value.function.actual));
+
+  return sym;
+}
+
+/* Generate a call to an external intrinsic function.  FIXME: So far,
+   this only works for functions which are called with well-defined
+   types; CSHIFT and friends will come later.  */
+
 static void
 gfc_conv_intrinsic_funcall (gfc_se * se, gfc_expr * expr)
 {
   gfc_symbol *sym;
   vec<tree, va_gc> *append_args;
+  bool specific_symbol;
 
   gcc_assert (!se->ss || se->ss->info->expr == expr);
 
@@ -3058,12 +4366,39 @@ gfc_conv_intrinsic_funcall (gfc_se * se, gfc_expr * expr)
   else
     gcc_assert (expr->rank == 0);
 
-  sym = gfc_get_symbol_for_expr (expr, se->ignore_optional);
+  switch (expr->value.function.isym->id)
+    {
+    case GFC_ISYM_ANY:
+    case GFC_ISYM_ALL:
+    case GFC_ISYM_FINDLOC:
+    case GFC_ISYM_MAXLOC:
+    case GFC_ISYM_MINLOC:
+    case GFC_ISYM_MAXVAL:
+    case GFC_ISYM_MINVAL:
+    case GFC_ISYM_NORM2:
+    case GFC_ISYM_PRODUCT:
+    case GFC_ISYM_SUM:
+      specific_symbol = true;
+      break;
+    default:
+      specific_symbol = false;
+    }
+
+  if (specific_symbol)
+    {
+      /* Need to copy here because specific_intrinsic_symbol modifies
+        expr to omit the absent optional arguments.  */
+      expr = gfc_copy_expr (expr);
+      sym = specific_intrinsic_symbol (expr);
+    }
+  else
+    sym = gfc_get_symbol_for_expr (expr, se->ignore_optional);
 
   /* Calls to libgfortran_matmul need to be appended special arguments,
      to be able to call the BLAS ?gemm functions if required and possible.  */
   append_args = NULL;
   if (expr->value.function.isym->id == GFC_ISYM_MATMUL
+      && !expr->external_blas
       && sym->ts.type != BT_LOGICAL)
     {
       tree cint = gfc_get_int_type (gfc_c_int_kind);
@@ -3107,7 +4442,11 @@ gfc_conv_intrinsic_funcall (gfc_se * se, gfc_expr * expr)
 
   gfc_conv_procedure_call (se, sym, expr->value.function.actual, expr,
                          append_args);
-  gfc_free_symbol (sym);
+
+  if (specific_symbol)
+    gfc_free_expr (expr);
+  else
+    gfc_free_symbol (sym);
 }
 
 /* ANY and ALL intrinsics. ANY->op == NE_EXPR, ALL->op == EQ_EXPR.
@@ -3199,7 +4538,7 @@ gfc_conv_intrinsic_anyall (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_conv_expr_val (&arrayse, actual->expr);
 
   gfc_add_block_to_block (&body, &arrayse.pre);
-  tmp = fold_build2_loc (input_location, op, boolean_type_node, arrayse.expr,
+  tmp = fold_build2_loc (input_location, op, logical_type_node, arrayse.expr,
                         build_int_cst (TREE_TYPE (arrayse.expr), 0));
   tmp = build3_v (COND_EXPR, tmp, found, build_empty_stmt (input_location));
   gfc_add_expr_to_block (&body, tmp);
@@ -3218,6 +4557,181 @@ gfc_conv_intrinsic_anyall (gfc_se * se, gfc_expr * expr, enum tree_code op)
   se->expr = resvar;
 }
 
+
+/* Generate the constant 180 / pi, which is used in the conversion
+   of acosd(), asind(), atand(), atan2d().  */
+
+static tree
+rad2deg (int kind)
+{
+  tree retval;
+  mpfr_t pi, t0;
+
+  gfc_set_model_kind (kind);
+  mpfr_init (pi);
+  mpfr_init (t0);
+  mpfr_set_si (t0, 180, GFC_RND_MODE);
+  mpfr_const_pi (pi, GFC_RND_MODE);
+  mpfr_div (t0, t0, pi, GFC_RND_MODE);
+  retval = gfc_conv_mpfr_to_tree (t0, kind, 0);
+  mpfr_clear (t0);
+  mpfr_clear (pi);
+  return retval;
+}
+
+
+/* ACOSD(x) is translated into ACOS(x) * 180 / pi.
+   ASIND(x) is translated into ASIN(x) * 180 / pi.
+   ATAND(x) is translated into ATAN(x) * 180 / pi.  */
+
+static void
+gfc_conv_intrinsic_atrigd (gfc_se * se, gfc_expr * expr, gfc_isym_id id)
+{
+  tree arg;
+  tree atrigd;
+  tree type;
+
+  type = gfc_typenode_for_spec (&expr->ts);
+
+  gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+
+  if (id == GFC_ISYM_ACOSD)
+    atrigd = gfc_builtin_decl_for_float_kind (BUILT_IN_ACOS, expr->ts.kind);
+  else if (id == GFC_ISYM_ASIND)
+    atrigd = gfc_builtin_decl_for_float_kind (BUILT_IN_ASIN, expr->ts.kind);
+  else if (id == GFC_ISYM_ATAND)
+    atrigd = gfc_builtin_decl_for_float_kind (BUILT_IN_ATAN, expr->ts.kind);
+  else
+    gcc_unreachable ();
+
+  atrigd = build_call_expr_loc (input_location, atrigd, 1, arg);
+
+  se->expr = fold_build2_loc (input_location, MULT_EXPR, type, atrigd,
+                             fold_convert (type, rad2deg (expr->ts.kind)));
+}
+
+
+/* COTAN(X) is translated into -TAN(X+PI/2) for REAL argument and
+   COS(X) / SIN(X) for COMPLEX argument.  */
+
+static void
+gfc_conv_intrinsic_cotan (gfc_se *se, gfc_expr *expr)
+{
+  gfc_intrinsic_map_t *m;
+  tree arg;
+  tree type;
+
+  type = gfc_typenode_for_spec (&expr->ts);
+  gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+
+  if (expr->ts.type == BT_REAL)
+    {
+      tree tan;
+      tree tmp;
+      mpfr_t pio2;
+
+      /* Create pi/2.  */
+      gfc_set_model_kind (expr->ts.kind);
+      mpfr_init (pio2);
+      mpfr_const_pi (pio2, GFC_RND_MODE);
+      mpfr_div_ui (pio2, pio2, 2, GFC_RND_MODE);
+      tmp = gfc_conv_mpfr_to_tree (pio2, expr->ts.kind, 0);
+      mpfr_clear (pio2);
+
+      /* Find tan builtin function.  */
+      m = gfc_intrinsic_map;
+      for (; m->id != GFC_ISYM_NONE || m->double_built_in != END_BUILTINS; m++)
+       if (GFC_ISYM_TAN == m->id)
+         break;
+
+      tmp = fold_build2_loc (input_location, PLUS_EXPR, type, arg, tmp);
+      tan = gfc_get_intrinsic_lib_fndecl (m, expr);
+      tan = build_call_expr_loc (input_location, tan, 1, tmp);
+      se->expr = fold_build1_loc (input_location, NEGATE_EXPR, type, tan);
+    }
+  else
+    {
+      tree sin;
+      tree cos;
+
+      /* Find cos builtin function.  */
+      m = gfc_intrinsic_map;
+      for (; m->id != GFC_ISYM_NONE || m->double_built_in != END_BUILTINS; m++)
+       if (GFC_ISYM_COS == m->id)
+         break;
+
+      cos = gfc_get_intrinsic_lib_fndecl (m, expr);
+      cos = build_call_expr_loc (input_location, cos, 1, arg);
+
+      /* Find sin builtin function.  */
+      m = gfc_intrinsic_map;
+      for (; m->id != GFC_ISYM_NONE || m->double_built_in != END_BUILTINS; m++)
+       if (GFC_ISYM_SIN == m->id)
+         break;
+
+      sin = gfc_get_intrinsic_lib_fndecl (m, expr);
+      sin = build_call_expr_loc (input_location, sin, 1, arg);
+
+      /* Divide cos by sin. */
+      se->expr = fold_build2_loc (input_location, RDIV_EXPR, type, cos, sin);
+   }
+}
+
+
+/* COTAND(X) is translated into -TAND(X+90) for REAL argument.  */
+
+static void
+gfc_conv_intrinsic_cotand (gfc_se *se, gfc_expr *expr)
+{
+  tree arg;
+  tree type;
+  tree ninety_tree;
+  mpfr_t ninety;
+
+  type = gfc_typenode_for_spec (&expr->ts);
+  gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+
+  gfc_set_model_kind (expr->ts.kind);
+
+  /* Build the tree for x + 90.  */
+  mpfr_init_set_ui (ninety, 90, GFC_RND_MODE);
+  ninety_tree = gfc_conv_mpfr_to_tree (ninety, expr->ts.kind, 0);
+  arg = fold_build2_loc (input_location, PLUS_EXPR, type, arg, ninety_tree);
+  mpfr_clear (ninety);
+
+  /* Find tand.  */
+  gfc_intrinsic_map_t *m = gfc_intrinsic_map;
+  for (; m->id != GFC_ISYM_NONE || m->double_built_in != END_BUILTINS; m++)
+    if (GFC_ISYM_TAND == m->id)
+      break;
+
+  tree tand = gfc_get_intrinsic_lib_fndecl (m, expr);
+  tand = build_call_expr_loc (input_location, tand, 1, arg);
+
+  se->expr = fold_build1_loc (input_location, NEGATE_EXPR, type, tand);
+}
+
+
+/* ATAN2D(Y,X) is translated into ATAN2(Y,X) * 180 / PI. */
+
+static void
+gfc_conv_intrinsic_atan2d (gfc_se *se, gfc_expr *expr)
+{
+  tree args[2];
+  tree atan2d;
+  tree type;
+
+  gfc_conv_intrinsic_function_args (se, expr, args, 2);
+  type = TREE_TYPE (args[0]);
+
+  atan2d = gfc_builtin_decl_for_float_kind (BUILT_IN_ATAN2, expr->ts.kind);
+  atan2d = build_call_expr_loc (input_location, atan2d, 2, args[0], args[1]);
+
+  se->expr = fold_build2_loc (input_location, MULT_EXPR, type, atan2d,
+                             rad2deg (expr->ts.kind));
+}
+
+
 /* COUNT(A) = Number of true elements in A.  */
 static void
 gfc_conv_intrinsic_count (gfc_se * se, gfc_expr * expr)
@@ -3297,6 +4811,28 @@ enter_nested_loop (gfc_se *se)
   return se->ss->loop;
 }
 
+/* Build the condition for a mask, which may be optional.  */
+
+static tree
+conv_mask_condition (gfc_se *maskse, gfc_expr *maskexpr,
+                        bool optional_mask)
+{
+  tree present;
+  tree type;
+
+  if (optional_mask)
+    {
+      type = TREE_TYPE (maskse->expr);
+      present = gfc_conv_expr_present (maskexpr->symtree->n.sym);
+      present = convert (type, present);
+      present = fold_build1_loc (input_location, TRUTH_NOT_EXPR, type,
+                                present);
+      return fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                             type, present, maskse->expr);
+    }
+  else
+    return maskse->expr;
+}
 
 /* Inline implementation of the sum and product intrinsics.  */
 static void
@@ -3318,6 +4854,7 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
   gfc_se *parent_se;
   gfc_expr *arrayexpr;
   gfc_expr *maskexpr;
+  bool optional_mask;
 
   if (expr->rank > 0)
     {
@@ -3357,13 +4894,19 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
   arrayexpr = arg_array->expr;
 
   if (op == NE_EXPR || norm2)
-    /* PARITY and NORM2.  */
-    maskexpr = NULL;
+    {
+      /* PARITY and NORM2.  */
+      maskexpr = NULL;
+      optional_mask = false;
+    }
   else
     {
       arg_mask  = arg_array->next->next;
       gcc_assert (arg_mask != NULL);
       maskexpr = arg_mask->expr;
+      optional_mask = maskexpr && maskexpr->expr_type == EXPR_VARIABLE
+       && maskexpr->symtree->n.sym->attr.dummy
+       && maskexpr->symtree->n.sym->attr.optional;
     }
 
   if (expr->rank == 0)
@@ -3382,17 +4925,22 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
 
       /* Initialize the scalarizer.  */
       gfc_init_loopinfo (&loop);
-      gfc_add_ss_to_loop (&loop, arrayss);
+
+      /* We add the mask first because the number of iterations is
+        taken from the last ss, and this breaks if an absent
+        optional argument is used for mask.  */
+
       if (maskexpr && maskexpr->rank > 0)
        gfc_add_ss_to_loop (&loop, maskss);
+      gfc_add_ss_to_loop (&loop, arrayss);
 
       /* Initialize the loop.  */
       gfc_conv_ss_startstride (&loop);
       gfc_conv_loop_setup (&loop, &expr->where);
 
-      gfc_mark_ss_chain_used (arrayss, 1);
       if (maskexpr && maskexpr->rank > 0)
        gfc_mark_ss_chain_used (maskss, 1);
+      gfc_mark_ss_chain_used (arrayss, 1);
 
       ploop = &loop;
     }
@@ -3478,13 +5026,13 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
       gfc_add_modify (&ifblock3, resvar, res2);
       res2 = gfc_finish_block (&ifblock3);
 
-      cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+      cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                              absX, scale);
       tmp = build3_v (COND_EXPR, cond, res1, res2);
       gfc_add_expr_to_block (&ifblock1, tmp);
       tmp = gfc_finish_block (&ifblock1);
 
-      cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+      cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                              arrayse.expr,
                              gfc_build_const (type, integer_zero_node));
 
@@ -3501,10 +5049,13 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
 
   if (maskexpr && maskexpr->rank > 0)
     {
-      /* We enclose the above in if (mask) {...} .  */
-
+      /* We enclose the above in if (mask) {...} .  If the mask is an
+        optional argument, generate
+        IF (.NOT. PRESENT(MASK) .OR. MASK(I)).  */
+      tree ifmask;
       tmp = gfc_finish_block (&block);
-      tmp = build3_v (COND_EXPR, maskse.expr, tmp,
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      tmp = build3_v (COND_EXPR, ifmask, tmp,
                      build_empty_stmt (input_location));
     }
   else
@@ -3529,10 +5080,13 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, enum tree_code op,
        }
       else
        {
+         tree ifmask;
+
          gcc_assert (expr->rank == 0);
          gfc_init_se (&maskse, NULL);
          gfc_conv_expr_val (&maskse, maskexpr);
-         tmp = build3_v (COND_EXPR, maskse.expr, tmp,
+         ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+         tmp = build3_v (COND_EXPR, ifmask, tmp,
                          build_empty_stmt (input_location));
        }
 
@@ -3663,6 +5217,22 @@ gfc_conv_intrinsic_dot_product (gfc_se * se, gfc_expr * expr)
 }
 
 
+/* Remove unneeded kind= argument from actual argument list when the
+   result conversion is dealt with in a different place.  */
+
+static void
+strip_kind_from_actual (gfc_actual_arglist * actual)
+{
+  for (gfc_actual_arglist *a = actual; a; a = a->next)
+    {
+      if (a && a->name && strcmp (a->name, "kind") == 0)
+       {
+         gfc_free_expr (a->expr);
+         a->expr = NULL;
+       }
+    }
+}
+
 /* Emit code for minloc or maxloc intrinsic.  There are many different cases
    we need to handle.  For performance reasons we sometimes create two
    loops instead of one, where the second one is much simpler.
@@ -3727,7 +5297,20 @@ gfc_conv_intrinsic_dot_product (gfc_se * se, gfc_expr * expr)
        S++;
       }
    For 3) and 5), if mask is scalar, this all goes into a conditional,
-   setting pos = 0; in the else branch.  */
+   setting pos = 0; in the else branch.
+
+   Since we now also support the BACK argument, instead of using
+   if (a[S] < limit), we now use
+
+   if (back)
+     cond = a[S] <= limit;
+   else
+     cond = a[S] < limit;
+   if (cond) {
+     ....
+
+     The optimizer is smart enough to move the condition out of the loop.
+     The are now marked as unlikely to for further speedup.  */
 
 static void
 gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
@@ -3745,6 +5328,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
   tree offset;
   tree nonempty;
   tree lab1, lab2;
+  tree b_if, b_else;
   gfc_loopinfo loop;
   gfc_actual_arglist *actual;
   gfc_ss *arrayss;
@@ -3753,8 +5337,21 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_se maskse;
   gfc_expr *arrayexpr;
   gfc_expr *maskexpr;
+  gfc_expr *backexpr;
+  gfc_se backse;
   tree pos;
   int n;
+  bool optional_mask;
+
+  actual = expr->value.function.actual;
+
+  /* The last argument, BACK, is passed by value. Ensure that
+     by setting its name to %VAL. */
+  for (gfc_actual_arglist *a = actual; a; a = a->next)
+    {
+      if (a->next == NULL)
+       a->name = "%VAL";
+    }
 
   if (se->ss)
     {
@@ -3762,20 +5359,45 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
       return;
     }
 
+  arrayexpr = actual->expr;
+
+  /* Special case for character maxloc.  Remove unneeded actual
+     arguments, then call a library function.  */
+
+  if (arrayexpr->ts.type == BT_CHARACTER)
+    {
+      gfc_actual_arglist *a;
+      a = actual;
+      strip_kind_from_actual (a);
+      while (a)
+       {
+         if (a->name && strcmp (a->name, "dim") == 0)
+           {
+             gfc_free_expr (a->expr);
+             a->expr = NULL;
+           }
+         a = a->next;
+       }
+      gfc_conv_intrinsic_funcall (se, expr);
+      return;
+    }
+
   /* Initialize the result.  */
   pos = gfc_create_var (gfc_array_index_type, "pos");
   offset = gfc_create_var (gfc_array_index_type, "offset");
   type = gfc_typenode_for_spec (&expr->ts);
 
   /* Walk the arguments.  */
-  actual = expr->value.function.actual;
-  arrayexpr = actual->expr;
   arrayss = gfc_walk_expr (arrayexpr);
   gcc_assert (arrayss != gfc_ss_terminator);
 
   actual = actual->next->next;
   gcc_assert (actual);
   maskexpr = actual->expr;
+  optional_mask = maskexpr && maskexpr->expr_type == EXPR_VARIABLE
+    && maskexpr->symtree->n.sym->attr.dummy
+    && maskexpr->symtree->n.sym->attr.optional;
+  backexpr = actual->next->next->expr;
   nonempty = NULL;
   if (maskexpr && maskexpr->rank != 0)
     {
@@ -3790,7 +5412,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
          nonempty = gfc_conv_mpz_to_tree (asize, gfc_index_integer_kind);
          mpz_clear (asize);
          nonempty = fold_build2_loc (input_location, GT_EXPR,
-                                     boolean_type_node, nonempty,
+                                     logical_type_node, nonempty,
                                      gfc_index_zero_node);
        }
       maskss = NULL;
@@ -3827,10 +5449,16 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   /* Initialize the scalarizer.  */
   gfc_init_loopinfo (&loop);
-  gfc_add_ss_to_loop (&loop, arrayss);
+
+  /* We add the mask first because the number of iterations is taken
+     from the last ss, and this breaks if an absent optional argument
+     is used for mask.  */
+
   if (maskss)
     gfc_add_ss_to_loop (&loop, maskss);
 
+  gfc_add_ss_to_loop (&loop, arrayss);
+
   /* Initialize the loop.  */
   gfc_conv_ss_startstride (&loop);
 
@@ -3854,7 +5482,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   gcc_assert (loop.dimen == 1);
   if (nonempty == NULL && maskss == NULL && loop.from[0] && loop.to[0])
-    nonempty = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
+    nonempty = fold_build2_loc (input_location, LE_EXPR, logical_type_node,
                                loop.from[0], loop.to[0]);
 
   lab1 = NULL;
@@ -3914,6 +5542,10 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_conv_expr_val (&arrayse, arrayexpr);
   gfc_add_block_to_block (&block, &arrayse.pre);
 
+  gfc_init_se (&backse, NULL);
+  gfc_conv_expr_val (&backse, backexpr);
+  gfc_add_block_to_block (&block, &backse.pre);
+
   /* We do the following if this is a more extreme value.  */
   gfc_start_block (&ifblock);
 
@@ -3930,7 +5562,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
                             loop.loopvar[0], offset);
       gfc_add_modify (&ifblock2, pos, tmp);
       ifbody2 = gfc_finish_block (&ifblock2);
-      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, pos,
+      cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, pos,
                              gfc_index_zero_node);
       tmp = build3_v (COND_EXPR, cond, ifbody2,
                      build_empty_stmt (input_location));
@@ -3951,11 +5583,35 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
       if (lab1)
        cond = fold_build2_loc (input_location,
                                op == GT_EXPR ? GE_EXPR : LE_EXPR,
-                               boolean_type_node, arrayse.expr, limit);
+                               logical_type_node, arrayse.expr, limit);
       else
-       cond = fold_build2_loc (input_location, op, boolean_type_node,
-                               arrayse.expr, limit);
+       {
+         tree ifbody2, elsebody2;
+
+         /* We switch to > or >= depending on the value of the BACK argument. */
+         cond = gfc_create_var (logical_type_node, "cond");
+
+         gfc_start_block (&ifblock);
+         b_if = fold_build2_loc (input_location, op == GT_EXPR ? GE_EXPR : LE_EXPR,
+                                 logical_type_node, arrayse.expr, limit);
+
+         gfc_add_modify (&ifblock, cond, b_if);
+         ifbody2 = gfc_finish_block (&ifblock);
+
+         gfc_start_block (&elseblock);
+         b_else = fold_build2_loc (input_location, op, logical_type_node,
+                                   arrayse.expr, limit);
 
+         gfc_add_modify (&elseblock, cond, b_else);
+         elsebody2 = gfc_finish_block (&elseblock);
+
+         tmp = fold_build3_loc (input_location, COND_EXPR, logical_type_node,
+                                backse.expr, ifbody2, elsebody2);
+
+         gfc_add_expr_to_block (&block, tmp);
+       }
+
+      cond = gfc_unlikely (cond, PRED_BUILTIN_EXPECT);
       ifbody = build3_v (COND_EXPR, cond, ifbody,
                         build_empty_stmt (input_location));
     }
@@ -3963,10 +5619,14 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   if (maskss)
     {
-      /* We enclose the above in if (mask) {...}.  */
-      tmp = gfc_finish_block (&block);
+      /* We enclose the above in if (mask) {...}.  If the mask is an
+        optional argument, generate IF (.NOT. PRESENT(MASK)
+        .OR. MASK(I)). */
 
-      tmp = build3_v (COND_EXPR, maskse.expr, tmp,
+      tree ifmask;
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      tmp = gfc_finish_block (&block);
+      tmp = build3_v (COND_EXPR, ifmask, tmp,
                      build_empty_stmt (input_location));
     }
   else
@@ -4024,19 +5684,47 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
       ifbody = gfc_finish_block (&ifblock);
 
-      cond = fold_build2_loc (input_location, op, boolean_type_node,
-                             arrayse.expr, limit);
+      /* We switch to > or >= depending on the value of the BACK argument. */
+      {
+       tree ifbody2, elsebody2;
+
+       cond = gfc_create_var (logical_type_node, "cond");
+
+       gfc_start_block (&ifblock);
+       b_if = fold_build2_loc (input_location, op == GT_EXPR ? GE_EXPR : LE_EXPR,
+                               logical_type_node, arrayse.expr, limit);
+
+       gfc_add_modify (&ifblock, cond, b_if);
+       ifbody2 = gfc_finish_block (&ifblock);
+
+       gfc_start_block (&elseblock);
+       b_else = fold_build2_loc (input_location, op, logical_type_node,
+                                 arrayse.expr, limit);
 
+       gfc_add_modify (&elseblock, cond, b_else);
+       elsebody2 = gfc_finish_block (&elseblock);
+
+       tmp = fold_build3_loc (input_location, COND_EXPR, logical_type_node,
+                              backse.expr, ifbody2, elsebody2);
+      }
+
+      gfc_add_expr_to_block (&block, tmp);
+      cond = gfc_unlikely (cond, PRED_BUILTIN_EXPECT);
       tmp = build3_v (COND_EXPR, cond, ifbody,
                      build_empty_stmt (input_location));
+
       gfc_add_expr_to_block (&block, tmp);
 
       if (maskss)
        {
-         /* We enclose the above in if (mask) {...}.  */
-         tmp = gfc_finish_block (&block);
+         /* We enclose the above in if (mask) {...}.  If the mask is
+        an optional argument, generate IF (.NOT. PRESENT(MASK)
+        .OR. MASK(I)).*/
 
-         tmp = build3_v (COND_EXPR, maskse.expr, tmp,
+         tree ifmask;
+         ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+         tmp = gfc_finish_block (&block);
+         tmp = build3_v (COND_EXPR, ifmask, tmp,
                          build_empty_stmt (input_location));
        }
       else
@@ -4055,6 +5743,8 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
   /* For a scalar mask, enclose the loop in an if statement.  */
   if (maskexpr && maskss == NULL)
     {
+      tree ifmask;
+
       gfc_init_se (&maskse, NULL);
       gfc_conv_expr_val (&maskse, maskexpr);
       gfc_init_block (&block);
@@ -4068,8 +5758,8 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
       gfc_init_block (&elseblock);
       gfc_add_modify (&elseblock, pos, gfc_index_zero_node);
       elsetmp = gfc_finish_block (&elseblock);
-
-      tmp = build3_v (COND_EXPR, maskse.expr, tmp, elsetmp);
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      tmp = build3_v (COND_EXPR, ifmask, tmp, elsetmp);
       gfc_add_expr_to_block (&block, tmp);
       gfc_add_block_to_block (&se->pre, &block);
     }
@@ -4083,6 +5773,239 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, enum tree_code op)
   se->expr = convert (type, pos);
 }
 
+/* Emit code for findloc.  */
+
+static void
+gfc_conv_intrinsic_findloc (gfc_se *se, gfc_expr *expr)
+{
+  gfc_actual_arglist *array_arg, *value_arg, *dim_arg, *mask_arg,
+    *kind_arg, *back_arg;
+  gfc_expr *value_expr;
+  int ikind;
+  tree resvar;
+  stmtblock_t block;
+  stmtblock_t body;
+  stmtblock_t loopblock;
+  tree type;
+  tree tmp;
+  tree found;
+  tree forward_branch = NULL_TREE;
+  tree back_branch;
+  gfc_loopinfo loop;
+  gfc_ss *arrayss;
+  gfc_ss *maskss;
+  gfc_se arrayse;
+  gfc_se valuese;
+  gfc_se maskse;
+  gfc_se backse;
+  tree exit_label;
+  gfc_expr *maskexpr;
+  tree offset;
+  int i;
+  bool optional_mask;
+
+  array_arg = expr->value.function.actual;
+  value_arg = array_arg->next;
+  dim_arg   = value_arg->next;
+  mask_arg  = dim_arg->next;
+  kind_arg  = mask_arg->next;
+  back_arg  = kind_arg->next;
+
+  /* Remove kind and set ikind.  */
+  if (kind_arg->expr)
+    {
+      ikind = mpz_get_si (kind_arg->expr->value.integer);
+      gfc_free_expr (kind_arg->expr);
+      kind_arg->expr = NULL;
+    }
+  else
+    ikind = gfc_default_integer_kind;
+
+  value_expr = value_arg->expr;
+
+  /* Unless it's a string, pass VALUE by value.  */
+  if (value_expr->ts.type != BT_CHARACTER)
+    value_arg->name = "%VAL";
+
+  /* Pass BACK argument by value.  */
+  back_arg->name = "%VAL";
+
+  /* Call the library if we have a character function or if
+     rank > 0.  */
+  if (se->ss || array_arg->expr->ts.type == BT_CHARACTER)
+    {
+      se->ignore_optional = 1;
+      if (expr->rank == 0)
+       {
+         /* Remove dim argument.  */
+         gfc_free_expr (dim_arg->expr);
+         dim_arg->expr = NULL;
+       }
+      gfc_conv_intrinsic_funcall (se, expr);
+      return;
+    }
+
+  type = gfc_get_int_type (ikind);
+
+  /* Initialize the result.  */
+  resvar = gfc_create_var (gfc_array_index_type, "pos");
+  gfc_add_modify (&se->pre, resvar, build_int_cst (gfc_array_index_type, 0));
+  offset = gfc_create_var (gfc_array_index_type, "offset");
+
+  maskexpr = mask_arg->expr;
+  optional_mask = maskexpr && maskexpr->expr_type == EXPR_VARIABLE
+    && maskexpr->symtree->n.sym->attr.dummy
+    && maskexpr->symtree->n.sym->attr.optional;
+
+  /*  Generate two loops, one for BACK=.true. and one for BACK=.false.  */
+
+  for (i = 0 ; i < 2; i++)
+    {
+      /* Walk the arguments.  */
+      arrayss = gfc_walk_expr (array_arg->expr);
+      gcc_assert (arrayss != gfc_ss_terminator);
+
+      if (maskexpr && maskexpr->rank != 0)
+       {
+         maskss = gfc_walk_expr (maskexpr);
+         gcc_assert (maskss != gfc_ss_terminator);
+       }
+      else
+       maskss = NULL;
+
+      /* Initialize the scalarizer.  */
+      gfc_init_loopinfo (&loop);
+      exit_label = gfc_build_label_decl (NULL_TREE);
+      TREE_USED (exit_label) = 1;
+
+      /* We add the mask first because the number of iterations is
+        taken from the last ss, and this breaks if an absent
+        optional argument is used for mask.  */
+
+      if (maskss)
+       gfc_add_ss_to_loop (&loop, maskss);
+      gfc_add_ss_to_loop (&loop, arrayss);
+
+      /* Initialize the loop.  */
+      gfc_conv_ss_startstride (&loop);
+      gfc_conv_loop_setup (&loop, &expr->where);
+
+      /* Calculate the offset.  */
+      tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
+                            gfc_index_one_node, loop.from[0]);
+      gfc_add_modify (&loop.pre, offset, tmp);
+
+      gfc_mark_ss_chain_used (arrayss, 1);
+      if (maskss)
+       gfc_mark_ss_chain_used (maskss, 1);
+
+      /* The first loop is for BACK=.true.  */
+      if (i == 0)
+       loop.reverse[0] = GFC_REVERSE_SET;
+
+      /* Generate the loop body.  */
+      gfc_start_scalarized_body (&loop, &body);
+
+      /* If we have an array mask, only add the element if it is
+        set.  */
+      if (maskss)
+       {
+         gfc_init_se (&maskse, NULL);
+         gfc_copy_loopinfo_to_se (&maskse, &loop);
+         maskse.ss = maskss;
+         gfc_conv_expr_val (&maskse, maskexpr);
+         gfc_add_block_to_block (&body, &maskse.pre);
+       }
+
+      /* If the condition matches then set the return value.  */
+      gfc_start_block (&block);
+
+      /* Add the offset.  */
+      tmp = fold_build2_loc (input_location, PLUS_EXPR,
+                            TREE_TYPE (resvar),
+                            loop.loopvar[0], offset);
+      gfc_add_modify (&block, resvar, tmp);
+      /* And break out of the loop.  */
+      tmp = build1_v (GOTO_EXPR, exit_label);
+      gfc_add_expr_to_block (&block, tmp);
+
+      found = gfc_finish_block (&block);
+
+      /* Check this element.  */
+      gfc_init_se (&arrayse, NULL);
+      gfc_copy_loopinfo_to_se (&arrayse, &loop);
+      arrayse.ss = arrayss;
+      gfc_conv_expr_val (&arrayse, array_arg->expr);
+      gfc_add_block_to_block (&body, &arrayse.pre);
+
+      gfc_init_se (&valuese, NULL);
+      gfc_conv_expr_val (&valuese, value_arg->expr);
+      gfc_add_block_to_block (&body, &valuese.pre);
+
+      tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
+                            arrayse.expr, valuese.expr);
+
+      tmp = build3_v (COND_EXPR, tmp, found, build_empty_stmt (input_location));
+      if (maskss)
+       {
+         /* We enclose the above in if (mask) {...}.  If the mask is
+            an optional argument, generate IF (.NOT. PRESENT(MASK)
+            .OR. MASK(I)). */
+
+         tree ifmask;
+         ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+         tmp = build3_v (COND_EXPR, ifmask, tmp,
+                         build_empty_stmt (input_location));
+       }
+
+      gfc_add_expr_to_block (&body, tmp);
+      gfc_add_block_to_block (&body, &arrayse.post);
+
+      gfc_trans_scalarizing_loops (&loop, &body);
+
+      /* Add the exit label.  */
+      tmp = build1_v (LABEL_EXPR, exit_label);
+      gfc_add_expr_to_block (&loop.pre, tmp);
+      gfc_start_block (&loopblock);
+      gfc_add_block_to_block (&loopblock, &loop.pre);
+      gfc_add_block_to_block (&loopblock, &loop.post);
+      if (i == 0)
+       forward_branch = gfc_finish_block (&loopblock);
+      else
+       back_branch = gfc_finish_block (&loopblock);
+
+      gfc_cleanup_loop (&loop);
+    }
+
+  /* Enclose the two loops in an IF statement.  */
+
+  gfc_init_se (&backse, NULL);
+  gfc_conv_expr_val (&backse, back_arg->expr);
+  gfc_add_block_to_block (&se->pre, &backse.pre);
+  tmp = build3_v (COND_EXPR, backse.expr, forward_branch, back_branch);
+
+  /* For a scalar mask, enclose the loop in an if statement.  */
+  if (maskexpr && maskss == NULL)
+    {
+      tree ifmask;
+      tree if_stmt;
+
+      gfc_init_se (&maskse, NULL);
+      gfc_conv_expr_val (&maskse, maskexpr);
+      gfc_init_block (&block);
+      gfc_add_expr_to_block (&block, maskse.expr);
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      if_stmt = build3_v (COND_EXPR, ifmask, tmp,
+                         build_empty_stmt (input_location));
+      gfc_add_expr_to_block (&block, if_stmt);
+      tmp = gfc_finish_block (&block);
+    }
+
+  gfc_add_expr_to_block (&se->pre, tmp);
+  se->expr = convert (type, resvar);
+
+}
+
 /* Emit code for minval or maxval intrinsic.  There are many different cases
    we need to handle.  For performance reasons we sometimes create two
    loops instead of one, where the second one is much simpler.
@@ -4199,6 +6122,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_expr *arrayexpr;
   gfc_expr *maskexpr;
   int n;
+  bool optional_mask;
 
   if (se->ss)
     {
@@ -4206,6 +6130,21 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
       return;
     }
 
+  actual = expr->value.function.actual;
+  arrayexpr = actual->expr;
+
+  if (arrayexpr->ts.type == BT_CHARACTER)
+    {
+      gfc_actual_arglist *dim = actual->next;
+      if (expr->rank == 0 && dim->expr != 0)
+       {
+         gfc_free_expr (dim->expr);
+         dim->expr = NULL;
+       }
+      gfc_conv_intrinsic_funcall (se, expr);
+      return;
+    }
+
   type = gfc_typenode_for_spec (&expr->ts);
   /* Initialize the result.  */
   limit = gfc_create_var (type, "limit");
@@ -4254,14 +6193,15 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_add_modify (&se->pre, limit, tmp);
 
   /* Walk the arguments.  */
-  actual = expr->value.function.actual;
-  arrayexpr = actual->expr;
   arrayss = gfc_walk_expr (arrayexpr);
   gcc_assert (arrayss != gfc_ss_terminator);
 
   actual = actual->next->next;
   gcc_assert (actual);
   maskexpr = actual->expr;
+  optional_mask = maskexpr && maskexpr->expr_type == EXPR_VARIABLE
+    && maskexpr->symtree->n.sym->attr.dummy
+    && maskexpr->symtree->n.sym->attr.optional;
   nonempty = NULL;
   if (maskexpr && maskexpr->rank != 0)
     {
@@ -4276,7 +6216,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
          nonempty = gfc_conv_mpz_to_tree (asize, gfc_index_integer_kind);
          mpz_clear (asize);
          nonempty = fold_build2_loc (input_location, GT_EXPR,
-                                     boolean_type_node, nonempty,
+                                     logical_type_node, nonempty,
                                      gfc_index_zero_node);
        }
       maskss = NULL;
@@ -4284,9 +6224,14 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   /* Initialize the scalarizer.  */
   gfc_init_loopinfo (&loop);
-  gfc_add_ss_to_loop (&loop, arrayss);
+
+  /* We add the mask first because the number of iterations is taken
+     from the last ss, and this breaks if an absent optional argument
+     is used for mask.  */
+
   if (maskss)
     gfc_add_ss_to_loop (&loop, maskss);
+  gfc_add_ss_to_loop (&loop, arrayss);
 
   /* Initialize the loop.  */
   gfc_conv_ss_startstride (&loop);
@@ -4310,15 +6255,15 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   if (nonempty == NULL && maskss == NULL
       && loop.dimen == 1 && loop.from[0] && loop.to[0])
-    nonempty = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
+    nonempty = fold_build2_loc (input_location, LE_EXPR, logical_type_node,
                                loop.from[0], loop.to[0]);
   nonempty_var = NULL;
   if (nonempty == NULL
       && (HONOR_INFINITIES (DECL_MODE (limit))
          || HONOR_NANS (DECL_MODE (limit))))
     {
-      nonempty_var = gfc_create_var (boolean_type_node, "nonempty");
-      gfc_add_modify (&se->pre, nonempty_var, boolean_false_node);
+      nonempty_var = gfc_create_var (logical_type_node, "nonempty");
+      gfc_add_modify (&se->pre, nonempty_var, logical_false_node);
       nonempty = nonempty_var;
     }
   lab = NULL;
@@ -4332,8 +6277,8 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
        }
       else
        {
-         fast = gfc_create_var (boolean_type_node, "fast");
-         gfc_add_modify (&se->pre, fast, boolean_false_node);
+         fast = gfc_create_var (logical_type_node, "fast");
+         gfc_add_modify (&se->pre, fast, logical_false_node);
        }
     }
 
@@ -4367,12 +6312,12 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
   gfc_init_block (&block2);
 
   if (nonempty_var)
-    gfc_add_modify (&block2, nonempty_var, boolean_true_node);
+    gfc_add_modify (&block2, nonempty_var, logical_true_node);
 
   if (HONOR_NANS (DECL_MODE (limit)))
     {
       tmp = fold_build2_loc (input_location, op == GT_EXPR ? GE_EXPR : LE_EXPR,
-                            boolean_type_node, arrayse.expr, limit);
+                            logical_type_node, arrayse.expr, limit);
       if (lab)
        ifbody = build1_v (GOTO_EXPR, lab);
       else
@@ -4381,7 +6326,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
          gfc_init_block (&ifblock);
          gfc_add_modify (&ifblock, limit, arrayse.expr);
-         gfc_add_modify (&ifblock, fast, boolean_true_node);
+         gfc_add_modify (&ifblock, fast, logical_true_node);
          ifbody = gfc_finish_block (&ifblock);
        }
       tmp = build3_v (COND_EXPR, tmp, ifbody,
@@ -4392,22 +6337,10 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
     {
       /* MIN_EXPR/MAX_EXPR has unspecified behavior with NaNs or
         signed zeros.  */
-      if (HONOR_SIGNED_ZEROS (DECL_MODE (limit)))
-       {
-         tmp = fold_build2_loc (input_location, op, boolean_type_node,
-                                arrayse.expr, limit);
-         ifbody = build2_v (MODIFY_EXPR, limit, arrayse.expr);
-         tmp = build3_v (COND_EXPR, tmp, ifbody,
-                         build_empty_stmt (input_location));
-         gfc_add_expr_to_block (&block2, tmp);
-       }
-      else
-       {
-         tmp = fold_build2_loc (input_location,
-                                op == GT_EXPR ? MAX_EXPR : MIN_EXPR,
-                                type, arrayse.expr, limit);
-         gfc_add_modify (&block2, limit, tmp);
-       }
+      tmp = fold_build2_loc (input_location,
+                            op == GT_EXPR ? MAX_EXPR : MIN_EXPR,
+                            type, arrayse.expr, limit);
+      gfc_add_modify (&block2, limit, tmp);
     }
 
   if (fast)
@@ -4416,10 +6349,9 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
       /* MIN_EXPR/MAX_EXPR has unspecified behavior with NaNs or
         signed zeros.  */
-      if (HONOR_NANS (DECL_MODE (limit))
-         || HONOR_SIGNED_ZEROS (DECL_MODE (limit)))
+      if (HONOR_NANS (DECL_MODE (limit)))
        {
-         tmp = fold_build2_loc (input_location, op, boolean_type_node,
+         tmp = fold_build2_loc (input_location, op, logical_type_node,
                                 arrayse.expr, limit);
          ifbody = build2_v (MODIFY_EXPR, limit, arrayse.expr);
          ifbody = build3_v (COND_EXPR, tmp, ifbody,
@@ -4442,9 +6374,15 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
   tmp = gfc_finish_block (&block);
   if (maskss)
-    /* We enclose the above in if (mask) {...}.  */
-    tmp = build3_v (COND_EXPR, maskse.expr, tmp,
-                   build_empty_stmt (input_location));
+    {
+      /* We enclose the above in if (mask) {...}.  If the mask is an
+        optional argument, generate IF (.NOT. PRESENT(MASK)
+        .OR. MASK(I)).  */
+      tree ifmask;
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      tmp = build3_v (COND_EXPR, ifmask, tmp,
+                     build_empty_stmt (input_location));
+    }
   gfc_add_expr_to_block (&body, tmp);
 
   if (lab)
@@ -4479,10 +6417,9 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
 
       /* MIN_EXPR/MAX_EXPR has unspecified behavior with NaNs or
         signed zeros.  */
-      if (HONOR_NANS (DECL_MODE (limit))
-         || HONOR_SIGNED_ZEROS (DECL_MODE (limit)))
+      if (HONOR_NANS (DECL_MODE (limit)))
        {
-         tmp = fold_build2_loc (input_location, op, boolean_type_node,
+         tmp = fold_build2_loc (input_location, op, logical_type_node,
                                 arrayse.expr, limit);
          ifbody = build2_v (MODIFY_EXPR, limit, arrayse.expr);
          tmp = build3_v (COND_EXPR, tmp, ifbody,
@@ -4502,8 +6439,13 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
       tmp = gfc_finish_block (&block);
       if (maskss)
        /* We enclose the above in if (mask) {...}.  */
-       tmp = build3_v (COND_EXPR, maskse.expr, tmp,
-                       build_empty_stmt (input_location));
+       {
+         tree ifmask;
+         ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+         tmp = build3_v (COND_EXPR, ifmask, tmp,
+                         build_empty_stmt (input_location));
+       }
+
       gfc_add_expr_to_block (&body, tmp);
       /* Avoid initializing loopvar[0] again, it should be left where
         it finished by the first loop.  */
@@ -4531,6 +6473,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
   if (maskexpr && maskss == NULL)
     {
       tree else_stmt;
+      tree ifmask;
 
       gfc_init_se (&maskse, NULL);
       gfc_conv_expr_val (&maskse, maskexpr);
@@ -4543,7 +6486,9 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, enum tree_code op)
        else_stmt = build2_v (MODIFY_EXPR, limit, huge_cst);
       else
        else_stmt = build_empty_stmt (input_location);
-      tmp = build3_v (COND_EXPR, maskse.expr, tmp, else_stmt);
+
+      ifmask = conv_mask_condition (&maskse, maskexpr, optional_mask);
+      tmp = build3_v (COND_EXPR, ifmask, tmp, else_stmt);
       gfc_add_expr_to_block (&block, tmp);
       gfc_add_block_to_block (&se->pre, &block);
     }
@@ -4569,10 +6514,28 @@ gfc_conv_intrinsic_btest (gfc_se * se, gfc_expr * expr)
   gfc_conv_intrinsic_function_args (se, expr, args, 2);
   type = TREE_TYPE (args[0]);
 
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree below = fold_build2_loc (input_location, LT_EXPR,
+                                   logical_type_node, args[1],
+                                   build_int_cst (TREE_TYPE (args[1]), 0));
+      tree nbits = build_int_cst (TREE_TYPE (args[1]), TYPE_PRECISION (type));
+      tree above = fold_build2_loc (input_location, GE_EXPR,
+                                   logical_type_node, args[1], nbits);
+      tree scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                                   logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "POS argument (%ld) out of range 0:%ld "
+                              "in intrinsic BTEST",
+                              fold_convert (long_integer_type_node, args[1]),
+                              fold_convert (long_integer_type_node, nbits));
+    }
+
   tmp = fold_build2_loc (input_location, LSHIFT_EXPR, type,
                         build_int_cst (type, 1), args[1]);
   tmp = fold_build2_loc (input_location, BIT_AND_EXPR, type, args[0], tmp);
-  tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
+  tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, tmp,
                         build_int_cst (type, 0));
   type = gfc_typenode_for_spec (&expr->ts);
   se->expr = convert (type, tmp);
@@ -4600,7 +6563,7 @@ gfc_conv_intrinsic_bitcomp (gfc_se * se, gfc_expr * expr, enum tree_code op)
     args[0] = fold_convert (TREE_TYPE (args[1]), args[0]);
 
   /* Now, we compare them.  */
-  se->expr = fold_build2_loc (input_location, op, boolean_type_node,
+  se->expr = fold_build2_loc (input_location, op, logical_type_node,
                              args[0], args[1]);
 }
 
@@ -4639,6 +6602,32 @@ gfc_conv_intrinsic_singlebitop (gfc_se * se, gfc_expr * expr, int set)
   gfc_conv_intrinsic_function_args (se, expr, args, 2);
   type = TREE_TYPE (args[0]);
 
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree below = fold_build2_loc (input_location, LT_EXPR,
+                                   logical_type_node, args[1],
+                                   build_int_cst (TREE_TYPE (args[1]), 0));
+      tree nbits = build_int_cst (TREE_TYPE (args[1]), TYPE_PRECISION (type));
+      tree above = fold_build2_loc (input_location, GE_EXPR,
+                                   logical_type_node, args[1], nbits);
+      tree scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                                   logical_type_node, below, above);
+      size_t len_name = strlen (expr->value.function.isym->name);
+      char *name = XALLOCAVEC (char, len_name + 1);
+      for (size_t i = 0; i < len_name; i++)
+       name[i] = TOUPPER (expr->value.function.isym->name[i]);
+      name[len_name] = '\0';
+      tree iname = gfc_build_addr_expr (pchar_type_node,
+                                       gfc_build_cstring_const (name));
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "POS argument (%ld) out of range 0:%ld "
+                              "in intrinsic %s",
+                              fold_convert (long_integer_type_node, args[1]),
+                              fold_convert (long_integer_type_node, nbits),
+                              iname);
+    }
+
   tmp = fold_build2_loc (input_location, LSHIFT_EXPR, type,
                         build_int_cst (type, 1), args[1]);
   if (set)
@@ -4664,6 +6653,42 @@ gfc_conv_intrinsic_ibits (gfc_se * se, gfc_expr * expr)
   gfc_conv_intrinsic_function_args (se, expr, args, 3);
   type = TREE_TYPE (args[0]);
 
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree tmp1 = fold_convert (long_integer_type_node, args[1]);
+      tree tmp2 = fold_convert (long_integer_type_node, args[2]);
+      tree nbits = build_int_cst (long_integer_type_node,
+                                 TYPE_PRECISION (type));
+      tree below = fold_build2_loc (input_location, LT_EXPR,
+                                   logical_type_node, args[1],
+                                   build_int_cst (TREE_TYPE (args[1]), 0));
+      tree above = fold_build2_loc (input_location, GT_EXPR,
+                                   logical_type_node, tmp1, nbits);
+      tree scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                                   logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "POS argument (%ld) out of range 0:%ld "
+                              "in intrinsic IBITS", tmp1, nbits);
+      below = fold_build2_loc (input_location, LT_EXPR,
+                              logical_type_node, args[2],
+                              build_int_cst (TREE_TYPE (args[2]), 0));
+      above = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, tmp2, nbits);
+      scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                              logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "LEN argument (%ld) out of range 0:%ld "
+                              "in intrinsic IBITS", tmp2, nbits);
+      above = fold_build2_loc (input_location, PLUS_EXPR,
+                              long_integer_type_node, tmp1, tmp2);
+      scond = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, above, nbits);
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "POS(%ld)+LEN(%ld)>BIT_SIZE(%ld) "
+                              "in intrinsic IBITS", tmp1, tmp2, nbits);
+    }
+
   mask = build_int_cst (type, -1);
   mask = fold_build2_loc (input_location, LSHIFT_EXPR, type, mask, args[2]);
   mask = fold_build1_loc (input_location, BIT_NOT_EXPR, type, mask);
@@ -4673,11 +6698,83 @@ gfc_conv_intrinsic_ibits (gfc_se * se, gfc_expr * expr)
   se->expr = fold_build2_loc (input_location, BIT_AND_EXPR, type, tmp, mask);
 }
 
+static void
+gfc_conv_intrinsic_shape (gfc_se *se, gfc_expr *expr)
+{
+  gfc_actual_arglist *s, *k;
+  gfc_expr *e;
+  gfc_array_spec *as;
+  gfc_ss *ss;
+
+  /* Remove the KIND argument, if present. */
+  s = expr->value.function.actual;
+  k = s->next;
+  e = k->expr;
+  gfc_free_expr (e);
+  k->expr = NULL;
+
+  gfc_conv_intrinsic_funcall (se, expr);
+
+  as = gfc_get_full_arrayspec_from_expr (s->expr);;
+  ss = gfc_walk_expr (s->expr);
+
+  /* According to F2018 16.9.172, para 5, an assumed rank entity, argument
+     associated with an assumed size array, has the ubound of the final
+     dimension set to -1 and SHAPE must return this.  */
+  if (as && as->type == AS_ASSUMED_RANK
+      && se->expr && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr))
+      && ss && ss->info->type == GFC_SS_SECTION)
+    {
+      tree desc, rank, minus_one, cond, ubound, tmp;
+      stmtblock_t block;
+      gfc_se ase;
+
+      minus_one = build_int_cst (gfc_array_index_type, -1);
+
+      /* Recover the descriptor for the array.  */
+      gfc_init_se (&ase, NULL);
+      ase.descriptor_only = 1;
+      gfc_conv_expr_lhs (&ase, ss->info->expr);
+
+      /* Obtain rank-1 so that we can address both descriptors.  */
+      rank = gfc_conv_descriptor_rank (ase.expr);
+      rank = fold_convert (gfc_array_index_type, rank);
+      rank = fold_build2_loc (input_location, PLUS_EXPR,
+                             gfc_array_index_type,
+                             rank, minus_one);
+      rank = gfc_evaluate_now (rank, &se->pre);
+
+      /* The ubound for the final dimension will be tested for being -1.  */
+      ubound = gfc_conv_descriptor_ubound_get (ase.expr, rank);
+      ubound = gfc_evaluate_now (ubound, &se->pre);
+      cond = fold_build2_loc (input_location, EQ_EXPR,
+                            logical_type_node,
+                            ubound, minus_one);
+
+      /* Obtain the last element of the result from the library shape
+        intrinsic and set it to -1 if that is the value of ubound.  */
+      desc = se->expr;
+      tmp = gfc_conv_array_data (desc);
+      tmp = build_fold_indirect_ref_loc (input_location, tmp);
+      tmp = gfc_build_array_ref (tmp, rank, NULL, NULL);
+
+      gfc_init_block (&block);
+      gfc_add_modify (&block, tmp, build_int_cst (TREE_TYPE (tmp), -1));
+
+      cond = build3_v (COND_EXPR, cond,
+                      gfc_finish_block (&block),
+                      build_empty_stmt (input_location));
+      gfc_add_expr_to_block (&se->pre, cond);
+    }
+
+}
+
 static void
 gfc_conv_intrinsic_shift (gfc_se * se, gfc_expr * expr, bool right_shift,
                          bool arithmetic)
 {
   tree args[2], type, num_bits, cond;
+  tree bigshift;
 
   gfc_conv_intrinsic_function_args (se, expr, args, 2);
 
@@ -4697,15 +6794,53 @@ gfc_conv_intrinsic_shift (gfc_se * se, gfc_expr * expr, bool right_shift,
   if (!arithmetic)
     se->expr = fold_convert (type, se->expr);
 
+  if (!arithmetic)
+    bigshift = build_int_cst (type, 0);
+  else
+    {
+      tree nonneg = fold_build2_loc (input_location, GE_EXPR,
+                                    logical_type_node, args[0],
+                                    build_int_cst (TREE_TYPE (args[0]), 0));
+      bigshift = fold_build3_loc (input_location, COND_EXPR, type, nonneg,
+                                 build_int_cst (type, 0),
+                                 build_int_cst (type, -1));
+    }
+
   /* The Fortran standard allows shift widths <= BIT_SIZE(I), whereas
      gcc requires a shift width < BIT_SIZE(I), so we have to catch this
      special case.  */
   num_bits = build_int_cst (TREE_TYPE (args[1]), TYPE_PRECISION (type));
-  cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
+
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree below = fold_build2_loc (input_location, LT_EXPR,
+                                   logical_type_node, args[1],
+                                   build_int_cst (TREE_TYPE (args[1]), 0));
+      tree above = fold_build2_loc (input_location, GT_EXPR,
+                                   logical_type_node, args[1], num_bits);
+      tree scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                                   logical_type_node, below, above);
+      size_t len_name = strlen (expr->value.function.isym->name);
+      char *name = XALLOCAVEC (char, len_name + 1);
+      for (size_t i = 0; i < len_name; i++)
+       name[i] = TOUPPER (expr->value.function.isym->name[i]);
+      name[len_name] = '\0';
+      tree iname = gfc_build_addr_expr (pchar_type_node,
+                                       gfc_build_cstring_const (name));
+      gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                              "SHIFT argument (%ld) out of range 0:%ld "
+                              "in intrinsic %s",
+                              fold_convert (long_integer_type_node, args[1]),
+                              fold_convert (long_integer_type_node, num_bits),
+                              iname);
+    }
+
+  cond = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
                          args[1], num_bits);
 
   se->expr = fold_build3_loc (input_location, COND_EXPR, type, cond,
-                             build_int_cst (type, 0), se->expr);
+                             bigshift, se->expr);
 }
 
 /* ISHFT (I, SHIFT) = (abs (shift) >= BIT_SIZE (i))
@@ -4747,7 +6882,7 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr)
   rshift = fold_convert (type, fold_build2_loc (input_location, RSHIFT_EXPR,
                                    utype, convert (utype, args[0]), width));
 
-  tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, args[1],
+  tmp = fold_build2_loc (input_location, GE_EXPR, logical_type_node, args[1],
                         build_int_cst (TREE_TYPE (args[1]), 0));
   tmp = fold_build3_loc (input_location, COND_EXPR, type, tmp, lshift, rshift);
 
@@ -4755,7 +6890,21 @@ gfc_conv_intrinsic_ishft (gfc_se * se, gfc_expr * expr)
      gcc requires a shift width < BIT_SIZE(I), so we have to catch this
      special case.  */
   num_bits = build_int_cst (TREE_TYPE (args[1]), TYPE_PRECISION (type));
-  cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, width,
+
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree outside = fold_build2_loc (input_location, GT_EXPR,
+                                   logical_type_node, width, num_bits);
+      gfc_trans_runtime_check (true, false, outside, &se->pre, &expr->where,
+                              "SHIFT argument (%ld) out of range -%ld:%ld "
+                              "in intrinsic ISHFT",
+                              fold_convert (long_integer_type_node, args[1]),
+                              fold_convert (long_integer_type_node, num_bits),
+                              fold_convert (long_integer_type_node, num_bits));
+    }
+
+  cond = fold_build2_loc (input_location, GE_EXPR, logical_type_node, width,
                          num_bits);
   se->expr = fold_build3_loc (input_location, COND_EXPR, type, cond,
                              build_int_cst (type, 0), tmp);
@@ -4773,6 +6922,7 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
   tree lrot;
   tree rrot;
   tree zero;
+  tree nbits;
   unsigned int num_args;
 
   num_args = gfc_intrinsic_argument_list_length (expr);
@@ -4780,12 +6930,14 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
 
   gfc_conv_intrinsic_function_args (se, expr, args, num_args);
 
+  type = TREE_TYPE (args[0]);
+  nbits = build_int_cst (long_integer_type_node, TYPE_PRECISION (type));
+
   if (num_args == 3)
     {
       /* Use a library function for the 3 parameter version.  */
       tree int4type = gfc_get_int_type (4);
 
-      type = TREE_TYPE (args[0]);
       /* We convert the first argument to at least 4 bytes, and
         convert back afterwards.  This removes the need for library
         functions for all argument sizes, and function will be
@@ -4799,6 +6951,32 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
       args[1] = convert (int4type, args[1]);
       args[2] = convert (int4type, args[2]);
 
+      /* Optionally generate code for runtime argument check.  */
+      if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+       {
+         tree size = fold_convert (long_integer_type_node, args[2]);
+         tree below = fold_build2_loc (input_location, LE_EXPR,
+                                       logical_type_node, size,
+                                       build_int_cst (TREE_TYPE (args[1]), 0));
+         tree above = fold_build2_loc (input_location, GT_EXPR,
+                                       logical_type_node, size, nbits);
+         tree scond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                                       logical_type_node, below, above);
+         gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                                  "SIZE argument (%ld) out of range 1:%ld "
+                                  "in intrinsic ISHFTC", size, nbits);
+         tree width = fold_convert (long_integer_type_node, args[1]);
+         width = fold_build1_loc (input_location, ABS_EXPR,
+                                  long_integer_type_node, width);
+         scond = fold_build2_loc (input_location, GT_EXPR,
+                                  logical_type_node, width, size);
+         gfc_trans_runtime_check (true, false, scond, &se->pre, &expr->where,
+                                  "SHIFT argument (%ld) out of range -%ld:%ld "
+                                  "in intrinsic ISHFTC",
+                                  fold_convert (long_integer_type_node, args[1]),
+                                  size, size);
+       }
+
       switch (expr->ts.kind)
        {
        case 1:
@@ -4824,12 +7002,26 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
 
       return;
     }
-  type = TREE_TYPE (args[0]);
 
   /* Evaluate arguments only once.  */
   args[0] = gfc_evaluate_now (args[0], &se->pre);
   args[1] = gfc_evaluate_now (args[1], &se->pre);
 
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree width = fold_convert (long_integer_type_node, args[1]);
+      width = fold_build1_loc (input_location, ABS_EXPR,
+                              long_integer_type_node, width);
+      tree outside = fold_build2_loc (input_location, GT_EXPR,
+                                     logical_type_node, width, nbits);
+      gfc_trans_runtime_check (true, false, outside, &se->pre, &expr->where,
+                              "SHIFT argument (%ld) out of range -%ld:%ld "
+                              "in intrinsic ISHFTC",
+                              fold_convert (long_integer_type_node, args[1]),
+                              nbits, nbits);
+    }
+
   /* Rotate left if positive.  */
   lrot = fold_build2_loc (input_location, LROTATE_EXPR, type, args[0], args[1]);
 
@@ -4839,12 +7031,12 @@ gfc_conv_intrinsic_ishftc (gfc_se * se, gfc_expr * expr)
   rrot = fold_build2_loc (input_location,RROTATE_EXPR, type, args[0], tmp);
 
   zero = build_int_cst (TREE_TYPE (args[1]), 0);
-  tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, args[1],
+  tmp = fold_build2_loc (input_location, GT_EXPR, logical_type_node, args[1],
                         zero);
   rrot = fold_build3_loc (input_location, COND_EXPR, type, tmp, lrot, rrot);
 
   /* Do nothing if shift == 0.  */
-  tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, args[1],
+  tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, args[1],
                         zero);
   se->expr = fold_build3_loc (input_location, COND_EXPR, type, tmp, args[0],
                              rrot);
@@ -4942,7 +7134,7 @@ gfc_conv_intrinsic_leadz (gfc_se * se, gfc_expr * expr)
                              fold_convert (arg_type, ullmax), ullsize);
       cond = fold_build2_loc (input_location, BIT_AND_EXPR, arg_type,
                              arg, cond);
-      cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+      cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                              cond, build_int_cst (arg_type, 0));
 
       tmp1 = fold_build2_loc (input_location, RSHIFT_EXPR, arg_type,
@@ -4966,7 +7158,7 @@ gfc_conv_intrinsic_leadz (gfc_se * se, gfc_expr * expr)
   /* Build BIT_SIZE.  */
   bit_size = build_int_cst (result_type, argsize);
 
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                          arg, build_int_cst (arg_type, 0));
   se->expr = fold_build3_loc (input_location, COND_EXPR, result_type, cond,
                              bit_size, leadz);
@@ -5051,7 +7243,7 @@ gfc_conv_intrinsic_trailz (gfc_se * se, gfc_expr *expr)
 
       cond = fold_build2_loc (input_location, BIT_AND_EXPR, arg_type, arg,
                              fold_convert (arg_type, ullmax));
-      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, cond,
+      cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, cond,
                              build_int_cst (arg_type, 0));
 
       tmp1 = fold_build2_loc (input_location, RSHIFT_EXPR, arg_type,
@@ -5075,7 +7267,7 @@ gfc_conv_intrinsic_trailz (gfc_se * se, gfc_expr *expr)
   /* Build BIT_SIZE.  */
   bit_size = build_int_cst (result_type, argsize);
 
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                          arg, build_int_cst (arg_type, 0));
   se->expr = fold_build3_loc (input_location, COND_EXPR, result_type, cond,
                              bit_size, trailz);
@@ -5232,7 +7424,6 @@ conv_generic_with_optional_char_arg (gfc_se* se, gfc_expr* expr,
   gfc_free_symbol (sym);
 }
 
-
 /* The length of a character string.  */
 static void
 gfc_conv_intrinsic_len (gfc_se * se, gfc_expr * expr)
@@ -5280,10 +7471,9 @@ gfc_conv_intrinsic_len (gfc_se * se, gfc_expr * expr)
          break;
        }
 
-      /* Otherwise fall through.  */
+      /* Fall through.  */
 
     default:
-      /* Anybody stupid enough to do this deserves inefficient code.  */
       gfc_init_se (&argse, se);
       if (arg->rank == 0)
        gfc_conv_expr (&argse, arg);
@@ -5350,7 +7540,7 @@ gfc_conv_intrinsic_index_scan_verify (gfc_se * se, gfc_expr * expr,
   else
     args[4] = convert (logical4_type_node, args[4]);
 
-  fndecl = build_addr (function, current_function_decl);
+  fndecl = build_addr (function);
   se->expr = build_call_array_loc (input_location,
                               TREE_TYPE (TREE_TYPE (function)), fndecl,
                               5, args);
@@ -5508,7 +7698,7 @@ gfc_conv_intrinsic_mask (gfc_se * se, gfc_expr * expr, int left)
 
       /* Special case arg == 0, because SHIFT_EXPR wants a shift strictly
         smaller than type width.  */
-      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, arg,
+      cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, arg,
                              build_int_cst (TREE_TYPE (arg), 0));
       res = fold_build3_loc (input_location, COND_EXPR, utype, cond,
                             build_int_cst (utype, 0), res);
@@ -5522,7 +7712,7 @@ gfc_conv_intrinsic_mask (gfc_se * se, gfc_expr * expr, int left)
 
       /* Special case agr == bit_size, because SHIFT_EXPR wants a shift
         strictly smaller than type width.  */
-      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+      cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                              arg, bitsize);
       res = fold_build3_loc (input_location, COND_EXPR, utype,
                             cond, allones, res);
@@ -5643,7 +7833,7 @@ gfc_conv_intrinsic_spacing (gfc_se * se, gfc_expr * expr)
   gfc_add_modify (&block, res, tmp);
 
   /* Finish by building the IF statement for value zero.  */
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, arg,
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, arg,
                          build_real_from_int_cst (type, integer_zero_node));
   tmp = build3_v (COND_EXPR, cond, build2_v (MODIFY_EXPR, res, tiny),
                  gfc_finish_block (&block));
@@ -5714,7 +7904,7 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr)
   stmt = gfc_finish_block (&block);
 
   /* if (x != 0) */
-  cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, x,
+  cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node, x,
                          build_real_from_int_cst (type, integer_zero_node));
   tmp = build3_v (COND_EXPR, cond, stmt, build_empty_stmt (input_location));
 
@@ -5789,6 +7979,8 @@ gfc_conv_intrinsic_size (gfc_se * se, gfc_expr * expr)
   tree fncall0;
   tree fncall1;
   gfc_se argse;
+  gfc_expr *e;
+  gfc_symbol *sym = NULL;
 
   gfc_init_se (&argse, NULL);
   actual = expr->value.function.actual;
@@ -5796,9 +7988,68 @@ gfc_conv_intrinsic_size (gfc_se * se, gfc_expr * expr)
   if (actual->expr->ts.type == BT_CLASS)
     gfc_add_class_array_ref (actual->expr);
 
-  argse.want_pointer = 1;
+  e = actual->expr;
+
+  /* These are emerging from the interface mapping, when a class valued
+     function appears as the rhs in a realloc on assign statement, where
+     the size of the result is that of one of the actual arguments.  */
+  if (e->expr_type == EXPR_VARIABLE
+      && e->symtree->n.sym->ns == NULL /* This is distinctive!  */
+      && e->symtree->n.sym->ts.type == BT_CLASS
+      && e->ref && e->ref->type == REF_COMPONENT
+      && strcmp (e->ref->u.c.component->name, "_data") == 0)
+    sym = e->symtree->n.sym;
+
+  if ((gfc_option.rtcheck & GFC_RTCHECK_POINTER)
+      && e
+      && (e->expr_type == EXPR_VARIABLE || e->expr_type == EXPR_FUNCTION))
+    {
+      symbol_attribute attr;
+      char *msg;
+
+      attr = gfc_expr_attr (e);
+      if (attr.allocatable)
+       msg = xasprintf ("Allocatable argument '%s' is not allocated",
+                        e->symtree->n.sym->name);
+      else if (attr.pointer)
+       msg = xasprintf ("Pointer argument '%s' is not associated",
+                        e->symtree->n.sym->name);
+      else
+       goto end_arg_check;
+
+      argse.descriptor_only = 1;
+      gfc_conv_expr_descriptor (&argse, actual->expr);
+      tree temp = gfc_conv_descriptor_data_get (argse.expr);
+      tree cond = fold_build2_loc (input_location, EQ_EXPR,
+                                  logical_type_node, temp,
+                                  fold_convert (TREE_TYPE (temp),
+                                                null_pointer_node));
+      gfc_trans_runtime_check (true, false, cond, &argse.pre, &e->where, msg);
+      free (msg);
+    }
+ end_arg_check:
+
   argse.data_not_needed = 1;
-  gfc_conv_expr_descriptor (&argse, actual->expr);
+  if (gfc_is_class_array_function (e))
+    {
+      /* For functions that return a class array conv_expr_descriptor is not
+        able to get the descriptor right.  Therefore this special case.  */
+      gfc_conv_expr_reference (&argse, e);
+      argse.expr = gfc_build_addr_expr (NULL_TREE,
+                                       gfc_class_data_get (argse.expr));
+    }
+  else if (sym && sym->backend_decl)
+    {
+      gcc_assert (GFC_CLASS_TYPE_P (TREE_TYPE (sym->backend_decl)));
+      argse.expr = sym->backend_decl;
+      argse.expr = gfc_build_addr_expr (NULL_TREE,
+                                       gfc_class_data_get (argse.expr));
+    }
+  else
+    {
+      argse.want_pointer = 1;
+      gfc_conv_expr_descriptor (&argse, actual->expr);
+    }
   gfc_add_block_to_block (&se->pre, &argse.pre);
   gfc_add_block_to_block (&se->post, &argse.post);
   arg1 = gfc_evaluate_now (argse.expr, &se->pre);
@@ -5833,7 +8084,7 @@ gfc_conv_intrinsic_size (gfc_se * se, gfc_expr * expr)
          argse.data_not_needed = 1;
          gfc_conv_expr (&argse, actual->expr);
          gfc_add_block_to_block (&se->pre, &argse.pre);
-         tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+         tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                 argse.expr, null_pointer_node);
          tmp = gfc_evaluate_now (tmp, &se->pre);
          se->expr = fold_build3_loc (input_location, COND_EXPR,
@@ -5907,6 +8158,7 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
   tree lower;
   tree upper;
   tree byte_size;
+  tree field;
   int n;
 
   gfc_init_se (&argse, NULL);
@@ -5929,10 +8181,13 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
            ? GFC_DECL_SAVED_DESCRIPTOR (tmp) : tmp;
       if (POINTER_TYPE_P (TREE_TYPE (tmp)))
        tmp = build_fold_indirect_ref_loc (input_location, tmp);
-      tmp = fold_convert (size_type_node, gfc_conv_descriptor_dtype (tmp));
-      tmp = fold_build2_loc (input_location, RSHIFT_EXPR, TREE_TYPE (tmp), tmp,
-                            build_int_cst (TREE_TYPE (tmp),
-                                           GFC_DTYPE_SIZE_SHIFT));
+
+      tmp = gfc_conv_descriptor_dtype (tmp);
+      field = gfc_advance_chain (TYPE_FIELDS (get_dtype_type_node ()),
+                                GFC_DTYPE_ELEM_LEN);
+      tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
+                            tmp, field, NULL_TREE);
+
       byte_size = fold_convert (gfc_array_index_type, tmp);
     }
   else if (arg->ts.type == BT_CLASS)
@@ -5948,7 +8203,9 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
                                        TREE_OPERAND (argse.expr, 0), 0)))
                  || GFC_DECL_CLASS (TREE_OPERAND (argse.expr, 0)))))
        byte_size = gfc_class_vtab_size_get (TREE_OPERAND (argse.expr, 0));
-      else if (arg->rank > 0)
+      else if (arg->rank > 0
+              || (arg->rank == 0
+                  && arg->ref && arg->ref->type == REF_COMPONENT))
        /* The scalarizer added an additional temp.  To get the class' vptr
           one has to look at the original backend_decl.  */
        byte_size = gfc_class_vtab_size_get (
@@ -6000,7 +8257,7 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
                }
              exit:  */
          gfc_start_block (&body);
-         cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
+         cond = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
                                  loop_var, tmp);
          tmp = build1_v (GOTO_EXPR, exit_label);
          tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
@@ -6183,13 +8440,14 @@ gfc_conv_intrinsic_transfer (gfc_se * se, gfc_expr * expr)
   tree upper;
   tree lower;
   tree stmt;
+  tree class_ref = NULL_TREE;
   gfc_actual_arglist *arg;
   gfc_se argse;
   gfc_array_info *info;
   stmtblock_t block;
   int n;
   bool scalar_mold;
-  gfc_expr *source_expr, *mold_expr;
+  gfc_expr *source_expr, *mold_expr, *class_expr;
 
   info = NULL;
   if (se->loop)
@@ -6220,7 +8478,24 @@ gfc_conv_intrinsic_transfer (gfc_se * se, gfc_expr * expr)
     {
       gfc_conv_expr_reference (&argse, arg->expr);
       if (arg->expr->ts.type == BT_CLASS)
-       source = gfc_class_data_get (argse.expr);
+       {
+         tmp = build_fold_indirect_ref_loc (input_location, argse.expr);
+         if (GFC_CLASS_TYPE_P (TREE_TYPE (tmp)))
+           source = gfc_class_data_get (tmp);
+         else
+           {
+             /* Array elements are evaluated as a reference to the data.
+                To obtain the vptr for the element size, the argument
+                expression must be stripped to the class reference and
+                re-evaluated. The pre and post blocks are not needed.  */
+             gcc_assert (arg->expr->expr_type == EXPR_VARIABLE);
+             source = argse.expr;
+             class_expr = gfc_find_and_cut_at_last_class_ref (arg->expr);
+             gfc_init_se (&argse, NULL);
+             gfc_conv_expr (&argse, class_expr);
+             class_ref = argse.expr;
+           }
+       }
       else
        source = argse.expr;
 
@@ -6232,7 +8507,10 @@ gfc_conv_intrinsic_transfer (gfc_se * se, gfc_expr * expr)
                                         argse.string_length);
          break;
        case BT_CLASS:
-         tmp = gfc_class_vtab_size_get (argse.expr);
+         if (class_ref != NULL_TREE)
+           tmp = gfc_class_vtab_size_get (class_ref);
+         else
+           tmp = gfc_class_vtab_size_get (argse.expr);
          break;
        default:
          source_type = TREE_TYPE (build_fold_indirect_ref_loc (input_location,
@@ -6250,7 +8528,7 @@ gfc_conv_intrinsic_transfer (gfc_se * se, gfc_expr * expr)
       source_type = gfc_get_element_type (TREE_TYPE (argse.expr));
 
       /* Repack the source if not simply contiguous.  */
-      if (!gfc_is_simply_contiguous (arg->expr, false))
+      if (!gfc_is_simply_contiguous (arg->expr, false, true))
        {
          tmp = gfc_build_addr_expr (NULL_TREE, argse.expr);
 
@@ -6271,7 +8549,7 @@ gfc_conv_intrinsic_transfer (gfc_se * se, gfc_expr * expr)
          /* Clean up if it was repacked.  */
          gfc_init_block (&block);
          tmp = gfc_conv_array_data (argse.expr);
-         tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+         tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                 source, tmp);
          tmp = build3_v (COND_EXPR, tmp, stmt,
                          build_empty_stmt (input_location));
@@ -6496,14 +8774,14 @@ scalar_transfer:
       indirect = gfc_finish_block (&block);
 
       /* Wrap it up with the condition.  */
-      tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
+      tmp = fold_build2_loc (input_location, LE_EXPR, logical_type_node,
                             dest_word_len, source_bytes);
       tmp = build3_v (COND_EXPR, tmp, direct, indirect);
       gfc_add_expr_to_block (&se->pre, tmp);
 
       /* Free the temporary string, if necessary.  */
       free = gfc_call_free (tmpdecl);
-      tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+      tmp = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                             dest_word_len, source_bytes);
       tmp = build3_v (COND_EXPR, tmp, free, build_empty_stmt (input_location));
       gfc_add_expr_to_block (&se->post, tmp);
@@ -6556,6 +8834,42 @@ scalar_transfer:
 }
 
 
+/* Generate a call to caf_is_present.  */
+
+static tree
+trans_caf_is_present (gfc_se *se, gfc_expr *expr)
+{
+  tree caf_reference, caf_decl, token, image_index;
+
+  /* Compile the reference chain.  */
+  caf_reference = conv_expr_ref_to_caf_ref (&se->pre, expr);
+  gcc_assert (caf_reference != NULL_TREE);
+
+  caf_decl = gfc_get_tree_for_caf_expr (expr);
+  if (TREE_CODE (TREE_TYPE (caf_decl)) == REFERENCE_TYPE)
+    caf_decl = build_fold_indirect_ref_loc (input_location, caf_decl);
+  image_index = gfc_caf_get_image_index (&se->pre, expr, caf_decl);
+  gfc_get_caf_token_offset (se, &token, NULL, caf_decl, NULL,
+                           expr);
+
+  return build_call_expr_loc (input_location, gfor_fndecl_caf_is_present,
+                             3, token, image_index, caf_reference);
+}
+
+
+/* Test whether this ref-chain refs this image only.  */
+
+static bool
+caf_this_image_ref (gfc_ref *ref)
+{
+  for ( ; ref; ref = ref->next)
+    if (ref->type == REF_ARRAY && ref->u.ar.codimen)
+      return ref->u.ar.dimen_type[ref->u.ar.dimen] == DIMEN_THIS_IMAGE;
+
+  return false;
+}
+
+
 /* Generate code for the ALLOCATED intrinsic.
    Generate inline code that directly check the address of the argument.  */
 
@@ -6565,6 +8879,7 @@ gfc_conv_allocated (gfc_se *se, gfc_expr *expr)
   gfc_actual_arglist *arg1;
   gfc_se arg1se;
   tree tmp;
+  symbol_attribute caf_attr;
 
   gfc_init_se (&arg1se, NULL);
   arg1 = expr->value.function.actual;
@@ -6580,23 +8895,42 @@ gfc_conv_allocated (gfc_se *se, gfc_expr *expr)
        gfc_add_data_component (arg1->expr);
     }
 
-  if (arg1->expr->rank == 0)
-    {
-      /* Allocatable scalar.  */
-      arg1se.want_pointer = 1;
-      gfc_conv_expr (&arg1se, arg1->expr);
-      tmp = arg1se.expr;
-    }
+  /* When arg1 references an allocatable component in a coarray, then call
+     the caf-library function caf_is_present ().  */
+  if (flag_coarray == GFC_FCOARRAY_LIB && arg1->expr->expr_type == EXPR_FUNCTION
+      && arg1->expr->value.function.isym
+      && arg1->expr->value.function.isym->id == GFC_ISYM_CAF_GET)
+    caf_attr = gfc_caf_attr (arg1->expr->value.function.actual->expr);
+  else
+    gfc_clear_attr (&caf_attr);
+  if (flag_coarray == GFC_FCOARRAY_LIB && caf_attr.codimension
+      && !caf_this_image_ref (arg1->expr->value.function.actual->expr->ref))
+    tmp = trans_caf_is_present (se, arg1->expr->value.function.actual->expr);
   else
     {
-      /* Allocatable array.  */
-      arg1se.descriptor_only = 1;
-      gfc_conv_expr_descriptor (&arg1se, arg1->expr);
-      tmp = gfc_conv_descriptor_data_get (arg1se.expr);
+      if (arg1->expr->rank == 0)
+       {
+         /* Allocatable scalar.  */
+         arg1se.want_pointer = 1;
+         gfc_conv_expr (&arg1se, arg1->expr);
+         tmp = arg1se.expr;
+       }
+      else
+       {
+         /* Allocatable array.  */
+         arg1se.descriptor_only = 1;
+         gfc_conv_expr_descriptor (&arg1se, arg1->expr);
+         tmp = gfc_conv_descriptor_data_get (arg1se.expr);
+       }
+
+      tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, tmp,
+                            fold_convert (TREE_TYPE (tmp), null_pointer_node));
     }
 
-  tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
-                        fold_convert (TREE_TYPE (tmp), null_pointer_node));
+  /* Components of pointer array references sometimes come back with a pre block.  */
+  if (arg1se.pre.head)
+    gfc_add_block_to_block (&se->pre, &arg1se.pre);
+
   se->expr = convert (gfc_typenode_for_spec (&expr->ts), tmp);
 }
 
@@ -6616,7 +8950,6 @@ gfc_conv_associated (gfc_se *se, gfc_expr *expr)
   gfc_se arg2se;
   tree tmp2;
   tree tmp;
-  tree nonzero_charlen;
   tree nonzero_arraylen;
   gfc_ss *ss;
   bool scalar;
@@ -6662,7 +8995,7 @@ gfc_conv_associated (gfc_se *se, gfc_expr *expr)
         }
       gfc_add_block_to_block (&se->pre, &arg1se.pre);
       gfc_add_block_to_block (&se->post, &arg1se.post);
-      tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp2,
+      tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, tmp2,
                             fold_convert (TREE_TYPE (tmp2), null_pointer_node));
       se->expr = tmp;
     }
@@ -6672,12 +9005,6 @@ gfc_conv_associated (gfc_se *se, gfc_expr *expr)
       if (arg2->expr->ts.type == BT_CLASS)
        gfc_add_data_component (arg2->expr);
 
-      nonzero_charlen = NULL_TREE;
-      if (arg1->expr->ts.type == BT_CHARACTER)
-       nonzero_charlen = fold_build2_loc (input_location, NE_EXPR,
-                                          boolean_type_node,
-                                          arg1->expr->ts.u.cl->backend_decl,
-                                          integer_zero_node);
       if (scalar)
         {
          /* A pointer to a scalar.  */
@@ -6700,12 +9027,12 @@ gfc_conv_associated (gfc_se *se, gfc_expr *expr)
          gfc_add_block_to_block (&se->post, &arg1se.post);
          gfc_add_block_to_block (&se->pre, &arg2se.pre);
          gfc_add_block_to_block (&se->post, &arg2se.post);
-          tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+          tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                 arg1se.expr, arg2se.expr);
-          tmp2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+          tmp2 = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                  arg1se.expr, null_pointer_node);
           se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                     boolean_type_node, tmp, tmp2);
+                                     logical_type_node, tmp, tmp2);
         }
       else
         {
@@ -6723,32 +9050,39 @@ gfc_conv_associated (gfc_se *se, gfc_expr *expr)
            tmp = gfc_rank_cst[arg1->expr->rank - 1];
          tmp = gfc_conv_descriptor_stride_get (arg1se.expr, tmp);
          nonzero_arraylen = fold_build2_loc (input_location, NE_EXPR,
-                                             boolean_type_node, tmp,
+                                             logical_type_node, tmp,
                                              build_int_cst (TREE_TYPE (tmp), 0));
 
-          /* A pointer to an array, call library function _gfor_associated.  */
-          arg1se.want_pointer = 1;
-          gfc_conv_expr_descriptor (&arg1se, arg1->expr);
+         /* A pointer to an array, call library function _gfor_associated.  */
+         arg1se.want_pointer = 1;
+         gfc_conv_expr_descriptor (&arg1se, arg1->expr);
+         gfc_add_block_to_block (&se->pre, &arg1se.pre);
+         gfc_add_block_to_block (&se->post, &arg1se.post);
 
-          arg2se.want_pointer = 1;
-          gfc_conv_expr_descriptor (&arg2se, arg2->expr);
-          gfc_add_block_to_block (&se->pre, &arg2se.pre);
-          gfc_add_block_to_block (&se->post, &arg2se.post);
-          se->expr = build_call_expr_loc (input_location,
+         arg2se.want_pointer = 1;
+         gfc_conv_expr_descriptor (&arg2se, arg2->expr);
+         gfc_add_block_to_block (&se->pre, &arg2se.pre);
+         gfc_add_block_to_block (&se->post, &arg2se.post);
+         se->expr = build_call_expr_loc (input_location,
                                      gfor_fndecl_associated, 2,
                                      arg1se.expr, arg2se.expr);
-         se->expr = convert (boolean_type_node, se->expr);
+         se->expr = convert (logical_type_node, se->expr);
          se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                     boolean_type_node, se->expr,
+                                     logical_type_node, se->expr,
                                      nonzero_arraylen);
         }
 
       /* If target is present zero character length pointers cannot
         be associated.  */
-      if (nonzero_charlen != NULL_TREE)
-       se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                   boolean_type_node,
-                                   se->expr, nonzero_charlen);
+      if (arg1->expr->ts.type == BT_CHARACTER)
+       {
+         tmp = arg1se.string_length;
+         tmp = fold_build2_loc (input_location, NE_EXPR,
+                                logical_type_node, tmp,
+                                build_zero_cst (TREE_TYPE (tmp)));
+         se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
+                                     logical_type_node, se->expr, tmp);
+       }
     }
 
   se->expr = convert (gfc_typenode_for_spec (&expr->ts), se->expr);
@@ -6775,14 +9109,14 @@ gfc_conv_same_type_as (gfc_se *se, gfc_expr *expr)
   if (UNLIMITED_POLY (a))
     {
       tmp = gfc_class_vptr_get (a->symtree->n.sym->backend_decl);
-      conda = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+      conda = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                               tmp, build_int_cst (TREE_TYPE (tmp), 0));
     }
 
   if (UNLIMITED_POLY (b))
     {
       tmp = gfc_class_vptr_get (b->symtree->n.sym->backend_decl);
-      condb = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+      condb = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                               tmp, build_int_cst (TREE_TYPE (tmp), 0));
     }
 
@@ -6808,16 +9142,16 @@ gfc_conv_same_type_as (gfc_se *se, gfc_expr *expr)
   gfc_conv_expr (&se2, b);
 
   tmp = fold_build2_loc (input_location, EQ_EXPR,
-                        boolean_type_node, se1.expr,
+                        logical_type_node, se1.expr,
                         fold_convert (TREE_TYPE (se1.expr), se2.expr));
 
   if (conda)
     tmp = fold_build2_loc (input_location, TRUTH_ANDIF_EXPR,
-                          boolean_type_node, conda, tmp);
+                          logical_type_node, conda, tmp);
 
   if (condb)
     tmp = fold_build2_loc (input_location, TRUTH_ANDIF_EXPR,
-                          boolean_type_node, condb, tmp);
+                          logical_type_node, condb, tmp);
 
   se->expr = convert (gfc_typenode_for_spec (&expr->ts), tmp);
 }
@@ -6936,14 +9270,14 @@ gfc_conv_intrinsic_trim (gfc_se * se, gfc_expr * expr)
   else
     gcc_unreachable ();
 
-  fndecl = build_addr (function, current_function_decl);
+  fndecl = build_addr (function);
   tmp = build_call_array_loc (input_location,
                          TREE_TYPE (TREE_TYPE (function)), fndecl,
                          num_args, args);
   gfc_add_expr_to_block (&se->pre, tmp);
 
   /* Free the temporary afterwards, if necessary.  */
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          len, build_int_cst (TREE_TYPE (len), 0));
   tmp = gfc_call_free (var);
   tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
@@ -6967,17 +9301,17 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
 
   /* We store in charsize the size of a character.  */
   i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
-  size = build_int_cst (size_type_node, gfc_character_kinds[i].bit_size / 8);
+  size = build_int_cst (sizetype, gfc_character_kinds[i].bit_size / 8);
 
   /* Get the arguments.  */
   gfc_conv_intrinsic_function_args (se, expr, args, 3);
-  slen = fold_convert (size_type_node, gfc_evaluate_now (args[0], &se->pre));
+  slen = fold_convert (sizetype, gfc_evaluate_now (args[0], &se->pre));
   src = args[1];
   ncopies = gfc_evaluate_now (args[2], &se->pre);
   ncopies_type = TREE_TYPE (ncopies);
 
   /* Check that NCOPIES is not negative.  */
-  cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, ncopies,
+  cond = fold_build2_loc (input_location, LT_EXPR, logical_type_node, ncopies,
                          build_int_cst (ncopies_type, 0));
   gfc_trans_runtime_check (true, false, cond, &se->pre, &expr->where,
                           "Argument NCOPIES of REPEAT intrinsic is negative "
@@ -6987,8 +9321,8 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
   /* If the source length is zero, any non negative value of NCOPIES
      is valid, and nothing happens.  */
   n = gfc_create_var (ncopies_type, "ncopies");
-  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, slen,
-                         build_int_cst (size_type_node, 0));
+  cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, slen,
+                         size_zero_node);
   tmp = fold_build3_loc (input_location, COND_EXPR, ncopies_type, cond,
                         build_int_cst (ncopies_type, 0), ncopies);
   gfc_add_modify (&se->pre, n, tmp);
@@ -6998,19 +9332,19 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
      (or equal to) MAX / slen, where MAX is the maximal integer of
      the gfc_charlen_type_node type.  If slen == 0, we need a special
      case to avoid the division by zero.  */
-  i = gfc_validate_kind (BT_INTEGER, gfc_charlen_int_kind, false);
-  max = gfc_conv_mpz_to_tree (gfc_integer_kinds[i].huge, gfc_charlen_int_kind);
-  max = fold_build2_loc (input_location, TRUNC_DIV_EXPR, size_type_node,
-                         fold_convert (size_type_node, max), slen);
-  largest = TYPE_PRECISION (size_type_node) > TYPE_PRECISION (ncopies_type)
-             ? size_type_node : ncopies_type;
-  cond = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
+  max = fold_build2_loc (input_location, TRUNC_DIV_EXPR, sizetype,
+                        fold_convert (sizetype,
+                                      TYPE_MAX_VALUE (gfc_charlen_type_node)),
+                        slen);
+  largest = TYPE_PRECISION (sizetype) > TYPE_PRECISION (ncopies_type)
+             ? sizetype : ncopies_type;
+  cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node,
                          fold_convert (largest, ncopies),
                          fold_convert (largest, max));
-  tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, slen,
-                        build_int_cst (size_type_node, 0));
-  cond = fold_build3_loc (input_location, COND_EXPR, boolean_type_node, tmp,
-                         boolean_false_node, cond);
+  tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, slen,
+                        size_zero_node);
+  cond = fold_build3_loc (input_location, COND_EXPR, logical_type_node, tmp,
+                         logical_false_node, cond);
   gfc_trans_runtime_check (true, false, cond, &se->pre, &expr->where,
                           "Argument NCOPIES of REPEAT intrinsic is too large");
 
@@ -7025,16 +9359,16 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
        for (i = 0; i < ncopies; i++)
          memmove (dest + (i * slen * size), src, slen*size);  */
   gfc_start_block (&block);
-  count = gfc_create_var (ncopies_type, "count");
-  gfc_add_modify (&block, count, build_int_cst (ncopies_type, 0));
+  count = gfc_create_var (sizetype, "count");
+  gfc_add_modify (&block, count, size_zero_node);
   exit_label = gfc_build_label_decl (NULL_TREE);
 
   /* Start the loop body.  */
   gfc_start_block (&body);
 
   /* Exit the loop if count >= ncopies.  */
-  cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, count,
-                         ncopies);
+  cond = fold_build2_loc (input_location, GE_EXPR, logical_type_node, count,
+                         fold_convert (sizetype, ncopies));
   tmp = build1_v (GOTO_EXPR, exit_label);
   TREE_USED (exit_label) = 1;
   tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, tmp,
@@ -7042,25 +9376,22 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
   gfc_add_expr_to_block (&body, tmp);
 
   /* Call memmove (dest + (i*slen*size), src, slen*size).  */
-  tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_charlen_type_node,
-                        fold_convert (gfc_charlen_type_node, slen),
-                        fold_convert (gfc_charlen_type_node, count));
-  tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_charlen_type_node,
-                        tmp, fold_convert (gfc_charlen_type_node, size));
+  tmp = fold_build2_loc (input_location, MULT_EXPR, sizetype, slen,
+                        count);
+  tmp = fold_build2_loc (input_location, MULT_EXPR, sizetype, tmp,
+                        size);
   tmp = fold_build_pointer_plus_loc (input_location,
                                     fold_convert (pvoid_type_node, dest), tmp);
   tmp = build_call_expr_loc (input_location,
                             builtin_decl_explicit (BUILT_IN_MEMMOVE),
                             3, tmp, src,
                             fold_build2_loc (input_location, MULT_EXPR,
-                                             size_type_node, slen,
-                                             fold_convert (size_type_node,
-                                                           size)));
+                                             size_type_node, slen, size));
   gfc_add_expr_to_block (&body, tmp);
 
   /* Increment count.  */
-  tmp = fold_build2_loc (input_location, PLUS_EXPR, ncopies_type,
-                        count, build_int_cst (TREE_TYPE (count), 1));
+  tmp = fold_build2_loc (input_location, PLUS_EXPR, sizetype,
+                        count, size_one_node);
   gfc_add_modify (&body, count, tmp);
 
   /* Build the loop.  */
@@ -7103,6 +9434,85 @@ gfc_conv_intrinsic_iargc (gfc_se * se, gfc_expr * expr)
 }
 
 
+/* Generate code for the KILL intrinsic.  */
+
+static void
+conv_intrinsic_kill (gfc_se *se, gfc_expr *expr)
+{
+  tree *args;
+  tree int4_type_node = gfc_get_int_type (4);
+  tree pid;
+  tree sig;
+  tree tmp;
+  unsigned int num_args;
+
+  num_args = gfc_intrinsic_argument_list_length (expr);
+  args = XALLOCAVEC (tree, num_args);
+  gfc_conv_intrinsic_function_args (se, expr, args, num_args);
+
+  /* Convert PID to a INTEGER(4) entity.  */
+  pid = convert (int4_type_node, args[0]);
+
+  /* Convert SIG to a INTEGER(4) entity.  */
+  sig = convert (int4_type_node, args[1]);
+
+  tmp = build_call_expr_loc (input_location, gfor_fndecl_kill, 2, pid, sig);
+
+  se->expr = fold_convert (TREE_TYPE (args[0]), tmp);
+}
+
+
+static tree
+conv_intrinsic_kill_sub (gfc_code *code)
+{
+  stmtblock_t block;
+  gfc_se se, se_stat;
+  tree int4_type_node = gfc_get_int_type (4);
+  tree pid;
+  tree sig;
+  tree statp;
+  tree tmp;
+
+  /* Make the function call.  */
+  gfc_init_block (&block);
+  gfc_init_se (&se, NULL);
+
+  /* Convert PID to a INTEGER(4) entity.  */
+  gfc_conv_expr (&se, code->ext.actual->expr);
+  gfc_add_block_to_block (&block, &se.pre);
+  pid = fold_convert (int4_type_node, gfc_evaluate_now (se.expr, &block));
+  gfc_add_block_to_block (&block, &se.post);
+
+  /* Convert SIG to a INTEGER(4) entity.  */
+  gfc_conv_expr (&se, code->ext.actual->next->expr);
+  gfc_add_block_to_block (&block, &se.pre);
+  sig = fold_convert (int4_type_node, gfc_evaluate_now (se.expr, &block));
+  gfc_add_block_to_block (&block, &se.post);
+
+  /* Deal with an optional STATUS.  */
+  if (code->ext.actual->next->next->expr)
+    {
+      gfc_init_se (&se_stat, NULL);
+      gfc_conv_expr (&se_stat, code->ext.actual->next->next->expr);
+      statp = gfc_create_var (gfc_get_int_type (4), "_statp");
+    }
+  else
+    statp = NULL_TREE;
+
+  tmp = build_call_expr_loc (input_location, gfor_fndecl_kill_sub, 3, pid, sig,
+       statp ? gfc_build_addr_expr (NULL_TREE, statp) : null_pointer_node);
+
+  gfc_add_expr_to_block (&block, tmp);
+
+  if (statp && statp != se_stat.expr)
+    gfc_add_modify (&block, se_stat.expr,
+                   fold_convert (TREE_TYPE (se_stat.expr), statp));
+
+  return gfc_finish_block (&block);
+}
+
+
+
 /* The loc intrinsic returns the address of its argument as
    gfc_index_integer_kind integer.  */
 
@@ -7118,7 +9528,7 @@ gfc_conv_intrinsic_loc (gfc_se * se, gfc_expr * expr)
   if (arg_expr->rank == 0)
     {
       if (arg_expr->ts.type == BT_CLASS)
-       gfc_add_component_ref (arg_expr, "_data");
+       gfc_add_data_component (arg_expr);
       gfc_conv_expr_reference (se, arg_expr);
     }
   else
@@ -7148,7 +9558,7 @@ conv_isocbinding_function (gfc_se *se, gfc_expr *expr)
     {
       if (arg->expr->rank == 0)
        gfc_conv_expr_reference (se, arg->expr);
-      else if (gfc_is_simply_contiguous (arg->expr, false))
+      else if (gfc_is_simply_contiguous (arg->expr, false, false))
        gfc_conv_array_parameter (se, arg->expr, true, NULL, NULL, NULL);
       else
        {
@@ -7182,7 +9592,7 @@ conv_isocbinding_function (gfc_se *se, gfc_expr *expr)
       if (arg->next->expr == NULL)
        /* Only given one arg so generate a null and do a
           not-equal comparison against the first arg.  */
-       se->expr = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+       se->expr = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                                    arg1se.expr,
                                    fold_convert (TREE_TYPE (arg1se.expr),
                                                  null_pointer_node));
@@ -7198,17 +9608,17 @@ conv_isocbinding_function (gfc_se *se, gfc_expr *expr)
          gfc_add_block_to_block (&se->post, &arg2se.post);
 
          /* Generate test to compare that the two args are equal.  */
-         eq_expr = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+         eq_expr = fold_build2_loc (input_location, EQ_EXPR, logical_type_node,
                                     arg1se.expr, arg2se.expr);
          /* Generate test to ensure that the first arg is not null.  */
          not_null_expr = fold_build2_loc (input_location, NE_EXPR,
-                                          boolean_type_node,
+                                          logical_type_node,
                                           arg1se.expr, null_pointer_node);
 
          /* Finally, the generated test must check that both arg1 is not
             NULL and that it is equal to the second arg.  */
          se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                                     boolean_type_node,
+                                     logical_type_node,
                                      not_null_expr, eq_expr);
        }
     }
@@ -7270,6 +9680,11 @@ conv_isocbinding_subroutine (gfc_code *code)
   gfc_add_block_to_block (&block, &fptrse.pre);
   desc = fptrse.expr;
 
+  /* Set the span field.  */
+  tmp = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
+  tmp = fold_convert (gfc_array_index_type, tmp);
+  gfc_conv_descriptor_span_set (&block, desc, tmp);
+
   /* Set data value, dtype, and offset.  */
   tmp = GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc));
   gfc_conv_descriptor_data_set (&block, desc, fold_convert (tmp, cptrse.expr));
@@ -7433,11 +9848,11 @@ conv_intrinsic_ieee_is_normal (gfc_se * se, gfc_expr * expr)
   isnormal = build_call_expr_loc (input_location,
                                  builtin_decl_explicit (BUILT_IN_ISNORMAL),
                                  1, arg);
-  iszero = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, arg,
+  iszero = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, arg,
                            build_real_from_int_cst (TREE_TYPE (arg),
                                                     integer_zero_node));
   se->expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
-                             boolean_type_node, isnormal, iszero);
+                             logical_type_node, isnormal, iszero);
   se->expr = fold_convert (gfc_typenode_for_spec (&expr->ts), se->expr);
 }
 
@@ -7462,11 +9877,11 @@ conv_intrinsic_ieee_is_negative (gfc_se * se, gfc_expr * expr)
   signbit = build_call_expr_loc (input_location,
                                 builtin_decl_explicit (BUILT_IN_SIGNBIT),
                                 1, arg);
-  signbit = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+  signbit = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                             signbit, integer_zero_node);
 
   se->expr = fold_build2_loc (input_location, TRUTH_AND_EXPR,
-                             boolean_type_node, signbit,
+                             logical_type_node, signbit,
                              fold_build1_loc (input_location, TRUTH_NOT_EXPR,
                                               TREE_TYPE(isnan), isnan));
 
@@ -7612,7 +10027,7 @@ conv_intrinsic_ieee_copy_sign (gfc_se * se, gfc_expr * expr)
   sign = build_call_expr_loc (input_location,
                              builtin_decl_explicit (BUILT_IN_SIGNBIT),
                              1, args[1]);
-  sign = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+  sign = fold_build2_loc (input_location, NE_EXPR, logical_type_node,
                          sign, integer_zero_node);
 
   /* Create a value of one, with the right sign.  */
@@ -7639,37 +10054,33 @@ gfc_conv_ieee_arithmetic_function (gfc_se * se, gfc_expr * expr)
 {
   const char *name = expr->value.function.name;
 
-#define STARTS_WITH(A,B) (strncmp((A), (B), strlen(B)) == 0)
-
-  if (STARTS_WITH (name, "_gfortran_ieee_is_nan"))
+  if (gfc_str_startswith (name, "_gfortran_ieee_is_nan"))
     conv_intrinsic_ieee_builtin (se, expr, BUILT_IN_ISNAN, 1);
-  else if (STARTS_WITH (name, "_gfortran_ieee_is_finite"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_is_finite"))
     conv_intrinsic_ieee_builtin (se, expr, BUILT_IN_ISFINITE, 1);
-  else if (STARTS_WITH (name, "_gfortran_ieee_unordered"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_unordered"))
     conv_intrinsic_ieee_builtin (se, expr, BUILT_IN_ISUNORDERED, 2);
-  else if (STARTS_WITH (name, "_gfortran_ieee_is_normal"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_is_normal"))
     conv_intrinsic_ieee_is_normal (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_is_negative"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_is_negative"))
     conv_intrinsic_ieee_is_negative (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_copy_sign"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_copy_sign"))
     conv_intrinsic_ieee_copy_sign (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_scalb"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_scalb"))
     conv_intrinsic_ieee_scalb (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_next_after"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_next_after"))
     conv_intrinsic_ieee_next_after (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_rem"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_rem"))
     conv_intrinsic_ieee_rem (se, expr);
-  else if (STARTS_WITH (name, "_gfortran_ieee_logb"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_logb"))
     conv_intrinsic_ieee_logb_rint (se, expr, BUILT_IN_LOGB);
-  else if (STARTS_WITH (name, "_gfortran_ieee_rint"))
+  else if (gfc_str_startswith (name, "_gfortran_ieee_rint"))
     conv_intrinsic_ieee_logb_rint (se, expr, BUILT_IN_RINT);
   else
     /* It is not among the functions we translate directly.  We return
        false, so a library function call is emitted.  */
     return false;
 
-#undef STARTS_WITH
-
   return true;
 }
 
@@ -7721,6 +10132,22 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
              conv_generic_with_optional_char_arg (se, expr, 1, 3);
              break;
 
+           case GFC_ISYM_FINDLOC:
+             gfc_conv_intrinsic_findloc (se, expr);
+             break;
+
+           case GFC_ISYM_MINLOC:
+             gfc_conv_intrinsic_minmaxloc (se, expr, LT_EXPR);
+             break;
+
+           case GFC_ISYM_MAXLOC:
+             gfc_conv_intrinsic_minmaxloc (se, expr, GT_EXPR);
+             break;
+
+           case GFC_ISYM_SHAPE:
+             gfc_conv_intrinsic_shape (se, expr);
+             break;
+
            default:
              gfc_conv_intrinsic_funcall (se, expr);
              break;
@@ -7845,6 +10272,24 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       gfc_conv_intrinsic_anyall (se, expr, NE_EXPR);
       break;
 
+    case GFC_ISYM_ACOSD:
+    case GFC_ISYM_ASIND:
+    case GFC_ISYM_ATAND:
+      gfc_conv_intrinsic_atrigd (se, expr, expr->value.function.isym->id);
+      break;
+
+    case GFC_ISYM_COTAN:
+      gfc_conv_intrinsic_cotan (se, expr);
+      break;
+
+    case GFC_ISYM_COTAND:
+      gfc_conv_intrinsic_cotand (se, expr);
+      break;
+
+    case GFC_ISYM_ATAN2D:
+      gfc_conv_intrinsic_atan2d (se, expr);
+      break;
+
     case GFC_ISYM_BTEST:
       gfc_conv_intrinsic_btest (se, expr);
       break;
@@ -7877,9 +10322,13 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       break;
 
     case GFC_ISYM_CONVERSION:
-    case GFC_ISYM_REAL:
-    case GFC_ISYM_LOGICAL:
     case GFC_ISYM_DBLE:
+    case GFC_ISYM_DFLOAT:
+    case GFC_ISYM_FLOAT:
+    case GFC_ISYM_LOGICAL:
+    case GFC_ISYM_REAL:
+    case GFC_ISYM_REALPART:
+    case GFC_ISYM_SNGL:
       gfc_conv_intrinsic_conversion (se, expr);
       break;
 
@@ -7913,7 +10362,8 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       break;
 
     case GFC_ISYM_CAF_GET:
-      gfc_conv_intrinsic_caf_get (se, expr, NULL_TREE, NULL_TREE, NULL_TREE);
+      gfc_conv_intrinsic_caf_get (se, expr, NULL_TREE, NULL_TREE, NULL_TREE,
+                                 false, NULL);
       break;
 
     case GFC_ISYM_CMPLX:
@@ -8034,10 +10484,18 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       gfc_conv_has_intvalue (se, expr, LIBERROR_EOR);
       break;
 
+    case GFC_ISYM_IS_CONTIGUOUS:
+      gfc_conv_intrinsic_is_contiguous (se, expr);
+      break;
+
     case GFC_ISYM_ISNAN:
       gfc_conv_intrinsic_isnan (se, expr);
       break;
 
+    case GFC_ISYM_KILL:
+      conv_intrinsic_kill (se, expr);
+      break;
+
     case GFC_ISYM_LSHIFT:
       gfc_conv_intrinsic_shift (se, expr, false, false);
       break;
@@ -8143,6 +10601,10 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       gfc_conv_intrinsic_minmaxloc (se, expr, GT_EXPR);
       break;
 
+    case GFC_ISYM_FINDLOC:
+      gfc_conv_intrinsic_findloc (se, expr);
+      break;
+
     case GFC_ISYM_MAXVAL:
       gfc_conv_intrinsic_minmaxval (se, expr, GT_EXPR);
       break;
@@ -8243,6 +10705,10 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       gfc_conv_intrinsic_arith (se, expr, PLUS_EXPR, false);
       break;
 
+    case GFC_ISYM_TEAM_NUMBER:
+      conv_intrinsic_team_number (se, expr);
+      break;
+
     case GFC_ISYM_TRANSFER:
       if (se->ss && se->ss->info->useflags)
        /* Access the previously obtained result.  */
@@ -8284,6 +10750,10 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
       trans_image_index (se, expr);
       break;
 
+    case GFC_ISYM_IMAGE_STATUS:
+      conv_intrinsic_image_status (se, expr);
+      break;
+
     case GFC_ISYM_NUM_IMAGES:
       trans_num_images (se, expr);
       break;
@@ -8306,7 +10776,6 @@ gfc_conv_intrinsic_function (gfc_se * se, gfc_expr * expr)
     case GFC_ISYM_GETPID:
     case GFC_ISYM_GETUID:
     case GFC_ISYM_HOSTNM:
-    case GFC_ISYM_KILL:
     case GFC_ISYM_IERRNO:
     case GFC_ISYM_IRAND:
     case GFC_ISYM_ISATTY:
@@ -8568,7 +11037,8 @@ gfc_walk_intrinsic_libfunc (gfc_ss * ss, gfc_expr * expr)
 bool
 gfc_inline_intrinsic_function_p (gfc_expr *expr)
 {
-  gfc_actual_arglist *args;
+  gfc_actual_arglist *args, *dim_arg, *mask_arg;
+  gfc_expr *maskexpr;
 
   if (!expr->value.function.isym)
     return false;
@@ -8582,8 +11052,23 @@ gfc_inline_intrinsic_function_p (gfc_expr *expr)
        return false;
 
       args = expr->value.function.actual;
+      dim_arg = args->next;
+
       /* We need to be able to subset the SUM argument at compile-time.  */
-      if (args->next->expr && args->next->expr->expr_type != EXPR_CONSTANT)
+      if (dim_arg->expr && dim_arg->expr->expr_type != EXPR_CONSTANT)
+       return false;
+
+      /* FIXME: If MASK is optional for a more than two-dimensional
+        argument, the scalarizer gets confused if the mask is
+        absent.  See PR 82995.  For now, fall back to the library
+        function.  */
+
+      mask_arg = dim_arg->next;
+      maskexpr = mask_arg->expr;
+
+      if (expr->rank > 0 && maskexpr && maskexpr->expr_type == EXPR_VARIABLE
+         && maskexpr->symtree->n.sym->attr.dummy
+         && maskexpr->symtree->n.sym->attr.optional)
        return false;
 
       return true;
@@ -8615,6 +11100,7 @@ gfc_is_intrinsic_libcall (gfc_expr * expr)
     case GFC_ISYM_ALL:
     case GFC_ISYM_ANY:
     case GFC_ISYM_COUNT:
+    case GFC_ISYM_FINDLOC:
     case GFC_ISYM_JN2:
     case GFC_ISYM_IANY:
     case GFC_ISYM_IALL:
@@ -8634,10 +11120,13 @@ gfc_is_intrinsic_libcall (gfc_expr * expr)
       /* Ignore absent optional parameters.  */
       return 1;
 
-    case GFC_ISYM_RESHAPE:
     case GFC_ISYM_CSHIFT:
     case GFC_ISYM_EOSHIFT:
+    case GFC_ISYM_GET_TEAM:
+    case GFC_ISYM_FAILED_IMAGES:
+    case GFC_ISYM_STOPPED_IMAGES:
     case GFC_ISYM_PACK:
+    case GFC_ISYM_RESHAPE:
     case GFC_ISYM_UNPACK:
       /* Pass absent optional parameters.  */
       return 2;
@@ -8689,13 +11178,12 @@ gfc_walk_intrinsic_function (gfc_ss * ss, gfc_expr * expr,
     }
 }
 
-
 static tree
 conv_co_collective (gfc_code *code)
 {
   gfc_se argse;
   stmtblock_t block, post_block;
-  tree fndecl, array, strlen, image_index, stat, errmsg, errmsg_len;
+  tree fndecl, array = NULL_TREE, strlen, image_index, stat, errmsg, errmsg_len;
   gfc_expr *image_idx_expr, *stat_expr, *errmsg_expr, *opr_expr;
 
   gfc_start_block (&block);
@@ -8760,6 +11248,7 @@ conv_co_collective (gfc_code *code)
       gfc_conv_expr_descriptor (&argse, code->ext.actual->expr);
       array = argse.expr;
     }
+
   gfc_add_block_to_block (&block, &argse.pre);
   gfc_add_block_to_block (&post_block, &argse.post);
 
@@ -8788,12 +11277,12 @@ conv_co_collective (gfc_code *code)
       gfc_add_block_to_block (&block, &argse.pre);
       gfc_add_block_to_block (&post_block, &argse.post);
       errmsg = argse.expr;
-      errmsg_len = fold_convert (integer_type_node, argse.string_length);
+      errmsg_len = fold_convert (size_type_node, argse.string_length);
     }
   else
     {
       errmsg = null_pointer_node;
-      errmsg_len = integer_zero_node;
+      errmsg_len = build_zero_cst (size_type_node);
     }
 
   /* Generate the function call.  */
@@ -8818,46 +11307,64 @@ conv_co_collective (gfc_code *code)
       gcc_unreachable ();
     }
 
-  if (code->resolved_isym->id == GFC_ISYM_CO_SUM
-      || code->resolved_isym->id == GFC_ISYM_CO_BROADCAST)
-    fndecl = build_call_expr_loc (input_location, fndecl, 5, array,
-                                 image_index, stat, errmsg, errmsg_len);
-  else if (code->resolved_isym->id != GFC_ISYM_CO_REDUCE)
-    fndecl = build_call_expr_loc (input_location, fndecl, 6, array, image_index,
-                                 stat, errmsg, strlen, errmsg_len);
+  gfc_symbol *derived = code->ext.actual->expr->ts.type == BT_DERIVED
+    ? code->ext.actual->expr->ts.u.derived : NULL;
+
+  if (derived && derived->attr.alloc_comp
+      && code->resolved_isym->id == GFC_ISYM_CO_BROADCAST)
+    /* The derived type has the attribute 'alloc_comp'.  */
+    {
+      tree tmp = gfc_bcast_alloc_comp (derived, code->ext.actual->expr,
+                                      code->ext.actual->expr->rank,
+                                      image_index, stat, errmsg, errmsg_len);
+      gfc_add_expr_to_block (&block, tmp);
+    }
   else
     {
-      tree opr, opr_flags;
-
-      // FIXME: Handle TS29113's bind(C) strings with descriptor.
-      int opr_flag_int;
-      if (gfc_is_proc_ptr_comp (opr_expr))
-       {
-         gfc_symbol *sym = gfc_get_proc_ptr_comp (opr_expr)->ts.interface;
-         opr_flag_int = sym->attr.dimension
-                        || (sym->ts.type == BT_CHARACTER
-                            && !sym->attr.is_bind_c)
-                        ? GFC_CAF_BYREF : 0;
-         opr_flag_int |= opr_expr->ts.type == BT_CHARACTER
-                         && !sym->attr.is_bind_c
-                         ? GFC_CAF_HIDDENLEN : 0;
-         opr_flag_int |= sym->formal->sym->attr.value ? GFC_CAF_ARG_VALUE : 0;
-       }
+      if (code->resolved_isym->id == GFC_ISYM_CO_SUM
+         || code->resolved_isym->id == GFC_ISYM_CO_BROADCAST)
+       fndecl = build_call_expr_loc (input_location, fndecl, 5, array,
+                                     image_index, stat, errmsg, errmsg_len);
+      else if (code->resolved_isym->id != GFC_ISYM_CO_REDUCE)
+       fndecl = build_call_expr_loc (input_location, fndecl, 6, array,
+                                     image_index, stat, errmsg,
+                                     strlen, errmsg_len);
       else
        {
-         opr_flag_int = gfc_return_by_reference (opr_expr->symtree->n.sym)
-                        ? GFC_CAF_BYREF : 0;
-         opr_flag_int |= opr_expr->ts.type == BT_CHARACTER
-                         && !opr_expr->symtree->n.sym->attr.is_bind_c
-                         ? GFC_CAF_HIDDENLEN : 0;
-         opr_flag_int |= opr_expr->symtree->n.sym->formal->sym->attr.value
-                         ? GFC_CAF_ARG_VALUE : 0;
+         tree opr, opr_flags;
+
+         // FIXME: Handle TS29113's bind(C) strings with descriptor.
+         int opr_flag_int;
+         if (gfc_is_proc_ptr_comp (opr_expr))
+           {
+             gfc_symbol *sym = gfc_get_proc_ptr_comp (opr_expr)->ts.interface;
+             opr_flag_int = sym->attr.dimension
+               || (sym->ts.type == BT_CHARACTER
+                   && !sym->attr.is_bind_c)
+               ? GFC_CAF_BYREF : 0;
+             opr_flag_int |= opr_expr->ts.type == BT_CHARACTER
+               && !sym->attr.is_bind_c
+               ? GFC_CAF_HIDDENLEN : 0;
+             opr_flag_int |= sym->formal->sym->attr.value
+               ? GFC_CAF_ARG_VALUE : 0;
+           }
+         else
+           {
+             opr_flag_int = gfc_return_by_reference (opr_expr->symtree->n.sym)
+               ? GFC_CAF_BYREF : 0;
+             opr_flag_int |= opr_expr->ts.type == BT_CHARACTER
+               && !opr_expr->symtree->n.sym->attr.is_bind_c
+               ? GFC_CAF_HIDDENLEN : 0;
+             opr_flag_int |= opr_expr->symtree->n.sym->formal->sym->attr.value
+               ? GFC_CAF_ARG_VALUE : 0;
+           }
+         opr_flags = build_int_cst (integer_type_node, opr_flag_int);
+         gfc_conv_expr (&argse, opr_expr);
+         opr = argse.expr;
+         fndecl = build_call_expr_loc (input_location, fndecl, 8, array, opr,
+                                       opr_flags, image_index, stat, errmsg,
+                                       strlen, errmsg_len);
        }
-      opr_flags = build_int_cst (integer_type_node, opr_flag_int);
-      gfc_conv_expr (&argse, opr_expr);
-      opr = argse.expr;
-      fndecl = build_call_expr_loc (input_location, fndecl, 8, array, opr, opr_flags,
-                                   image_index, stat, errmsg, strlen, errmsg_len);
     }
 
   gfc_add_expr_to_block (&block, fndecl);
@@ -8984,8 +11491,11 @@ conv_intrinsic_atomic_op (gfc_code *code)
           value = gfc_build_addr_expr (NULL_TREE, tmp);
        }
 
-      gfc_get_caf_token_offset (&token, &offset, caf_decl, atom, atom_expr);
+      gfc_init_se (&argse, NULL);
+      gfc_get_caf_token_offset (&argse, &token, &offset, caf_decl, atom,
+                               atom_expr);
 
+      gfc_add_block_to_block (&block, &argse.pre);
       if (code->resolved_isym->id == GFC_ISYM_ATOMIC_DEF)
        tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_atomic_def, 7,
                                   token, offset, image_index, value, stat,
@@ -9003,6 +11513,7 @@ conv_intrinsic_atomic_op (gfc_code *code)
                                                  (int) atom_expr->ts.kind));
 
       gfc_add_expr_to_block (&block, tmp);
+      gfc_add_block_to_block (&block, &argse.post);
       gfc_add_block_to_block (&block, &post_block);
       return gfc_finish_block (&block);
     }
@@ -9037,7 +11548,6 @@ conv_intrinsic_atomic_op (gfc_code *code)
   fn = (built_in_function) ((int) fn
                            + exact_log2 (tree_to_uhwi (TYPE_SIZE_UNIT (tmp)))
                            + 1);
-  tmp = builtin_decl_explicit (fn);
   tree itype = TREE_TYPE (TREE_TYPE (atom));
   tmp = builtin_decl_explicit (fn);
 
@@ -9130,7 +11640,10 @@ conv_intrinsic_atomic_ref (gfc_code *code)
       else
        image_index = integer_zero_node;
 
-      gfc_get_caf_token_offset (&token, &offset, caf_decl, atom, atom_expr);
+      gfc_init_se (&argse, NULL);
+      gfc_get_caf_token_offset (&argse, &token, &offset, caf_decl, atom,
+                               atom_expr);
+      gfc_add_block_to_block (&block, &argse.pre);
 
       /* Different type, need type conversion.  */
       if (!POINTER_TYPE_P (TREE_TYPE (value)))
@@ -9150,6 +11663,7 @@ conv_intrinsic_atomic_ref (gfc_code *code)
       if (vardecl != NULL_TREE)
        gfc_add_modify (&block, orig_value,
                        fold_convert (TREE_TYPE (orig_value), vardecl));
+      gfc_add_block_to_block (&block, &argse.post);
       gfc_add_block_to_block (&block, &post_block);
       return gfc_finish_block (&block);
     }
@@ -9263,7 +11777,10 @@ conv_intrinsic_atomic_cas (gfc_code *code)
           comp = gfc_build_addr_expr (NULL_TREE, tmp);
        }
 
-      gfc_get_caf_token_offset (&token, &offset, caf_decl, atom, atom_expr);
+      gfc_init_se (&argse, NULL);
+      gfc_get_caf_token_offset (&argse, &token, &offset, caf_decl, atom,
+                               atom_expr);
+      gfc_add_block_to_block (&block, &argse.pre);
 
       tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_atomic_cas, 9,
                                 token, offset, image_index, old, comp, new_val,
@@ -9272,6 +11789,7 @@ conv_intrinsic_atomic_cas (gfc_code *code)
                                 build_int_cst (integer_type_node,
                                                (int) atom_expr->ts.kind));
       gfc_add_expr_to_block (&block, tmp);
+      gfc_add_block_to_block (&block, &argse.post);
       gfc_add_block_to_block (&block, &post_block);
       return gfc_finish_block (&block);
     }
@@ -9297,6 +11815,317 @@ conv_intrinsic_atomic_cas (gfc_code *code)
   return gfc_finish_block (&block);
 }
 
+static tree
+conv_intrinsic_event_query (gfc_code *code)
+{
+  gfc_se se, argse;
+  tree stat = NULL_TREE, stat2 = NULL_TREE;
+  tree count = NULL_TREE, count2 = NULL_TREE;
+
+  gfc_expr *event_expr = code->ext.actual->expr;
+
+  if (code->ext.actual->next->next->expr)
+    {
+      gcc_assert (code->ext.actual->next->next->expr->expr_type
+                 == EXPR_VARIABLE);
+      gfc_init_se (&argse, NULL);
+      gfc_conv_expr_val (&argse, code->ext.actual->next->next->expr);
+      stat = argse.expr;
+    }
+  else if (flag_coarray == GFC_FCOARRAY_LIB)
+    stat = null_pointer_node;
+
+  if (code->ext.actual->next->expr)
+    {
+      gcc_assert (code->ext.actual->next->expr->expr_type == EXPR_VARIABLE);
+      gfc_init_se (&argse, NULL);
+      gfc_conv_expr_val (&argse, code->ext.actual->next->expr);
+      count = argse.expr;
+    }
+
+  gfc_start_block (&se.pre);
+  if (flag_coarray == GFC_FCOARRAY_LIB)
+    {
+      tree tmp, token, image_index;
+      tree index = build_zero_cst (gfc_array_index_type);
+
+      if (event_expr->expr_type == EXPR_FUNCTION
+         && event_expr->value.function.isym
+         && event_expr->value.function.isym->id == GFC_ISYM_CAF_GET)
+       event_expr = event_expr->value.function.actual->expr;
+
+      tree caf_decl = gfc_get_tree_for_caf_expr (event_expr);
+
+      if (event_expr->symtree->n.sym->ts.type != BT_DERIVED
+         || event_expr->symtree->n.sym->ts.u.derived->from_intmod
+            != INTMOD_ISO_FORTRAN_ENV
+         || event_expr->symtree->n.sym->ts.u.derived->intmod_sym_id
+            != ISOFORTRAN_EVENT_TYPE)
+       {
+         gfc_error ("Sorry, the event component of derived type at %L is not "
+                    "yet supported", &event_expr->where);
+         return NULL_TREE;
+       }
+
+      if (gfc_is_coindexed (event_expr))
+       {
+         gfc_error ("The event variable at %L shall not be coindexed",
+                    &event_expr->where);
+          return NULL_TREE;
+       }
+
+      image_index = integer_zero_node;
+
+      gfc_get_caf_token_offset (&se, &token, NULL, caf_decl, NULL_TREE,
+                               event_expr);
+
+      /* For arrays, obtain the array index.  */
+      if (gfc_expr_attr (event_expr).dimension)
+       {
+         tree desc, tmp, extent, lbound, ubound;
+          gfc_array_ref *ar, ar2;
+          int i;
+
+         /* TODO: Extend this, once DT components are supported.  */
+         ar = &event_expr->ref->u.ar;
+         ar2 = *ar;
+         memset (ar, '\0', sizeof (*ar));
+         ar->as = ar2.as;
+         ar->type = AR_FULL;
+
+         gfc_init_se (&argse, NULL);
+         argse.descriptor_only = 1;
+         gfc_conv_expr_descriptor (&argse, event_expr);
+         gfc_add_block_to_block (&se.pre, &argse.pre);
+         desc = argse.expr;
+         *ar = ar2;
+
+         extent = build_one_cst (gfc_array_index_type);
+         for (i = 0; i < ar->dimen; i++)
+           {
+             gfc_init_se (&argse, NULL);
+             gfc_conv_expr_type (&argse, ar->start[i], gfc_array_index_type);
+             gfc_add_block_to_block (&argse.pre, &argse.pre);
+             lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[i]);
+             tmp = fold_build2_loc (input_location, MINUS_EXPR,
+                                    TREE_TYPE (lbound), argse.expr, lbound);
+             tmp = fold_build2_loc (input_location, MULT_EXPR,
+                                    TREE_TYPE (tmp), extent, tmp);
+             index = fold_build2_loc (input_location, PLUS_EXPR,
+                                      TREE_TYPE (tmp), index, tmp);
+             if (i < ar->dimen - 1)
+               {
+                 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[i]);
+                 tmp = gfc_conv_array_extent_dim (lbound, ubound, NULL);
+                 extent = fold_build2_loc (input_location, MULT_EXPR,
+                                           TREE_TYPE (tmp), extent, tmp);
+               }
+           }
+       }
+
+      if (count != null_pointer_node && TREE_TYPE (count) != integer_type_node)
+       {
+         count2 = count;
+         count = gfc_create_var (integer_type_node, "count");
+       }
+
+      if (stat != null_pointer_node && TREE_TYPE (stat) != integer_type_node)
+       {
+         stat2 = stat;
+         stat = gfc_create_var (integer_type_node, "stat");
+       }
+
+      index = fold_convert (size_type_node, index);
+      tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_event_query, 5,
+                                   token, index, image_index, count
+                                  ? gfc_build_addr_expr (NULL, count) : count,
+                                  stat != null_pointer_node
+                                  ? gfc_build_addr_expr (NULL, stat) : stat);
+      gfc_add_expr_to_block (&se.pre, tmp);
+
+      if (count2 != NULL_TREE)
+       gfc_add_modify (&se.pre, count2,
+                       fold_convert (TREE_TYPE (count2), count));
+
+      if (stat2 != NULL_TREE)
+       gfc_add_modify (&se.pre, stat2,
+                       fold_convert (TREE_TYPE (stat2), stat));
+
+      return gfc_finish_block (&se.pre);
+    }
+
+  gfc_init_se (&argse, NULL);
+  gfc_conv_expr_val (&argse, code->ext.actual->expr);
+  gfc_add_modify (&se.pre, count, fold_convert (TREE_TYPE (count), argse.expr));
+
+  if (stat != NULL_TREE)
+    gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
+
+  return gfc_finish_block (&se.pre);
+}
+
+
+/* This is a peculiar case because of the need to do dependency checking.
+   It is called via trans-stmt.c(gfc_trans_call), where it is picked out as
+   a special case and this function called instead of
+   gfc_conv_procedure_call.  */
+void
+gfc_conv_intrinsic_mvbits (gfc_se *se, gfc_actual_arglist *actual_args,
+                          gfc_loopinfo *loop)
+{
+  gfc_actual_arglist *actual;
+  gfc_se argse[5];
+  gfc_expr *arg[5];
+  gfc_ss *lss;
+  int n;
+
+  tree from, frompos, len, to, topos;
+  tree lenmask, oldbits, newbits, bitsize;
+  tree type, utype, above, mask1, mask2;
+
+  if (loop)
+    lss = loop->ss;
+  else
+    lss = gfc_ss_terminator;
+
+  actual = actual_args;
+  for (n = 0; n < 5; n++, actual = actual->next)
+    {
+      arg[n] = actual->expr;
+      gfc_init_se (&argse[n], NULL);
+
+      if (lss != gfc_ss_terminator)
+       {
+         gfc_copy_loopinfo_to_se (&argse[n], loop);
+         /* Find the ss for the expression if it is there.  */
+         argse[n].ss = lss;
+         gfc_mark_ss_chain_used (lss, 1);
+       }
+
+      gfc_conv_expr (&argse[n], arg[n]);
+
+      if (loop)
+       lss = argse[n].ss;
+    }
+
+  from    = argse[0].expr;
+  frompos = argse[1].expr;
+  len     = argse[2].expr;
+  to      = argse[3].expr;
+  topos   = argse[4].expr;
+
+  /* The type of the result (TO).  */
+  type    = TREE_TYPE (to);
+  bitsize = build_int_cst (integer_type_node, TYPE_PRECISION (type));
+
+  /* Optionally generate code for runtime argument check.  */
+  if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+    {
+      tree nbits, below, ccond;
+      tree fp = fold_convert (long_integer_type_node, frompos);
+      tree ln = fold_convert (long_integer_type_node, len);
+      tree tp = fold_convert (long_integer_type_node, topos);
+      below = fold_build2_loc (input_location, LT_EXPR,
+                              logical_type_node, frompos,
+                              build_int_cst (TREE_TYPE (frompos), 0));
+      above = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, frompos,
+                              fold_convert (TREE_TYPE (frompos), bitsize));
+      ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                              logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+                              &arg[1]->where,
+                              "FROMPOS argument (%ld) out of range 0:%d "
+                              "in intrinsic MVBITS", fp, bitsize);
+      below = fold_build2_loc (input_location, LT_EXPR,
+                              logical_type_node, len,
+                              build_int_cst (TREE_TYPE (len), 0));
+      above = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, len,
+                              fold_convert (TREE_TYPE (len), bitsize));
+      ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                              logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, ccond, &argse[2].pre,
+                              &arg[2]->where,
+                              "LEN argument (%ld) out of range 0:%d "
+                              "in intrinsic MVBITS", ln, bitsize);
+      below = fold_build2_loc (input_location, LT_EXPR,
+                              logical_type_node, topos,
+                              build_int_cst (TREE_TYPE (topos), 0));
+      above = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, topos,
+                              fold_convert (TREE_TYPE (topos), bitsize));
+      ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+                              logical_type_node, below, above);
+      gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+                              &arg[4]->where,
+                              "TOPOS argument (%ld) out of range 0:%d "
+                              "in intrinsic MVBITS", tp, bitsize);
+
+      /* The tests above ensure that FROMPOS, LEN and TOPOS fit into short
+        integers.  Additions below cannot overflow.  */
+      nbits = fold_convert (long_integer_type_node, bitsize);
+      above = fold_build2_loc (input_location, PLUS_EXPR,
+                              long_integer_type_node, fp, ln);
+      ccond = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, above, nbits);
+      gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+                              &arg[1]->where,
+                              "FROMPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+                              "in intrinsic MVBITS", fp, ln, bitsize);
+      above = fold_build2_loc (input_location, PLUS_EXPR,
+                              long_integer_type_node, tp, ln);
+      ccond = fold_build2_loc (input_location, GT_EXPR,
+                              logical_type_node, above, nbits);
+      gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+                              &arg[4]->where,
+                              "TOPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+                              "in intrinsic MVBITS", tp, ln, bitsize);
+    }
+
+  for (n = 0; n < 5; n++)
+    {
+      gfc_add_block_to_block (&se->pre, &argse[n].pre);
+      gfc_add_block_to_block (&se->post, &argse[n].post);
+    }
+
+  /* lenmask = (LEN >= bit_size (TYPE)) ? ~(TYPE)0 : ((TYPE)1 << LEN) - 1  */
+  above = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
+                          len, fold_convert (TREE_TYPE (len), bitsize));
+  mask1 = build_int_cst (type, -1);
+  mask2 = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+                          build_int_cst (type, 1), len);
+  mask2 = fold_build2_loc (input_location, MINUS_EXPR, type,
+                          mask2, build_int_cst (type, 1));
+  lenmask = fold_build3_loc (input_location, COND_EXPR, type,
+                            above, mask1, mask2);
+
+  /* newbits = (((UTYPE)(FROM) >> FROMPOS) & lenmask) << TOPOS.
+   * For valid frompos+len <= bit_size(FROM) the conversion to unsigned is
+   * not strictly necessary; artificial bits from rshift will be masked.  */
+  utype = unsigned_type_for (type);
+  newbits = fold_build2_loc (input_location, RSHIFT_EXPR, utype,
+                            fold_convert (utype, from), frompos);
+  newbits = fold_build2_loc (input_location, BIT_AND_EXPR, type,
+                            fold_convert (type, newbits), lenmask);
+  newbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+                            newbits, topos);
+
+  /* oldbits = TO & (~(lenmask << TOPOS)).  */
+  oldbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+                            lenmask, topos);
+  oldbits = fold_build1_loc (input_location, BIT_NOT_EXPR, type, oldbits);
+  oldbits = fold_build2_loc (input_location, BIT_AND_EXPR, type, oldbits, to);
+
+  /* TO = newbits | oldbits.  */
+  se->expr = fold_build2_loc (input_location, BIT_IOR_EXPR, type,
+                             oldbits, newbits);
+
+  /* Return the assignment.  */
+  se->expr = fold_build2_loc (input_location, MODIFY_EXPR,
+                             void_type_node, to, se->expr);
+}
+
 
 static tree
 conv_intrinsic_move_alloc (gfc_code *code)
@@ -9346,8 +12175,8 @@ conv_intrinsic_move_alloc (gfc_code *code)
       gfc_add_block_to_block (&block, &to_se.pre);
 
       /* Deallocate "to".  */
-      tmp = gfc_deallocate_scalar_with_status (to_se.expr, NULL_TREE, true,
-                                              to_expr, to_expr->ts);
+      tmp = gfc_deallocate_scalar_with_status (to_se.expr, NULL_TREE, NULL_TREE,
+                                              true, to_expr, to_expr->ts);
       gfc_add_expr_to_block (&block, tmp);
 
       /* Assign (_data) pointers.  */
@@ -9414,6 +12243,16 @@ conv_intrinsic_move_alloc (gfc_code *code)
            }
        }
 
+      if (to_expr->ts.type == BT_CHARACTER && to_expr->ts.deferred)
+       {
+         gfc_add_modify_loc (input_location, &block, to_se.string_length,
+                             fold_convert (TREE_TYPE (to_se.string_length),
+                                           from_se.string_length));
+         if (from_expr->ts.deferred)
+           gfc_add_modify_loc (input_location, &block, from_se.string_length,
+                       build_int_cst (TREE_TYPE (from_se.string_length), 0));
+       }
+
       return gfc_finish_block (&block);
     }
 
@@ -9495,12 +12334,12 @@ conv_intrinsic_move_alloc (gfc_code *code)
 
       tmp = gfc_deallocate_with_status (to_se.expr, NULL_TREE, NULL_TREE,
                                        NULL_TREE, NULL_TREE, true, to_expr,
-                                       true);
+                                       GFC_CAF_COARRAY_DEALLOCATE_ONLY);
       gfc_add_expr_to_block (&block, tmp);
 
       tmp = gfc_conv_descriptor_data_get (to_se.expr);
       cond = fold_build2_loc (input_location, EQ_EXPR,
-                             boolean_type_node, tmp,
+                             logical_type_node, tmp,
                              fold_convert (TREE_TYPE (tmp),
                                            null_pointer_node));
       tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
@@ -9513,9 +12352,18 @@ conv_intrinsic_move_alloc (gfc_code *code)
     }
   else
     {
+      if (to_expr->ts.type == BT_DERIVED
+         && to_expr->ts.u.derived->attr.alloc_comp)
+       {
+         tmp = gfc_deallocate_alloc_comp (to_expr->ts.u.derived,
+                                          to_se.expr, to_expr->rank);
+         gfc_add_expr_to_block (&block, tmp);
+       }
+
       tmp = gfc_conv_descriptor_data_get (to_se.expr);
       tmp = gfc_deallocate_with_status (tmp, NULL_TREE, NULL_TREE, NULL_TREE,
-                                       NULL_TREE, true, to_expr, false);
+                                       NULL_TREE, true, to_expr,
+                                       GFC_CAF_COARRAY_NOCOARRAY);
       gfc_add_expr_to_block (&block, tmp);
     }
 
@@ -9527,6 +12375,17 @@ conv_intrinsic_move_alloc (gfc_code *code)
   gfc_add_modify_loc (input_location, &block, tmp,
                      fold_convert (TREE_TYPE (tmp), null_pointer_node));
 
+
+  if (to_expr->ts.type == BT_CHARACTER && to_expr->ts.deferred)
+    {
+      gfc_add_modify_loc (input_location, &block, to_se.string_length,
+                         fold_convert (TREE_TYPE (to_se.string_length),
+                                       from_se.string_length));
+      if (from_expr->ts.deferred)
+        gfc_add_modify_loc (input_location, &block, from_se.string_length,
+                       build_int_cst (TREE_TYPE (from_se.string_length), 0));
+    }
+
   return gfc_finish_block (&block);
 }
 
@@ -9564,6 +12423,10 @@ gfc_conv_intrinsic_subroutine (gfc_code *code)
       res = conv_intrinsic_atomic_ref (code);
       break;
 
+    case GFC_ISYM_EVENT_QUERY:
+      res = conv_intrinsic_event_query (code);
+      break;
+
     case GFC_ISYM_C_F_POINTER:
     case GFC_ISYM_C_F_PROCPOINTER:
       res = conv_isocbinding_subroutine (code);
@@ -9585,6 +12448,18 @@ gfc_conv_intrinsic_subroutine (gfc_code *code)
       res = conv_intrinsic_free (code);
       break;
 
+    case GFC_ISYM_RANDOM_INIT:
+      res = conv_intrinsic_random_init (code);
+      break;
+
+    case GFC_ISYM_KILL:
+      res = conv_intrinsic_kill_sub (code);
+      break;
+
+    case GFC_ISYM_MVBITS:
+      res = NULL_TREE;
+      break;
+
     case GFC_ISYM_SYSTEM_CLOCK:
       res = conv_intrinsic_system_clock (code);
       break;