1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-expr.h"
30 #include "stringpool.h"
31 #include "fold-const.h"
32 #include "gimplify.h" /* For create_tmp_var_raw. */
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
38 #include "gomp-constants.h"
39 #include "omp-general.h"
42 #define GCC_DIAG_STYLE __gcc_tdiag__
43 #include "diagnostic-core.h"
45 #define GCC_DIAG_STYLE __gcc_gfc__
50 /* True if OpenMP should regard this DECL as being a scalar which has Fortran's
51 allocatable or pointer attribute. */
54 gfc_omp_is_allocatable_or_ptr (const_tree decl
)
57 && (GFC_DECL_GET_SCALAR_POINTER (decl
)
58 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)));
61 /* True if the argument is an optional argument; except that false is also
62 returned for arguments with the value attribute (nonpointers) and for
63 assumed-shape variables (decl is a local variable containing arg->data).
64 Note that pvoid_type_node is for 'type(c_ptr), value. */
67 gfc_omp_is_optional_argument (const_tree decl
)
69 return (TREE_CODE (decl
) == PARM_DECL
70 && DECL_LANG_SPECIFIC (decl
)
71 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
72 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
73 && GFC_DECL_OPTIONAL_ARGUMENT (decl
));
76 /* Check whether this DECL belongs to a Fortran optional argument.
77 With 'for_present_check' set to false, decls which are optional parameters
78 themselve are returned as tree - or a NULL_TREE otherwise. Those decls are
79 always pointers. With 'for_present_check' set to true, the decl for checking
80 whether an argument is present is returned; for arguments with value
81 attribute this is the hidden argument and of BOOLEAN_TYPE. If the decl is
82 unrelated to optional arguments, NULL_TREE is returned. */
85 gfc_omp_check_optional_argument (tree decl
, bool for_present_check
)
87 if (!for_present_check
)
88 return gfc_omp_is_optional_argument (decl
) ? decl
: NULL_TREE
;
90 if (!DECL_LANG_SPECIFIC (decl
))
93 /* For assumed-shape arrays, a local decl with arg->data is used. */
94 if (TREE_CODE (decl
) != PARM_DECL
95 && (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
96 || GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))))
97 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
99 if (TREE_CODE (decl
) != PARM_DECL
100 || !DECL_LANG_SPECIFIC (decl
)
101 || !GFC_DECL_OPTIONAL_ARGUMENT (decl
))
104 /* Scalars with VALUE attribute which are passed by value use a hidden
105 argument to denote the present status. They are passed as nonpointer type
106 with one exception: 'type(c_ptr), value' as 'void*'. */
107 /* Cf. trans-expr.c's gfc_conv_expr_present. */
108 if (TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
109 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
111 char name
[GFC_MAX_SYMBOL_LEN
+ 2];
115 strcpy (&name
[1], IDENTIFIER_POINTER (DECL_NAME (decl
)));
116 tree_name
= get_identifier (name
);
118 /* Walk function argument list to find the hidden arg. */
119 decl
= DECL_ARGUMENTS (DECL_CONTEXT (decl
));
120 for ( ; decl
!= NULL_TREE
; decl
= TREE_CHAIN (decl
))
121 if (DECL_NAME (decl
) == tree_name
122 && DECL_ARTIFICIAL (decl
))
133 /* Returns tree with NULL if it is not an array descriptor and with the tree to
134 access the 'data' component otherwise. With type_only = true, it returns the
135 TREE_TYPE without creating a new tree. */
138 gfc_omp_array_data (tree decl
, bool type_only
)
140 tree type
= TREE_TYPE (decl
);
142 if (POINTER_TYPE_P (type
))
143 type
= TREE_TYPE (type
);
145 if (!GFC_DESCRIPTOR_TYPE_P (type
))
149 return GFC_TYPE_ARRAY_DATAPTR_TYPE (type
);
151 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
152 decl
= build_fold_indirect_ref (decl
);
154 decl
= gfc_conv_descriptor_data_get (decl
);
159 /* True if OpenMP should privatize what this DECL points to rather
160 than the DECL itself. */
163 gfc_omp_privatize_by_reference (const_tree decl
)
165 tree type
= TREE_TYPE (decl
);
167 if (TREE_CODE (type
) == REFERENCE_TYPE
168 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
171 if (TREE_CODE (type
) == POINTER_TYPE
172 && gfc_omp_is_optional_argument (decl
))
175 if (TREE_CODE (type
) == POINTER_TYPE
)
177 while (TREE_CODE (decl
) == COMPONENT_REF
)
178 decl
= TREE_OPERAND (decl
, 1);
180 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
181 that have POINTER_TYPE type and aren't scalar pointers, scalar
182 allocatables, Cray pointees or C pointers are supposed to be
183 privatized by reference. */
184 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
185 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
186 || GFC_DECL_CRAY_POINTEE (decl
)
187 || GFC_DECL_ASSOCIATE_VAR_P (decl
)
188 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
191 if (!DECL_ARTIFICIAL (decl
)
192 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
195 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
197 if (DECL_LANG_SPECIFIC (decl
)
198 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
205 /* True if OpenMP sharing attribute of DECL is predetermined. */
207 enum omp_clause_default_kind
208 gfc_omp_predetermined_sharing (tree decl
)
210 /* Associate names preserve the association established during ASSOCIATE.
211 As they are implemented either as pointers to the selector or array
212 descriptor and shouldn't really change in the ASSOCIATE region,
213 this decl can be either shared or firstprivate. If it is a pointer,
214 use firstprivate, as it is cheaper that way, otherwise make it shared. */
215 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
217 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
218 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
220 return OMP_CLAUSE_DEFAULT_SHARED
;
223 if (DECL_ARTIFICIAL (decl
)
224 && ! GFC_DECL_RESULT (decl
)
225 && ! (DECL_LANG_SPECIFIC (decl
)
226 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
227 return OMP_CLAUSE_DEFAULT_SHARED
;
229 /* Cray pointees shouldn't be listed in any clauses and should be
230 gimplified to dereference of the corresponding Cray pointer.
231 Make them all private, so that they are emitted in the debug
233 if (GFC_DECL_CRAY_POINTEE (decl
))
234 return OMP_CLAUSE_DEFAULT_PRIVATE
;
236 /* Assumed-size arrays are predetermined shared. */
237 if (TREE_CODE (decl
) == PARM_DECL
238 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
239 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
240 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
241 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
243 return OMP_CLAUSE_DEFAULT_SHARED
;
245 /* Dummy procedures aren't considered variables by OpenMP, thus are
246 disallowed in OpenMP clauses. They are represented as PARM_DECLs
247 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
248 to avoid complaining about their uses with default(none). */
249 if (TREE_CODE (decl
) == PARM_DECL
250 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
251 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
252 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
254 /* COMMON and EQUIVALENCE decls are shared. They
255 are only referenced through DECL_VALUE_EXPR of the variables
256 contained in them. If those are privatized, they will not be
257 gimplified to the COMMON or EQUIVALENCE decls. */
258 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
259 return OMP_CLAUSE_DEFAULT_SHARED
;
261 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
262 return OMP_CLAUSE_DEFAULT_SHARED
;
264 /* These are either array or derived parameters, or vtables.
265 In the former cases, the OpenMP standard doesn't consider them to be
266 variables at all (they can't be redefined), but they can nevertheless appear
267 in parallel/task regions and for default(none) purposes treat them as shared.
268 For vtables likely the same handling is desirable. */
269 if (VAR_P (decl
) && TREE_READONLY (decl
)
270 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
271 return OMP_CLAUSE_DEFAULT_SHARED
;
273 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
276 /* Return decl that should be used when reporting DEFAULT(NONE)
280 gfc_omp_report_decl (tree decl
)
282 if (DECL_ARTIFICIAL (decl
)
283 && DECL_LANG_SPECIFIC (decl
)
284 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
285 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
290 /* Return true if TYPE has any allocatable components. */
293 gfc_has_alloc_comps (tree type
, tree decl
)
297 if (POINTER_TYPE_P (type
))
299 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
300 type
= TREE_TYPE (type
);
301 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
305 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
306 type
= gfc_get_element_type (type
);
308 if (TREE_CODE (type
) != RECORD_TYPE
)
311 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
313 ftype
= TREE_TYPE (field
);
314 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
316 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
317 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
319 if (gfc_has_alloc_comps (ftype
, field
))
325 /* Return true if DECL in private clause needs
326 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
328 gfc_omp_private_outer_ref (tree decl
)
330 tree type
= TREE_TYPE (decl
);
332 if (gfc_omp_privatize_by_reference (decl
))
333 type
= TREE_TYPE (type
);
335 if (GFC_DESCRIPTOR_TYPE_P (type
)
336 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
339 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
342 if (gfc_has_alloc_comps (type
, decl
))
348 /* Callback for gfc_omp_unshare_expr. */
351 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
354 enum tree_code code
= TREE_CODE (t
);
356 /* Stop at types, decls, constants like copy_tree_r. */
357 if (TREE_CODE_CLASS (code
) == tcc_type
358 || TREE_CODE_CLASS (code
) == tcc_declaration
359 || TREE_CODE_CLASS (code
) == tcc_constant
362 else if (handled_component_p (t
)
363 || TREE_CODE (t
) == MEM_REF
)
365 *tp
= unshare_expr (t
);
372 /* Unshare in expr anything that the FE which normally doesn't
373 care much about tree sharing (because during gimplification
374 everything is unshared) could cause problems with tree sharing
375 at omp-low.c time. */
378 gfc_omp_unshare_expr (tree expr
)
380 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
384 enum walk_alloc_comps
386 WALK_ALLOC_COMPS_DTOR
,
387 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
388 WALK_ALLOC_COMPS_COPY_CTOR
391 /* Handle allocatable components in OpenMP clauses. */
394 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
395 enum walk_alloc_comps kind
)
397 stmtblock_t block
, tmpblock
;
398 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
399 gfc_init_block (&block
);
401 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
403 if (GFC_DESCRIPTOR_TYPE_P (type
))
405 gfc_init_block (&tmpblock
);
406 tem
= gfc_full_array_size (&tmpblock
, decl
,
407 GFC_TYPE_ARRAY_RANK (type
));
408 then_b
= gfc_finish_block (&tmpblock
);
409 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
410 tem
= gfc_omp_unshare_expr (tem
);
411 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
412 gfc_array_index_type
, tem
,
417 bool compute_nelts
= false;
418 if (!TYPE_DOMAIN (type
)
419 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
420 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
421 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
422 compute_nelts
= true;
423 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
425 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
426 if (lookup_attribute ("omp dummy var", a
))
427 compute_nelts
= true;
431 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
432 TYPE_SIZE_UNIT (type
),
433 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
434 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
437 tem
= array_type_nelts (type
);
438 tem
= fold_convert (gfc_array_index_type
, tem
);
441 tree nelems
= gfc_evaluate_now (tem
, &block
);
442 tree index
= gfc_create_var (gfc_array_index_type
, "S");
444 gfc_init_block (&tmpblock
);
445 tem
= gfc_conv_array_data (decl
);
446 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
447 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
448 tree destvar
, destvref
= NULL_TREE
;
451 tem
= gfc_conv_array_data (dest
);
452 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
453 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
455 gfc_add_expr_to_block (&tmpblock
,
456 gfc_walk_alloc_comps (declvref
, destvref
,
460 gfc_init_loopinfo (&loop
);
462 loop
.from
[0] = gfc_index_zero_node
;
463 loop
.loopvar
[0] = index
;
465 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
466 gfc_add_block_to_block (&block
, &loop
.pre
);
467 return gfc_finish_block (&block
);
469 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
471 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
473 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
474 type
= TREE_TYPE (decl
);
477 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
478 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
480 tree ftype
= TREE_TYPE (field
);
481 tree declf
, destf
= NULL_TREE
;
482 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
483 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
484 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
485 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
488 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
489 decl
, field
, NULL_TREE
);
491 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
492 dest
, field
, NULL_TREE
);
497 case WALK_ALLOC_COMPS_DTOR
:
499 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
500 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
501 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
503 gfc_add_modify (&block
, unshare_expr (destf
),
504 unshare_expr (declf
));
505 tem
= gfc_duplicate_allocatable_nocopy
506 (destf
, declf
, ftype
,
507 GFC_TYPE_ARRAY_RANK (ftype
));
509 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
510 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
512 case WALK_ALLOC_COMPS_COPY_CTOR
:
513 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
514 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
515 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
516 GFC_TYPE_ARRAY_RANK (ftype
),
518 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
519 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
524 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
527 gfc_init_block (&tmpblock
);
528 gfc_add_expr_to_block (&tmpblock
,
529 gfc_walk_alloc_comps (declf
, destf
,
531 then_b
= gfc_finish_block (&tmpblock
);
532 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
533 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
534 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
535 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
536 tem
= unshare_expr (declf
);
541 tem
= fold_convert (pvoid_type_node
, tem
);
542 tem
= fold_build2_loc (input_location
, NE_EXPR
,
543 logical_type_node
, tem
,
545 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
547 build_empty_stmt (input_location
));
549 gfc_add_expr_to_block (&block
, then_b
);
551 if (kind
== WALK_ALLOC_COMPS_DTOR
)
553 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
554 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
556 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
557 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
,
558 NULL_TREE
, NULL_TREE
, true,
560 GFC_CAF_COARRAY_NOCOARRAY
);
561 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
563 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
565 tem
= gfc_call_free (unshare_expr (declf
));
566 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
571 return gfc_finish_block (&block
);
574 /* Return code to initialize DECL with its default constructor, or
575 NULL if there's nothing to do. */
578 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
580 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
581 stmtblock_t block
, cond_block
;
583 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
584 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
585 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
586 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
588 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
589 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
590 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
591 || !POINTER_TYPE_P (type
)))
593 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
596 gfc_start_block (&block
);
597 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
598 OMP_CLAUSE_DECL (clause
),
599 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
600 gfc_add_expr_to_block (&block
, tem
);
601 return gfc_finish_block (&block
);
606 gcc_assert (outer
!= NULL_TREE
);
608 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
609 "not currently allocated" allocation status if outer
610 array is "not currently allocated", otherwise should be allocated. */
611 gfc_start_block (&block
);
613 gfc_init_block (&cond_block
);
615 if (GFC_DESCRIPTOR_TYPE_P (type
))
617 gfc_add_modify (&cond_block
, decl
, outer
);
618 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
619 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
620 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
622 gfc_conv_descriptor_lbound_get (decl
, rank
));
623 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
624 size
, gfc_index_one_node
);
625 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
626 size
= fold_build2_loc (input_location
, MULT_EXPR
,
627 gfc_array_index_type
, size
,
628 gfc_conv_descriptor_stride_get (decl
, rank
));
629 tree esize
= fold_convert (gfc_array_index_type
,
630 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
631 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
633 size
= unshare_expr (size
);
634 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
638 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
639 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
640 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
641 if (GFC_DESCRIPTOR_TYPE_P (type
))
642 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
644 gfc_add_modify (&cond_block
, unshare_expr (decl
),
645 fold_convert (TREE_TYPE (decl
), ptr
));
646 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
648 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
649 OMP_CLAUSE_DECL (clause
),
650 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
651 gfc_add_expr_to_block (&cond_block
, tem
);
653 then_b
= gfc_finish_block (&cond_block
);
655 /* Reduction clause requires allocated ALLOCATABLE. */
656 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
658 gfc_init_block (&cond_block
);
659 if (GFC_DESCRIPTOR_TYPE_P (type
))
660 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
663 gfc_add_modify (&cond_block
, unshare_expr (decl
),
664 build_zero_cst (TREE_TYPE (decl
)));
665 else_b
= gfc_finish_block (&cond_block
);
667 tree tem
= fold_convert (pvoid_type_node
,
668 GFC_DESCRIPTOR_TYPE_P (type
)
669 ? gfc_conv_descriptor_data_get (outer
) : outer
);
670 tem
= unshare_expr (tem
);
671 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
672 tem
, null_pointer_node
);
673 gfc_add_expr_to_block (&block
,
674 build3_loc (input_location
, COND_EXPR
,
675 void_type_node
, cond
, then_b
,
677 /* Avoid -W*uninitialized warnings. */
679 TREE_NO_WARNING (decl
) = 1;
682 gfc_add_expr_to_block (&block
, then_b
);
684 return gfc_finish_block (&block
);
687 /* Build and return code for a copy constructor from SRC to DEST. */
690 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
692 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
693 tree cond
, then_b
, else_b
;
694 stmtblock_t block
, cond_block
;
696 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
697 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
699 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
700 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
701 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
702 || !POINTER_TYPE_P (type
)))
704 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
706 gfc_start_block (&block
);
707 gfc_add_modify (&block
, dest
, src
);
708 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
709 WALK_ALLOC_COMPS_COPY_CTOR
);
710 gfc_add_expr_to_block (&block
, tem
);
711 return gfc_finish_block (&block
);
714 return build2_v (MODIFY_EXPR
, dest
, src
);
717 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
718 and copied from SRC. */
719 gfc_start_block (&block
);
721 gfc_init_block (&cond_block
);
723 gfc_add_modify (&cond_block
, dest
, src
);
724 if (GFC_DESCRIPTOR_TYPE_P (type
))
726 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
727 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
728 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
730 gfc_conv_descriptor_lbound_get (dest
, rank
));
731 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
732 size
, gfc_index_one_node
);
733 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
734 size
= fold_build2_loc (input_location
, MULT_EXPR
,
735 gfc_array_index_type
, size
,
736 gfc_conv_descriptor_stride_get (dest
, rank
));
737 tree esize
= fold_convert (gfc_array_index_type
,
738 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
739 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
741 size
= unshare_expr (size
);
742 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
746 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
747 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
748 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
749 if (GFC_DESCRIPTOR_TYPE_P (type
))
750 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
752 gfc_add_modify (&cond_block
, unshare_expr (dest
),
753 fold_convert (TREE_TYPE (dest
), ptr
));
755 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
756 ? gfc_conv_descriptor_data_get (src
) : src
;
757 srcptr
= unshare_expr (srcptr
);
758 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
759 call
= build_call_expr_loc (input_location
,
760 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
762 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
763 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
765 tree tem
= gfc_walk_alloc_comps (src
, dest
,
766 OMP_CLAUSE_DECL (clause
),
767 WALK_ALLOC_COMPS_COPY_CTOR
);
768 gfc_add_expr_to_block (&cond_block
, tem
);
770 then_b
= gfc_finish_block (&cond_block
);
772 gfc_init_block (&cond_block
);
773 if (GFC_DESCRIPTOR_TYPE_P (type
))
774 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
777 gfc_add_modify (&cond_block
, unshare_expr (dest
),
778 build_zero_cst (TREE_TYPE (dest
)));
779 else_b
= gfc_finish_block (&cond_block
);
781 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
782 unshare_expr (srcptr
), null_pointer_node
);
783 gfc_add_expr_to_block (&block
,
784 build3_loc (input_location
, COND_EXPR
,
785 void_type_node
, cond
, then_b
, else_b
));
786 /* Avoid -W*uninitialized warnings. */
788 TREE_NO_WARNING (dest
) = 1;
790 return gfc_finish_block (&block
);
793 /* Similarly, except use an intrinsic or pointer assignment operator
797 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
799 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
800 tree cond
, then_b
, else_b
;
801 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
803 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
804 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
805 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
806 || !POINTER_TYPE_P (type
)))
808 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
810 gfc_start_block (&block
);
811 /* First dealloc any allocatable components in DEST. */
812 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
813 OMP_CLAUSE_DECL (clause
),
814 WALK_ALLOC_COMPS_DTOR
);
815 gfc_add_expr_to_block (&block
, tem
);
816 /* Then copy over toplevel data. */
817 gfc_add_modify (&block
, dest
, src
);
818 /* Finally allocate any allocatable components and copy. */
819 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
820 WALK_ALLOC_COMPS_COPY_CTOR
);
821 gfc_add_expr_to_block (&block
, tem
);
822 return gfc_finish_block (&block
);
825 return build2_v (MODIFY_EXPR
, dest
, src
);
828 gfc_start_block (&block
);
830 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
832 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
833 WALK_ALLOC_COMPS_DTOR
);
834 tree tem
= fold_convert (pvoid_type_node
,
835 GFC_DESCRIPTOR_TYPE_P (type
)
836 ? gfc_conv_descriptor_data_get (dest
) : dest
);
837 tem
= unshare_expr (tem
);
838 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
839 tem
, null_pointer_node
);
840 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
841 then_b
, build_empty_stmt (input_location
));
842 gfc_add_expr_to_block (&block
, tem
);
845 gfc_init_block (&cond_block
);
847 if (GFC_DESCRIPTOR_TYPE_P (type
))
849 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
850 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
851 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
853 gfc_conv_descriptor_lbound_get (src
, rank
));
854 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
855 size
, gfc_index_one_node
);
856 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
857 size
= fold_build2_loc (input_location
, MULT_EXPR
,
858 gfc_array_index_type
, size
,
859 gfc_conv_descriptor_stride_get (src
, rank
));
860 tree esize
= fold_convert (gfc_array_index_type
,
861 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
862 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
864 size
= unshare_expr (size
);
865 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
869 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
870 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
872 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
873 ? gfc_conv_descriptor_data_get (dest
) : dest
;
874 destptr
= unshare_expr (destptr
);
875 destptr
= fold_convert (pvoid_type_node
, destptr
);
876 gfc_add_modify (&cond_block
, ptr
, destptr
);
878 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, logical_type_node
,
879 destptr
, null_pointer_node
);
881 if (GFC_DESCRIPTOR_TYPE_P (type
))
884 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
886 tree rank
= gfc_rank_cst
[i
];
887 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
888 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
889 gfc_array_index_type
, tem
,
890 gfc_conv_descriptor_lbound_get (src
, rank
));
891 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
892 gfc_array_index_type
, tem
,
893 gfc_conv_descriptor_lbound_get (dest
, rank
));
894 tem
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
895 tem
, gfc_conv_descriptor_ubound_get (dest
,
897 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
898 logical_type_node
, cond
, tem
);
902 gfc_init_block (&cond_block2
);
904 if (GFC_DESCRIPTOR_TYPE_P (type
))
906 gfc_init_block (&inner_block
);
907 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
908 then_b
= gfc_finish_block (&inner_block
);
910 gfc_init_block (&inner_block
);
911 gfc_add_modify (&inner_block
, ptr
,
912 gfc_call_realloc (&inner_block
, ptr
, size
));
913 else_b
= gfc_finish_block (&inner_block
);
915 gfc_add_expr_to_block (&cond_block2
,
916 build3_loc (input_location
, COND_EXPR
,
918 unshare_expr (nonalloc
),
920 gfc_add_modify (&cond_block2
, dest
, src
);
921 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
925 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
926 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
927 fold_convert (type
, ptr
));
929 then_b
= gfc_finish_block (&cond_block2
);
930 else_b
= build_empty_stmt (input_location
);
932 gfc_add_expr_to_block (&cond_block
,
933 build3_loc (input_location
, COND_EXPR
,
934 void_type_node
, unshare_expr (cond
),
937 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
938 ? gfc_conv_descriptor_data_get (src
) : src
;
939 srcptr
= unshare_expr (srcptr
);
940 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
941 call
= build_call_expr_loc (input_location
,
942 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
944 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
945 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
947 tree tem
= gfc_walk_alloc_comps (src
, dest
,
948 OMP_CLAUSE_DECL (clause
),
949 WALK_ALLOC_COMPS_COPY_CTOR
);
950 gfc_add_expr_to_block (&cond_block
, tem
);
952 then_b
= gfc_finish_block (&cond_block
);
954 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
956 gfc_init_block (&cond_block
);
957 if (GFC_DESCRIPTOR_TYPE_P (type
))
959 tree tmp
= gfc_conv_descriptor_data_get (unshare_expr (dest
));
960 tmp
= gfc_deallocate_with_status (tmp
, NULL_TREE
, NULL_TREE
,
961 NULL_TREE
, NULL_TREE
, true, NULL
,
962 GFC_CAF_COARRAY_NOCOARRAY
);
963 gfc_add_expr_to_block (&cond_block
, tmp
);
967 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
968 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
969 gfc_add_modify (&cond_block
, unshare_expr (dest
),
970 build_zero_cst (TREE_TYPE (dest
)));
972 else_b
= gfc_finish_block (&cond_block
);
974 cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
975 unshare_expr (srcptr
), null_pointer_node
);
976 gfc_add_expr_to_block (&block
,
977 build3_loc (input_location
, COND_EXPR
,
978 void_type_node
, cond
,
982 gfc_add_expr_to_block (&block
, then_b
);
984 return gfc_finish_block (&block
);
988 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
989 tree add
, tree nelems
)
991 stmtblock_t tmpblock
;
992 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
993 nelems
= gfc_evaluate_now (nelems
, block
);
995 gfc_init_block (&tmpblock
);
996 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
998 desta
= gfc_build_array_ref (dest
, index
, NULL
);
999 srca
= gfc_build_array_ref (src
, index
, NULL
);
1003 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
1004 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
1005 fold_convert (sizetype
, index
),
1006 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
1007 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1008 TREE_TYPE (dest
), dest
,
1010 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
1011 TREE_TYPE (src
), src
,
1014 gfc_add_modify (&tmpblock
, desta
,
1015 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
1019 gfc_init_loopinfo (&loop
);
1021 loop
.from
[0] = gfc_index_zero_node
;
1022 loop
.loopvar
[0] = index
;
1023 loop
.to
[0] = nelems
;
1024 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
1025 gfc_add_block_to_block (block
, &loop
.pre
);
1028 /* Build and return code for a constructor of DEST that initializes
1029 it to SRC plus ADD (ADD is scalar integer). */
1032 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
1034 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
1037 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
1039 gfc_start_block (&block
);
1040 add
= gfc_evaluate_now (add
, &block
);
1042 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1043 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1044 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1045 || !POINTER_TYPE_P (type
)))
1047 bool compute_nelts
= false;
1048 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
1049 if (!TYPE_DOMAIN (type
)
1050 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
1051 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
1052 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
1053 compute_nelts
= true;
1054 else if (VAR_P (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
1056 tree a
= DECL_ATTRIBUTES (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)));
1057 if (lookup_attribute ("omp dummy var", a
))
1058 compute_nelts
= true;
1062 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
1063 TYPE_SIZE_UNIT (type
),
1064 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1065 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
1068 nelems
= array_type_nelts (type
);
1069 nelems
= fold_convert (gfc_array_index_type
, nelems
);
1071 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
1072 return gfc_finish_block (&block
);
1075 /* Allocatable arrays in LINEAR clauses need to be allocated
1076 and copied from SRC. */
1077 gfc_add_modify (&block
, dest
, src
);
1078 if (GFC_DESCRIPTOR_TYPE_P (type
))
1080 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
1081 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
1082 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
1084 gfc_conv_descriptor_lbound_get (dest
, rank
));
1085 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1086 size
, gfc_index_one_node
);
1087 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
1088 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1089 gfc_array_index_type
, size
,
1090 gfc_conv_descriptor_stride_get (dest
, rank
));
1091 tree esize
= fold_convert (gfc_array_index_type
,
1092 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1093 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
1094 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1095 nelems
, unshare_expr (esize
));
1096 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
1098 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
1099 gfc_array_index_type
, nelems
,
1100 gfc_index_one_node
);
1103 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
1104 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
1105 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
1106 if (GFC_DESCRIPTOR_TYPE_P (type
))
1108 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
1109 tree etype
= gfc_get_element_type (type
);
1110 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
1111 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
1112 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
1113 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
1117 gfc_add_modify (&block
, unshare_expr (dest
),
1118 fold_convert (TREE_TYPE (dest
), ptr
));
1119 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
1120 tree dstm
= build_fold_indirect_ref (ptr
);
1121 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
1122 gfc_add_modify (&block
, dstm
,
1123 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
1125 return gfc_finish_block (&block
);
1128 /* Build and return code destructing DECL. Return NULL if nothing
1132 gfc_omp_clause_dtor (tree clause
, tree decl
)
1134 tree type
= TREE_TYPE (decl
), tem
;
1136 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
1137 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
1138 && (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
))
1139 || !POINTER_TYPE_P (type
)))
1141 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1142 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
1143 OMP_CLAUSE_DECL (clause
),
1144 WALK_ALLOC_COMPS_DTOR
);
1148 if (GFC_DESCRIPTOR_TYPE_P (type
))
1150 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
1151 to be deallocated if they were allocated. */
1152 tem
= gfc_conv_descriptor_data_get (decl
);
1153 tem
= gfc_deallocate_with_status (tem
, NULL_TREE
, NULL_TREE
, NULL_TREE
,
1154 NULL_TREE
, true, NULL
,
1155 GFC_CAF_COARRAY_NOCOARRAY
);
1158 tem
= gfc_call_free (decl
);
1159 tem
= gfc_omp_unshare_expr (tem
);
1161 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1166 gfc_init_block (&block
);
1167 gfc_add_expr_to_block (&block
,
1168 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1169 OMP_CLAUSE_DECL (clause
),
1170 WALK_ALLOC_COMPS_DTOR
));
1171 gfc_add_expr_to_block (&block
, tem
);
1172 then_b
= gfc_finish_block (&block
);
1174 tem
= fold_convert (pvoid_type_node
,
1175 GFC_DESCRIPTOR_TYPE_P (type
)
1176 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1177 tem
= unshare_expr (tem
);
1178 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, logical_type_node
,
1179 tem
, null_pointer_node
);
1180 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1181 then_b
, build_empty_stmt (input_location
));
1186 /* Build a conditional expression in BLOCK. If COND_VAL is not
1187 null, then the block THEN_B is executed, otherwise ELSE_VAL
1188 is assigned to VAL. */
1191 gfc_build_cond_assign (stmtblock_t
*block
, tree val
, tree cond_val
,
1192 tree then_b
, tree else_val
)
1194 stmtblock_t cond_block
;
1195 tree cond
, else_b
= NULL_TREE
;
1196 tree val_ty
= TREE_TYPE (val
);
1200 gfc_init_block (&cond_block
);
1201 gfc_add_modify (&cond_block
, val
, fold_convert (val_ty
, else_val
));
1202 else_b
= gfc_finish_block (&cond_block
);
1204 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1206 cond_val
, null_pointer_node
);
1207 gfc_add_expr_to_block (block
,
1208 build3_loc (input_location
,
1215 /* Build a conditional expression in BLOCK, returning a temporary
1216 variable containing the result. If COND_VAL is not null, then
1217 THEN_VAL will be assigned to the variable, otherwise ELSE_VAL
1222 gfc_build_cond_assign_expr (stmtblock_t
*block
, tree cond_val
,
1223 tree then_val
, tree else_val
)
1226 tree val_ty
= TREE_TYPE (then_val
);
1227 stmtblock_t cond_block
;
1229 val
= create_tmp_var (val_ty
);
1231 gfc_init_block (&cond_block
);
1232 gfc_add_modify (&cond_block
, val
, then_val
);
1233 tree then_b
= gfc_finish_block (&cond_block
);
1235 gfc_build_cond_assign (block
, val
, cond_val
, then_b
, else_val
);
1241 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1243 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1246 tree decl
= OMP_CLAUSE_DECL (c
);
1248 /* Assumed-size arrays can't be mapped implicitly, they have to be
1249 mapped explicitly using array sections. */
1250 if (TREE_CODE (decl
) == PARM_DECL
1251 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
1252 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
1253 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
1254 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
1257 error_at (OMP_CLAUSE_LOCATION (c
),
1258 "implicit mapping of assumed size array %qD", decl
);
1262 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1263 tree present
= (gfc_omp_is_optional_argument (decl
)
1264 ? gfc_omp_check_optional_argument (decl
, true) : NULL_TREE
);
1265 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1267 if (!gfc_omp_privatize_by_reference (decl
)
1268 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1269 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1270 && !GFC_DECL_CRAY_POINTEE (decl
)
1271 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1273 tree orig_decl
= decl
;
1274 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1275 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1276 OMP_CLAUSE_DECL (c4
) = decl
;
1277 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1278 decl
= build_fold_indirect_ref (decl
);
1280 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1281 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1283 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1284 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_POINTER
);
1285 OMP_CLAUSE_DECL (c2
) = decl
;
1286 OMP_CLAUSE_SIZE (c2
) = size_int (0);
1289 gfc_start_block (&block
);
1291 ptr
= gfc_build_cond_assign_expr (&block
, present
, decl
,
1293 gimplify_and_add (gfc_finish_block (&block
), pre_p
);
1294 ptr
= build_fold_indirect_ref (ptr
);
1295 OMP_CLAUSE_DECL (c
) = ptr
;
1296 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
1300 OMP_CLAUSE_DECL (c
) = decl
;
1301 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1303 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1304 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1305 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1307 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1308 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1309 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1310 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1311 decl
= build_fold_indirect_ref (decl
);
1312 OMP_CLAUSE_DECL (c
) = decl
;
1315 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1318 gfc_start_block (&block
);
1319 tree type
= TREE_TYPE (decl
);
1320 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1323 ptr
= gfc_build_cond_assign_expr (&block
, present
, ptr
,
1325 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1326 ptr
= build_fold_indirect_ref (ptr
);
1327 OMP_CLAUSE_DECL (c
) = ptr
;
1328 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1329 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1332 ptr
= create_tmp_var (TREE_TYPE (TREE_OPERAND (decl
, 0)));
1333 gfc_add_modify (&block
, ptr
, TREE_OPERAND (decl
, 0));
1335 OMP_CLAUSE_DECL (c2
) = build_fold_indirect_ref (ptr
);
1338 OMP_CLAUSE_DECL (c2
) = decl
;
1339 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1340 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1341 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1344 ptr
= gfc_conv_descriptor_data_get (decl
);
1345 ptr
= gfc_build_addr_expr (NULL
, ptr
);
1346 ptr
= gfc_build_cond_assign_expr (&block
, present
,
1347 ptr
, null_pointer_node
);
1348 ptr
= build_fold_indirect_ref (ptr
);
1349 OMP_CLAUSE_DECL (c3
) = ptr
;
1352 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1353 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1354 tree size
= create_tmp_var (gfc_array_index_type
);
1355 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1356 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1357 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1358 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1360 stmtblock_t cond_block
;
1361 tree tem
, then_b
, else_b
, zero
, cond
;
1363 gfc_init_block (&cond_block
);
1364 tem
= gfc_full_array_size (&cond_block
, decl
,
1365 GFC_TYPE_ARRAY_RANK (type
));
1366 gfc_add_modify (&cond_block
, size
, tem
);
1367 gfc_add_modify (&cond_block
, size
,
1368 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1370 then_b
= gfc_finish_block (&cond_block
);
1371 gfc_init_block (&cond_block
);
1372 zero
= build_int_cst (gfc_array_index_type
, 0);
1373 gfc_add_modify (&cond_block
, size
, zero
);
1374 else_b
= gfc_finish_block (&cond_block
);
1375 tem
= gfc_conv_descriptor_data_get (decl
);
1376 tem
= fold_convert (pvoid_type_node
, tem
);
1377 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1378 boolean_type_node
, tem
, null_pointer_node
);
1381 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1382 present
, null_pointer_node
);
1383 cond
= fold_build2_loc (input_location
, TRUTH_ANDIF_EXPR
,
1384 boolean_type_node
, tem
, cond
);
1386 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1387 void_type_node
, cond
,
1392 stmtblock_t cond_block
;
1395 gfc_init_block (&cond_block
);
1396 gfc_add_modify (&cond_block
, size
,
1397 gfc_full_array_size (&cond_block
, decl
,
1398 GFC_TYPE_ARRAY_RANK (type
)));
1399 gfc_add_modify (&cond_block
, size
,
1400 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1402 then_b
= gfc_finish_block (&cond_block
);
1404 gfc_build_cond_assign (&block
, size
, present
, then_b
,
1405 build_int_cst (gfc_array_index_type
, 0));
1409 gfc_add_modify (&block
, size
,
1410 gfc_full_array_size (&block
, decl
,
1411 GFC_TYPE_ARRAY_RANK (type
)));
1412 gfc_add_modify (&block
, size
,
1413 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1416 OMP_CLAUSE_SIZE (c
) = size
;
1417 tree stmt
= gfc_finish_block (&block
);
1418 gimplify_and_add (stmt
, pre_p
);
1421 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1423 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1424 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1427 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1428 OMP_CLAUSE_CHAIN (last
) = c2
;
1433 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1434 OMP_CLAUSE_CHAIN (last
) = c3
;
1439 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1440 OMP_CLAUSE_CHAIN (last
) = c4
;
1445 /* Return true if DECL is a scalar variable (for the purpose of
1446 implicit firstprivatization). */
1449 gfc_omp_scalar_p (tree decl
)
1451 tree type
= TREE_TYPE (decl
);
1452 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1453 type
= TREE_TYPE (type
);
1454 if (TREE_CODE (type
) == POINTER_TYPE
)
1456 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1457 || GFC_DECL_GET_SCALAR_POINTER (decl
))
1458 type
= TREE_TYPE (type
);
1459 if (GFC_ARRAY_TYPE_P (type
)
1460 || GFC_CLASS_TYPE_P (type
))
1463 if ((TREE_CODE (type
) == ARRAY_TYPE
|| TREE_CODE (type
) == INTEGER_TYPE
)
1464 && TYPE_STRING_FLAG (type
))
1466 if (INTEGRAL_TYPE_P (type
)
1467 || SCALAR_FLOAT_TYPE_P (type
)
1468 || COMPLEX_FLOAT_TYPE_P (type
))
1474 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1475 disregarded in OpenMP construct, because it is going to be
1476 remapped during OpenMP lowering. SHARED is true if DECL
1477 is going to be shared, false if it is going to be privatized. */
1480 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1482 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1483 && DECL_HAS_VALUE_EXPR_P (decl
))
1485 tree value
= DECL_VALUE_EXPR (decl
);
1487 if (TREE_CODE (value
) == COMPONENT_REF
1488 && VAR_P (TREE_OPERAND (value
, 0))
1489 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1491 /* If variable in COMMON or EQUIVALENCE is privatized, return
1492 true, as just that variable is supposed to be privatized,
1493 not the whole COMMON or whole EQUIVALENCE.
1494 For shared variables in COMMON or EQUIVALENCE, let them be
1495 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1496 from the same COMMON or EQUIVALENCE just one sharing of the
1497 whole COMMON or EQUIVALENCE is enough. */
1502 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1508 /* Return true if DECL that is shared iff SHARED is true should
1509 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1513 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1515 if (GFC_DECL_CRAY_POINTEE (decl
))
1518 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1519 && DECL_HAS_VALUE_EXPR_P (decl
))
1521 tree value
= DECL_VALUE_EXPR (decl
);
1523 if (TREE_CODE (value
) == COMPONENT_REF
1524 && VAR_P (TREE_OPERAND (value
, 0))
1525 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1532 /* Register language specific type size variables as potentially OpenMP
1533 firstprivate variables. */
1536 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1538 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1542 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1543 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1545 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1546 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1547 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1549 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1550 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1556 gfc_trans_add_clause (tree node
, tree tail
)
1558 OMP_CLAUSE_CHAIN (node
) = tail
;
1563 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1568 gfc_symbol
*proc_sym
;
1569 gfc_formal_arglist
*f
;
1571 gcc_assert (sym
->attr
.dummy
);
1572 proc_sym
= sym
->ns
->proc_name
;
1573 if (proc_sym
->attr
.entry_master
)
1575 if (gfc_return_by_reference (proc_sym
))
1578 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1581 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1587 return build_int_cst (integer_type_node
, cnt
);
1590 tree t
= gfc_get_symbol_decl (sym
);
1594 bool alternate_entry
;
1597 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1598 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1599 && sym
->result
== sym
;
1600 entry_master
= sym
->attr
.result
1601 && sym
->ns
->proc_name
->attr
.entry_master
1602 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1603 parent_decl
= current_function_decl
1604 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1606 if ((t
== parent_decl
&& return_value
)
1607 || (sym
->ns
&& sym
->ns
->proc_name
1608 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1609 && (alternate_entry
|| entry_master
)))
1614 /* Special case for assigning the return value of a function.
1615 Self recursive functions must have an explicit return value. */
1616 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1617 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1619 /* Similarly for alternate entry points. */
1620 else if (alternate_entry
1621 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1624 gfc_entry_list
*el
= NULL
;
1626 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1629 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1634 else if (entry_master
1635 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1637 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1643 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1644 gfc_omp_namelist
*namelist
, tree list
,
1647 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1648 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1650 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1651 if (t
!= error_mark_node
)
1653 tree node
= build_omp_clause (input_location
, code
);
1654 OMP_CLAUSE_DECL (node
) = t
;
1655 list
= gfc_trans_add_clause (node
, list
);
1661 struct omp_udr_find_orig_data
1663 gfc_omp_udr
*omp_udr
;
1668 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1671 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1672 if ((*e
)->expr_type
== EXPR_VARIABLE
1673 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1674 cd
->omp_orig_seen
= true;
1680 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1682 gfc_symbol
*sym
= n
->sym
;
1683 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1684 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1685 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1686 gfc_symbol omp_var_copy
[4];
1687 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1689 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1690 locus old_loc
= gfc_current_locus
;
1693 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1695 decl
= OMP_CLAUSE_DECL (c
);
1696 gfc_current_locus
= where
;
1697 type
= TREE_TYPE (decl
);
1698 outer_decl
= create_tmp_var_raw (type
);
1699 if (TREE_CODE (decl
) == PARM_DECL
1700 && TREE_CODE (type
) == REFERENCE_TYPE
1701 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1702 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1704 decl
= build_fold_indirect_ref (decl
);
1705 type
= TREE_TYPE (type
);
1708 /* Create a fake symbol for init value. */
1709 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1710 init_val_sym
.ns
= sym
->ns
;
1711 init_val_sym
.name
= sym
->name
;
1712 init_val_sym
.ts
= sym
->ts
;
1713 init_val_sym
.attr
.referenced
= 1;
1714 init_val_sym
.declared_at
= where
;
1715 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1716 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1717 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1718 else if (udr
->initializer_ns
)
1719 backend_decl
= NULL
;
1721 switch (sym
->ts
.type
)
1727 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1730 backend_decl
= NULL_TREE
;
1733 init_val_sym
.backend_decl
= backend_decl
;
1735 /* Create a fake symbol for the outer array reference. */
1738 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1739 outer_sym
.attr
.dummy
= 0;
1740 outer_sym
.attr
.result
= 0;
1741 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1742 outer_sym
.backend_decl
= outer_decl
;
1743 if (decl
!= OMP_CLAUSE_DECL (c
))
1744 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1746 /* Create fake symtrees for it. */
1747 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1748 symtree1
->n
.sym
= sym
;
1749 gcc_assert (symtree1
== root1
);
1751 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1752 symtree2
->n
.sym
= &init_val_sym
;
1753 gcc_assert (symtree2
== root2
);
1755 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1756 symtree3
->n
.sym
= &outer_sym
;
1757 gcc_assert (symtree3
== root3
);
1759 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1762 omp_var_copy
[0] = *udr
->omp_out
;
1763 omp_var_copy
[1] = *udr
->omp_in
;
1764 *udr
->omp_out
= outer_sym
;
1765 *udr
->omp_in
= *sym
;
1766 if (udr
->initializer_ns
)
1768 omp_var_copy
[2] = *udr
->omp_priv
;
1769 omp_var_copy
[3] = *udr
->omp_orig
;
1770 *udr
->omp_priv
= *sym
;
1771 *udr
->omp_orig
= outer_sym
;
1775 /* Create expressions. */
1776 e1
= gfc_get_expr ();
1777 e1
->expr_type
= EXPR_VARIABLE
;
1779 e1
->symtree
= symtree1
;
1781 if (sym
->attr
.dimension
)
1783 e1
->ref
= ref
= gfc_get_ref ();
1784 ref
->type
= REF_ARRAY
;
1785 ref
->u
.ar
.where
= where
;
1786 ref
->u
.ar
.as
= sym
->as
;
1787 ref
->u
.ar
.type
= AR_FULL
;
1788 ref
->u
.ar
.dimen
= 0;
1790 t
= gfc_resolve_expr (e1
);
1794 if (backend_decl
!= NULL_TREE
)
1796 e2
= gfc_get_expr ();
1797 e2
->expr_type
= EXPR_VARIABLE
;
1799 e2
->symtree
= symtree2
;
1801 t
= gfc_resolve_expr (e2
);
1804 else if (udr
->initializer_ns
== NULL
)
1806 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1807 e2
= gfc_default_initializer (&sym
->ts
);
1809 t
= gfc_resolve_expr (e2
);
1812 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1814 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1815 t
= gfc_resolve_expr (e2
);
1818 if (udr
&& udr
->initializer_ns
)
1820 struct omp_udr_find_orig_data cd
;
1822 cd
.omp_orig_seen
= false;
1823 gfc_code_walker (&n
->udr
->initializer
,
1824 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1825 if (cd
.omp_orig_seen
)
1826 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1829 e3
= gfc_copy_expr (e1
);
1830 e3
->symtree
= symtree3
;
1831 t
= gfc_resolve_expr (e3
);
1836 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1840 e4
= gfc_add (e3
, e1
);
1843 e4
= gfc_multiply (e3
, e1
);
1845 case TRUTH_ANDIF_EXPR
:
1846 e4
= gfc_and (e3
, e1
);
1848 case TRUTH_ORIF_EXPR
:
1849 e4
= gfc_or (e3
, e1
);
1852 e4
= gfc_eqv (e3
, e1
);
1855 e4
= gfc_neqv (e3
, e1
);
1873 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1876 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1877 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1878 t
= gfc_resolve_expr (e3
);
1880 t
= gfc_resolve_expr (e4
);
1889 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1890 intrinsic_sym
.ns
= sym
->ns
;
1891 intrinsic_sym
.name
= iname
;
1892 intrinsic_sym
.ts
= sym
->ts
;
1893 intrinsic_sym
.attr
.referenced
= 1;
1894 intrinsic_sym
.attr
.intrinsic
= 1;
1895 intrinsic_sym
.attr
.function
= 1;
1896 intrinsic_sym
.attr
.implicit_type
= 1;
1897 intrinsic_sym
.result
= &intrinsic_sym
;
1898 intrinsic_sym
.declared_at
= where
;
1900 symtree4
= gfc_new_symtree (&root4
, iname
);
1901 symtree4
->n
.sym
= &intrinsic_sym
;
1902 gcc_assert (symtree4
== root4
);
1904 e4
= gfc_get_expr ();
1905 e4
->expr_type
= EXPR_FUNCTION
;
1907 e4
->symtree
= symtree4
;
1908 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1909 e4
->value
.function
.actual
->expr
= e3
;
1910 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1911 e4
->value
.function
.actual
->next
->expr
= e1
;
1913 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1915 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1916 e1
= gfc_copy_expr (e1
);
1917 e3
= gfc_copy_expr (e3
);
1918 t
= gfc_resolve_expr (e4
);
1922 /* Create the init statement list. */
1925 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1927 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1928 NULL_TREE
, NULL_TREE
, false);
1929 if (TREE_CODE (stmt
) != BIND_EXPR
)
1930 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1933 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1935 /* Create the merge statement list. */
1938 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1940 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1941 NULL_TREE
, NULL_TREE
, false);
1942 if (TREE_CODE (stmt
) != BIND_EXPR
)
1943 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1946 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1948 /* And stick the placeholder VAR_DECL into the clause as well. */
1949 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1951 gfc_current_locus
= old_loc
;
1964 gfc_free_array_spec (outer_sym
.as
);
1968 *udr
->omp_out
= omp_var_copy
[0];
1969 *udr
->omp_in
= omp_var_copy
[1];
1970 if (udr
->initializer_ns
)
1972 *udr
->omp_priv
= omp_var_copy
[2];
1973 *udr
->omp_orig
= omp_var_copy
[3];
1979 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1980 locus where
, bool mark_addressable
)
1982 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1983 if (namelist
->sym
->attr
.referenced
)
1985 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1986 if (t
!= error_mark_node
)
1988 tree node
= build_omp_clause (gfc_get_location (&namelist
->where
),
1989 OMP_CLAUSE_REDUCTION
);
1990 OMP_CLAUSE_DECL (node
) = t
;
1991 if (mark_addressable
)
1992 TREE_ADDRESSABLE (t
) = 1;
1993 switch (namelist
->u
.reduction_op
)
1995 case OMP_REDUCTION_PLUS
:
1996 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1998 case OMP_REDUCTION_MINUS
:
1999 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
2001 case OMP_REDUCTION_TIMES
:
2002 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
2004 case OMP_REDUCTION_AND
:
2005 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
2007 case OMP_REDUCTION_OR
:
2008 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
2010 case OMP_REDUCTION_EQV
:
2011 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
2013 case OMP_REDUCTION_NEQV
:
2014 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
2016 case OMP_REDUCTION_MAX
:
2017 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
2019 case OMP_REDUCTION_MIN
:
2020 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
2022 case OMP_REDUCTION_IAND
:
2023 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
2025 case OMP_REDUCTION_IOR
:
2026 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
2028 case OMP_REDUCTION_IEOR
:
2029 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
2031 case OMP_REDUCTION_USER
:
2032 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
2037 if (namelist
->sym
->attr
.dimension
2038 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
2039 || namelist
->sym
->attr
.allocatable
)
2040 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
2041 list
= gfc_trans_add_clause (node
, list
);
2048 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
2053 gfc_init_se (&se
, NULL
);
2054 gfc_conv_expr (&se
, expr
);
2055 gfc_add_block_to_block (block
, &se
.pre
);
2056 result
= gfc_evaluate_now (se
.expr
, block
);
2057 gfc_add_block_to_block (block
, &se
.post
);
2062 static vec
<tree
, va_heap
, vl_embed
> *doacross_steps
;
2065 /* Translate an array section or array element. */
2068 gfc_trans_omp_array_section (stmtblock_t
*block
, gfc_omp_namelist
*n
,
2069 tree decl
, bool element
, gomp_map_kind ptr_kind
,
2070 tree node
, tree
&node2
, tree
&node3
, tree
&node4
)
2075 gfc_init_se (&se
, NULL
);
2079 gfc_conv_expr_reference (&se
, n
->expr
);
2080 gfc_add_block_to_block (block
, &se
.pre
);
2082 OMP_CLAUSE_SIZE (node
) = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2086 gfc_conv_expr_descriptor (&se
, n
->expr
);
2087 ptr
= gfc_conv_array_data (se
.expr
);
2088 tree type
= TREE_TYPE (se
.expr
);
2089 gfc_add_block_to_block (block
, &se
.pre
);
2090 OMP_CLAUSE_SIZE (node
) = gfc_full_array_size (block
, se
.expr
,
2091 GFC_TYPE_ARRAY_RANK (type
));
2092 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2093 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2094 OMP_CLAUSE_SIZE (node
) = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2095 OMP_CLAUSE_SIZE (node
), elemsz
);
2097 gfc_add_block_to_block (block
, &se
.post
);
2098 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
2099 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2101 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2102 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
)))
2103 && ptr_kind
== GOMP_MAP_POINTER
)
2105 node4
= build_omp_clause (input_location
,
2107 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2108 OMP_CLAUSE_DECL (node4
) = decl
;
2109 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2110 decl
= build_fold_indirect_ref (decl
);
2112 ptr
= fold_convert (sizetype
, ptr
);
2113 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2115 tree type
= TREE_TYPE (decl
);
2116 ptr2
= gfc_conv_descriptor_data_get (decl
);
2117 node2
= build_omp_clause (input_location
,
2119 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2120 OMP_CLAUSE_DECL (node2
) = decl
;
2121 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2122 node3
= build_omp_clause (input_location
,
2124 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2125 OMP_CLAUSE_DECL (node3
)
2126 = gfc_conv_descriptor_data_get (decl
);
2127 if (ptr_kind
== GOMP_MAP_ATTACH_DETACH
)
2128 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2132 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2133 ptr2
= build_fold_addr_expr (decl
);
2136 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2139 node3
= build_omp_clause (input_location
,
2141 OMP_CLAUSE_SET_MAP_KIND (node3
, ptr_kind
);
2142 OMP_CLAUSE_DECL (node3
) = decl
;
2144 ptr2
= fold_convert (sizetype
, ptr2
);
2145 OMP_CLAUSE_SIZE (node3
)
2146 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2150 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
2151 locus where
, bool declare_simd
= false)
2153 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
2155 enum omp_clause_code clause_code
;
2158 if (clauses
== NULL
)
2161 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
2163 gfc_omp_namelist
*n
= clauses
->lists
[list
];
2169 case OMP_LIST_REDUCTION
:
2170 /* An OpenACC async clause indicates the need to set reduction
2171 arguments addressable, to allow asynchronous copy-out. */
2172 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
,
2175 case OMP_LIST_PRIVATE
:
2176 clause_code
= OMP_CLAUSE_PRIVATE
;
2178 case OMP_LIST_SHARED
:
2179 clause_code
= OMP_CLAUSE_SHARED
;
2181 case OMP_LIST_FIRSTPRIVATE
:
2182 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
2184 case OMP_LIST_LASTPRIVATE
:
2185 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
2187 case OMP_LIST_COPYIN
:
2188 clause_code
= OMP_CLAUSE_COPYIN
;
2190 case OMP_LIST_COPYPRIVATE
:
2191 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
2193 case OMP_LIST_UNIFORM
:
2194 clause_code
= OMP_CLAUSE_UNIFORM
;
2196 case OMP_LIST_USE_DEVICE
:
2197 case OMP_LIST_USE_DEVICE_PTR
:
2198 clause_code
= OMP_CLAUSE_USE_DEVICE_PTR
;
2200 case OMP_LIST_USE_DEVICE_ADDR
:
2201 clause_code
= OMP_CLAUSE_USE_DEVICE_ADDR
;
2203 case OMP_LIST_IS_DEVICE_PTR
:
2204 clause_code
= OMP_CLAUSE_IS_DEVICE_PTR
;
2209 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
2212 case OMP_LIST_ALIGNED
:
2213 for (; n
!= NULL
; n
= n
->next
)
2214 if (n
->sym
->attr
.referenced
|| declare_simd
)
2216 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2217 if (t
!= error_mark_node
)
2219 tree node
= build_omp_clause (input_location
,
2220 OMP_CLAUSE_ALIGNED
);
2221 OMP_CLAUSE_DECL (node
) = t
;
2227 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
2230 gfc_init_se (&se
, NULL
);
2231 gfc_conv_expr (&se
, n
->expr
);
2232 gfc_add_block_to_block (block
, &se
.pre
);
2233 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
2234 gfc_add_block_to_block (block
, &se
.post
);
2236 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
2238 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2242 case OMP_LIST_LINEAR
:
2244 gfc_expr
*last_step_expr
= NULL
;
2245 tree last_step
= NULL_TREE
;
2246 bool last_step_parm
= false;
2248 for (; n
!= NULL
; n
= n
->next
)
2252 last_step_expr
= n
->expr
;
2253 last_step
= NULL_TREE
;
2254 last_step_parm
= false;
2256 if (n
->sym
->attr
.referenced
|| declare_simd
)
2258 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
2259 if (t
!= error_mark_node
)
2261 tree node
= build_omp_clause (input_location
,
2263 OMP_CLAUSE_DECL (node
) = t
;
2264 omp_clause_linear_kind kind
;
2265 switch (n
->u
.linear_op
)
2267 case OMP_LINEAR_DEFAULT
:
2268 kind
= OMP_CLAUSE_LINEAR_DEFAULT
;
2270 case OMP_LINEAR_REF
:
2271 kind
= OMP_CLAUSE_LINEAR_REF
;
2273 case OMP_LINEAR_VAL
:
2274 kind
= OMP_CLAUSE_LINEAR_VAL
;
2276 case OMP_LINEAR_UVAL
:
2277 kind
= OMP_CLAUSE_LINEAR_UVAL
;
2282 OMP_CLAUSE_LINEAR_KIND (node
) = kind
;
2283 if (last_step_expr
&& last_step
== NULL_TREE
)
2287 gfc_init_se (&se
, NULL
);
2288 gfc_conv_expr (&se
, last_step_expr
);
2289 gfc_add_block_to_block (block
, &se
.pre
);
2290 last_step
= gfc_evaluate_now (se
.expr
, block
);
2291 gfc_add_block_to_block (block
, &se
.post
);
2293 else if (last_step_expr
->expr_type
== EXPR_VARIABLE
)
2295 gfc_symbol
*s
= last_step_expr
->symtree
->n
.sym
;
2296 last_step
= gfc_trans_omp_variable (s
, true);
2297 last_step_parm
= true;
2301 = gfc_conv_constant_to_tree (last_step_expr
);
2305 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (node
) = 1;
2306 OMP_CLAUSE_LINEAR_STEP (node
) = last_step
;
2310 if (kind
== OMP_CLAUSE_LINEAR_REF
)
2313 if (n
->sym
->attr
.flavor
== FL_PROCEDURE
)
2315 type
= gfc_get_function_type (n
->sym
);
2316 type
= build_pointer_type (type
);
2319 type
= gfc_sym_type (n
->sym
);
2320 if (POINTER_TYPE_P (type
))
2321 type
= TREE_TYPE (type
);
2322 /* Otherwise to be determined what exactly
2324 tree t
= fold_convert (sizetype
, last_step
);
2325 t
= size_binop (MULT_EXPR
, t
,
2326 TYPE_SIZE_UNIT (type
));
2327 OMP_CLAUSE_LINEAR_STEP (node
) = t
;
2332 = gfc_typenode_for_spec (&n
->sym
->ts
);
2333 OMP_CLAUSE_LINEAR_STEP (node
)
2334 = fold_convert (type
, last_step
);
2337 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
2338 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
2339 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2345 case OMP_LIST_DEPEND
:
2346 for (; n
!= NULL
; n
= n
->next
)
2348 if (n
->u
.depend_op
== OMP_DEPEND_SINK_FIRST
)
2350 tree vec
= NULL_TREE
;
2354 tree addend
= integer_zero_node
, t
;
2358 addend
= gfc_conv_constant_to_tree (n
->expr
);
2359 if (TREE_CODE (addend
) == INTEGER_CST
2360 && tree_int_cst_sgn (addend
) == -1)
2363 addend
= const_unop (NEGATE_EXPR
,
2364 TREE_TYPE (addend
), addend
);
2367 t
= gfc_trans_omp_variable (n
->sym
, false);
2368 if (t
!= error_mark_node
)
2370 if (i
< vec_safe_length (doacross_steps
)
2371 && !integer_zerop (addend
)
2372 && (*doacross_steps
)[i
])
2374 tree step
= (*doacross_steps
)[i
];
2375 addend
= fold_convert (TREE_TYPE (step
), addend
);
2376 addend
= build2 (TRUNC_DIV_EXPR
,
2377 TREE_TYPE (step
), addend
, step
);
2379 vec
= tree_cons (addend
, t
, vec
);
2381 OMP_CLAUSE_DEPEND_SINK_NEGATIVE (vec
) = 1;
2384 || n
->next
->u
.depend_op
!= OMP_DEPEND_SINK
)
2388 if (vec
== NULL_TREE
)
2391 tree node
= build_omp_clause (input_location
,
2393 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_SINK
;
2394 OMP_CLAUSE_DECL (node
) = nreverse (vec
);
2395 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2399 if (!n
->sym
->attr
.referenced
)
2402 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
2403 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2405 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2406 if (gfc_omp_privatize_by_reference (decl
))
2407 decl
= build_fold_indirect_ref (decl
);
2408 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2410 decl
= gfc_conv_descriptor_data_get (decl
);
2411 decl
= fold_convert (build_pointer_type (char_type_node
),
2413 decl
= build_fold_indirect_ref (decl
);
2415 else if (DECL_P (decl
))
2416 TREE_ADDRESSABLE (decl
) = 1;
2417 OMP_CLAUSE_DECL (node
) = decl
;
2422 gfc_init_se (&se
, NULL
);
2423 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2425 gfc_conv_expr_reference (&se
, n
->expr
);
2430 gfc_conv_expr_descriptor (&se
, n
->expr
);
2431 ptr
= gfc_conv_array_data (se
.expr
);
2433 gfc_add_block_to_block (block
, &se
.pre
);
2434 gfc_add_block_to_block (block
, &se
.post
);
2435 ptr
= fold_convert (build_pointer_type (char_type_node
),
2437 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2439 switch (n
->u
.depend_op
)
2442 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
2444 case OMP_DEPEND_OUT
:
2445 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
2447 case OMP_DEPEND_INOUT
:
2448 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
2453 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2457 for (; n
!= NULL
; n
= n
->next
)
2459 if (!n
->sym
->attr
.referenced
)
2462 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2463 tree node2
= NULL_TREE
;
2464 tree node3
= NULL_TREE
;
2465 tree node4
= NULL_TREE
;
2466 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2468 TREE_ADDRESSABLE (decl
) = 1;
2469 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2471 tree present
= (gfc_omp_is_optional_argument (decl
)
2472 ? gfc_omp_check_optional_argument (decl
, true)
2474 if (n
->sym
->ts
.type
== BT_CLASS
)
2476 tree type
= TREE_TYPE (decl
);
2477 if (n
->sym
->attr
.optional
)
2478 sorry ("optional class parameter");
2479 if (POINTER_TYPE_P (type
))
2481 node4
= build_omp_clause (input_location
,
2483 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2484 OMP_CLAUSE_DECL (node4
) = decl
;
2485 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2486 decl
= build_fold_indirect_ref (decl
);
2488 tree ptr
= gfc_class_data_get (decl
);
2489 ptr
= build_fold_indirect_ref (ptr
);
2490 OMP_CLAUSE_DECL (node
) = ptr
;
2491 OMP_CLAUSE_SIZE (node
) = gfc_class_vtab_size_get (decl
);
2492 node2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2493 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2494 OMP_CLAUSE_DECL (node2
) = decl
;
2495 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2496 node3
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
2497 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_ATTACH_DETACH
);
2498 OMP_CLAUSE_DECL (node3
) = gfc_class_data_get (decl
);
2499 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2500 goto finalize_map_clause
;
2502 else if (POINTER_TYPE_P (TREE_TYPE (decl
))
2503 && (gfc_omp_privatize_by_reference (decl
)
2504 || GFC_DECL_GET_SCALAR_POINTER (decl
)
2505 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
2506 || GFC_DECL_CRAY_POINTEE (decl
)
2507 || GFC_DESCRIPTOR_TYPE_P
2508 (TREE_TYPE (TREE_TYPE (decl
)))
2509 || n
->sym
->ts
.type
== BT_DERIVED
))
2511 tree orig_decl
= decl
;
2512 node4
= build_omp_clause (input_location
,
2514 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2515 OMP_CLAUSE_DECL (node4
) = decl
;
2516 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2517 decl
= build_fold_indirect_ref (decl
);
2518 if ((TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
2519 || gfc_omp_is_optional_argument (orig_decl
))
2520 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
2521 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
2523 node3
= build_omp_clause (input_location
,
2525 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2526 OMP_CLAUSE_DECL (node3
) = decl
;
2527 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2528 decl
= build_fold_indirect_ref (decl
);
2531 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
))
2532 && n
->u
.map_op
!= OMP_MAP_ATTACH
2533 && n
->u
.map_op
!= OMP_MAP_DETACH
)
2535 tree type
= TREE_TYPE (decl
);
2536 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2538 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
2540 ptr
= fold_convert (build_pointer_type (char_type_node
),
2542 ptr
= build_fold_indirect_ref (ptr
);
2543 OMP_CLAUSE_DECL (node
) = ptr
;
2544 node2
= build_omp_clause (input_location
,
2546 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2547 OMP_CLAUSE_DECL (node2
) = decl
;
2548 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2549 node3
= build_omp_clause (input_location
,
2551 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2554 ptr
= gfc_conv_descriptor_data_get (decl
);
2555 ptr
= gfc_build_addr_expr (NULL
, ptr
);
2556 ptr
= gfc_build_cond_assign_expr (block
, present
, ptr
,
2558 ptr
= build_fold_indirect_ref (ptr
);
2559 OMP_CLAUSE_DECL (node3
) = ptr
;
2562 OMP_CLAUSE_DECL (node3
)
2563 = gfc_conv_descriptor_data_get (decl
);
2564 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2566 /* We have to check for n->sym->attr.dimension because
2567 of scalar coarrays. */
2568 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
2570 stmtblock_t cond_block
;
2572 = gfc_create_var (gfc_array_index_type
, NULL
);
2573 tree tem
, then_b
, else_b
, zero
, cond
;
2575 gfc_init_block (&cond_block
);
2577 = gfc_full_array_size (&cond_block
, decl
,
2578 GFC_TYPE_ARRAY_RANK (type
));
2579 gfc_add_modify (&cond_block
, size
, tem
);
2580 then_b
= gfc_finish_block (&cond_block
);
2581 gfc_init_block (&cond_block
);
2582 zero
= build_int_cst (gfc_array_index_type
, 0);
2583 gfc_add_modify (&cond_block
, size
, zero
);
2584 else_b
= gfc_finish_block (&cond_block
);
2585 tem
= gfc_conv_descriptor_data_get (decl
);
2586 tem
= fold_convert (pvoid_type_node
, tem
);
2587 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2589 tem
, null_pointer_node
);
2592 tree tmp
= fold_build2_loc (input_location
,
2597 cond
= fold_build2_loc (input_location
,
2602 gfc_add_expr_to_block (block
,
2603 build3_loc (input_location
,
2608 OMP_CLAUSE_SIZE (node
) = size
;
2610 else if (n
->sym
->attr
.dimension
)
2612 stmtblock_t cond_block
;
2613 gfc_init_block (&cond_block
);
2614 tree size
= gfc_full_array_size (&cond_block
, decl
,
2615 GFC_TYPE_ARRAY_RANK (type
));
2618 tree var
= gfc_create_var (gfc_array_index_type
,
2620 tree cond
= fold_build2_loc (input_location
,
2625 gfc_add_modify (&cond_block
, var
, size
);
2626 cond
= build3_loc (input_location
, COND_EXPR
,
2627 void_type_node
, cond
,
2628 gfc_finish_block (&cond_block
),
2630 gfc_add_expr_to_block (block
, cond
);
2631 OMP_CLAUSE_SIZE (node
) = var
;
2635 gfc_add_block_to_block (block
, &cond_block
);
2636 OMP_CLAUSE_SIZE (node
) = size
;
2639 if (n
->sym
->attr
.dimension
)
2642 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2643 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2644 OMP_CLAUSE_SIZE (node
)
2645 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2646 OMP_CLAUSE_SIZE (node
), elemsz
);
2650 && TREE_CODE (decl
) == INDIRECT_REF
2651 && (TREE_CODE (TREE_OPERAND (decl
, 0))
2654 /* A single indirectref is handled by the middle end. */
2655 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
2656 decl
= TREE_OPERAND (decl
, 0);
2657 decl
= gfc_build_cond_assign_expr (block
, present
, decl
,
2659 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (decl
);
2662 OMP_CLAUSE_DECL (node
) = decl
;
2665 && n
->expr
->expr_type
== EXPR_VARIABLE
2666 && n
->expr
->ref
->type
== REF_COMPONENT
)
2670 for (gfc_ref
*ref
= n
->expr
->ref
; ref
; ref
= ref
->next
)
2671 if (ref
->type
== REF_COMPONENT
)
2674 symbol_attribute sym_attr
;
2676 if (lastcomp
->u
.c
.component
->ts
.type
== BT_CLASS
)
2677 sym_attr
= CLASS_DATA (lastcomp
->u
.c
.component
)->attr
;
2679 sym_attr
= lastcomp
->u
.c
.component
->attr
;
2681 gfc_init_se (&se
, NULL
);
2683 if (!sym_attr
.dimension
2684 && lastcomp
->u
.c
.component
->ts
.type
!= BT_CLASS
2685 && lastcomp
->u
.c
.component
->ts
.type
!= BT_DERIVED
)
2687 /* Last component is a scalar. */
2688 gfc_conv_expr (&se
, n
->expr
);
2689 gfc_add_block_to_block (block
, &se
.pre
);
2690 OMP_CLAUSE_DECL (node
) = se
.expr
;
2691 gfc_add_block_to_block (block
, &se
.post
);
2692 goto finalize_map_clause
;
2695 se
.expr
= gfc_maybe_dereference_var (n
->sym
, decl
);
2697 for (gfc_ref
*ref
= n
->expr
->ref
;
2698 ref
&& ref
!= lastcomp
->next
;
2701 if (ref
->type
== REF_COMPONENT
)
2703 if (ref
->u
.c
.sym
->attr
.extension
)
2704 conv_parent_component_references (&se
, ref
);
2706 gfc_conv_component_ref (&se
, ref
);
2709 sorry ("unhandled derived-type component");
2712 tree inner
= se
.expr
;
2714 /* Last component is a derived type or class pointer. */
2715 if (lastcomp
->u
.c
.component
->ts
.type
== BT_DERIVED
2716 || lastcomp
->u
.c
.component
->ts
.type
== BT_CLASS
)
2718 if (sym_attr
.allocatable
|| sym_attr
.pointer
)
2722 if (lastcomp
->u
.c
.component
->ts
.type
== BT_CLASS
)
2724 data
= gfc_class_data_get (inner
);
2725 size
= gfc_class_vtab_size_get (inner
);
2727 else /* BT_DERIVED. */
2730 size
= TYPE_SIZE_UNIT (TREE_TYPE (inner
));
2733 OMP_CLAUSE_DECL (node
)
2734 = build_fold_indirect_ref (data
);
2735 OMP_CLAUSE_SIZE (node
) = size
;
2736 node2
= build_omp_clause (input_location
,
2738 OMP_CLAUSE_SET_MAP_KIND (node2
,
2739 GOMP_MAP_ATTACH_DETACH
);
2740 OMP_CLAUSE_DECL (node2
) = data
;
2741 OMP_CLAUSE_SIZE (node2
) = size_int (0);
2745 OMP_CLAUSE_DECL (node
) = decl
;
2746 OMP_CLAUSE_SIZE (node
)
2747 = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
2750 else if (lastcomp
->next
2751 && lastcomp
->next
->type
== REF_ARRAY
2752 && lastcomp
->next
->u
.ar
.type
== AR_FULL
)
2754 /* Just pass the (auto-dereferenced) decl through for
2755 bare attach and detach clauses. */
2756 if (n
->u
.map_op
== OMP_MAP_ATTACH
2757 || n
->u
.map_op
== OMP_MAP_DETACH
)
2759 OMP_CLAUSE_DECL (node
) = inner
;
2760 OMP_CLAUSE_SIZE (node
) = size_zero_node
;
2761 goto finalize_map_clause
;
2764 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (inner
)))
2766 tree type
= TREE_TYPE (inner
);
2767 tree ptr
= gfc_conv_descriptor_data_get (inner
);
2768 ptr
= build_fold_indirect_ref (ptr
);
2769 OMP_CLAUSE_DECL (node
) = ptr
;
2770 node2
= build_omp_clause (input_location
,
2772 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2773 OMP_CLAUSE_DECL (node2
) = inner
;
2774 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2775 node3
= build_omp_clause (input_location
,
2777 OMP_CLAUSE_SET_MAP_KIND (node3
,
2778 GOMP_MAP_ATTACH_DETACH
);
2779 OMP_CLAUSE_DECL (node3
)
2780 = gfc_conv_descriptor_data_get (inner
);
2781 STRIP_NOPS (OMP_CLAUSE_DECL (node3
));
2782 OMP_CLAUSE_SIZE (node3
) = size_int (0);
2783 int rank
= GFC_TYPE_ARRAY_RANK (type
);
2784 OMP_CLAUSE_SIZE (node
)
2785 = gfc_full_array_size (block
, inner
, rank
);
2787 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2788 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2789 OMP_CLAUSE_SIZE (node
)
2790 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2791 OMP_CLAUSE_SIZE (node
), elemsz
);
2794 OMP_CLAUSE_DECL (node
) = inner
;
2796 else /* An array element or section. */
2800 && lastcomp
->next
->type
== REF_ARRAY
2801 && lastcomp
->next
->u
.ar
.type
== AR_ELEMENT
);
2803 gfc_trans_omp_array_section (block
, n
, inner
, element
,
2804 GOMP_MAP_ATTACH_DETACH
,
2805 node
, node2
, node3
, node4
);
2808 else /* An array element or array section. */
2810 bool element
= n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
;
2811 gfc_trans_omp_array_section (block
, n
, decl
, element
,
2812 GOMP_MAP_POINTER
, node
, node2
,
2816 finalize_map_clause
:
2817 switch (n
->u
.map_op
)
2820 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2822 case OMP_MAP_IF_PRESENT
:
2823 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_IF_PRESENT
);
2825 case OMP_MAP_ATTACH
:
2826 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ATTACH
);
2829 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2832 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2834 case OMP_MAP_TOFROM
:
2835 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2837 case OMP_MAP_ALWAYS_TO
:
2838 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TO
);
2840 case OMP_MAP_ALWAYS_FROM
:
2841 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_FROM
);
2843 case OMP_MAP_ALWAYS_TOFROM
:
2844 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALWAYS_TOFROM
);
2846 case OMP_MAP_RELEASE
:
2847 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_RELEASE
);
2849 case OMP_MAP_DELETE
:
2850 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DELETE
);
2852 case OMP_MAP_DETACH
:
2853 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_DETACH
);
2855 case OMP_MAP_FORCE_ALLOC
:
2856 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2858 case OMP_MAP_FORCE_TO
:
2859 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2861 case OMP_MAP_FORCE_FROM
:
2862 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2864 case OMP_MAP_FORCE_TOFROM
:
2865 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2867 case OMP_MAP_FORCE_PRESENT
:
2868 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2870 case OMP_MAP_FORCE_DEVICEPTR
:
2871 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2876 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2878 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2880 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2882 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2887 case OMP_LIST_CACHE
:
2888 for (; n
!= NULL
; n
= n
->next
)
2890 if (!n
->sym
->attr
.referenced
)
2896 clause_code
= OMP_CLAUSE_TO
;
2899 clause_code
= OMP_CLAUSE_FROM
;
2901 case OMP_LIST_CACHE
:
2902 clause_code
= OMP_CLAUSE__CACHE_
;
2907 tree node
= build_omp_clause (input_location
, clause_code
);
2908 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2910 tree decl
= gfc_trans_omp_variable (n
->sym
, false);
2911 if (gfc_omp_privatize_by_reference (decl
))
2913 if (gfc_omp_is_allocatable_or_ptr (decl
))
2914 decl
= build_fold_indirect_ref (decl
);
2915 decl
= build_fold_indirect_ref (decl
);
2917 else if (DECL_P (decl
))
2918 TREE_ADDRESSABLE (decl
) = 1;
2919 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2921 tree type
= TREE_TYPE (decl
);
2922 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2923 ptr
= fold_convert (build_pointer_type (char_type_node
),
2925 ptr
= build_fold_indirect_ref (ptr
);
2926 OMP_CLAUSE_DECL (node
) = ptr
;
2927 OMP_CLAUSE_SIZE (node
)
2928 = gfc_full_array_size (block
, decl
,
2929 GFC_TYPE_ARRAY_RANK (type
));
2931 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2932 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2933 OMP_CLAUSE_SIZE (node
)
2934 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2935 OMP_CLAUSE_SIZE (node
), elemsz
);
2939 OMP_CLAUSE_DECL (node
) = decl
;
2940 if (gfc_omp_is_allocatable_or_ptr (decl
))
2941 OMP_CLAUSE_SIZE (node
)
2942 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
2948 gfc_init_se (&se
, NULL
);
2949 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2951 gfc_conv_expr_reference (&se
, n
->expr
);
2953 gfc_add_block_to_block (block
, &se
.pre
);
2954 OMP_CLAUSE_SIZE (node
)
2955 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2959 gfc_conv_expr_descriptor (&se
, n
->expr
);
2960 ptr
= gfc_conv_array_data (se
.expr
);
2961 tree type
= TREE_TYPE (se
.expr
);
2962 gfc_add_block_to_block (block
, &se
.pre
);
2963 OMP_CLAUSE_SIZE (node
)
2964 = gfc_full_array_size (block
, se
.expr
,
2965 GFC_TYPE_ARRAY_RANK (type
));
2967 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2968 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2969 OMP_CLAUSE_SIZE (node
)
2970 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2971 OMP_CLAUSE_SIZE (node
), elemsz
);
2973 gfc_add_block_to_block (block
, &se
.post
);
2974 ptr
= fold_convert (build_pointer_type (char_type_node
),
2976 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2978 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2986 if (clauses
->if_expr
)
2990 gfc_init_se (&se
, NULL
);
2991 gfc_conv_expr (&se
, clauses
->if_expr
);
2992 gfc_add_block_to_block (block
, &se
.pre
);
2993 if_var
= gfc_evaluate_now (se
.expr
, block
);
2994 gfc_add_block_to_block (block
, &se
.post
);
2996 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
2997 OMP_CLAUSE_IF_MODIFIER (c
) = ERROR_MARK
;
2998 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2999 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3001 for (ifc
= 0; ifc
< OMP_IF_LAST
; ifc
++)
3002 if (clauses
->if_exprs
[ifc
])
3006 gfc_init_se (&se
, NULL
);
3007 gfc_conv_expr (&se
, clauses
->if_exprs
[ifc
]);
3008 gfc_add_block_to_block (block
, &se
.pre
);
3009 if_var
= gfc_evaluate_now (se
.expr
, block
);
3010 gfc_add_block_to_block (block
, &se
.post
);
3012 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF
);
3015 case OMP_IF_PARALLEL
:
3016 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_PARALLEL
;
3019 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASK
;
3021 case OMP_IF_TASKLOOP
:
3022 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TASKLOOP
;
3025 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET
;
3027 case OMP_IF_TARGET_DATA
:
3028 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_DATA
;
3030 case OMP_IF_TARGET_UPDATE
:
3031 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_UPDATE
;
3033 case OMP_IF_TARGET_ENTER_DATA
:
3034 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_ENTER_DATA
;
3036 case OMP_IF_TARGET_EXIT_DATA
:
3037 OMP_CLAUSE_IF_MODIFIER (c
) = OMP_TARGET_EXIT_DATA
;
3042 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
3043 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3046 if (clauses
->final_expr
)
3050 gfc_init_se (&se
, NULL
);
3051 gfc_conv_expr (&se
, clauses
->final_expr
);
3052 gfc_add_block_to_block (block
, &se
.pre
);
3053 final_var
= gfc_evaluate_now (se
.expr
, block
);
3054 gfc_add_block_to_block (block
, &se
.post
);
3056 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINAL
);
3057 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
3058 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3061 if (clauses
->num_threads
)
3065 gfc_init_se (&se
, NULL
);
3066 gfc_conv_expr (&se
, clauses
->num_threads
);
3067 gfc_add_block_to_block (block
, &se
.pre
);
3068 num_threads
= gfc_evaluate_now (se
.expr
, block
);
3069 gfc_add_block_to_block (block
, &se
.post
);
3071 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_THREADS
);
3072 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
3073 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3076 chunk_size
= NULL_TREE
;
3077 if (clauses
->chunk_size
)
3079 gfc_init_se (&se
, NULL
);
3080 gfc_conv_expr (&se
, clauses
->chunk_size
);
3081 gfc_add_block_to_block (block
, &se
.pre
);
3082 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3083 gfc_add_block_to_block (block
, &se
.post
);
3086 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
3088 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SCHEDULE
);
3089 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3090 switch (clauses
->sched_kind
)
3092 case OMP_SCHED_STATIC
:
3093 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
3095 case OMP_SCHED_DYNAMIC
:
3096 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
3098 case OMP_SCHED_GUIDED
:
3099 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
3101 case OMP_SCHED_RUNTIME
:
3102 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
3104 case OMP_SCHED_AUTO
:
3105 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
3110 if (clauses
->sched_monotonic
)
3111 OMP_CLAUSE_SCHEDULE_KIND (c
)
3112 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3113 | OMP_CLAUSE_SCHEDULE_MONOTONIC
);
3114 else if (clauses
->sched_nonmonotonic
)
3115 OMP_CLAUSE_SCHEDULE_KIND (c
)
3116 = (omp_clause_schedule_kind
) (OMP_CLAUSE_SCHEDULE_KIND (c
)
3117 | OMP_CLAUSE_SCHEDULE_NONMONOTONIC
);
3118 if (clauses
->sched_simd
)
3119 OMP_CLAUSE_SCHEDULE_SIMD (c
) = 1;
3120 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3123 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
3125 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULT
);
3126 switch (clauses
->default_sharing
)
3128 case OMP_DEFAULT_NONE
:
3129 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
3131 case OMP_DEFAULT_SHARED
:
3132 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
3134 case OMP_DEFAULT_PRIVATE
:
3135 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
3137 case OMP_DEFAULT_FIRSTPRIVATE
:
3138 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
3140 case OMP_DEFAULT_PRESENT
:
3141 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRESENT
;
3146 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3149 if (clauses
->nowait
)
3151 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOWAIT
);
3152 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3155 if (clauses
->ordered
)
3157 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ORDERED
);
3158 OMP_CLAUSE_ORDERED_EXPR (c
)
3159 = clauses
->orderedc
? build_int_cst (integer_type_node
,
3160 clauses
->orderedc
) : NULL_TREE
;
3161 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3164 if (clauses
->untied
)
3166 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_UNTIED
);
3167 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3170 if (clauses
->mergeable
)
3172 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_MERGEABLE
);
3173 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3176 if (clauses
->collapse
)
3178 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_COLLAPSE
);
3179 OMP_CLAUSE_COLLAPSE_EXPR (c
)
3180 = build_int_cst (integer_type_node
, clauses
->collapse
);
3181 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3184 if (clauses
->inbranch
)
3186 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INBRANCH
);
3187 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3190 if (clauses
->notinbranch
)
3192 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOTINBRANCH
);
3193 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3196 switch (clauses
->cancel
)
3198 case OMP_CANCEL_UNKNOWN
:
3200 case OMP_CANCEL_PARALLEL
:
3201 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PARALLEL
);
3202 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3204 case OMP_CANCEL_SECTIONS
:
3205 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SECTIONS
);
3206 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3209 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FOR
);
3210 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3212 case OMP_CANCEL_TASKGROUP
:
3213 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TASKGROUP
);
3214 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3218 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
3220 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PROC_BIND
);
3221 switch (clauses
->proc_bind
)
3223 case OMP_PROC_BIND_MASTER
:
3224 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
3226 case OMP_PROC_BIND_SPREAD
:
3227 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
3229 case OMP_PROC_BIND_CLOSE
:
3230 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
3235 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3238 if (clauses
->safelen_expr
)
3242 gfc_init_se (&se
, NULL
);
3243 gfc_conv_expr (&se
, clauses
->safelen_expr
);
3244 gfc_add_block_to_block (block
, &se
.pre
);
3245 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
3246 gfc_add_block_to_block (block
, &se
.post
);
3248 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SAFELEN
);
3249 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
3250 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3253 if (clauses
->simdlen_expr
)
3257 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3258 OMP_CLAUSE_SIMDLEN_EXPR (c
)
3259 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
3260 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3266 gfc_init_se (&se
, NULL
);
3267 gfc_conv_expr (&se
, clauses
->simdlen_expr
);
3268 gfc_add_block_to_block (block
, &se
.pre
);
3269 simdlen_var
= gfc_evaluate_now (se
.expr
, block
);
3270 gfc_add_block_to_block (block
, &se
.post
);
3272 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMDLEN
);
3273 OMP_CLAUSE_SIMDLEN_EXPR (c
) = simdlen_var
;
3274 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3278 if (clauses
->num_teams
)
3282 gfc_init_se (&se
, NULL
);
3283 gfc_conv_expr (&se
, clauses
->num_teams
);
3284 gfc_add_block_to_block (block
, &se
.pre
);
3285 num_teams
= gfc_evaluate_now (se
.expr
, block
);
3286 gfc_add_block_to_block (block
, &se
.post
);
3288 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TEAMS
);
3289 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
3290 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3293 if (clauses
->device
)
3297 gfc_init_se (&se
, NULL
);
3298 gfc_conv_expr (&se
, clauses
->device
);
3299 gfc_add_block_to_block (block
, &se
.pre
);
3300 device
= gfc_evaluate_now (se
.expr
, block
);
3301 gfc_add_block_to_block (block
, &se
.post
);
3303 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEVICE
);
3304 OMP_CLAUSE_DEVICE_ID (c
) = device
;
3305 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3308 if (clauses
->thread_limit
)
3312 gfc_init_se (&se
, NULL
);
3313 gfc_conv_expr (&se
, clauses
->thread_limit
);
3314 gfc_add_block_to_block (block
, &se
.pre
);
3315 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
3316 gfc_add_block_to_block (block
, &se
.post
);
3318 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREAD_LIMIT
);
3319 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
3320 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3323 chunk_size
= NULL_TREE
;
3324 if (clauses
->dist_chunk_size
)
3326 gfc_init_se (&se
, NULL
);
3327 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
3328 gfc_add_block_to_block (block
, &se
.pre
);
3329 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
3330 gfc_add_block_to_block (block
, &se
.post
);
3333 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
3335 c
= build_omp_clause (gfc_get_location (&where
),
3336 OMP_CLAUSE_DIST_SCHEDULE
);
3337 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
3338 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3341 if (clauses
->grainsize
)
3345 gfc_init_se (&se
, NULL
);
3346 gfc_conv_expr (&se
, clauses
->grainsize
);
3347 gfc_add_block_to_block (block
, &se
.pre
);
3348 grainsize
= gfc_evaluate_now (se
.expr
, block
);
3349 gfc_add_block_to_block (block
, &se
.post
);
3351 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GRAINSIZE
);
3352 OMP_CLAUSE_GRAINSIZE_EXPR (c
) = grainsize
;
3353 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3356 if (clauses
->num_tasks
)
3360 gfc_init_se (&se
, NULL
);
3361 gfc_conv_expr (&se
, clauses
->num_tasks
);
3362 gfc_add_block_to_block (block
, &se
.pre
);
3363 num_tasks
= gfc_evaluate_now (se
.expr
, block
);
3364 gfc_add_block_to_block (block
, &se
.post
);
3366 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_TASKS
);
3367 OMP_CLAUSE_NUM_TASKS_EXPR (c
) = num_tasks
;
3368 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3371 if (clauses
->priority
)
3375 gfc_init_se (&se
, NULL
);
3376 gfc_conv_expr (&se
, clauses
->priority
);
3377 gfc_add_block_to_block (block
, &se
.pre
);
3378 priority
= gfc_evaluate_now (se
.expr
, block
);
3379 gfc_add_block_to_block (block
, &se
.post
);
3381 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_PRIORITY
);
3382 OMP_CLAUSE_PRIORITY_EXPR (c
) = priority
;
3383 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3390 gfc_init_se (&se
, NULL
);
3391 gfc_conv_expr (&se
, clauses
->hint
);
3392 gfc_add_block_to_block (block
, &se
.pre
);
3393 hint
= gfc_evaluate_now (se
.expr
, block
);
3394 gfc_add_block_to_block (block
, &se
.post
);
3396 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_HINT
);
3397 OMP_CLAUSE_HINT_EXPR (c
) = hint
;
3398 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3403 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SIMD
);
3404 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3406 if (clauses
->threads
)
3408 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_THREADS
);
3409 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3411 if (clauses
->nogroup
)
3413 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NOGROUP
);
3414 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3416 if (clauses
->defaultmap
)
3418 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEFAULTMAP
);
3419 OMP_CLAUSE_DEFAULTMAP_SET_KIND (c
, OMP_CLAUSE_DEFAULTMAP_TOFROM
,
3420 OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
);
3421 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3423 if (clauses
->depend_source
)
3425 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_DEPEND
);
3426 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_SOURCE
;
3427 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3432 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_ASYNC
);
3433 if (clauses
->async_expr
)
3434 OMP_CLAUSE_ASYNC_EXPR (c
)
3435 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
3437 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
3438 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3442 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_SEQ
);
3443 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3445 if (clauses
->par_auto
)
3447 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_AUTO
);
3448 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3450 if (clauses
->if_present
)
3452 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_IF_PRESENT
);
3453 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3455 if (clauses
->finalize
)
3457 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_FINALIZE
);
3458 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3460 if (clauses
->independent
)
3462 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_INDEPENDENT
);
3463 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3465 if (clauses
->wait_list
)
3469 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3471 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WAIT
);
3472 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
3473 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
3477 if (clauses
->num_gangs_expr
)
3480 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
3481 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_GANGS
);
3482 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
3483 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3485 if (clauses
->num_workers_expr
)
3487 tree num_workers_var
3488 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
3489 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_NUM_WORKERS
);
3490 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
3491 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3493 if (clauses
->vector_length_expr
)
3495 tree vector_length_var
3496 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
3497 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR_LENGTH
);
3498 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
3499 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3501 if (clauses
->tile_list
)
3503 vec
<tree
, va_gc
> *tvec
;
3506 vec_alloc (tvec
, 4);
3508 for (el
= clauses
->tile_list
; el
; el
= el
->next
)
3509 vec_safe_push (tvec
, gfc_convert_expr_to_tree (block
, el
->expr
));
3511 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_TILE
);
3512 OMP_CLAUSE_TILE_LIST (c
) = build_tree_list_vec (tvec
);
3513 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3516 if (clauses
->vector
)
3518 if (clauses
->vector_expr
)
3521 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
3522 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
3523 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
3524 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3528 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_VECTOR
);
3529 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3532 if (clauses
->worker
)
3534 if (clauses
->worker_expr
)
3537 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
3538 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
3539 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
3540 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3544 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_WORKER
);
3545 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3551 c
= build_omp_clause (gfc_get_location (&where
), OMP_CLAUSE_GANG
);
3552 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
3553 if (clauses
->gang_num_expr
)
3555 arg
= gfc_convert_expr_to_tree (block
, clauses
->gang_num_expr
);
3556 OMP_CLAUSE_GANG_EXPR (c
) = arg
;
3558 if (clauses
->gang_static
)
3560 arg
= clauses
->gang_static_expr
3561 ? gfc_convert_expr_to_tree (block
, clauses
->gang_static_expr
)
3562 : integer_minus_one_node
;
3563 OMP_CLAUSE_GANG_STATIC_EXPR (c
) = arg
;
3567 return nreverse (omp_clauses
);
3570 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
3573 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
3578 stmt
= gfc_trans_code (code
);
3579 if (TREE_CODE (stmt
) != BIND_EXPR
)
3581 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
3583 tree block
= poplevel (1, 0);
3584 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
3594 /* Translate OpenACC 'parallel', 'kernels', 'serial', 'data', 'host_data'
3598 gfc_trans_oacc_construct (gfc_code
*code
)
3601 tree stmt
, oacc_clauses
;
3602 enum tree_code construct_code
;
3606 case EXEC_OACC_PARALLEL
:
3607 construct_code
= OACC_PARALLEL
;
3609 case EXEC_OACC_KERNELS
:
3610 construct_code
= OACC_KERNELS
;
3612 case EXEC_OACC_SERIAL
:
3613 construct_code
= OACC_SERIAL
;
3615 case EXEC_OACC_DATA
:
3616 construct_code
= OACC_DATA
;
3618 case EXEC_OACC_HOST_DATA
:
3619 construct_code
= OACC_HOST_DATA
;
3625 gfc_start_block (&block
);
3626 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3628 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3629 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3631 gfc_add_expr_to_block (&block
, stmt
);
3632 return gfc_finish_block (&block
);
3635 /* update, enter_data, exit_data, cache. */
3637 gfc_trans_oacc_executable_directive (gfc_code
*code
)
3640 tree stmt
, oacc_clauses
;
3641 enum tree_code construct_code
;
3645 case EXEC_OACC_UPDATE
:
3646 construct_code
= OACC_UPDATE
;
3648 case EXEC_OACC_ENTER_DATA
:
3649 construct_code
= OACC_ENTER_DATA
;
3651 case EXEC_OACC_EXIT_DATA
:
3652 construct_code
= OACC_EXIT_DATA
;
3654 case EXEC_OACC_CACHE
:
3655 construct_code
= OACC_CACHE
;
3661 gfc_start_block (&block
);
3662 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3664 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
3666 gfc_add_expr_to_block (&block
, stmt
);
3667 return gfc_finish_block (&block
);
3671 gfc_trans_oacc_wait_directive (gfc_code
*code
)
3675 vec
<tree
, va_gc
> *args
;
3678 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3679 location_t loc
= input_location
;
3681 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3684 vec_alloc (args
, nparms
+ 2);
3685 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
3687 gfc_start_block (&block
);
3689 if (clauses
->async_expr
)
3690 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
3692 t
= build_int_cst (integer_type_node
, -2);
3694 args
->quick_push (t
);
3695 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
3697 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
3698 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
3700 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
3701 gfc_add_expr_to_block (&block
, stmt
);
3705 return gfc_finish_block (&block
);
3708 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
3709 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
3712 gfc_trans_omp_atomic (gfc_code
*code
)
3714 gfc_code
*atomic_code
= code
;
3718 gfc_expr
*expr2
, *e
;
3721 tree lhsaddr
, type
, rhs
, x
;
3722 enum tree_code op
= ERROR_MARK
;
3723 enum tree_code aop
= OMP_ATOMIC
;
3724 bool var_on_left
= false;
3725 enum omp_memory_order mo
3726 = ((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
)
3727 ? OMP_MEMORY_ORDER_SEQ_CST
: OMP_MEMORY_ORDER_RELAXED
);
3729 code
= code
->block
->next
;
3730 gcc_assert (code
->op
== EXEC_ASSIGN
);
3731 var
= code
->expr1
->symtree
->n
.sym
;
3733 gfc_init_se (&lse
, NULL
);
3734 gfc_init_se (&rse
, NULL
);
3735 gfc_init_se (&vse
, NULL
);
3736 gfc_start_block (&block
);
3738 expr2
= code
->expr2
;
3739 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3740 != GFC_OMP_ATOMIC_WRITE
)
3741 && expr2
->expr_type
== EXPR_FUNCTION
3742 && expr2
->value
.function
.isym
3743 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3744 expr2
= expr2
->value
.function
.actual
->expr
;
3746 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3748 case GFC_OMP_ATOMIC_READ
:
3749 gfc_conv_expr (&vse
, code
->expr1
);
3750 gfc_add_block_to_block (&block
, &vse
.pre
);
3752 gfc_conv_expr (&lse
, expr2
);
3753 gfc_add_block_to_block (&block
, &lse
.pre
);
3754 type
= TREE_TYPE (lse
.expr
);
3755 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3757 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
3758 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3759 x
= convert (TREE_TYPE (vse
.expr
), x
);
3760 gfc_add_modify (&block
, vse
.expr
, x
);
3762 gfc_add_block_to_block (&block
, &lse
.pre
);
3763 gfc_add_block_to_block (&block
, &rse
.pre
);
3765 return gfc_finish_block (&block
);
3766 case GFC_OMP_ATOMIC_CAPTURE
:
3767 aop
= OMP_ATOMIC_CAPTURE_NEW
;
3768 if (expr2
->expr_type
== EXPR_VARIABLE
)
3770 aop
= OMP_ATOMIC_CAPTURE_OLD
;
3771 gfc_conv_expr (&vse
, code
->expr1
);
3772 gfc_add_block_to_block (&block
, &vse
.pre
);
3774 gfc_conv_expr (&lse
, expr2
);
3775 gfc_add_block_to_block (&block
, &lse
.pre
);
3776 gfc_init_se (&lse
, NULL
);
3778 var
= code
->expr1
->symtree
->n
.sym
;
3779 expr2
= code
->expr2
;
3780 if (expr2
->expr_type
== EXPR_FUNCTION
3781 && expr2
->value
.function
.isym
3782 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3783 expr2
= expr2
->value
.function
.actual
->expr
;
3790 gfc_conv_expr (&lse
, code
->expr1
);
3791 gfc_add_block_to_block (&block
, &lse
.pre
);
3792 type
= TREE_TYPE (lse
.expr
);
3793 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
3795 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3796 == GFC_OMP_ATOMIC_WRITE
)
3797 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3799 gfc_conv_expr (&rse
, expr2
);
3800 gfc_add_block_to_block (&block
, &rse
.pre
);
3802 else if (expr2
->expr_type
== EXPR_OP
)
3805 switch (expr2
->value
.op
.op
)
3807 case INTRINSIC_PLUS
:
3810 case INTRINSIC_TIMES
:
3813 case INTRINSIC_MINUS
:
3816 case INTRINSIC_DIVIDE
:
3817 if (expr2
->ts
.type
== BT_INTEGER
)
3818 op
= TRUNC_DIV_EXPR
;
3823 op
= TRUTH_ANDIF_EXPR
;
3826 op
= TRUTH_ORIF_EXPR
;
3831 case INTRINSIC_NEQV
:
3837 e
= expr2
->value
.op
.op1
;
3838 if (e
->expr_type
== EXPR_FUNCTION
3839 && e
->value
.function
.isym
3840 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3841 e
= e
->value
.function
.actual
->expr
;
3842 if (e
->expr_type
== EXPR_VARIABLE
3843 && e
->symtree
!= NULL
3844 && e
->symtree
->n
.sym
== var
)
3846 expr2
= expr2
->value
.op
.op2
;
3851 e
= expr2
->value
.op
.op2
;
3852 if (e
->expr_type
== EXPR_FUNCTION
3853 && e
->value
.function
.isym
3854 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3855 e
= e
->value
.function
.actual
->expr
;
3856 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3857 && e
->symtree
!= NULL
3858 && e
->symtree
->n
.sym
== var
);
3859 expr2
= expr2
->value
.op
.op1
;
3860 var_on_left
= false;
3862 gfc_conv_expr (&rse
, expr2
);
3863 gfc_add_block_to_block (&block
, &rse
.pre
);
3867 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
3868 switch (expr2
->value
.function
.isym
->id
)
3888 e
= expr2
->value
.function
.actual
->expr
;
3889 gcc_assert (e
->expr_type
== EXPR_VARIABLE
3890 && e
->symtree
!= NULL
3891 && e
->symtree
->n
.sym
== var
);
3893 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
3894 gfc_add_block_to_block (&block
, &rse
.pre
);
3895 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
3897 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
3898 gfc_actual_arglist
*arg
;
3900 gfc_add_modify (&block
, accum
, rse
.expr
);
3901 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
3904 gfc_init_block (&rse
.pre
);
3905 gfc_conv_expr (&rse
, arg
->expr
);
3906 gfc_add_block_to_block (&block
, &rse
.pre
);
3907 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
3909 gfc_add_modify (&block
, accum
, x
);
3915 expr2
= expr2
->value
.function
.actual
->next
->expr
;
3918 lhsaddr
= save_expr (lhsaddr
);
3919 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
3920 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
3921 || !VAR_P (TREE_OPERAND (lhsaddr
, 0))))
3923 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
3924 it even after unsharing function body. */
3925 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
3926 DECL_CONTEXT (var
) = current_function_decl
;
3927 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
3928 NULL_TREE
, NULL_TREE
);
3931 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
3933 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
3934 == GFC_OMP_ATOMIC_WRITE
)
3935 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
3939 x
= convert (TREE_TYPE (rhs
),
3940 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
3942 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
3944 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
3947 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
3948 && TREE_CODE (type
) != COMPLEX_TYPE
)
3949 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
3950 TREE_TYPE (TREE_TYPE (rhs
)), x
);
3952 gfc_add_block_to_block (&block
, &lse
.pre
);
3953 gfc_add_block_to_block (&block
, &rse
.pre
);
3955 if (aop
== OMP_ATOMIC
)
3957 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
3958 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3959 gfc_add_expr_to_block (&block
, x
);
3963 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3966 expr2
= code
->expr2
;
3967 if (expr2
->expr_type
== EXPR_FUNCTION
3968 && expr2
->value
.function
.isym
3969 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3970 expr2
= expr2
->value
.function
.actual
->expr
;
3972 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3973 gfc_conv_expr (&vse
, code
->expr1
);
3974 gfc_add_block_to_block (&block
, &vse
.pre
);
3976 gfc_init_se (&lse
, NULL
);
3977 gfc_conv_expr (&lse
, expr2
);
3978 gfc_add_block_to_block (&block
, &lse
.pre
);
3980 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3981 OMP_ATOMIC_MEMORY_ORDER (x
) = mo
;
3982 x
= convert (TREE_TYPE (vse
.expr
), x
);
3983 gfc_add_modify (&block
, vse
.expr
, x
);
3986 return gfc_finish_block (&block
);
3990 gfc_trans_omp_barrier (void)
3992 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3993 return build_call_expr_loc (input_location
, decl
, 0);
3997 gfc_trans_omp_cancel (gfc_code
*code
)
4000 tree ifc
= boolean_true_node
;
4002 switch (code
->ext
.omp_clauses
->cancel
)
4004 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4005 case OMP_CANCEL_DO
: mask
= 2; break;
4006 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4007 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4008 default: gcc_unreachable ();
4010 gfc_start_block (&block
);
4011 if (code
->ext
.omp_clauses
->if_expr
)
4016 gfc_init_se (&se
, NULL
);
4017 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
4018 gfc_add_block_to_block (&block
, &se
.pre
);
4019 if_var
= gfc_evaluate_now (se
.expr
, &block
);
4020 gfc_add_block_to_block (&block
, &se
.post
);
4021 tree type
= TREE_TYPE (if_var
);
4022 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
4023 boolean_type_node
, if_var
,
4024 build_zero_cst (type
));
4026 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
4027 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
4028 ifc
= fold_convert (c_bool_type
, ifc
);
4029 gfc_add_expr_to_block (&block
,
4030 build_call_expr_loc (input_location
, decl
, 2,
4031 build_int_cst (integer_type_node
,
4033 return gfc_finish_block (&block
);
4037 gfc_trans_omp_cancellation_point (gfc_code
*code
)
4040 switch (code
->ext
.omp_clauses
->cancel
)
4042 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
4043 case OMP_CANCEL_DO
: mask
= 2; break;
4044 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
4045 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
4046 default: gcc_unreachable ();
4048 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
4049 return build_call_expr_loc (input_location
, decl
, 1,
4050 build_int_cst (integer_type_node
, mask
));
4054 gfc_trans_omp_critical (gfc_code
*code
)
4056 tree name
= NULL_TREE
, stmt
;
4057 if (code
->ext
.omp_clauses
!= NULL
)
4058 name
= get_identifier (code
->ext
.omp_clauses
->critical_name
);
4059 stmt
= gfc_trans_code (code
->block
->next
);
4060 return build3_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
,
4064 typedef struct dovar_init_d
{
4071 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
4072 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
4075 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
, orig_decls
;
4076 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
4079 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
4080 int i
, collapse
= clauses
->collapse
;
4081 vec
<dovar_init
> inits
= vNULL
;
4084 vec
<tree
, va_heap
, vl_embed
> *saved_doacross_steps
= doacross_steps
;
4085 gfc_expr_list
*tile
= do_clauses
? do_clauses
->tile_list
: clauses
->tile_list
;
4087 /* Both collapsed and tiled loops are lowered the same way. In
4088 OpenACC, those clauses are not compatible, so prioritize the tile
4089 clause, if present. */
4093 for (gfc_expr_list
*el
= tile
; el
; el
= el
->next
)
4097 doacross_steps
= NULL
;
4098 if (clauses
->orderedc
)
4099 collapse
= clauses
->orderedc
;
4103 code
= code
->block
->next
;
4104 gcc_assert (code
->op
== EXEC_DO
);
4106 init
= make_tree_vec (collapse
);
4107 cond
= make_tree_vec (collapse
);
4108 incr
= make_tree_vec (collapse
);
4109 orig_decls
= clauses
->orderedc
? make_tree_vec (collapse
) : NULL_TREE
;
4113 gfc_start_block (&block
);
4117 /* simd schedule modifier is only useful for composite do simd and other
4118 constructs including that, where gfc_trans_omp_do is only called
4119 on the simd construct and DO's clauses are translated elsewhere. */
4120 do_clauses
->sched_simd
= false;
4122 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
4124 for (i
= 0; i
< collapse
; i
++)
4127 int dovar_found
= 0;
4132 gfc_omp_namelist
*n
= NULL
;
4133 if (op
!= EXEC_OMP_DISTRIBUTE
)
4134 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
4135 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
4136 n
!= NULL
; n
= n
->next
)
4137 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4141 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
4142 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
4143 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
4149 /* Evaluate all the expressions in the iterator. */
4150 gfc_init_se (&se
, NULL
);
4151 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
4152 gfc_add_block_to_block (pblock
, &se
.pre
);
4154 type
= TREE_TYPE (dovar
);
4155 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
4157 gfc_init_se (&se
, NULL
);
4158 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
4159 gfc_add_block_to_block (pblock
, &se
.pre
);
4160 from
= gfc_evaluate_now (se
.expr
, pblock
);
4162 gfc_init_se (&se
, NULL
);
4163 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
4164 gfc_add_block_to_block (pblock
, &se
.pre
);
4165 to
= gfc_evaluate_now (se
.expr
, pblock
);
4167 gfc_init_se (&se
, NULL
);
4168 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
4169 gfc_add_block_to_block (pblock
, &se
.pre
);
4170 step
= gfc_evaluate_now (se
.expr
, pblock
);
4173 /* Special case simple loops. */
4176 if (integer_onep (step
))
4178 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
4183 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
4189 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
4190 /* The condition should not be folded. */
4191 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
4192 ? LE_EXPR
: GE_EXPR
,
4193 logical_type_node
, dovar
, to
);
4194 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
4196 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
4199 TREE_VEC_ELT (incr
, i
));
4203 /* STEP is not 1 or -1. Use:
4204 for (count = 0; count < (to + step - from) / step; count++)
4206 dovar = from + count * step;
4210 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
4211 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
4212 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
4214 tmp
= gfc_evaluate_now (tmp
, pblock
);
4215 count
= gfc_create_var (type
, "count");
4216 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
4217 build_int_cst (type
, 0));
4218 /* The condition should not be folded. */
4219 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
4222 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
4224 build_int_cst (type
, 1));
4225 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
4226 MODIFY_EXPR
, type
, count
,
4227 TREE_VEC_ELT (incr
, i
));
4229 /* Initialize DOVAR. */
4230 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
4231 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
4232 dovar_init e
= {dovar
, tmp
};
4233 inits
.safe_push (e
);
4234 if (clauses
->orderedc
)
4236 if (doacross_steps
== NULL
)
4237 vec_safe_grow_cleared (doacross_steps
, clauses
->orderedc
);
4238 (*doacross_steps
)[i
] = step
;
4242 TREE_VEC_ELT (orig_decls
, i
) = dovar_decl
;
4244 if (dovar_found
== 2
4245 && op
== EXEC_OMP_SIMD
4249 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
4250 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
4251 && OMP_CLAUSE_DECL (tmp
) == dovar
)
4253 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
4259 if (op
== EXEC_OMP_SIMD
)
4263 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
4264 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
4265 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
4268 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
4273 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
4274 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
4275 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
4277 if (dovar_found
== 2)
4284 /* If dovar is lastprivate, but different counter is used,
4285 dovar += step needs to be added to
4286 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
4287 will have the value on entry of the last loop, rather
4288 than value after iterator increment. */
4289 if (clauses
->orderedc
)
4291 if (clauses
->collapse
<= 1 || i
>= clauses
->collapse
)
4294 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4295 type
, count
, build_one_cst (type
));
4296 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
,
4298 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
4303 tmp
= gfc_evaluate_now (step
, pblock
);
4304 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
,
4307 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
4309 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4310 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4311 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
4313 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
4316 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
4317 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
4319 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
4323 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
4325 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4326 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4327 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
4329 tree l
= build_omp_clause (input_location
,
4330 OMP_CLAUSE_LASTPRIVATE
);
4331 OMP_CLAUSE_DECL (l
) = dovar_decl
;
4332 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
4333 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
4335 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
4339 gcc_assert (simple
|| c
!= NULL
);
4343 if (op
!= EXEC_OMP_SIMD
)
4344 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
4345 else if (collapse
== 1)
4347 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
4348 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
4349 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
4350 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
4353 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
4354 OMP_CLAUSE_DECL (tmp
) = count
;
4355 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
4358 if (i
+ 1 < collapse
)
4359 code
= code
->block
->next
;
4362 if (pblock
!= &block
)
4365 gfc_start_block (&block
);
4368 gfc_start_block (&body
);
4370 FOR_EACH_VEC_ELT (inits
, ix
, di
)
4371 gfc_add_modify (&body
, di
->var
, di
->init
);
4374 /* Cycle statement is implemented with a goto. Exit statement must not be
4375 present for this loop. */
4376 cycle_label
= gfc_build_label_decl (NULL_TREE
);
4378 /* Put these labels where they can be found later. */
4380 code
->cycle_label
= cycle_label
;
4381 code
->exit_label
= NULL_TREE
;
4383 /* Main loop body. */
4384 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
4385 gfc_add_expr_to_block (&body
, tmp
);
4387 /* Label for cycle statements (if needed). */
4388 if (TREE_USED (cycle_label
))
4390 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
4391 gfc_add_expr_to_block (&body
, tmp
);
4394 /* End of loop body. */
4397 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
4398 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
4399 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
4400 case EXEC_OMP_TASKLOOP
: stmt
= make_node (OMP_TASKLOOP
); break;
4401 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
4402 default: gcc_unreachable ();
4405 TREE_TYPE (stmt
) = void_type_node
;
4406 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
4407 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
4408 OMP_FOR_INIT (stmt
) = init
;
4409 OMP_FOR_COND (stmt
) = cond
;
4410 OMP_FOR_INCR (stmt
) = incr
;
4412 OMP_FOR_ORIG_DECLS (stmt
) = orig_decls
;
4413 gfc_add_expr_to_block (&block
, stmt
);
4415 vec_free (doacross_steps
);
4416 doacross_steps
= saved_doacross_steps
;
4418 return gfc_finish_block (&block
);
4421 /* Translate combined OpenACC 'parallel loop', 'kernels loop', 'serial loop'
4425 gfc_trans_oacc_combined_directive (gfc_code
*code
)
4427 stmtblock_t block
, *pblock
= NULL
;
4428 gfc_omp_clauses construct_clauses
, loop_clauses
;
4429 tree stmt
, oacc_clauses
= NULL_TREE
;
4430 enum tree_code construct_code
;
4431 location_t loc
= input_location
;
4435 case EXEC_OACC_PARALLEL_LOOP
:
4436 construct_code
= OACC_PARALLEL
;
4438 case EXEC_OACC_KERNELS_LOOP
:
4439 construct_code
= OACC_KERNELS
;
4441 case EXEC_OACC_SERIAL_LOOP
:
4442 construct_code
= OACC_SERIAL
;
4448 gfc_start_block (&block
);
4450 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
4451 if (code
->ext
.omp_clauses
!= NULL
)
4453 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
4454 sizeof (construct_clauses
));
4455 loop_clauses
.collapse
= construct_clauses
.collapse
;
4456 loop_clauses
.gang
= construct_clauses
.gang
;
4457 loop_clauses
.gang_static
= construct_clauses
.gang_static
;
4458 loop_clauses
.gang_num_expr
= construct_clauses
.gang_num_expr
;
4459 loop_clauses
.gang_static_expr
= construct_clauses
.gang_static_expr
;
4460 loop_clauses
.vector
= construct_clauses
.vector
;
4461 loop_clauses
.vector_expr
= construct_clauses
.vector_expr
;
4462 loop_clauses
.worker
= construct_clauses
.worker
;
4463 loop_clauses
.worker_expr
= construct_clauses
.worker_expr
;
4464 loop_clauses
.seq
= construct_clauses
.seq
;
4465 loop_clauses
.par_auto
= construct_clauses
.par_auto
;
4466 loop_clauses
.independent
= construct_clauses
.independent
;
4467 loop_clauses
.tile_list
= construct_clauses
.tile_list
;
4468 loop_clauses
.lists
[OMP_LIST_PRIVATE
]
4469 = construct_clauses
.lists
[OMP_LIST_PRIVATE
];
4470 loop_clauses
.lists
[OMP_LIST_REDUCTION
]
4471 = construct_clauses
.lists
[OMP_LIST_REDUCTION
];
4472 construct_clauses
.gang
= false;
4473 construct_clauses
.gang_static
= false;
4474 construct_clauses
.gang_num_expr
= NULL
;
4475 construct_clauses
.gang_static_expr
= NULL
;
4476 construct_clauses
.vector
= false;
4477 construct_clauses
.vector_expr
= NULL
;
4478 construct_clauses
.worker
= false;
4479 construct_clauses
.worker_expr
= NULL
;
4480 construct_clauses
.seq
= false;
4481 construct_clauses
.par_auto
= false;
4482 construct_clauses
.independent
= false;
4483 construct_clauses
.independent
= false;
4484 construct_clauses
.tile_list
= NULL
;
4485 construct_clauses
.lists
[OMP_LIST_PRIVATE
] = NULL
;
4486 if (construct_code
== OACC_KERNELS
)
4487 construct_clauses
.lists
[OMP_LIST_REDUCTION
] = NULL
;
4488 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
4491 if (!loop_clauses
.seq
)
4495 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
4496 protected_set_expr_location (stmt
, loc
);
4497 if (TREE_CODE (stmt
) != BIND_EXPR
)
4498 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4501 stmt
= build2_loc (loc
, construct_code
, void_type_node
, stmt
, oacc_clauses
);
4502 gfc_add_expr_to_block (&block
, stmt
);
4503 return gfc_finish_block (&block
);
4507 gfc_trans_omp_flush (void)
4509 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
4510 return build_call_expr_loc (input_location
, decl
, 0);
4514 gfc_trans_omp_master (gfc_code
*code
)
4516 tree stmt
= gfc_trans_code (code
->block
->next
);
4517 if (IS_EMPTY_STMT (stmt
))
4519 return build1_v (OMP_MASTER
, stmt
);
4523 gfc_trans_omp_ordered (gfc_code
*code
)
4527 if (!code
->ext
.omp_clauses
->simd
)
4528 return gfc_trans_code (code
->block
? code
->block
->next
: NULL
);
4529 code
->ext
.omp_clauses
->threads
= 0;
4531 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, code
->ext
.omp_clauses
,
4533 return build2_loc (input_location
, OMP_ORDERED
, void_type_node
,
4534 code
->block
? gfc_trans_code (code
->block
->next
)
4535 : NULL_TREE
, omp_clauses
);
4539 gfc_trans_omp_parallel (gfc_code
*code
)
4542 tree stmt
, omp_clauses
;
4544 gfc_start_block (&block
);
4545 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4548 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4549 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4550 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4552 gfc_add_expr_to_block (&block
, stmt
);
4553 return gfc_finish_block (&block
);
4560 GFC_OMP_SPLIT_PARALLEL
,
4561 GFC_OMP_SPLIT_DISTRIBUTE
,
4562 GFC_OMP_SPLIT_TEAMS
,
4563 GFC_OMP_SPLIT_TARGET
,
4564 GFC_OMP_SPLIT_TASKLOOP
,
4570 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
4571 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
4572 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
4573 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
4574 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
4575 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
),
4576 GFC_OMP_MASK_TASKLOOP
= (1 << GFC_OMP_SPLIT_TASKLOOP
)
4580 gfc_split_omp_clauses (gfc_code
*code
,
4581 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
4583 int mask
= 0, innermost
= 0;
4584 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
4587 case EXEC_OMP_DISTRIBUTE
:
4588 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4590 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4591 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4592 innermost
= GFC_OMP_SPLIT_DO
;
4594 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4595 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
4596 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4597 innermost
= GFC_OMP_SPLIT_SIMD
;
4599 case EXEC_OMP_DISTRIBUTE_SIMD
:
4600 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4601 innermost
= GFC_OMP_SPLIT_SIMD
;
4604 innermost
= GFC_OMP_SPLIT_DO
;
4606 case EXEC_OMP_DO_SIMD
:
4607 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4608 innermost
= GFC_OMP_SPLIT_SIMD
;
4610 case EXEC_OMP_PARALLEL
:
4611 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4613 case EXEC_OMP_PARALLEL_DO
:
4614 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4615 innermost
= GFC_OMP_SPLIT_DO
;
4617 case EXEC_OMP_PARALLEL_DO_SIMD
:
4618 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4619 innermost
= GFC_OMP_SPLIT_SIMD
;
4622 innermost
= GFC_OMP_SPLIT_SIMD
;
4624 case EXEC_OMP_TARGET
:
4625 innermost
= GFC_OMP_SPLIT_TARGET
;
4627 case EXEC_OMP_TARGET_PARALLEL
:
4628 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
;
4629 innermost
= GFC_OMP_SPLIT_PARALLEL
;
4631 case EXEC_OMP_TARGET_PARALLEL_DO
:
4632 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4633 innermost
= GFC_OMP_SPLIT_DO
;
4635 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
4636 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
4637 | GFC_OMP_MASK_SIMD
;
4638 innermost
= GFC_OMP_SPLIT_SIMD
;
4640 case EXEC_OMP_TARGET_SIMD
:
4641 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_SIMD
;
4642 innermost
= GFC_OMP_SPLIT_SIMD
;
4644 case EXEC_OMP_TARGET_TEAMS
:
4645 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
4646 innermost
= GFC_OMP_SPLIT_TEAMS
;
4648 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4649 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4650 | GFC_OMP_MASK_DISTRIBUTE
;
4651 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4653 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4654 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4655 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4656 innermost
= GFC_OMP_SPLIT_DO
;
4658 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4659 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4660 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4661 innermost
= GFC_OMP_SPLIT_SIMD
;
4663 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4664 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
4665 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4666 innermost
= GFC_OMP_SPLIT_SIMD
;
4668 case EXEC_OMP_TASKLOOP
:
4669 innermost
= GFC_OMP_SPLIT_TASKLOOP
;
4671 case EXEC_OMP_TASKLOOP_SIMD
:
4672 mask
= GFC_OMP_MASK_TASKLOOP
| GFC_OMP_MASK_SIMD
;
4673 innermost
= GFC_OMP_SPLIT_SIMD
;
4675 case EXEC_OMP_TEAMS
:
4676 innermost
= GFC_OMP_SPLIT_TEAMS
;
4678 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4679 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
4680 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
4682 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4683 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4684 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
4685 innermost
= GFC_OMP_SPLIT_DO
;
4687 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4688 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
4689 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
4690 innermost
= GFC_OMP_SPLIT_SIMD
;
4692 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4693 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
4694 innermost
= GFC_OMP_SPLIT_SIMD
;
4701 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
4704 if (code
->ext
.omp_clauses
!= NULL
)
4706 if (mask
& GFC_OMP_MASK_TARGET
)
4708 /* First the clauses that are unique to some constructs. */
4709 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
4710 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
4711 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_IS_DEVICE_PTR
]
4712 = code
->ext
.omp_clauses
->lists
[OMP_LIST_IS_DEVICE_PTR
];
4713 clausesa
[GFC_OMP_SPLIT_TARGET
].device
4714 = code
->ext
.omp_clauses
->device
;
4715 clausesa
[GFC_OMP_SPLIT_TARGET
].defaultmap
4716 = code
->ext
.omp_clauses
->defaultmap
;
4717 clausesa
[GFC_OMP_SPLIT_TARGET
].if_exprs
[OMP_IF_TARGET
]
4718 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TARGET
];
4719 /* And this is copied to all. */
4720 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4721 = code
->ext
.omp_clauses
->if_expr
;
4723 if (mask
& GFC_OMP_MASK_TEAMS
)
4725 /* First the clauses that are unique to some constructs. */
4726 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
4727 = code
->ext
.omp_clauses
->num_teams
;
4728 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
4729 = code
->ext
.omp_clauses
->thread_limit
;
4730 /* Shared and default clauses are allowed on parallel, teams
4732 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
4733 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4734 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
4735 = code
->ext
.omp_clauses
->default_sharing
;
4737 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4739 /* First the clauses that are unique to some constructs. */
4740 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
4741 = code
->ext
.omp_clauses
->dist_sched_kind
;
4742 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
4743 = code
->ext
.omp_clauses
->dist_chunk_size
;
4744 /* Duplicate collapse. */
4745 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
4746 = code
->ext
.omp_clauses
->collapse
;
4748 if (mask
& GFC_OMP_MASK_PARALLEL
)
4750 /* First the clauses that are unique to some constructs. */
4751 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
4752 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
4753 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
4754 = code
->ext
.omp_clauses
->num_threads
;
4755 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
4756 = code
->ext
.omp_clauses
->proc_bind
;
4757 /* Shared and default clauses are allowed on parallel, teams
4759 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
4760 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4761 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
4762 = code
->ext
.omp_clauses
->default_sharing
;
4763 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_exprs
[OMP_IF_PARALLEL
]
4764 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_PARALLEL
];
4765 /* And this is copied to all. */
4766 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
4767 = code
->ext
.omp_clauses
->if_expr
;
4769 if (mask
& GFC_OMP_MASK_DO
)
4771 /* First the clauses that are unique to some constructs. */
4772 clausesa
[GFC_OMP_SPLIT_DO
].ordered
4773 = code
->ext
.omp_clauses
->ordered
;
4774 clausesa
[GFC_OMP_SPLIT_DO
].orderedc
4775 = code
->ext
.omp_clauses
->orderedc
;
4776 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
4777 = code
->ext
.omp_clauses
->sched_kind
;
4778 if (innermost
== GFC_OMP_SPLIT_SIMD
)
4779 clausesa
[GFC_OMP_SPLIT_DO
].sched_simd
4780 = code
->ext
.omp_clauses
->sched_simd
;
4781 clausesa
[GFC_OMP_SPLIT_DO
].sched_monotonic
4782 = code
->ext
.omp_clauses
->sched_monotonic
;
4783 clausesa
[GFC_OMP_SPLIT_DO
].sched_nonmonotonic
4784 = code
->ext
.omp_clauses
->sched_nonmonotonic
;
4785 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
4786 = code
->ext
.omp_clauses
->chunk_size
;
4787 clausesa
[GFC_OMP_SPLIT_DO
].nowait
4788 = code
->ext
.omp_clauses
->nowait
;
4789 /* Duplicate collapse. */
4790 clausesa
[GFC_OMP_SPLIT_DO
].collapse
4791 = code
->ext
.omp_clauses
->collapse
;
4793 if (mask
& GFC_OMP_MASK_SIMD
)
4795 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
4796 = code
->ext
.omp_clauses
->safelen_expr
;
4797 clausesa
[GFC_OMP_SPLIT_SIMD
].simdlen_expr
4798 = code
->ext
.omp_clauses
->simdlen_expr
;
4799 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
4800 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
4801 /* Duplicate collapse. */
4802 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
4803 = code
->ext
.omp_clauses
->collapse
;
4805 if (mask
& GFC_OMP_MASK_TASKLOOP
)
4807 /* First the clauses that are unique to some constructs. */
4808 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].nogroup
4809 = code
->ext
.omp_clauses
->nogroup
;
4810 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].grainsize
4811 = code
->ext
.omp_clauses
->grainsize
;
4812 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].num_tasks
4813 = code
->ext
.omp_clauses
->num_tasks
;
4814 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].priority
4815 = code
->ext
.omp_clauses
->priority
;
4816 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].final_expr
4817 = code
->ext
.omp_clauses
->final_expr
;
4818 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].untied
4819 = code
->ext
.omp_clauses
->untied
;
4820 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].mergeable
4821 = code
->ext
.omp_clauses
->mergeable
;
4822 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_exprs
[OMP_IF_TASKLOOP
]
4823 = code
->ext
.omp_clauses
->if_exprs
[OMP_IF_TASKLOOP
];
4824 /* And this is copied to all. */
4825 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].if_expr
4826 = code
->ext
.omp_clauses
->if_expr
;
4827 /* Shared and default clauses are allowed on parallel, teams
4829 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].lists
[OMP_LIST_SHARED
]
4830 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
4831 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].default_sharing
4832 = code
->ext
.omp_clauses
->default_sharing
;
4833 /* Duplicate collapse. */
4834 clausesa
[GFC_OMP_SPLIT_TASKLOOP
].collapse
4835 = code
->ext
.omp_clauses
->collapse
;
4837 /* Private clause is supported on all constructs,
4838 it is enough to put it on the innermost one. For
4839 !$ omp parallel do put it on parallel though,
4840 as that's what we did for OpenMP 3.1. */
4841 clausesa
[innermost
== GFC_OMP_SPLIT_DO
4842 ? (int) GFC_OMP_SPLIT_PARALLEL
4843 : innermost
].lists
[OMP_LIST_PRIVATE
]
4844 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
4845 /* Firstprivate clause is supported on all constructs but
4846 simd. Put it on the outermost of those and duplicate
4847 on parallel and teams. */
4848 if (mask
& GFC_OMP_MASK_TARGET
)
4849 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_FIRSTPRIVATE
]
4850 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4851 if (mask
& GFC_OMP_MASK_TEAMS
)
4852 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
4853 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4854 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4855 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
4856 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4857 if (mask
& GFC_OMP_MASK_PARALLEL
)
4858 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
4859 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4860 else if (mask
& GFC_OMP_MASK_DO
)
4861 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
4862 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
4863 /* Lastprivate is allowed on distribute, do and simd.
4864 In parallel do{, simd} we actually want to put it on
4865 parallel rather than do. */
4866 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
4867 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_LASTPRIVATE
]
4868 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4869 if (mask
& GFC_OMP_MASK_PARALLEL
)
4870 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
4871 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4872 else if (mask
& GFC_OMP_MASK_DO
)
4873 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
4874 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4875 if (mask
& GFC_OMP_MASK_SIMD
)
4876 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
4877 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
4878 /* Reduction is allowed on simd, do, parallel and teams.
4879 Duplicate it on all of them, but omit on do if
4880 parallel is present. */
4881 if (mask
& GFC_OMP_MASK_TEAMS
)
4882 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
4883 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4884 if (mask
& GFC_OMP_MASK_PARALLEL
)
4885 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
4886 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4887 else if (mask
& GFC_OMP_MASK_DO
)
4888 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
4889 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4890 if (mask
& GFC_OMP_MASK_SIMD
)
4891 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
4892 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
4893 /* Linear clause is supported on do and simd,
4894 put it on the innermost one. */
4895 clausesa
[innermost
].lists
[OMP_LIST_LINEAR
]
4896 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
4898 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4899 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
4900 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
4904 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4905 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
4908 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4909 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
4912 gfc_start_block (&block
);
4914 gfc_init_block (&block
);
4916 if (clausesa
== NULL
)
4918 clausesa
= clausesa_buf
;
4919 gfc_split_omp_clauses (code
, clausesa
);
4923 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
4924 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
4925 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
4928 if (TREE_CODE (body
) != BIND_EXPR
)
4929 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
4933 else if (TREE_CODE (body
) != BIND_EXPR
)
4934 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
4937 stmt
= make_node (OMP_FOR
);
4938 TREE_TYPE (stmt
) = void_type_node
;
4939 OMP_FOR_BODY (stmt
) = body
;
4940 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
4944 gfc_add_expr_to_block (&block
, stmt
);
4945 return gfc_finish_block (&block
);
4949 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
4950 gfc_omp_clauses
*clausesa
)
4952 stmtblock_t block
, *new_pblock
= pblock
;
4953 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4954 tree stmt
, omp_clauses
= NULL_TREE
;
4957 gfc_start_block (&block
);
4959 gfc_init_block (&block
);
4961 if (clausesa
== NULL
)
4963 clausesa
= clausesa_buf
;
4964 gfc_split_omp_clauses (code
, clausesa
);
4967 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
4971 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
4972 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
4973 new_pblock
= &block
;
4977 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
4978 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
4981 if (TREE_CODE (stmt
) != BIND_EXPR
)
4982 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4986 else if (TREE_CODE (stmt
) != BIND_EXPR
)
4987 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
4988 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
4990 OMP_PARALLEL_COMBINED (stmt
) = 1;
4991 gfc_add_expr_to_block (&block
, stmt
);
4992 return gfc_finish_block (&block
);
4996 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
4997 gfc_omp_clauses
*clausesa
)
5000 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
5001 tree stmt
, omp_clauses
= NULL_TREE
;
5004 gfc_start_block (&block
);
5006 gfc_init_block (&block
);
5008 if (clausesa
== NULL
)
5010 clausesa
= clausesa_buf
;
5011 gfc_split_omp_clauses (code
, clausesa
);
5015 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
5019 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
5022 if (TREE_CODE (stmt
) != BIND_EXPR
)
5023 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5027 else if (TREE_CODE (stmt
) != BIND_EXPR
)
5028 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
5031 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5033 OMP_PARALLEL_COMBINED (stmt
) = 1;
5035 gfc_add_expr_to_block (&block
, stmt
);
5036 return gfc_finish_block (&block
);
5040 gfc_trans_omp_parallel_sections (gfc_code
*code
)
5043 gfc_omp_clauses section_clauses
;
5044 tree stmt
, omp_clauses
;
5046 memset (§ion_clauses
, 0, sizeof (section_clauses
));
5047 section_clauses
.nowait
= true;
5049 gfc_start_block (&block
);
5050 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5053 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
5054 if (TREE_CODE (stmt
) != BIND_EXPR
)
5055 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5058 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5060 OMP_PARALLEL_COMBINED (stmt
) = 1;
5061 gfc_add_expr_to_block (&block
, stmt
);
5062 return gfc_finish_block (&block
);
5066 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
5069 gfc_omp_clauses workshare_clauses
;
5070 tree stmt
, omp_clauses
;
5072 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
5073 workshare_clauses
.nowait
= true;
5075 gfc_start_block (&block
);
5076 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5079 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
5080 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5081 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5083 OMP_PARALLEL_COMBINED (stmt
) = 1;
5084 gfc_add_expr_to_block (&block
, stmt
);
5085 return gfc_finish_block (&block
);
5089 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
5091 stmtblock_t block
, body
;
5092 tree omp_clauses
, stmt
;
5093 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
5095 gfc_start_block (&block
);
5097 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
5099 gfc_init_block (&body
);
5100 for (code
= code
->block
; code
; code
= code
->block
)
5102 /* Last section is special because of lastprivate, so even if it
5103 is empty, chain it in. */
5104 stmt
= gfc_trans_omp_code (code
->next
,
5105 has_lastprivate
&& code
->block
== NULL
);
5106 if (! IS_EMPTY_STMT (stmt
))
5108 stmt
= build1_v (OMP_SECTION
, stmt
);
5109 gfc_add_expr_to_block (&body
, stmt
);
5112 stmt
= gfc_finish_block (&body
);
5114 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
5116 gfc_add_expr_to_block (&block
, stmt
);
5118 return gfc_finish_block (&block
);
5122 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
5124 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
5125 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5126 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
5132 gfc_trans_omp_task (gfc_code
*code
)
5135 tree stmt
, omp_clauses
;
5137 gfc_start_block (&block
);
5138 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5141 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5142 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5143 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
5145 gfc_add_expr_to_block (&block
, stmt
);
5146 return gfc_finish_block (&block
);
5150 gfc_trans_omp_taskgroup (gfc_code
*code
)
5152 tree body
= gfc_trans_code (code
->block
->next
);
5153 tree stmt
= make_node (OMP_TASKGROUP
);
5154 TREE_TYPE (stmt
) = void_type_node
;
5155 OMP_TASKGROUP_BODY (stmt
) = body
;
5156 OMP_TASKGROUP_CLAUSES (stmt
) = NULL_TREE
;
5161 gfc_trans_omp_taskwait (void)
5163 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
5164 return build_call_expr_loc (input_location
, decl
, 0);
5168 gfc_trans_omp_taskyield (void)
5170 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
5171 return build_call_expr_loc (input_location
, decl
, 0);
5175 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
5178 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
5179 tree stmt
, omp_clauses
= NULL_TREE
;
5181 gfc_start_block (&block
);
5182 if (clausesa
== NULL
)
5184 clausesa
= clausesa_buf
;
5185 gfc_split_omp_clauses (code
, clausesa
);
5189 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
5193 case EXEC_OMP_DISTRIBUTE
:
5194 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5195 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5196 /* This is handled in gfc_trans_omp_do. */
5199 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5200 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5201 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5202 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
5203 if (TREE_CODE (stmt
) != BIND_EXPR
)
5204 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5208 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5209 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5210 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5211 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
5212 if (TREE_CODE (stmt
) != BIND_EXPR
)
5213 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5217 case EXEC_OMP_DISTRIBUTE_SIMD
:
5218 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5219 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5220 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
5221 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
5222 if (TREE_CODE (stmt
) != BIND_EXPR
)
5223 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5232 tree distribute
= make_node (OMP_DISTRIBUTE
);
5233 TREE_TYPE (distribute
) = void_type_node
;
5234 OMP_FOR_BODY (distribute
) = stmt
;
5235 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
5238 gfc_add_expr_to_block (&block
, stmt
);
5239 return gfc_finish_block (&block
);
5243 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
,
5247 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
5249 bool combined
= true;
5251 gfc_start_block (&block
);
5252 if (clausesa
== NULL
)
5254 clausesa
= clausesa_buf
;
5255 gfc_split_omp_clauses (code
, clausesa
);
5260 = chainon (omp_clauses
,
5261 gfc_trans_omp_clauses (&block
,
5262 &clausesa
[GFC_OMP_SPLIT_TEAMS
],
5268 case EXEC_OMP_TARGET_TEAMS
:
5269 case EXEC_OMP_TEAMS
:
5270 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5273 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5274 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5275 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
5276 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
5280 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
5285 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5286 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
5289 OMP_TEAMS_COMBINED (stmt
) = 1;
5291 gfc_add_expr_to_block (&block
, stmt
);
5292 return gfc_finish_block (&block
);
5296 gfc_trans_omp_target (gfc_code
*code
)
5299 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
5300 tree stmt
, omp_clauses
= NULL_TREE
;
5302 gfc_start_block (&block
);
5303 gfc_split_omp_clauses (code
, clausesa
);
5306 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
5310 case EXEC_OMP_TARGET
:
5312 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5313 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5315 case EXEC_OMP_TARGET_PARALLEL
:
5319 gfc_start_block (&iblock
);
5321 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
5323 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5324 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
5326 gfc_add_expr_to_block (&iblock
, stmt
);
5327 stmt
= gfc_finish_block (&iblock
);
5328 if (TREE_CODE (stmt
) != BIND_EXPR
)
5329 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5334 case EXEC_OMP_TARGET_PARALLEL_DO
:
5335 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5336 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
5337 if (TREE_CODE (stmt
) != BIND_EXPR
)
5338 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5342 case EXEC_OMP_TARGET_SIMD
:
5343 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
5344 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
5345 if (TREE_CODE (stmt
) != BIND_EXPR
)
5346 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5352 && (clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
5353 || clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
))
5355 gfc_omp_clauses clausesb
;
5357 /* For combined !$omp target teams, the num_teams and
5358 thread_limit clauses are evaluated before entering the
5359 target construct. */
5360 memset (&clausesb
, '\0', sizeof (clausesb
));
5361 clausesb
.num_teams
= clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
;
5362 clausesb
.thread_limit
= clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
;
5363 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
= NULL
;
5364 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
= NULL
;
5366 = gfc_trans_omp_clauses (&block
, &clausesb
, code
->loc
);
5368 stmt
= gfc_trans_omp_teams (code
, clausesa
, teams_clauses
);
5373 stmt
= gfc_trans_omp_teams (code
, clausesa
, NULL_TREE
);
5375 if (TREE_CODE (stmt
) != BIND_EXPR
)
5376 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5383 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
5385 if (code
->op
!= EXEC_OMP_TARGET
)
5386 OMP_TARGET_COMBINED (stmt
) = 1;
5388 gfc_add_expr_to_block (&block
, stmt
);
5389 return gfc_finish_block (&block
);
5393 gfc_trans_omp_taskloop (gfc_code
*code
)
5396 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
5397 tree stmt
, omp_clauses
= NULL_TREE
;
5399 gfc_start_block (&block
);
5400 gfc_split_omp_clauses (code
, clausesa
);
5403 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TASKLOOP
],
5407 case EXEC_OMP_TASKLOOP
:
5408 /* This is handled in gfc_trans_omp_do. */
5411 case EXEC_OMP_TASKLOOP_SIMD
:
5412 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
5413 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
5414 if (TREE_CODE (stmt
) != BIND_EXPR
)
5415 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
5424 tree taskloop
= make_node (OMP_TASKLOOP
);
5425 TREE_TYPE (taskloop
) = void_type_node
;
5426 OMP_FOR_BODY (taskloop
) = stmt
;
5427 OMP_FOR_CLAUSES (taskloop
) = omp_clauses
;
5430 gfc_add_expr_to_block (&block
, stmt
);
5431 return gfc_finish_block (&block
);
5435 gfc_trans_omp_target_data (gfc_code
*code
)
5438 tree stmt
, omp_clauses
;
5440 gfc_start_block (&block
);
5441 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5443 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5444 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
5446 gfc_add_expr_to_block (&block
, stmt
);
5447 return gfc_finish_block (&block
);
5451 gfc_trans_omp_target_enter_data (gfc_code
*code
)
5454 tree stmt
, omp_clauses
;
5456 gfc_start_block (&block
);
5457 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5459 stmt
= build1_loc (input_location
, OMP_TARGET_ENTER_DATA
, void_type_node
,
5461 gfc_add_expr_to_block (&block
, stmt
);
5462 return gfc_finish_block (&block
);
5466 gfc_trans_omp_target_exit_data (gfc_code
*code
)
5469 tree stmt
, omp_clauses
;
5471 gfc_start_block (&block
);
5472 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5474 stmt
= build1_loc (input_location
, OMP_TARGET_EXIT_DATA
, void_type_node
,
5476 gfc_add_expr_to_block (&block
, stmt
);
5477 return gfc_finish_block (&block
);
5481 gfc_trans_omp_target_update (gfc_code
*code
)
5484 tree stmt
, omp_clauses
;
5486 gfc_start_block (&block
);
5487 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
5489 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
5491 gfc_add_expr_to_block (&block
, stmt
);
5492 return gfc_finish_block (&block
);
5496 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
5498 tree res
, tmp
, stmt
;
5499 stmtblock_t block
, *pblock
= NULL
;
5500 stmtblock_t singleblock
;
5501 int saved_ompws_flags
;
5502 bool singleblock_in_progress
= false;
5503 /* True if previous gfc_code in workshare construct is not workshared. */
5504 bool prev_singleunit
;
5506 code
= code
->block
->next
;
5510 gfc_start_block (&block
);
5513 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
5514 prev_singleunit
= false;
5516 /* Translate statements one by one to trees until we reach
5517 the end of the workshare construct. Adjacent gfc_codes that
5518 are a single unit of work are clustered and encapsulated in a
5519 single OMP_SINGLE construct. */
5520 for (; code
; code
= code
->next
)
5522 if (code
->here
!= 0)
5524 res
= gfc_trans_label_here (code
);
5525 gfc_add_expr_to_block (pblock
, res
);
5528 /* No dependence analysis, use for clauses with wait.
5529 If this is the last gfc_code, use default omp_clauses. */
5530 if (code
->next
== NULL
&& clauses
->nowait
)
5531 ompws_flags
|= OMPWS_NOWAIT
;
5533 /* By default, every gfc_code is a single unit of work. */
5534 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
5535 ompws_flags
&= ~(OMPWS_SCALARIZER_WS
| OMPWS_SCALARIZER_BODY
);
5544 res
= gfc_trans_assign (code
);
5547 case EXEC_POINTER_ASSIGN
:
5548 res
= gfc_trans_pointer_assign (code
);
5551 case EXEC_INIT_ASSIGN
:
5552 res
= gfc_trans_init_assign (code
);
5556 res
= gfc_trans_forall (code
);
5560 res
= gfc_trans_where (code
);
5563 case EXEC_OMP_ATOMIC
:
5564 res
= gfc_trans_omp_directive (code
);
5567 case EXEC_OMP_PARALLEL
:
5568 case EXEC_OMP_PARALLEL_DO
:
5569 case EXEC_OMP_PARALLEL_SECTIONS
:
5570 case EXEC_OMP_PARALLEL_WORKSHARE
:
5571 case EXEC_OMP_CRITICAL
:
5572 saved_ompws_flags
= ompws_flags
;
5574 res
= gfc_trans_omp_directive (code
);
5575 ompws_flags
= saved_ompws_flags
;
5579 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
5582 gfc_set_backend_locus (&code
->loc
);
5584 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
5586 if (prev_singleunit
)
5588 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5589 /* Add current gfc_code to single block. */
5590 gfc_add_expr_to_block (&singleblock
, res
);
5593 /* Finish single block and add it to pblock. */
5594 tmp
= gfc_finish_block (&singleblock
);
5595 tmp
= build2_loc (input_location
, OMP_SINGLE
,
5596 void_type_node
, tmp
, NULL_TREE
);
5597 gfc_add_expr_to_block (pblock
, tmp
);
5598 /* Add current gfc_code to pblock. */
5599 gfc_add_expr_to_block (pblock
, res
);
5600 singleblock_in_progress
= false;
5605 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
5607 /* Start single block. */
5608 gfc_init_block (&singleblock
);
5609 gfc_add_expr_to_block (&singleblock
, res
);
5610 singleblock_in_progress
= true;
5613 /* Add the new statement to the block. */
5614 gfc_add_expr_to_block (pblock
, res
);
5616 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
5620 /* Finish remaining SINGLE block, if we were in the middle of one. */
5621 if (singleblock_in_progress
)
5623 /* Finish single block and add it to pblock. */
5624 tmp
= gfc_finish_block (&singleblock
);
5625 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
5627 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
5629 gfc_add_expr_to_block (pblock
, tmp
);
5632 stmt
= gfc_finish_block (pblock
);
5633 if (TREE_CODE (stmt
) != BIND_EXPR
)
5635 if (!IS_EMPTY_STMT (stmt
))
5637 tree bindblock
= poplevel (1, 0);
5638 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
5646 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
5647 stmt
= gfc_trans_omp_barrier ();
5654 gfc_trans_oacc_declare (gfc_code
*code
)
5657 tree stmt
, oacc_clauses
;
5658 enum tree_code construct_code
;
5660 construct_code
= OACC_DATA
;
5662 gfc_start_block (&block
);
5664 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.oacc_declare
->clauses
,
5666 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
5667 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
5669 gfc_add_expr_to_block (&block
, stmt
);
5671 return gfc_finish_block (&block
);
5675 gfc_trans_oacc_directive (gfc_code
*code
)
5679 case EXEC_OACC_PARALLEL_LOOP
:
5680 case EXEC_OACC_KERNELS_LOOP
:
5681 case EXEC_OACC_SERIAL_LOOP
:
5682 return gfc_trans_oacc_combined_directive (code
);
5683 case EXEC_OACC_PARALLEL
:
5684 case EXEC_OACC_KERNELS
:
5685 case EXEC_OACC_SERIAL
:
5686 case EXEC_OACC_DATA
:
5687 case EXEC_OACC_HOST_DATA
:
5688 return gfc_trans_oacc_construct (code
);
5689 case EXEC_OACC_LOOP
:
5690 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5692 case EXEC_OACC_UPDATE
:
5693 case EXEC_OACC_CACHE
:
5694 case EXEC_OACC_ENTER_DATA
:
5695 case EXEC_OACC_EXIT_DATA
:
5696 return gfc_trans_oacc_executable_directive (code
);
5697 case EXEC_OACC_WAIT
:
5698 return gfc_trans_oacc_wait_directive (code
);
5699 case EXEC_OACC_ATOMIC
:
5700 return gfc_trans_omp_atomic (code
);
5701 case EXEC_OACC_DECLARE
:
5702 return gfc_trans_oacc_declare (code
);
5709 gfc_trans_omp_directive (gfc_code
*code
)
5713 case EXEC_OMP_ATOMIC
:
5714 return gfc_trans_omp_atomic (code
);
5715 case EXEC_OMP_BARRIER
:
5716 return gfc_trans_omp_barrier ();
5717 case EXEC_OMP_CANCEL
:
5718 return gfc_trans_omp_cancel (code
);
5719 case EXEC_OMP_CANCELLATION_POINT
:
5720 return gfc_trans_omp_cancellation_point (code
);
5721 case EXEC_OMP_CRITICAL
:
5722 return gfc_trans_omp_critical (code
);
5723 case EXEC_OMP_DISTRIBUTE
:
5726 case EXEC_OMP_TASKLOOP
:
5727 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
5729 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
5730 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
5731 case EXEC_OMP_DISTRIBUTE_SIMD
:
5732 return gfc_trans_omp_distribute (code
, NULL
);
5733 case EXEC_OMP_DO_SIMD
:
5734 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
5735 case EXEC_OMP_FLUSH
:
5736 return gfc_trans_omp_flush ();
5737 case EXEC_OMP_MASTER
:
5738 return gfc_trans_omp_master (code
);
5739 case EXEC_OMP_ORDERED
:
5740 return gfc_trans_omp_ordered (code
);
5741 case EXEC_OMP_PARALLEL
:
5742 return gfc_trans_omp_parallel (code
);
5743 case EXEC_OMP_PARALLEL_DO
:
5744 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
5745 case EXEC_OMP_PARALLEL_DO_SIMD
:
5746 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
5747 case EXEC_OMP_PARALLEL_SECTIONS
:
5748 return gfc_trans_omp_parallel_sections (code
);
5749 case EXEC_OMP_PARALLEL_WORKSHARE
:
5750 return gfc_trans_omp_parallel_workshare (code
);
5751 case EXEC_OMP_SECTIONS
:
5752 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
5753 case EXEC_OMP_SINGLE
:
5754 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
5755 case EXEC_OMP_TARGET
:
5756 case EXEC_OMP_TARGET_PARALLEL
:
5757 case EXEC_OMP_TARGET_PARALLEL_DO
:
5758 case EXEC_OMP_TARGET_PARALLEL_DO_SIMD
:
5759 case EXEC_OMP_TARGET_SIMD
:
5760 case EXEC_OMP_TARGET_TEAMS
:
5761 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
5762 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5763 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5764 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
5765 return gfc_trans_omp_target (code
);
5766 case EXEC_OMP_TARGET_DATA
:
5767 return gfc_trans_omp_target_data (code
);
5768 case EXEC_OMP_TARGET_ENTER_DATA
:
5769 return gfc_trans_omp_target_enter_data (code
);
5770 case EXEC_OMP_TARGET_EXIT_DATA
:
5771 return gfc_trans_omp_target_exit_data (code
);
5772 case EXEC_OMP_TARGET_UPDATE
:
5773 return gfc_trans_omp_target_update (code
);
5775 return gfc_trans_omp_task (code
);
5776 case EXEC_OMP_TASKGROUP
:
5777 return gfc_trans_omp_taskgroup (code
);
5778 case EXEC_OMP_TASKLOOP_SIMD
:
5779 return gfc_trans_omp_taskloop (code
);
5780 case EXEC_OMP_TASKWAIT
:
5781 return gfc_trans_omp_taskwait ();
5782 case EXEC_OMP_TASKYIELD
:
5783 return gfc_trans_omp_taskyield ();
5784 case EXEC_OMP_TEAMS
:
5785 case EXEC_OMP_TEAMS_DISTRIBUTE
:
5786 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
5787 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
5788 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
5789 return gfc_trans_omp_teams (code
, NULL
, NULL_TREE
);
5790 case EXEC_OMP_WORKSHARE
:
5791 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
5798 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
5803 gfc_omp_declare_simd
*ods
;
5804 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
5806 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
5807 tree fndecl
= ns
->proc_name
->backend_decl
;
5809 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
5810 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
5811 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
5812 DECL_ATTRIBUTES (fndecl
) = c
;