1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "gimple-expr.h"
30 #include "gimplify.h" /* For create_tmp_var_raw. */
31 #include "stringpool.h"
33 #include "diagnostic-core.h" /* For internal_error. */
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
41 #include "gomp-constants.h"
45 /* True if OpenMP should privatize what this DECL points to rather
46 than the DECL itself. */
49 gfc_omp_privatize_by_reference (const_tree decl
)
51 tree type
= TREE_TYPE (decl
);
53 if (TREE_CODE (type
) == REFERENCE_TYPE
54 && (!DECL_ARTIFICIAL (decl
) || TREE_CODE (decl
) == PARM_DECL
))
57 if (TREE_CODE (type
) == POINTER_TYPE
)
59 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
60 that have POINTER_TYPE type and aren't scalar pointers, scalar
61 allocatables, Cray pointees or C pointers are supposed to be
62 privatized by reference. */
63 if (GFC_DECL_GET_SCALAR_POINTER (decl
)
64 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
65 || GFC_DECL_CRAY_POINTEE (decl
)
66 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
69 if (!DECL_ARTIFICIAL (decl
)
70 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
73 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
75 if (DECL_LANG_SPECIFIC (decl
)
76 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
83 /* True if OpenMP sharing attribute of DECL is predetermined. */
85 enum omp_clause_default_kind
86 gfc_omp_predetermined_sharing (tree decl
)
88 /* Associate names preserve the association established during ASSOCIATE.
89 As they are implemented either as pointers to the selector or array
90 descriptor and shouldn't really change in the ASSOCIATE region,
91 this decl can be either shared or firstprivate. If it is a pointer,
92 use firstprivate, as it is cheaper that way, otherwise make it shared. */
93 if (GFC_DECL_ASSOCIATE_VAR_P (decl
))
95 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
96 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
98 return OMP_CLAUSE_DEFAULT_SHARED
;
101 if (DECL_ARTIFICIAL (decl
)
102 && ! GFC_DECL_RESULT (decl
)
103 && ! (DECL_LANG_SPECIFIC (decl
)
104 && GFC_DECL_SAVED_DESCRIPTOR (decl
)))
105 return OMP_CLAUSE_DEFAULT_SHARED
;
107 /* Cray pointees shouldn't be listed in any clauses and should be
108 gimplified to dereference of the corresponding Cray pointer.
109 Make them all private, so that they are emitted in the debug
111 if (GFC_DECL_CRAY_POINTEE (decl
))
112 return OMP_CLAUSE_DEFAULT_PRIVATE
;
114 /* Assumed-size arrays are predetermined shared. */
115 if (TREE_CODE (decl
) == PARM_DECL
116 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl
))
117 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl
)) == GFC_ARRAY_UNKNOWN
118 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl
),
119 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl
)) - 1)
121 return OMP_CLAUSE_DEFAULT_SHARED
;
123 /* Dummy procedures aren't considered variables by OpenMP, thus are
124 disallowed in OpenMP clauses. They are represented as PARM_DECLs
125 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
126 to avoid complaining about their uses with default(none). */
127 if (TREE_CODE (decl
) == PARM_DECL
128 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
129 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == FUNCTION_TYPE
)
130 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
132 /* COMMON and EQUIVALENCE decls are shared. They
133 are only referenced through DECL_VALUE_EXPR of the variables
134 contained in them. If those are privatized, they will not be
135 gimplified to the COMMON or EQUIVALENCE decls. */
136 if (GFC_DECL_COMMON_OR_EQUIV (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
137 return OMP_CLAUSE_DEFAULT_SHARED
;
139 if (GFC_DECL_RESULT (decl
) && ! DECL_HAS_VALUE_EXPR_P (decl
))
140 return OMP_CLAUSE_DEFAULT_SHARED
;
142 /* These are either array or derived parameters, or vtables.
143 In the former cases, the OpenMP standard doesn't consider them to be
144 variables at all (they can't be redefined), but they can nevertheless appear
145 in parallel/task regions and for default(none) purposes treat them as shared.
146 For vtables likely the same handling is desirable. */
147 if (TREE_CODE (decl
) == VAR_DECL
148 && TREE_READONLY (decl
)
149 && TREE_STATIC (decl
))
150 return OMP_CLAUSE_DEFAULT_SHARED
;
152 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
155 /* Return decl that should be used when reporting DEFAULT(NONE)
159 gfc_omp_report_decl (tree decl
)
161 if (DECL_ARTIFICIAL (decl
)
162 && DECL_LANG_SPECIFIC (decl
)
163 && GFC_DECL_SAVED_DESCRIPTOR (decl
))
164 return GFC_DECL_SAVED_DESCRIPTOR (decl
);
169 /* Return true if TYPE has any allocatable components. */
172 gfc_has_alloc_comps (tree type
, tree decl
)
176 if (POINTER_TYPE_P (type
))
178 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
179 type
= TREE_TYPE (type
);
180 else if (GFC_DECL_GET_SCALAR_POINTER (decl
))
184 if (GFC_DESCRIPTOR_TYPE_P (type
) || GFC_ARRAY_TYPE_P (type
))
185 type
= gfc_get_element_type (type
);
187 if (TREE_CODE (type
) != RECORD_TYPE
)
190 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
192 ftype
= TREE_TYPE (field
);
193 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
195 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
196 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
198 if (gfc_has_alloc_comps (ftype
, field
))
204 /* Return true if DECL in private clause needs
205 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
207 gfc_omp_private_outer_ref (tree decl
)
209 tree type
= TREE_TYPE (decl
);
211 if (GFC_DESCRIPTOR_TYPE_P (type
)
212 && GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
)
215 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
))
218 if (gfc_omp_privatize_by_reference (decl
))
219 type
= TREE_TYPE (type
);
221 if (gfc_has_alloc_comps (type
, decl
))
227 /* Callback for gfc_omp_unshare_expr. */
230 gfc_omp_unshare_expr_r (tree
*tp
, int *walk_subtrees
, void *)
233 enum tree_code code
= TREE_CODE (t
);
235 /* Stop at types, decls, constants like copy_tree_r. */
236 if (TREE_CODE_CLASS (code
) == tcc_type
237 || TREE_CODE_CLASS (code
) == tcc_declaration
238 || TREE_CODE_CLASS (code
) == tcc_constant
241 else if (handled_component_p (t
)
242 || TREE_CODE (t
) == MEM_REF
)
244 *tp
= unshare_expr (t
);
251 /* Unshare in expr anything that the FE which normally doesn't
252 care much about tree sharing (because during gimplification
253 everything is unshared) could cause problems with tree sharing
254 at omp-low.c time. */
257 gfc_omp_unshare_expr (tree expr
)
259 walk_tree (&expr
, gfc_omp_unshare_expr_r
, NULL
, NULL
);
263 enum walk_alloc_comps
265 WALK_ALLOC_COMPS_DTOR
,
266 WALK_ALLOC_COMPS_DEFAULT_CTOR
,
267 WALK_ALLOC_COMPS_COPY_CTOR
270 /* Handle allocatable components in OpenMP clauses. */
273 gfc_walk_alloc_comps (tree decl
, tree dest
, tree var
,
274 enum walk_alloc_comps kind
)
276 stmtblock_t block
, tmpblock
;
277 tree type
= TREE_TYPE (decl
), then_b
, tem
, field
;
278 gfc_init_block (&block
);
280 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
282 if (GFC_DESCRIPTOR_TYPE_P (type
))
284 gfc_init_block (&tmpblock
);
285 tem
= gfc_full_array_size (&tmpblock
, decl
,
286 GFC_TYPE_ARRAY_RANK (type
));
287 then_b
= gfc_finish_block (&tmpblock
);
288 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (then_b
));
289 tem
= gfc_omp_unshare_expr (tem
);
290 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
291 gfc_array_index_type
, tem
,
296 if (!TYPE_DOMAIN (type
)
297 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
298 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
299 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
301 tem
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
302 TYPE_SIZE_UNIT (type
),
303 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
304 tem
= size_binop (MINUS_EXPR
, tem
, size_one_node
);
307 tem
= array_type_nelts (type
);
308 tem
= fold_convert (gfc_array_index_type
, tem
);
311 tree nelems
= gfc_evaluate_now (tem
, &block
);
312 tree index
= gfc_create_var (gfc_array_index_type
, "S");
314 gfc_init_block (&tmpblock
);
315 tem
= gfc_conv_array_data (decl
);
316 tree declvar
= build_fold_indirect_ref_loc (input_location
, tem
);
317 tree declvref
= gfc_build_array_ref (declvar
, index
, NULL
);
318 tree destvar
, destvref
= NULL_TREE
;
321 tem
= gfc_conv_array_data (dest
);
322 destvar
= build_fold_indirect_ref_loc (input_location
, tem
);
323 destvref
= gfc_build_array_ref (destvar
, index
, NULL
);
325 gfc_add_expr_to_block (&tmpblock
,
326 gfc_walk_alloc_comps (declvref
, destvref
,
330 gfc_init_loopinfo (&loop
);
332 loop
.from
[0] = gfc_index_zero_node
;
333 loop
.loopvar
[0] = index
;
335 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
336 gfc_add_block_to_block (&block
, &loop
.pre
);
337 return gfc_finish_block (&block
);
339 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var
))
341 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
343 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
344 type
= TREE_TYPE (decl
);
347 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
);
348 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
350 tree ftype
= TREE_TYPE (field
);
351 tree declf
, destf
= NULL_TREE
;
352 bool has_alloc_comps
= gfc_has_alloc_comps (ftype
, field
);
353 if ((!GFC_DESCRIPTOR_TYPE_P (ftype
)
354 || GFC_TYPE_ARRAY_AKIND (ftype
) != GFC_ARRAY_ALLOCATABLE
)
355 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field
)
358 declf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
359 decl
, field
, NULL_TREE
);
361 destf
= fold_build3_loc (input_location
, COMPONENT_REF
, ftype
,
362 dest
, field
, NULL_TREE
);
367 case WALK_ALLOC_COMPS_DTOR
:
369 case WALK_ALLOC_COMPS_DEFAULT_CTOR
:
370 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
371 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
373 gfc_add_modify (&block
, unshare_expr (destf
),
374 unshare_expr (declf
));
375 tem
= gfc_duplicate_allocatable_nocopy
376 (destf
, declf
, ftype
,
377 GFC_TYPE_ARRAY_RANK (ftype
));
379 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
380 tem
= gfc_duplicate_allocatable_nocopy (destf
, declf
, ftype
, 0);
382 case WALK_ALLOC_COMPS_COPY_CTOR
:
383 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
384 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
385 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
,
386 GFC_TYPE_ARRAY_RANK (ftype
),
388 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
389 tem
= gfc_duplicate_allocatable (destf
, declf
, ftype
, 0,
394 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
397 gfc_init_block (&tmpblock
);
398 gfc_add_expr_to_block (&tmpblock
,
399 gfc_walk_alloc_comps (declf
, destf
,
401 then_b
= gfc_finish_block (&tmpblock
);
402 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
403 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
404 tem
= gfc_conv_descriptor_data_get (unshare_expr (declf
));
405 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
406 tem
= unshare_expr (declf
);
411 tem
= fold_convert (pvoid_type_node
, tem
);
412 tem
= fold_build2_loc (input_location
, NE_EXPR
,
413 boolean_type_node
, tem
,
415 then_b
= build3_loc (input_location
, COND_EXPR
, void_type_node
,
417 build_empty_stmt (input_location
));
419 gfc_add_expr_to_block (&block
, then_b
);
421 if (kind
== WALK_ALLOC_COMPS_DTOR
)
423 if (GFC_DESCRIPTOR_TYPE_P (ftype
)
424 && GFC_TYPE_ARRAY_AKIND (ftype
) == GFC_ARRAY_ALLOCATABLE
)
426 tem
= gfc_trans_dealloc_allocated (unshare_expr (declf
),
428 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
430 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field
))
432 tem
= gfc_call_free (unshare_expr (declf
));
433 gfc_add_expr_to_block (&block
, gfc_omp_unshare_expr (tem
));
438 return gfc_finish_block (&block
);
441 /* Return code to initialize DECL with its default constructor, or
442 NULL if there's nothing to do. */
445 gfc_omp_clause_default_ctor (tree clause
, tree decl
, tree outer
)
447 tree type
= TREE_TYPE (decl
), size
, ptr
, cond
, then_b
, else_b
;
448 stmtblock_t block
, cond_block
;
450 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_PRIVATE
451 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LASTPRIVATE
452 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
453 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_REDUCTION
);
455 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
456 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
457 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
459 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
462 gfc_start_block (&block
);
463 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
464 OMP_CLAUSE_DECL (clause
),
465 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
466 gfc_add_expr_to_block (&block
, tem
);
467 return gfc_finish_block (&block
);
472 gcc_assert (outer
!= NULL_TREE
);
474 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
475 "not currently allocated" allocation status if outer
476 array is "not currently allocated", otherwise should be allocated. */
477 gfc_start_block (&block
);
479 gfc_init_block (&cond_block
);
481 if (GFC_DESCRIPTOR_TYPE_P (type
))
483 gfc_add_modify (&cond_block
, decl
, outer
);
484 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
485 size
= gfc_conv_descriptor_ubound_get (decl
, rank
);
486 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
488 gfc_conv_descriptor_lbound_get (decl
, rank
));
489 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
490 size
, gfc_index_one_node
);
491 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
492 size
= fold_build2_loc (input_location
, MULT_EXPR
,
493 gfc_array_index_type
, size
,
494 gfc_conv_descriptor_stride_get (decl
, rank
));
495 tree esize
= fold_convert (gfc_array_index_type
,
496 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
497 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
499 size
= unshare_expr (size
);
500 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
504 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
505 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
506 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
507 if (GFC_DESCRIPTOR_TYPE_P (type
))
508 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
), ptr
);
510 gfc_add_modify (&cond_block
, unshare_expr (decl
),
511 fold_convert (TREE_TYPE (decl
), ptr
));
512 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
514 tree tem
= gfc_walk_alloc_comps (outer
, decl
,
515 OMP_CLAUSE_DECL (clause
),
516 WALK_ALLOC_COMPS_DEFAULT_CTOR
);
517 gfc_add_expr_to_block (&cond_block
, tem
);
519 then_b
= gfc_finish_block (&cond_block
);
521 /* Reduction clause requires allocated ALLOCATABLE. */
522 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_REDUCTION
)
524 gfc_init_block (&cond_block
);
525 if (GFC_DESCRIPTOR_TYPE_P (type
))
526 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (decl
),
529 gfc_add_modify (&cond_block
, unshare_expr (decl
),
530 build_zero_cst (TREE_TYPE (decl
)));
531 else_b
= gfc_finish_block (&cond_block
);
533 tree tem
= fold_convert (pvoid_type_node
,
534 GFC_DESCRIPTOR_TYPE_P (type
)
535 ? gfc_conv_descriptor_data_get (outer
) : outer
);
536 tem
= unshare_expr (tem
);
537 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
538 tem
, null_pointer_node
);
539 gfc_add_expr_to_block (&block
,
540 build3_loc (input_location
, COND_EXPR
,
541 void_type_node
, cond
, then_b
,
545 gfc_add_expr_to_block (&block
, then_b
);
547 return gfc_finish_block (&block
);
550 /* Build and return code for a copy constructor from SRC to DEST. */
553 gfc_omp_clause_copy_ctor (tree clause
, tree dest
, tree src
)
555 tree type
= TREE_TYPE (dest
), ptr
, size
, call
;
556 tree cond
, then_b
, else_b
;
557 stmtblock_t block
, cond_block
;
559 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_FIRSTPRIVATE
560 || OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
562 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
563 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
564 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
566 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
568 gfc_start_block (&block
);
569 gfc_add_modify (&block
, dest
, src
);
570 tree tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
571 WALK_ALLOC_COMPS_COPY_CTOR
);
572 gfc_add_expr_to_block (&block
, tem
);
573 return gfc_finish_block (&block
);
576 return build2_v (MODIFY_EXPR
, dest
, src
);
579 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
580 and copied from SRC. */
581 gfc_start_block (&block
);
583 gfc_init_block (&cond_block
);
585 gfc_add_modify (&cond_block
, dest
, src
);
586 if (GFC_DESCRIPTOR_TYPE_P (type
))
588 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
589 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
590 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
592 gfc_conv_descriptor_lbound_get (dest
, rank
));
593 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
594 size
, gfc_index_one_node
);
595 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
596 size
= fold_build2_loc (input_location
, MULT_EXPR
,
597 gfc_array_index_type
, size
,
598 gfc_conv_descriptor_stride_get (dest
, rank
));
599 tree esize
= fold_convert (gfc_array_index_type
,
600 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
601 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
603 size
= unshare_expr (size
);
604 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
608 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
609 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
610 gfc_allocate_using_malloc (&cond_block
, ptr
, size
, NULL_TREE
);
611 if (GFC_DESCRIPTOR_TYPE_P (type
))
612 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
), ptr
);
614 gfc_add_modify (&cond_block
, unshare_expr (dest
),
615 fold_convert (TREE_TYPE (dest
), ptr
));
617 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
618 ? gfc_conv_descriptor_data_get (src
) : src
;
619 srcptr
= unshare_expr (srcptr
);
620 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
621 call
= build_call_expr_loc (input_location
,
622 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
624 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
625 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
627 tree tem
= gfc_walk_alloc_comps (src
, dest
,
628 OMP_CLAUSE_DECL (clause
),
629 WALK_ALLOC_COMPS_COPY_CTOR
);
630 gfc_add_expr_to_block (&cond_block
, tem
);
632 then_b
= gfc_finish_block (&cond_block
);
634 gfc_init_block (&cond_block
);
635 if (GFC_DESCRIPTOR_TYPE_P (type
))
636 gfc_conv_descriptor_data_set (&cond_block
, unshare_expr (dest
),
639 gfc_add_modify (&cond_block
, unshare_expr (dest
),
640 build_zero_cst (TREE_TYPE (dest
)));
641 else_b
= gfc_finish_block (&cond_block
);
643 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
644 unshare_expr (srcptr
), null_pointer_node
);
645 gfc_add_expr_to_block (&block
,
646 build3_loc (input_location
, COND_EXPR
,
647 void_type_node
, cond
, then_b
, else_b
));
649 return gfc_finish_block (&block
);
652 /* Similarly, except use an intrinsic or pointer assignment operator
656 gfc_omp_clause_assign_op (tree clause
, tree dest
, tree src
)
658 tree type
= TREE_TYPE (dest
), ptr
, size
, call
, nonalloc
;
659 tree cond
, then_b
, else_b
;
660 stmtblock_t block
, cond_block
, cond_block2
, inner_block
;
662 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
663 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
664 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
666 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
668 gfc_start_block (&block
);
669 /* First dealloc any allocatable components in DEST. */
670 tree tem
= gfc_walk_alloc_comps (dest
, NULL_TREE
,
671 OMP_CLAUSE_DECL (clause
),
672 WALK_ALLOC_COMPS_DTOR
);
673 gfc_add_expr_to_block (&block
, tem
);
674 /* Then copy over toplevel data. */
675 gfc_add_modify (&block
, dest
, src
);
676 /* Finally allocate any allocatable components and copy. */
677 tem
= gfc_walk_alloc_comps (src
, dest
, OMP_CLAUSE_DECL (clause
),
678 WALK_ALLOC_COMPS_COPY_CTOR
);
679 gfc_add_expr_to_block (&block
, tem
);
680 return gfc_finish_block (&block
);
683 return build2_v (MODIFY_EXPR
, dest
, src
);
686 gfc_start_block (&block
);
688 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
690 then_b
= gfc_walk_alloc_comps (dest
, NULL_TREE
, OMP_CLAUSE_DECL (clause
),
691 WALK_ALLOC_COMPS_DTOR
);
692 tree tem
= fold_convert (pvoid_type_node
,
693 GFC_DESCRIPTOR_TYPE_P (type
)
694 ? gfc_conv_descriptor_data_get (dest
) : dest
);
695 tem
= unshare_expr (tem
);
696 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
697 tem
, null_pointer_node
);
698 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
699 then_b
, build_empty_stmt (input_location
));
700 gfc_add_expr_to_block (&block
, tem
);
703 gfc_init_block (&cond_block
);
705 if (GFC_DESCRIPTOR_TYPE_P (type
))
707 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
708 size
= gfc_conv_descriptor_ubound_get (src
, rank
);
709 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
711 gfc_conv_descriptor_lbound_get (src
, rank
));
712 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
713 size
, gfc_index_one_node
);
714 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
715 size
= fold_build2_loc (input_location
, MULT_EXPR
,
716 gfc_array_index_type
, size
,
717 gfc_conv_descriptor_stride_get (src
, rank
));
718 tree esize
= fold_convert (gfc_array_index_type
,
719 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
720 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
722 size
= unshare_expr (size
);
723 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
727 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
728 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
730 tree destptr
= GFC_DESCRIPTOR_TYPE_P (type
)
731 ? gfc_conv_descriptor_data_get (dest
) : dest
;
732 destptr
= unshare_expr (destptr
);
733 destptr
= fold_convert (pvoid_type_node
, destptr
);
734 gfc_add_modify (&cond_block
, ptr
, destptr
);
736 nonalloc
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
737 destptr
, null_pointer_node
);
739 if (GFC_DESCRIPTOR_TYPE_P (type
))
742 for (i
= 0; i
< GFC_TYPE_ARRAY_RANK (type
); i
++)
744 tree rank
= gfc_rank_cst
[i
];
745 tree tem
= gfc_conv_descriptor_ubound_get (src
, rank
);
746 tem
= fold_build2_loc (input_location
, MINUS_EXPR
,
747 gfc_array_index_type
, tem
,
748 gfc_conv_descriptor_lbound_get (src
, rank
));
749 tem
= fold_build2_loc (input_location
, PLUS_EXPR
,
750 gfc_array_index_type
, tem
,
751 gfc_conv_descriptor_lbound_get (dest
, rank
));
752 tem
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
753 tem
, gfc_conv_descriptor_ubound_get (dest
,
755 cond
= fold_build2_loc (input_location
, TRUTH_ORIF_EXPR
,
756 boolean_type_node
, cond
, tem
);
760 gfc_init_block (&cond_block2
);
762 if (GFC_DESCRIPTOR_TYPE_P (type
))
764 gfc_init_block (&inner_block
);
765 gfc_allocate_using_malloc (&inner_block
, ptr
, size
, NULL_TREE
);
766 then_b
= gfc_finish_block (&inner_block
);
768 gfc_init_block (&inner_block
);
769 gfc_add_modify (&inner_block
, ptr
,
770 gfc_call_realloc (&inner_block
, ptr
, size
));
771 else_b
= gfc_finish_block (&inner_block
);
773 gfc_add_expr_to_block (&cond_block2
,
774 build3_loc (input_location
, COND_EXPR
,
776 unshare_expr (nonalloc
),
778 gfc_add_modify (&cond_block2
, dest
, src
);
779 gfc_conv_descriptor_data_set (&cond_block2
, unshare_expr (dest
), ptr
);
783 gfc_allocate_using_malloc (&cond_block2
, ptr
, size
, NULL_TREE
);
784 gfc_add_modify (&cond_block2
, unshare_expr (dest
),
785 fold_convert (type
, ptr
));
787 then_b
= gfc_finish_block (&cond_block2
);
788 else_b
= build_empty_stmt (input_location
);
790 gfc_add_expr_to_block (&cond_block
,
791 build3_loc (input_location
, COND_EXPR
,
792 void_type_node
, unshare_expr (cond
),
795 tree srcptr
= GFC_DESCRIPTOR_TYPE_P (type
)
796 ? gfc_conv_descriptor_data_get (src
) : src
;
797 srcptr
= unshare_expr (srcptr
);
798 srcptr
= fold_convert (pvoid_type_node
, srcptr
);
799 call
= build_call_expr_loc (input_location
,
800 builtin_decl_explicit (BUILT_IN_MEMCPY
), 3, ptr
,
802 gfc_add_expr_to_block (&cond_block
, fold_convert (void_type_node
, call
));
803 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
805 tree tem
= gfc_walk_alloc_comps (src
, dest
,
806 OMP_CLAUSE_DECL (clause
),
807 WALK_ALLOC_COMPS_COPY_CTOR
);
808 gfc_add_expr_to_block (&cond_block
, tem
);
810 then_b
= gfc_finish_block (&cond_block
);
812 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_COPYIN
)
814 gfc_init_block (&cond_block
);
815 if (GFC_DESCRIPTOR_TYPE_P (type
))
816 gfc_add_expr_to_block (&cond_block
,
817 gfc_trans_dealloc_allocated (unshare_expr (dest
),
821 destptr
= gfc_evaluate_now (destptr
, &cond_block
);
822 gfc_add_expr_to_block (&cond_block
, gfc_call_free (destptr
));
823 gfc_add_modify (&cond_block
, unshare_expr (dest
),
824 build_zero_cst (TREE_TYPE (dest
)));
826 else_b
= gfc_finish_block (&cond_block
);
828 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
829 unshare_expr (srcptr
), null_pointer_node
);
830 gfc_add_expr_to_block (&block
,
831 build3_loc (input_location
, COND_EXPR
,
832 void_type_node
, cond
,
836 gfc_add_expr_to_block (&block
, then_b
);
838 return gfc_finish_block (&block
);
842 gfc_omp_linear_clause_add_loop (stmtblock_t
*block
, tree dest
, tree src
,
843 tree add
, tree nelems
)
845 stmtblock_t tmpblock
;
846 tree desta
, srca
, index
= gfc_create_var (gfc_array_index_type
, "S");
847 nelems
= gfc_evaluate_now (nelems
, block
);
849 gfc_init_block (&tmpblock
);
850 if (TREE_CODE (TREE_TYPE (dest
)) == ARRAY_TYPE
)
852 desta
= gfc_build_array_ref (dest
, index
, NULL
);
853 srca
= gfc_build_array_ref (src
, index
, NULL
);
857 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest
)));
858 tree idx
= fold_build2 (MULT_EXPR
, sizetype
,
859 fold_convert (sizetype
, index
),
860 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest
))));
861 desta
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
862 TREE_TYPE (dest
), dest
,
864 srca
= build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR
,
865 TREE_TYPE (src
), src
,
868 gfc_add_modify (&tmpblock
, desta
,
869 fold_build2 (PLUS_EXPR
, TREE_TYPE (desta
),
873 gfc_init_loopinfo (&loop
);
875 loop
.from
[0] = gfc_index_zero_node
;
876 loop
.loopvar
[0] = index
;
878 gfc_trans_scalarizing_loops (&loop
, &tmpblock
);
879 gfc_add_block_to_block (block
, &loop
.pre
);
882 /* Build and return code for a constructor of DEST that initializes
883 it to SRC plus ADD (ADD is scalar integer). */
886 gfc_omp_clause_linear_ctor (tree clause
, tree dest
, tree src
, tree add
)
888 tree type
= TREE_TYPE (dest
), ptr
, size
, nelems
= NULL_TREE
;
891 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_LINEAR
);
893 gfc_start_block (&block
);
894 add
= gfc_evaluate_now (add
, &block
);
896 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
897 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
898 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
900 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
901 if (!TYPE_DOMAIN (type
)
902 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
903 || TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
904 || TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == error_mark_node
)
906 nelems
= fold_build2 (EXACT_DIV_EXPR
, sizetype
,
907 TYPE_SIZE_UNIT (type
),
908 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
909 nelems
= size_binop (MINUS_EXPR
, nelems
, size_one_node
);
912 nelems
= array_type_nelts (type
);
913 nelems
= fold_convert (gfc_array_index_type
, nelems
);
915 gfc_omp_linear_clause_add_loop (&block
, dest
, src
, add
, nelems
);
916 return gfc_finish_block (&block
);
919 /* Allocatable arrays in LINEAR clauses need to be allocated
920 and copied from SRC. */
921 gfc_add_modify (&block
, dest
, src
);
922 if (GFC_DESCRIPTOR_TYPE_P (type
))
924 tree rank
= gfc_rank_cst
[GFC_TYPE_ARRAY_RANK (type
) - 1];
925 size
= gfc_conv_descriptor_ubound_get (dest
, rank
);
926 size
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
928 gfc_conv_descriptor_lbound_get (dest
, rank
));
929 size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
930 size
, gfc_index_one_node
);
931 if (GFC_TYPE_ARRAY_RANK (type
) > 1)
932 size
= fold_build2_loc (input_location
, MULT_EXPR
,
933 gfc_array_index_type
, size
,
934 gfc_conv_descriptor_stride_get (dest
, rank
));
935 tree esize
= fold_convert (gfc_array_index_type
,
936 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
937 nelems
= gfc_evaluate_now (unshare_expr (size
), &block
);
938 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
939 nelems
, unshare_expr (esize
));
940 size
= gfc_evaluate_now (fold_convert (size_type_node
, size
),
942 nelems
= fold_build2_loc (input_location
, MINUS_EXPR
,
943 gfc_array_index_type
, nelems
,
947 size
= fold_convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (type
)));
948 ptr
= gfc_create_var (pvoid_type_node
, NULL
);
949 gfc_allocate_using_malloc (&block
, ptr
, size
, NULL_TREE
);
950 if (GFC_DESCRIPTOR_TYPE_P (type
))
952 gfc_conv_descriptor_data_set (&block
, unshare_expr (dest
), ptr
);
953 tree etype
= gfc_get_element_type (type
);
954 ptr
= fold_convert (build_pointer_type (etype
), ptr
);
955 tree srcptr
= gfc_conv_descriptor_data_get (unshare_expr (src
));
956 srcptr
= fold_convert (build_pointer_type (etype
), srcptr
);
957 gfc_omp_linear_clause_add_loop (&block
, ptr
, srcptr
, add
, nelems
);
961 gfc_add_modify (&block
, unshare_expr (dest
),
962 fold_convert (TREE_TYPE (dest
), ptr
));
963 ptr
= fold_convert (TREE_TYPE (dest
), ptr
);
964 tree dstm
= build_fold_indirect_ref (ptr
);
965 tree srcm
= build_fold_indirect_ref (unshare_expr (src
));
966 gfc_add_modify (&block
, dstm
,
967 fold_build2 (PLUS_EXPR
, TREE_TYPE (add
), srcm
, add
));
969 return gfc_finish_block (&block
);
972 /* Build and return code destructing DECL. Return NULL if nothing
976 gfc_omp_clause_dtor (tree clause
, tree decl
)
978 tree type
= TREE_TYPE (decl
), tem
;
980 if ((! GFC_DESCRIPTOR_TYPE_P (type
)
981 || GFC_TYPE_ARRAY_AKIND (type
) != GFC_ARRAY_ALLOCATABLE
)
982 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause
)))
984 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
985 return gfc_walk_alloc_comps (decl
, NULL_TREE
,
986 OMP_CLAUSE_DECL (clause
),
987 WALK_ALLOC_COMPS_DTOR
);
991 if (GFC_DESCRIPTOR_TYPE_P (type
))
992 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
993 to be deallocated if they were allocated. */
994 tem
= gfc_trans_dealloc_allocated (decl
, false, NULL
);
996 tem
= gfc_call_free (decl
);
997 tem
= gfc_omp_unshare_expr (tem
);
999 if (gfc_has_alloc_comps (type
, OMP_CLAUSE_DECL (clause
)))
1004 gfc_init_block (&block
);
1005 gfc_add_expr_to_block (&block
,
1006 gfc_walk_alloc_comps (decl
, NULL_TREE
,
1007 OMP_CLAUSE_DECL (clause
),
1008 WALK_ALLOC_COMPS_DTOR
));
1009 gfc_add_expr_to_block (&block
, tem
);
1010 then_b
= gfc_finish_block (&block
);
1012 tem
= fold_convert (pvoid_type_node
,
1013 GFC_DESCRIPTOR_TYPE_P (type
)
1014 ? gfc_conv_descriptor_data_get (decl
) : decl
);
1015 tem
= unshare_expr (tem
);
1016 tree cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1017 tem
, null_pointer_node
);
1018 tem
= build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
1019 then_b
, build_empty_stmt (input_location
));
1026 gfc_omp_finish_clause (tree c
, gimple_seq
*pre_p
)
1028 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
1031 tree decl
= OMP_CLAUSE_DECL (c
);
1032 tree c2
= NULL_TREE
, c3
= NULL_TREE
, c4
= NULL_TREE
;
1033 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
1035 if (!gfc_omp_privatize_by_reference (decl
)
1036 && !GFC_DECL_GET_SCALAR_POINTER (decl
)
1037 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1038 && !GFC_DECL_CRAY_POINTEE (decl
)
1039 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
1041 tree orig_decl
= decl
;
1042 c4
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1043 OMP_CLAUSE_SET_MAP_KIND (c4
, GOMP_MAP_POINTER
);
1044 OMP_CLAUSE_DECL (c4
) = decl
;
1045 OMP_CLAUSE_SIZE (c4
) = size_int (0);
1046 decl
= build_fold_indirect_ref (decl
);
1047 OMP_CLAUSE_DECL (c
) = decl
;
1048 OMP_CLAUSE_SIZE (c
) = NULL_TREE
;
1049 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1050 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1051 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1053 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1054 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1055 OMP_CLAUSE_DECL (c3
) = unshare_expr (decl
);
1056 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1057 decl
= build_fold_indirect_ref (decl
);
1058 OMP_CLAUSE_DECL (c
) = decl
;
1061 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1064 gfc_start_block (&block
);
1065 tree type
= TREE_TYPE (decl
);
1066 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1067 ptr
= fold_convert (build_pointer_type (char_type_node
), ptr
);
1068 ptr
= build_fold_indirect_ref (ptr
);
1069 OMP_CLAUSE_DECL (c
) = ptr
;
1070 c2
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1071 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_TO_PSET
);
1072 OMP_CLAUSE_DECL (c2
) = decl
;
1073 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (type
);
1074 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
1075 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_POINTER
);
1076 OMP_CLAUSE_DECL (c3
) = gfc_conv_descriptor_data_get (decl
);
1077 OMP_CLAUSE_SIZE (c3
) = size_int (0);
1078 tree size
= create_tmp_var (gfc_array_index_type
);
1079 tree elemsz
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
1080 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
1081 if (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER
1082 || GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
)
1084 stmtblock_t cond_block
;
1085 tree tem
, then_b
, else_b
, zero
, cond
;
1087 gfc_init_block (&cond_block
);
1088 tem
= gfc_full_array_size (&cond_block
, decl
,
1089 GFC_TYPE_ARRAY_RANK (type
));
1090 gfc_add_modify (&cond_block
, size
, tem
);
1091 gfc_add_modify (&cond_block
, size
,
1092 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1094 then_b
= gfc_finish_block (&cond_block
);
1095 gfc_init_block (&cond_block
);
1096 zero
= build_int_cst (gfc_array_index_type
, 0);
1097 gfc_add_modify (&cond_block
, size
, zero
);
1098 else_b
= gfc_finish_block (&cond_block
);
1099 tem
= gfc_conv_descriptor_data_get (decl
);
1100 tem
= fold_convert (pvoid_type_node
, tem
);
1101 cond
= fold_build2_loc (input_location
, NE_EXPR
,
1102 boolean_type_node
, tem
, null_pointer_node
);
1103 gfc_add_expr_to_block (&block
, build3_loc (input_location
, COND_EXPR
,
1104 void_type_node
, cond
,
1109 gfc_add_modify (&block
, size
,
1110 gfc_full_array_size (&block
, decl
,
1111 GFC_TYPE_ARRAY_RANK (type
)));
1112 gfc_add_modify (&block
, size
,
1113 fold_build2 (MULT_EXPR
, gfc_array_index_type
,
1116 OMP_CLAUSE_SIZE (c
) = size
;
1117 tree stmt
= gfc_finish_block (&block
);
1118 gimplify_and_add (stmt
, pre_p
);
1121 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
1123 = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
1124 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1127 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (last
);
1128 OMP_CLAUSE_CHAIN (last
) = c2
;
1133 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (last
);
1134 OMP_CLAUSE_CHAIN (last
) = c3
;
1139 OMP_CLAUSE_CHAIN (c4
) = OMP_CLAUSE_CHAIN (last
);
1140 OMP_CLAUSE_CHAIN (last
) = c4
;
1146 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1147 disregarded in OpenMP construct, because it is going to be
1148 remapped during OpenMP lowering. SHARED is true if DECL
1149 is going to be shared, false if it is going to be privatized. */
1152 gfc_omp_disregard_value_expr (tree decl
, bool shared
)
1154 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1155 && DECL_HAS_VALUE_EXPR_P (decl
))
1157 tree value
= DECL_VALUE_EXPR (decl
);
1159 if (TREE_CODE (value
) == COMPONENT_REF
1160 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1161 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1163 /* If variable in COMMON or EQUIVALENCE is privatized, return
1164 true, as just that variable is supposed to be privatized,
1165 not the whole COMMON or whole EQUIVALENCE.
1166 For shared variables in COMMON or EQUIVALENCE, let them be
1167 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1168 from the same COMMON or EQUIVALENCE just one sharing of the
1169 whole COMMON or EQUIVALENCE is enough. */
1174 if (GFC_DECL_RESULT (decl
) && DECL_HAS_VALUE_EXPR_P (decl
))
1180 /* Return true if DECL that is shared iff SHARED is true should
1181 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1185 gfc_omp_private_debug_clause (tree decl
, bool shared
)
1187 if (GFC_DECL_CRAY_POINTEE (decl
))
1190 if (GFC_DECL_COMMON_OR_EQUIV (decl
)
1191 && DECL_HAS_VALUE_EXPR_P (decl
))
1193 tree value
= DECL_VALUE_EXPR (decl
);
1195 if (TREE_CODE (value
) == COMPONENT_REF
1196 && TREE_CODE (TREE_OPERAND (value
, 0)) == VAR_DECL
1197 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value
, 0)))
1204 /* Register language specific type size variables as potentially OpenMP
1205 firstprivate variables. */
1208 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
1210 if (GFC_ARRAY_TYPE_P (type
) || GFC_DESCRIPTOR_TYPE_P (type
))
1214 gcc_assert (TYPE_LANG_SPECIFIC (type
) != NULL
);
1215 for (r
= 0; r
< GFC_TYPE_ARRAY_RANK (type
); r
++)
1217 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_LBOUND (type
, r
));
1218 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_UBOUND (type
, r
));
1219 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_STRIDE (type
, r
));
1221 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_SIZE (type
));
1222 omp_firstprivatize_variable (ctx
, GFC_TYPE_ARRAY_OFFSET (type
));
1228 gfc_trans_add_clause (tree node
, tree tail
)
1230 OMP_CLAUSE_CHAIN (node
) = tail
;
1235 gfc_trans_omp_variable (gfc_symbol
*sym
, bool declare_simd
)
1240 gfc_symbol
*proc_sym
;
1241 gfc_formal_arglist
*f
;
1243 gcc_assert (sym
->attr
.dummy
);
1244 proc_sym
= sym
->ns
->proc_name
;
1245 if (proc_sym
->attr
.entry_master
)
1247 if (gfc_return_by_reference (proc_sym
))
1250 if (proc_sym
->ts
.type
== BT_CHARACTER
)
1253 for (f
= gfc_sym_get_dummy_args (proc_sym
); f
; f
= f
->next
)
1259 return build_int_cst (integer_type_node
, cnt
);
1262 tree t
= gfc_get_symbol_decl (sym
);
1266 bool alternate_entry
;
1269 return_value
= sym
->attr
.function
&& sym
->result
== sym
;
1270 alternate_entry
= sym
->attr
.function
&& sym
->attr
.entry
1271 && sym
->result
== sym
;
1272 entry_master
= sym
->attr
.result
1273 && sym
->ns
->proc_name
->attr
.entry_master
1274 && !gfc_return_by_reference (sym
->ns
->proc_name
);
1275 parent_decl
= current_function_decl
1276 ? DECL_CONTEXT (current_function_decl
) : NULL_TREE
;
1278 if ((t
== parent_decl
&& return_value
)
1279 || (sym
->ns
&& sym
->ns
->proc_name
1280 && sym
->ns
->proc_name
->backend_decl
== parent_decl
1281 && (alternate_entry
|| entry_master
)))
1286 /* Special case for assigning the return value of a function.
1287 Self recursive functions must have an explicit return value. */
1288 if (return_value
&& (t
== current_function_decl
|| parent_flag
))
1289 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1291 /* Similarly for alternate entry points. */
1292 else if (alternate_entry
1293 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1296 gfc_entry_list
*el
= NULL
;
1298 for (el
= sym
->ns
->entries
; el
; el
= el
->next
)
1301 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1306 else if (entry_master
1307 && (sym
->ns
->proc_name
->backend_decl
== current_function_decl
1309 t
= gfc_get_fake_result_decl (sym
, parent_flag
);
1315 gfc_trans_omp_variable_list (enum omp_clause_code code
,
1316 gfc_omp_namelist
*namelist
, tree list
,
1319 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1320 if (namelist
->sym
->attr
.referenced
|| declare_simd
)
1322 tree t
= gfc_trans_omp_variable (namelist
->sym
, declare_simd
);
1323 if (t
!= error_mark_node
)
1325 tree node
= build_omp_clause (input_location
, code
);
1326 OMP_CLAUSE_DECL (node
) = t
;
1327 list
= gfc_trans_add_clause (node
, list
);
1333 struct omp_udr_find_orig_data
1335 gfc_omp_udr
*omp_udr
;
1340 omp_udr_find_orig (gfc_expr
**e
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1343 struct omp_udr_find_orig_data
*cd
= (struct omp_udr_find_orig_data
*) data
;
1344 if ((*e
)->expr_type
== EXPR_VARIABLE
1345 && (*e
)->symtree
->n
.sym
== cd
->omp_udr
->omp_orig
)
1346 cd
->omp_orig_seen
= true;
1352 gfc_trans_omp_array_reduction_or_udr (tree c
, gfc_omp_namelist
*n
, locus where
)
1354 gfc_symbol
*sym
= n
->sym
;
1355 gfc_symtree
*root1
= NULL
, *root2
= NULL
, *root3
= NULL
, *root4
= NULL
;
1356 gfc_symtree
*symtree1
, *symtree2
, *symtree3
, *symtree4
= NULL
;
1357 gfc_symbol init_val_sym
, outer_sym
, intrinsic_sym
;
1358 gfc_symbol omp_var_copy
[4];
1359 gfc_expr
*e1
, *e2
, *e3
, *e4
;
1361 tree decl
, backend_decl
, stmt
, type
, outer_decl
;
1362 locus old_loc
= gfc_current_locus
;
1365 gfc_omp_udr
*udr
= n
->udr
? n
->udr
->udr
: NULL
;
1367 decl
= OMP_CLAUSE_DECL (c
);
1368 gfc_current_locus
= where
;
1369 type
= TREE_TYPE (decl
);
1370 outer_decl
= create_tmp_var_raw (type
);
1371 if (TREE_CODE (decl
) == PARM_DECL
1372 && TREE_CODE (type
) == REFERENCE_TYPE
1373 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type
))
1374 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type
)) == GFC_ARRAY_ALLOCATABLE
)
1376 decl
= build_fold_indirect_ref (decl
);
1377 type
= TREE_TYPE (type
);
1380 /* Create a fake symbol for init value. */
1381 memset (&init_val_sym
, 0, sizeof (init_val_sym
));
1382 init_val_sym
.ns
= sym
->ns
;
1383 init_val_sym
.name
= sym
->name
;
1384 init_val_sym
.ts
= sym
->ts
;
1385 init_val_sym
.attr
.referenced
= 1;
1386 init_val_sym
.declared_at
= where
;
1387 init_val_sym
.attr
.flavor
= FL_VARIABLE
;
1388 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1389 backend_decl
= omp_reduction_init (c
, gfc_sym_type (&init_val_sym
));
1390 else if (udr
->initializer_ns
)
1391 backend_decl
= NULL
;
1393 switch (sym
->ts
.type
)
1399 backend_decl
= build_zero_cst (gfc_sym_type (&init_val_sym
));
1402 backend_decl
= NULL_TREE
;
1405 init_val_sym
.backend_decl
= backend_decl
;
1407 /* Create a fake symbol for the outer array reference. */
1410 outer_sym
.as
= gfc_copy_array_spec (sym
->as
);
1411 outer_sym
.attr
.dummy
= 0;
1412 outer_sym
.attr
.result
= 0;
1413 outer_sym
.attr
.flavor
= FL_VARIABLE
;
1414 outer_sym
.backend_decl
= outer_decl
;
1415 if (decl
!= OMP_CLAUSE_DECL (c
))
1416 outer_sym
.backend_decl
= build_fold_indirect_ref (outer_decl
);
1418 /* Create fake symtrees for it. */
1419 symtree1
= gfc_new_symtree (&root1
, sym
->name
);
1420 symtree1
->n
.sym
= sym
;
1421 gcc_assert (symtree1
== root1
);
1423 symtree2
= gfc_new_symtree (&root2
, sym
->name
);
1424 symtree2
->n
.sym
= &init_val_sym
;
1425 gcc_assert (symtree2
== root2
);
1427 symtree3
= gfc_new_symtree (&root3
, sym
->name
);
1428 symtree3
->n
.sym
= &outer_sym
;
1429 gcc_assert (symtree3
== root3
);
1431 memset (omp_var_copy
, 0, sizeof omp_var_copy
);
1434 omp_var_copy
[0] = *udr
->omp_out
;
1435 omp_var_copy
[1] = *udr
->omp_in
;
1436 *udr
->omp_out
= outer_sym
;
1437 *udr
->omp_in
= *sym
;
1438 if (udr
->initializer_ns
)
1440 omp_var_copy
[2] = *udr
->omp_priv
;
1441 omp_var_copy
[3] = *udr
->omp_orig
;
1442 *udr
->omp_priv
= *sym
;
1443 *udr
->omp_orig
= outer_sym
;
1447 /* Create expressions. */
1448 e1
= gfc_get_expr ();
1449 e1
->expr_type
= EXPR_VARIABLE
;
1451 e1
->symtree
= symtree1
;
1453 if (sym
->attr
.dimension
)
1455 e1
->ref
= ref
= gfc_get_ref ();
1456 ref
->type
= REF_ARRAY
;
1457 ref
->u
.ar
.where
= where
;
1458 ref
->u
.ar
.as
= sym
->as
;
1459 ref
->u
.ar
.type
= AR_FULL
;
1460 ref
->u
.ar
.dimen
= 0;
1462 t
= gfc_resolve_expr (e1
);
1466 if (backend_decl
!= NULL_TREE
)
1468 e2
= gfc_get_expr ();
1469 e2
->expr_type
= EXPR_VARIABLE
;
1471 e2
->symtree
= symtree2
;
1473 t
= gfc_resolve_expr (e2
);
1476 else if (udr
->initializer_ns
== NULL
)
1478 gcc_assert (sym
->ts
.type
== BT_DERIVED
);
1479 e2
= gfc_default_initializer (&sym
->ts
);
1481 t
= gfc_resolve_expr (e2
);
1484 else if (n
->udr
->initializer
->op
== EXEC_ASSIGN
)
1486 e2
= gfc_copy_expr (n
->udr
->initializer
->expr2
);
1487 t
= gfc_resolve_expr (e2
);
1490 if (udr
&& udr
->initializer_ns
)
1492 struct omp_udr_find_orig_data cd
;
1494 cd
.omp_orig_seen
= false;
1495 gfc_code_walker (&n
->udr
->initializer
,
1496 gfc_dummy_code_callback
, omp_udr_find_orig
, &cd
);
1497 if (cd
.omp_orig_seen
)
1498 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
) = 1;
1501 e3
= gfc_copy_expr (e1
);
1502 e3
->symtree
= symtree3
;
1503 t
= gfc_resolve_expr (e3
);
1508 switch (OMP_CLAUSE_REDUCTION_CODE (c
))
1512 e4
= gfc_add (e3
, e1
);
1515 e4
= gfc_multiply (e3
, e1
);
1517 case TRUTH_ANDIF_EXPR
:
1518 e4
= gfc_and (e3
, e1
);
1520 case TRUTH_ORIF_EXPR
:
1521 e4
= gfc_or (e3
, e1
);
1524 e4
= gfc_eqv (e3
, e1
);
1527 e4
= gfc_neqv (e3
, e1
);
1545 if (n
->udr
->combiner
->op
== EXEC_ASSIGN
)
1548 e3
= gfc_copy_expr (n
->udr
->combiner
->expr1
);
1549 e4
= gfc_copy_expr (n
->udr
->combiner
->expr2
);
1550 t
= gfc_resolve_expr (e3
);
1552 t
= gfc_resolve_expr (e4
);
1561 memset (&intrinsic_sym
, 0, sizeof (intrinsic_sym
));
1562 intrinsic_sym
.ns
= sym
->ns
;
1563 intrinsic_sym
.name
= iname
;
1564 intrinsic_sym
.ts
= sym
->ts
;
1565 intrinsic_sym
.attr
.referenced
= 1;
1566 intrinsic_sym
.attr
.intrinsic
= 1;
1567 intrinsic_sym
.attr
.function
= 1;
1568 intrinsic_sym
.result
= &intrinsic_sym
;
1569 intrinsic_sym
.declared_at
= where
;
1571 symtree4
= gfc_new_symtree (&root4
, iname
);
1572 symtree4
->n
.sym
= &intrinsic_sym
;
1573 gcc_assert (symtree4
== root4
);
1575 e4
= gfc_get_expr ();
1576 e4
->expr_type
= EXPR_FUNCTION
;
1578 e4
->symtree
= symtree4
;
1579 e4
->value
.function
.actual
= gfc_get_actual_arglist ();
1580 e4
->value
.function
.actual
->expr
= e3
;
1581 e4
->value
.function
.actual
->next
= gfc_get_actual_arglist ();
1582 e4
->value
.function
.actual
->next
->expr
= e1
;
1584 if (OMP_CLAUSE_REDUCTION_CODE (c
) != ERROR_MARK
)
1586 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1587 e1
= gfc_copy_expr (e1
);
1588 e3
= gfc_copy_expr (e3
);
1589 t
= gfc_resolve_expr (e4
);
1593 /* Create the init statement list. */
1596 stmt
= gfc_trans_assignment (e1
, e2
, false, false);
1598 stmt
= gfc_trans_call (n
->udr
->initializer
, false,
1599 NULL_TREE
, NULL_TREE
, false);
1600 if (TREE_CODE (stmt
) != BIND_EXPR
)
1601 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1604 OMP_CLAUSE_REDUCTION_INIT (c
) = stmt
;
1606 /* Create the merge statement list. */
1609 stmt
= gfc_trans_assignment (e3
, e4
, false, true);
1611 stmt
= gfc_trans_call (n
->udr
->combiner
, false,
1612 NULL_TREE
, NULL_TREE
, false);
1613 if (TREE_CODE (stmt
) != BIND_EXPR
)
1614 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
1617 OMP_CLAUSE_REDUCTION_MERGE (c
) = stmt
;
1619 /* And stick the placeholder VAR_DECL into the clause as well. */
1620 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = outer_decl
;
1622 gfc_current_locus
= old_loc
;
1635 gfc_free_array_spec (outer_sym
.as
);
1639 *udr
->omp_out
= omp_var_copy
[0];
1640 *udr
->omp_in
= omp_var_copy
[1];
1641 if (udr
->initializer_ns
)
1643 *udr
->omp_priv
= omp_var_copy
[2];
1644 *udr
->omp_orig
= omp_var_copy
[3];
1650 gfc_trans_omp_reduction_list (gfc_omp_namelist
*namelist
, tree list
,
1653 for (; namelist
!= NULL
; namelist
= namelist
->next
)
1654 if (namelist
->sym
->attr
.referenced
)
1656 tree t
= gfc_trans_omp_variable (namelist
->sym
, false);
1657 if (t
!= error_mark_node
)
1659 tree node
= build_omp_clause (where
.lb
->location
,
1660 OMP_CLAUSE_REDUCTION
);
1661 OMP_CLAUSE_DECL (node
) = t
;
1662 switch (namelist
->u
.reduction_op
)
1664 case OMP_REDUCTION_PLUS
:
1665 OMP_CLAUSE_REDUCTION_CODE (node
) = PLUS_EXPR
;
1667 case OMP_REDUCTION_MINUS
:
1668 OMP_CLAUSE_REDUCTION_CODE (node
) = MINUS_EXPR
;
1670 case OMP_REDUCTION_TIMES
:
1671 OMP_CLAUSE_REDUCTION_CODE (node
) = MULT_EXPR
;
1673 case OMP_REDUCTION_AND
:
1674 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ANDIF_EXPR
;
1676 case OMP_REDUCTION_OR
:
1677 OMP_CLAUSE_REDUCTION_CODE (node
) = TRUTH_ORIF_EXPR
;
1679 case OMP_REDUCTION_EQV
:
1680 OMP_CLAUSE_REDUCTION_CODE (node
) = EQ_EXPR
;
1682 case OMP_REDUCTION_NEQV
:
1683 OMP_CLAUSE_REDUCTION_CODE (node
) = NE_EXPR
;
1685 case OMP_REDUCTION_MAX
:
1686 OMP_CLAUSE_REDUCTION_CODE (node
) = MAX_EXPR
;
1688 case OMP_REDUCTION_MIN
:
1689 OMP_CLAUSE_REDUCTION_CODE (node
) = MIN_EXPR
;
1691 case OMP_REDUCTION_IAND
:
1692 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_AND_EXPR
;
1694 case OMP_REDUCTION_IOR
:
1695 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_IOR_EXPR
;
1697 case OMP_REDUCTION_IEOR
:
1698 OMP_CLAUSE_REDUCTION_CODE (node
) = BIT_XOR_EXPR
;
1700 case OMP_REDUCTION_USER
:
1701 OMP_CLAUSE_REDUCTION_CODE (node
) = ERROR_MARK
;
1706 if (namelist
->sym
->attr
.dimension
1707 || namelist
->u
.reduction_op
== OMP_REDUCTION_USER
1708 || namelist
->sym
->attr
.allocatable
)
1709 gfc_trans_omp_array_reduction_or_udr (node
, namelist
, where
);
1710 list
= gfc_trans_add_clause (node
, list
);
1717 gfc_convert_expr_to_tree (stmtblock_t
*block
, gfc_expr
*expr
)
1722 gfc_init_se (&se
, NULL
);
1723 gfc_conv_expr (&se
, expr
);
1724 gfc_add_block_to_block (block
, &se
.pre
);
1725 result
= gfc_evaluate_now (se
.expr
, block
);
1726 gfc_add_block_to_block (block
, &se
.post
);
1732 gfc_trans_omp_clauses (stmtblock_t
*block
, gfc_omp_clauses
*clauses
,
1733 locus where
, bool declare_simd
= false)
1735 tree omp_clauses
= NULL_TREE
, chunk_size
, c
;
1737 enum omp_clause_code clause_code
;
1740 if (clauses
== NULL
)
1743 for (list
= 0; list
< OMP_LIST_NUM
; list
++)
1745 gfc_omp_namelist
*n
= clauses
->lists
[list
];
1751 case OMP_LIST_REDUCTION
:
1752 omp_clauses
= gfc_trans_omp_reduction_list (n
, omp_clauses
, where
);
1754 case OMP_LIST_PRIVATE
:
1755 clause_code
= OMP_CLAUSE_PRIVATE
;
1757 case OMP_LIST_SHARED
:
1758 clause_code
= OMP_CLAUSE_SHARED
;
1760 case OMP_LIST_FIRSTPRIVATE
:
1761 clause_code
= OMP_CLAUSE_FIRSTPRIVATE
;
1763 case OMP_LIST_LASTPRIVATE
:
1764 clause_code
= OMP_CLAUSE_LASTPRIVATE
;
1766 case OMP_LIST_COPYIN
:
1767 clause_code
= OMP_CLAUSE_COPYIN
;
1769 case OMP_LIST_COPYPRIVATE
:
1770 clause_code
= OMP_CLAUSE_COPYPRIVATE
;
1772 case OMP_LIST_UNIFORM
:
1773 clause_code
= OMP_CLAUSE_UNIFORM
;
1775 case OMP_LIST_USE_DEVICE
:
1776 clause_code
= OMP_CLAUSE_USE_DEVICE
;
1778 case OMP_LIST_DEVICE_RESIDENT
:
1779 clause_code
= OMP_CLAUSE_DEVICE_RESIDENT
;
1781 case OMP_LIST_CACHE
:
1782 clause_code
= OMP_CLAUSE__CACHE_
;
1787 = gfc_trans_omp_variable_list (clause_code
, n
, omp_clauses
,
1790 case OMP_LIST_ALIGNED
:
1791 for (; n
!= NULL
; n
= n
->next
)
1792 if (n
->sym
->attr
.referenced
|| declare_simd
)
1794 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1795 if (t
!= error_mark_node
)
1797 tree node
= build_omp_clause (input_location
,
1798 OMP_CLAUSE_ALIGNED
);
1799 OMP_CLAUSE_DECL (node
) = t
;
1805 alignment_var
= gfc_conv_constant_to_tree (n
->expr
);
1808 gfc_init_se (&se
, NULL
);
1809 gfc_conv_expr (&se
, n
->expr
);
1810 gfc_add_block_to_block (block
, &se
.pre
);
1811 alignment_var
= gfc_evaluate_now (se
.expr
, block
);
1812 gfc_add_block_to_block (block
, &se
.post
);
1814 OMP_CLAUSE_ALIGNED_ALIGNMENT (node
) = alignment_var
;
1816 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1820 case OMP_LIST_LINEAR
:
1822 gfc_expr
*last_step_expr
= NULL
;
1823 tree last_step
= NULL_TREE
;
1825 for (; n
!= NULL
; n
= n
->next
)
1829 last_step_expr
= n
->expr
;
1830 last_step
= NULL_TREE
;
1832 if (n
->sym
->attr
.referenced
|| declare_simd
)
1834 tree t
= gfc_trans_omp_variable (n
->sym
, declare_simd
);
1835 if (t
!= error_mark_node
)
1837 tree node
= build_omp_clause (input_location
,
1839 OMP_CLAUSE_DECL (node
) = t
;
1840 if (last_step_expr
&& last_step
== NULL_TREE
)
1844 = gfc_conv_constant_to_tree (last_step_expr
);
1847 gfc_init_se (&se
, NULL
);
1848 gfc_conv_expr (&se
, last_step_expr
);
1849 gfc_add_block_to_block (block
, &se
.pre
);
1850 last_step
= gfc_evaluate_now (se
.expr
, block
);
1851 gfc_add_block_to_block (block
, &se
.post
);
1854 OMP_CLAUSE_LINEAR_STEP (node
)
1855 = fold_convert (gfc_typenode_for_spec (&n
->sym
->ts
),
1857 if (n
->sym
->attr
.dimension
|| n
->sym
->attr
.allocatable
)
1858 OMP_CLAUSE_LINEAR_ARRAY (node
) = 1;
1859 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1865 case OMP_LIST_DEPEND
:
1866 for (; n
!= NULL
; n
= n
->next
)
1868 if (!n
->sym
->attr
.referenced
)
1871 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_DEPEND
);
1872 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1874 tree decl
= gfc_get_symbol_decl (n
->sym
);
1875 if (gfc_omp_privatize_by_reference (decl
))
1876 decl
= build_fold_indirect_ref (decl
);
1877 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1879 decl
= gfc_conv_descriptor_data_get (decl
);
1880 decl
= fold_convert (build_pointer_type (char_type_node
),
1882 decl
= build_fold_indirect_ref (decl
);
1884 else if (DECL_P (decl
))
1885 TREE_ADDRESSABLE (decl
) = 1;
1886 OMP_CLAUSE_DECL (node
) = decl
;
1891 gfc_init_se (&se
, NULL
);
1892 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
1894 gfc_conv_expr_reference (&se
, n
->expr
);
1899 gfc_conv_expr_descriptor (&se
, n
->expr
);
1900 ptr
= gfc_conv_array_data (se
.expr
);
1902 gfc_add_block_to_block (block
, &se
.pre
);
1903 gfc_add_block_to_block (block
, &se
.post
);
1904 ptr
= fold_convert (build_pointer_type (char_type_node
),
1906 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
1908 switch (n
->u
.depend_op
)
1911 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_IN
;
1913 case OMP_DEPEND_OUT
:
1914 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_OUT
;
1916 case OMP_DEPEND_INOUT
:
1917 OMP_CLAUSE_DEPEND_KIND (node
) = OMP_CLAUSE_DEPEND_INOUT
;
1922 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
1926 for (; n
!= NULL
; n
= n
->next
)
1928 if (!n
->sym
->attr
.referenced
)
1931 tree node
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
1932 tree node2
= NULL_TREE
;
1933 tree node3
= NULL_TREE
;
1934 tree node4
= NULL_TREE
;
1935 tree decl
= gfc_get_symbol_decl (n
->sym
);
1937 TREE_ADDRESSABLE (decl
) = 1;
1938 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
1940 if (POINTER_TYPE_P (TREE_TYPE (decl
))
1941 && (gfc_omp_privatize_by_reference (decl
)
1942 || GFC_DECL_GET_SCALAR_POINTER (decl
)
1943 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl
)
1944 || GFC_DECL_CRAY_POINTEE (decl
)
1945 || GFC_DESCRIPTOR_TYPE_P
1946 (TREE_TYPE (TREE_TYPE (decl
)))))
1948 tree orig_decl
= decl
;
1949 node4
= build_omp_clause (input_location
,
1951 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
1952 OMP_CLAUSE_DECL (node4
) = decl
;
1953 OMP_CLAUSE_SIZE (node4
) = size_int (0);
1954 decl
= build_fold_indirect_ref (decl
);
1955 if (TREE_CODE (TREE_TYPE (orig_decl
)) == REFERENCE_TYPE
1956 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl
)
1957 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl
)))
1959 node3
= build_omp_clause (input_location
,
1961 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1962 OMP_CLAUSE_DECL (node3
) = decl
;
1963 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1964 decl
= build_fold_indirect_ref (decl
);
1967 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
1969 tree type
= TREE_TYPE (decl
);
1970 tree ptr
= gfc_conv_descriptor_data_get (decl
);
1971 ptr
= fold_convert (build_pointer_type (char_type_node
),
1973 ptr
= build_fold_indirect_ref (ptr
);
1974 OMP_CLAUSE_DECL (node
) = ptr
;
1975 node2
= build_omp_clause (input_location
,
1977 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
1978 OMP_CLAUSE_DECL (node2
) = decl
;
1979 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
1980 node3
= build_omp_clause (input_location
,
1982 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
1983 OMP_CLAUSE_DECL (node3
)
1984 = gfc_conv_descriptor_data_get (decl
);
1985 OMP_CLAUSE_SIZE (node3
) = size_int (0);
1987 /* We have to check for n->sym->attr.dimension because
1988 of scalar coarrays. */
1989 if (n
->sym
->attr
.pointer
&& n
->sym
->attr
.dimension
)
1991 stmtblock_t cond_block
;
1993 = gfc_create_var (gfc_array_index_type
, NULL
);
1994 tree tem
, then_b
, else_b
, zero
, cond
;
1996 gfc_init_block (&cond_block
);
1998 = gfc_full_array_size (&cond_block
, decl
,
1999 GFC_TYPE_ARRAY_RANK (type
));
2000 gfc_add_modify (&cond_block
, size
, tem
);
2001 then_b
= gfc_finish_block (&cond_block
);
2002 gfc_init_block (&cond_block
);
2003 zero
= build_int_cst (gfc_array_index_type
, 0);
2004 gfc_add_modify (&cond_block
, size
, zero
);
2005 else_b
= gfc_finish_block (&cond_block
);
2006 tem
= gfc_conv_descriptor_data_get (decl
);
2007 tem
= fold_convert (pvoid_type_node
, tem
);
2008 cond
= fold_build2_loc (input_location
, NE_EXPR
,
2010 tem
, null_pointer_node
);
2011 gfc_add_expr_to_block (block
,
2012 build3_loc (input_location
,
2017 OMP_CLAUSE_SIZE (node
) = size
;
2019 else if (n
->sym
->attr
.dimension
)
2020 OMP_CLAUSE_SIZE (node
)
2021 = gfc_full_array_size (block
, decl
,
2022 GFC_TYPE_ARRAY_RANK (type
));
2023 if (n
->sym
->attr
.dimension
)
2026 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2027 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2028 OMP_CLAUSE_SIZE (node
)
2029 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2030 OMP_CLAUSE_SIZE (node
), elemsz
);
2034 OMP_CLAUSE_DECL (node
) = decl
;
2039 gfc_init_se (&se
, NULL
);
2040 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2042 gfc_conv_expr_reference (&se
, n
->expr
);
2043 gfc_add_block_to_block (block
, &se
.pre
);
2045 OMP_CLAUSE_SIZE (node
)
2046 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2050 gfc_conv_expr_descriptor (&se
, n
->expr
);
2051 ptr
= gfc_conv_array_data (se
.expr
);
2052 tree type
= TREE_TYPE (se
.expr
);
2053 gfc_add_block_to_block (block
, &se
.pre
);
2054 OMP_CLAUSE_SIZE (node
)
2055 = gfc_full_array_size (block
, se
.expr
,
2056 GFC_TYPE_ARRAY_RANK (type
));
2058 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2059 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2060 OMP_CLAUSE_SIZE (node
)
2061 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2062 OMP_CLAUSE_SIZE (node
), elemsz
);
2064 gfc_add_block_to_block (block
, &se
.post
);
2065 ptr
= fold_convert (build_pointer_type (char_type_node
),
2067 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2069 if (POINTER_TYPE_P (TREE_TYPE (decl
))
2070 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
2072 node4
= build_omp_clause (input_location
,
2074 OMP_CLAUSE_SET_MAP_KIND (node4
, GOMP_MAP_POINTER
);
2075 OMP_CLAUSE_DECL (node4
) = decl
;
2076 OMP_CLAUSE_SIZE (node4
) = size_int (0);
2077 decl
= build_fold_indirect_ref (decl
);
2079 ptr
= fold_convert (sizetype
, ptr
);
2080 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2082 tree type
= TREE_TYPE (decl
);
2083 ptr2
= gfc_conv_descriptor_data_get (decl
);
2084 node2
= build_omp_clause (input_location
,
2086 OMP_CLAUSE_SET_MAP_KIND (node2
, GOMP_MAP_TO_PSET
);
2087 OMP_CLAUSE_DECL (node2
) = decl
;
2088 OMP_CLAUSE_SIZE (node2
) = TYPE_SIZE_UNIT (type
);
2089 node3
= build_omp_clause (input_location
,
2091 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2092 OMP_CLAUSE_DECL (node3
)
2093 = gfc_conv_descriptor_data_get (decl
);
2097 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
2098 ptr2
= build_fold_addr_expr (decl
);
2101 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl
)));
2104 node3
= build_omp_clause (input_location
,
2106 OMP_CLAUSE_SET_MAP_KIND (node3
, GOMP_MAP_POINTER
);
2107 OMP_CLAUSE_DECL (node3
) = decl
;
2109 ptr2
= fold_convert (sizetype
, ptr2
);
2110 OMP_CLAUSE_SIZE (node3
)
2111 = fold_build2 (MINUS_EXPR
, sizetype
, ptr
, ptr2
);
2113 switch (n
->u
.map_op
)
2116 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_ALLOC
);
2119 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TO
);
2122 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FROM
);
2124 case OMP_MAP_TOFROM
:
2125 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_TOFROM
);
2127 case OMP_MAP_FORCE_ALLOC
:
2128 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_ALLOC
);
2130 case OMP_MAP_FORCE_DEALLOC
:
2131 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEALLOC
);
2133 case OMP_MAP_FORCE_TO
:
2134 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TO
);
2136 case OMP_MAP_FORCE_FROM
:
2137 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_FROM
);
2139 case OMP_MAP_FORCE_TOFROM
:
2140 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_TOFROM
);
2142 case OMP_MAP_FORCE_PRESENT
:
2143 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_PRESENT
);
2145 case OMP_MAP_FORCE_DEVICEPTR
:
2146 OMP_CLAUSE_SET_MAP_KIND (node
, GOMP_MAP_FORCE_DEVICEPTR
);
2151 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2153 omp_clauses
= gfc_trans_add_clause (node2
, omp_clauses
);
2155 omp_clauses
= gfc_trans_add_clause (node3
, omp_clauses
);
2157 omp_clauses
= gfc_trans_add_clause (node4
, omp_clauses
);
2162 for (; n
!= NULL
; n
= n
->next
)
2164 if (!n
->sym
->attr
.referenced
)
2167 tree node
= build_omp_clause (input_location
,
2169 ? OMP_CLAUSE_TO
: OMP_CLAUSE_FROM
);
2170 if (n
->expr
== NULL
|| n
->expr
->ref
->u
.ar
.type
== AR_FULL
)
2172 tree decl
= gfc_get_symbol_decl (n
->sym
);
2173 if (gfc_omp_privatize_by_reference (decl
))
2174 decl
= build_fold_indirect_ref (decl
);
2175 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl
)))
2177 tree type
= TREE_TYPE (decl
);
2178 tree ptr
= gfc_conv_descriptor_data_get (decl
);
2179 ptr
= fold_convert (build_pointer_type (char_type_node
),
2181 ptr
= build_fold_indirect_ref (ptr
);
2182 OMP_CLAUSE_DECL (node
) = ptr
;
2183 OMP_CLAUSE_SIZE (node
)
2184 = gfc_full_array_size (block
, decl
,
2185 GFC_TYPE_ARRAY_RANK (type
));
2187 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2188 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2189 OMP_CLAUSE_SIZE (node
)
2190 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2191 OMP_CLAUSE_SIZE (node
), elemsz
);
2194 OMP_CLAUSE_DECL (node
) = decl
;
2199 gfc_init_se (&se
, NULL
);
2200 if (n
->expr
->ref
->u
.ar
.type
== AR_ELEMENT
)
2202 gfc_conv_expr_reference (&se
, n
->expr
);
2204 gfc_add_block_to_block (block
, &se
.pre
);
2205 OMP_CLAUSE_SIZE (node
)
2206 = TYPE_SIZE_UNIT (TREE_TYPE (ptr
));
2210 gfc_conv_expr_descriptor (&se
, n
->expr
);
2211 ptr
= gfc_conv_array_data (se
.expr
);
2212 tree type
= TREE_TYPE (se
.expr
);
2213 gfc_add_block_to_block (block
, &se
.pre
);
2214 OMP_CLAUSE_SIZE (node
)
2215 = gfc_full_array_size (block
, se
.expr
,
2216 GFC_TYPE_ARRAY_RANK (type
));
2218 = TYPE_SIZE_UNIT (gfc_get_element_type (type
));
2219 elemsz
= fold_convert (gfc_array_index_type
, elemsz
);
2220 OMP_CLAUSE_SIZE (node
)
2221 = fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2222 OMP_CLAUSE_SIZE (node
), elemsz
);
2224 gfc_add_block_to_block (block
, &se
.post
);
2225 ptr
= fold_convert (build_pointer_type (char_type_node
),
2227 OMP_CLAUSE_DECL (node
) = build_fold_indirect_ref (ptr
);
2229 omp_clauses
= gfc_trans_add_clause (node
, omp_clauses
);
2237 if (clauses
->if_expr
)
2241 gfc_init_se (&se
, NULL
);
2242 gfc_conv_expr (&se
, clauses
->if_expr
);
2243 gfc_add_block_to_block (block
, &se
.pre
);
2244 if_var
= gfc_evaluate_now (se
.expr
, block
);
2245 gfc_add_block_to_block (block
, &se
.post
);
2247 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_IF
);
2248 OMP_CLAUSE_IF_EXPR (c
) = if_var
;
2249 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2252 if (clauses
->final_expr
)
2256 gfc_init_se (&se
, NULL
);
2257 gfc_conv_expr (&se
, clauses
->final_expr
);
2258 gfc_add_block_to_block (block
, &se
.pre
);
2259 final_var
= gfc_evaluate_now (se
.expr
, block
);
2260 gfc_add_block_to_block (block
, &se
.post
);
2262 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FINAL
);
2263 OMP_CLAUSE_FINAL_EXPR (c
) = final_var
;
2264 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2267 if (clauses
->num_threads
)
2271 gfc_init_se (&se
, NULL
);
2272 gfc_conv_expr (&se
, clauses
->num_threads
);
2273 gfc_add_block_to_block (block
, &se
.pre
);
2274 num_threads
= gfc_evaluate_now (se
.expr
, block
);
2275 gfc_add_block_to_block (block
, &se
.post
);
2277 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_THREADS
);
2278 OMP_CLAUSE_NUM_THREADS_EXPR (c
) = num_threads
;
2279 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2282 chunk_size
= NULL_TREE
;
2283 if (clauses
->chunk_size
)
2285 gfc_init_se (&se
, NULL
);
2286 gfc_conv_expr (&se
, clauses
->chunk_size
);
2287 gfc_add_block_to_block (block
, &se
.pre
);
2288 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2289 gfc_add_block_to_block (block
, &se
.post
);
2292 if (clauses
->sched_kind
!= OMP_SCHED_NONE
)
2294 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SCHEDULE
);
2295 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2296 switch (clauses
->sched_kind
)
2298 case OMP_SCHED_STATIC
:
2299 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_STATIC
;
2301 case OMP_SCHED_DYNAMIC
:
2302 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_DYNAMIC
;
2304 case OMP_SCHED_GUIDED
:
2305 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_GUIDED
;
2307 case OMP_SCHED_RUNTIME
:
2308 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_RUNTIME
;
2310 case OMP_SCHED_AUTO
:
2311 OMP_CLAUSE_SCHEDULE_KIND (c
) = OMP_CLAUSE_SCHEDULE_AUTO
;
2316 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2319 if (clauses
->default_sharing
!= OMP_DEFAULT_UNKNOWN
)
2321 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEFAULT
);
2322 switch (clauses
->default_sharing
)
2324 case OMP_DEFAULT_NONE
:
2325 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_NONE
;
2327 case OMP_DEFAULT_SHARED
:
2328 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_SHARED
;
2330 case OMP_DEFAULT_PRIVATE
:
2331 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_PRIVATE
;
2333 case OMP_DEFAULT_FIRSTPRIVATE
:
2334 OMP_CLAUSE_DEFAULT_KIND (c
) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2339 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2342 if (clauses
->nowait
)
2344 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOWAIT
);
2345 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2348 if (clauses
->ordered
)
2350 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2351 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2354 if (clauses
->untied
)
2356 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_UNTIED
);
2357 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2360 if (clauses
->mergeable
)
2362 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_MERGEABLE
);
2363 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2366 if (clauses
->collapse
)
2368 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_COLLAPSE
);
2369 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2370 = build_int_cst (integer_type_node
, clauses
->collapse
);
2371 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2374 if (clauses
->inbranch
)
2376 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INBRANCH
);
2377 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2380 if (clauses
->notinbranch
)
2382 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NOTINBRANCH
);
2383 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2386 switch (clauses
->cancel
)
2388 case OMP_CANCEL_UNKNOWN
:
2390 case OMP_CANCEL_PARALLEL
:
2391 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PARALLEL
);
2392 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2394 case OMP_CANCEL_SECTIONS
:
2395 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SECTIONS
);
2396 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2399 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_FOR
);
2400 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2402 case OMP_CANCEL_TASKGROUP
:
2403 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_TASKGROUP
);
2404 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2408 if (clauses
->proc_bind
!= OMP_PROC_BIND_UNKNOWN
)
2410 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_PROC_BIND
);
2411 switch (clauses
->proc_bind
)
2413 case OMP_PROC_BIND_MASTER
:
2414 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_MASTER
;
2416 case OMP_PROC_BIND_SPREAD
:
2417 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_SPREAD
;
2419 case OMP_PROC_BIND_CLOSE
:
2420 OMP_CLAUSE_PROC_BIND_KIND (c
) = OMP_CLAUSE_PROC_BIND_CLOSE
;
2425 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2428 if (clauses
->safelen_expr
)
2432 gfc_init_se (&se
, NULL
);
2433 gfc_conv_expr (&se
, clauses
->safelen_expr
);
2434 gfc_add_block_to_block (block
, &se
.pre
);
2435 safelen_var
= gfc_evaluate_now (se
.expr
, block
);
2436 gfc_add_block_to_block (block
, &se
.post
);
2438 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SAFELEN
);
2439 OMP_CLAUSE_SAFELEN_EXPR (c
) = safelen_var
;
2440 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2443 if (clauses
->simdlen_expr
)
2445 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_SIMDLEN
);
2446 OMP_CLAUSE_SIMDLEN_EXPR (c
)
2447 = gfc_conv_constant_to_tree (clauses
->simdlen_expr
);
2448 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2451 if (clauses
->num_teams
)
2455 gfc_init_se (&se
, NULL
);
2456 gfc_conv_expr (&se
, clauses
->num_teams
);
2457 gfc_add_block_to_block (block
, &se
.pre
);
2458 num_teams
= gfc_evaluate_now (se
.expr
, block
);
2459 gfc_add_block_to_block (block
, &se
.post
);
2461 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_TEAMS
);
2462 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
2463 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2466 if (clauses
->device
)
2470 gfc_init_se (&se
, NULL
);
2471 gfc_conv_expr (&se
, clauses
->device
);
2472 gfc_add_block_to_block (block
, &se
.pre
);
2473 device
= gfc_evaluate_now (se
.expr
, block
);
2474 gfc_add_block_to_block (block
, &se
.post
);
2476 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DEVICE
);
2477 OMP_CLAUSE_DEVICE_ID (c
) = device
;
2478 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2481 if (clauses
->thread_limit
)
2485 gfc_init_se (&se
, NULL
);
2486 gfc_conv_expr (&se
, clauses
->thread_limit
);
2487 gfc_add_block_to_block (block
, &se
.pre
);
2488 thread_limit
= gfc_evaluate_now (se
.expr
, block
);
2489 gfc_add_block_to_block (block
, &se
.post
);
2491 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_THREAD_LIMIT
);
2492 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
2493 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2496 chunk_size
= NULL_TREE
;
2497 if (clauses
->dist_chunk_size
)
2499 gfc_init_se (&se
, NULL
);
2500 gfc_conv_expr (&se
, clauses
->dist_chunk_size
);
2501 gfc_add_block_to_block (block
, &se
.pre
);
2502 chunk_size
= gfc_evaluate_now (se
.expr
, block
);
2503 gfc_add_block_to_block (block
, &se
.post
);
2506 if (clauses
->dist_sched_kind
!= OMP_SCHED_NONE
)
2508 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_DIST_SCHEDULE
);
2509 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c
) = chunk_size
;
2510 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2515 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ASYNC
);
2516 if (clauses
->async_expr
)
2517 OMP_CLAUSE_ASYNC_EXPR (c
)
2518 = gfc_convert_expr_to_tree (block
, clauses
->async_expr
);
2520 OMP_CLAUSE_ASYNC_EXPR (c
) = NULL
;
2521 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2525 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_ORDERED
);
2526 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2528 if (clauses
->independent
)
2530 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_INDEPENDENT
);
2531 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2533 if (clauses
->wait_list
)
2537 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2539 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WAIT
);
2540 OMP_CLAUSE_DECL (c
) = gfc_convert_expr_to_tree (block
, el
->expr
);
2541 OMP_CLAUSE_CHAIN (c
) = omp_clauses
;
2545 if (clauses
->num_gangs_expr
)
2548 = gfc_convert_expr_to_tree (block
, clauses
->num_gangs_expr
);
2549 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_GANGS
);
2550 OMP_CLAUSE_NUM_GANGS_EXPR (c
) = num_gangs_var
;
2551 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2553 if (clauses
->num_workers_expr
)
2555 tree num_workers_var
2556 = gfc_convert_expr_to_tree (block
, clauses
->num_workers_expr
);
2557 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_NUM_WORKERS
);
2558 OMP_CLAUSE_NUM_WORKERS_EXPR (c
) = num_workers_var
;
2559 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2561 if (clauses
->vector_length_expr
)
2563 tree vector_length_var
2564 = gfc_convert_expr_to_tree (block
, clauses
->vector_length_expr
);
2565 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR_LENGTH
);
2566 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c
) = vector_length_var
;
2567 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2569 if (clauses
->vector
)
2571 if (clauses
->vector_expr
)
2574 = gfc_convert_expr_to_tree (block
, clauses
->vector_expr
);
2575 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2576 OMP_CLAUSE_VECTOR_EXPR (c
) = vector_var
;
2577 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2581 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_VECTOR
);
2582 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2585 if (clauses
->worker
)
2587 if (clauses
->worker_expr
)
2590 = gfc_convert_expr_to_tree (block
, clauses
->worker_expr
);
2591 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2592 OMP_CLAUSE_WORKER_EXPR (c
) = worker_var
;
2593 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2597 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_WORKER
);
2598 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2603 if (clauses
->gang_expr
)
2606 = gfc_convert_expr_to_tree (block
, clauses
->gang_expr
);
2607 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2608 OMP_CLAUSE_GANG_EXPR (c
) = gang_var
;
2609 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2613 c
= build_omp_clause (where
.lb
->location
, OMP_CLAUSE_GANG
);
2614 omp_clauses
= gfc_trans_add_clause (c
, omp_clauses
);
2618 return nreverse (omp_clauses
);
2621 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2624 gfc_trans_omp_code (gfc_code
*code
, bool force_empty
)
2629 stmt
= gfc_trans_code (code
);
2630 if (TREE_CODE (stmt
) != BIND_EXPR
)
2632 if (!IS_EMPTY_STMT (stmt
) || force_empty
)
2634 tree block
= poplevel (1, 0);
2635 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, block
);
2645 /* Trans OpenACC directives. */
2646 /* parallel, kernels, data and host_data. */
2648 gfc_trans_oacc_construct (gfc_code
*code
)
2651 tree stmt
, oacc_clauses
;
2652 enum tree_code construct_code
;
2656 case EXEC_OACC_PARALLEL
:
2657 construct_code
= OACC_PARALLEL
;
2659 case EXEC_OACC_KERNELS
:
2660 construct_code
= OACC_KERNELS
;
2662 case EXEC_OACC_DATA
:
2663 construct_code
= OACC_DATA
;
2665 case EXEC_OACC_HOST_DATA
:
2666 construct_code
= OACC_HOST_DATA
;
2672 gfc_start_block (&block
);
2673 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2675 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
2676 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
2678 gfc_add_expr_to_block (&block
, stmt
);
2679 return gfc_finish_block (&block
);
2682 /* update, enter_data, exit_data, cache. */
2684 gfc_trans_oacc_executable_directive (gfc_code
*code
)
2687 tree stmt
, oacc_clauses
;
2688 enum tree_code construct_code
;
2692 case EXEC_OACC_UPDATE
:
2693 construct_code
= OACC_UPDATE
;
2695 case EXEC_OACC_ENTER_DATA
:
2696 construct_code
= OACC_ENTER_DATA
;
2698 case EXEC_OACC_EXIT_DATA
:
2699 construct_code
= OACC_EXIT_DATA
;
2701 case EXEC_OACC_CACHE
:
2702 construct_code
= OACC_CACHE
;
2708 gfc_start_block (&block
);
2709 oacc_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
2711 stmt
= build1_loc (input_location
, construct_code
, void_type_node
,
2713 gfc_add_expr_to_block (&block
, stmt
);
2714 return gfc_finish_block (&block
);
2718 gfc_trans_oacc_wait_directive (gfc_code
*code
)
2722 vec
<tree
, va_gc
> *args
;
2725 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
2726 location_t loc
= input_location
;
2728 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2731 vec_alloc (args
, nparms
+ 2);
2732 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
2734 gfc_start_block (&block
);
2736 if (clauses
->async_expr
)
2737 t
= gfc_convert_expr_to_tree (&block
, clauses
->async_expr
);
2739 t
= build_int_cst (integer_type_node
, -2);
2741 args
->quick_push (t
);
2742 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
2744 for (el
= clauses
->wait_list
; el
; el
= el
->next
)
2745 args
->quick_push (gfc_convert_expr_to_tree (&block
, el
->expr
));
2747 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
2748 gfc_add_expr_to_block (&block
, stmt
);
2752 return gfc_finish_block (&block
);
2755 static tree
gfc_trans_omp_sections (gfc_code
*, gfc_omp_clauses
*);
2756 static tree
gfc_trans_omp_workshare (gfc_code
*, gfc_omp_clauses
*);
2759 gfc_trans_omp_atomic (gfc_code
*code
)
2761 gfc_code
*atomic_code
= code
;
2765 gfc_expr
*expr2
, *e
;
2768 tree lhsaddr
, type
, rhs
, x
;
2769 enum tree_code op
= ERROR_MARK
;
2770 enum tree_code aop
= OMP_ATOMIC
;
2771 bool var_on_left
= false;
2772 bool seq_cst
= (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SEQ_CST
) != 0;
2774 code
= code
->block
->next
;
2775 gcc_assert (code
->op
== EXEC_ASSIGN
);
2776 var
= code
->expr1
->symtree
->n
.sym
;
2778 gfc_init_se (&lse
, NULL
);
2779 gfc_init_se (&rse
, NULL
);
2780 gfc_init_se (&vse
, NULL
);
2781 gfc_start_block (&block
);
2783 expr2
= code
->expr2
;
2784 if (expr2
->expr_type
== EXPR_FUNCTION
2785 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2786 expr2
= expr2
->value
.function
.actual
->expr
;
2788 switch (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2790 case GFC_OMP_ATOMIC_READ
:
2791 gfc_conv_expr (&vse
, code
->expr1
);
2792 gfc_add_block_to_block (&block
, &vse
.pre
);
2794 gfc_conv_expr (&lse
, expr2
);
2795 gfc_add_block_to_block (&block
, &lse
.pre
);
2796 type
= TREE_TYPE (lse
.expr
);
2797 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2799 x
= build1 (OMP_ATOMIC_READ
, type
, lhsaddr
);
2800 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2801 x
= convert (TREE_TYPE (vse
.expr
), x
);
2802 gfc_add_modify (&block
, vse
.expr
, x
);
2804 gfc_add_block_to_block (&block
, &lse
.pre
);
2805 gfc_add_block_to_block (&block
, &rse
.pre
);
2807 return gfc_finish_block (&block
);
2808 case GFC_OMP_ATOMIC_CAPTURE
:
2809 aop
= OMP_ATOMIC_CAPTURE_NEW
;
2810 if (expr2
->expr_type
== EXPR_VARIABLE
)
2812 aop
= OMP_ATOMIC_CAPTURE_OLD
;
2813 gfc_conv_expr (&vse
, code
->expr1
);
2814 gfc_add_block_to_block (&block
, &vse
.pre
);
2816 gfc_conv_expr (&lse
, expr2
);
2817 gfc_add_block_to_block (&block
, &lse
.pre
);
2818 gfc_init_se (&lse
, NULL
);
2820 var
= code
->expr1
->symtree
->n
.sym
;
2821 expr2
= code
->expr2
;
2822 if (expr2
->expr_type
== EXPR_FUNCTION
2823 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2824 expr2
= expr2
->value
.function
.actual
->expr
;
2831 gfc_conv_expr (&lse
, code
->expr1
);
2832 gfc_add_block_to_block (&block
, &lse
.pre
);
2833 type
= TREE_TYPE (lse
.expr
);
2834 lhsaddr
= gfc_build_addr_expr (NULL
, lse
.expr
);
2836 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2837 == GFC_OMP_ATOMIC_WRITE
)
2838 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2840 gfc_conv_expr (&rse
, expr2
);
2841 gfc_add_block_to_block (&block
, &rse
.pre
);
2843 else if (expr2
->expr_type
== EXPR_OP
)
2846 switch (expr2
->value
.op
.op
)
2848 case INTRINSIC_PLUS
:
2851 case INTRINSIC_TIMES
:
2854 case INTRINSIC_MINUS
:
2857 case INTRINSIC_DIVIDE
:
2858 if (expr2
->ts
.type
== BT_INTEGER
)
2859 op
= TRUNC_DIV_EXPR
;
2864 op
= TRUTH_ANDIF_EXPR
;
2867 op
= TRUTH_ORIF_EXPR
;
2872 case INTRINSIC_NEQV
:
2878 e
= expr2
->value
.op
.op1
;
2879 if (e
->expr_type
== EXPR_FUNCTION
2880 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2881 e
= e
->value
.function
.actual
->expr
;
2882 if (e
->expr_type
== EXPR_VARIABLE
2883 && e
->symtree
!= NULL
2884 && e
->symtree
->n
.sym
== var
)
2886 expr2
= expr2
->value
.op
.op2
;
2891 e
= expr2
->value
.op
.op2
;
2892 if (e
->expr_type
== EXPR_FUNCTION
2893 && e
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
2894 e
= e
->value
.function
.actual
->expr
;
2895 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2896 && e
->symtree
!= NULL
2897 && e
->symtree
->n
.sym
== var
);
2898 expr2
= expr2
->value
.op
.op1
;
2899 var_on_left
= false;
2901 gfc_conv_expr (&rse
, expr2
);
2902 gfc_add_block_to_block (&block
, &rse
.pre
);
2906 gcc_assert (expr2
->expr_type
== EXPR_FUNCTION
);
2907 switch (expr2
->value
.function
.isym
->id
)
2927 e
= expr2
->value
.function
.actual
->expr
;
2928 gcc_assert (e
->expr_type
== EXPR_VARIABLE
2929 && e
->symtree
!= NULL
2930 && e
->symtree
->n
.sym
== var
);
2932 gfc_conv_expr (&rse
, expr2
->value
.function
.actual
->next
->expr
);
2933 gfc_add_block_to_block (&block
, &rse
.pre
);
2934 if (expr2
->value
.function
.actual
->next
->next
!= NULL
)
2936 tree accum
= gfc_create_var (TREE_TYPE (rse
.expr
), NULL
);
2937 gfc_actual_arglist
*arg
;
2939 gfc_add_modify (&block
, accum
, rse
.expr
);
2940 for (arg
= expr2
->value
.function
.actual
->next
->next
; arg
;
2943 gfc_init_block (&rse
.pre
);
2944 gfc_conv_expr (&rse
, arg
->expr
);
2945 gfc_add_block_to_block (&block
, &rse
.pre
);
2946 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (accum
),
2948 gfc_add_modify (&block
, accum
, x
);
2954 expr2
= expr2
->value
.function
.actual
->next
->expr
;
2957 lhsaddr
= save_expr (lhsaddr
);
2958 if (TREE_CODE (lhsaddr
) != SAVE_EXPR
2959 && (TREE_CODE (lhsaddr
) != ADDR_EXPR
2960 || TREE_CODE (TREE_OPERAND (lhsaddr
, 0)) != VAR_DECL
))
2962 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
2963 it even after unsharing function body. */
2964 tree var
= create_tmp_var_raw (TREE_TYPE (lhsaddr
));
2965 DECL_CONTEXT (var
) = current_function_decl
;
2966 lhsaddr
= build4 (TARGET_EXPR
, TREE_TYPE (lhsaddr
), var
, lhsaddr
,
2967 NULL_TREE
, NULL_TREE
);
2970 rhs
= gfc_evaluate_now (rse
.expr
, &block
);
2972 if (((atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_MASK
)
2973 == GFC_OMP_ATOMIC_WRITE
)
2974 || (atomic_code
->ext
.omp_atomic
& GFC_OMP_ATOMIC_SWAP
))
2978 x
= convert (TREE_TYPE (rhs
),
2979 build_fold_indirect_ref_loc (input_location
, lhsaddr
));
2981 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), x
, rhs
);
2983 x
= fold_build2_loc (input_location
, op
, TREE_TYPE (rhs
), rhs
, x
);
2986 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
2987 && TREE_CODE (type
) != COMPLEX_TYPE
)
2988 x
= fold_build1_loc (input_location
, REALPART_EXPR
,
2989 TREE_TYPE (TREE_TYPE (rhs
)), x
);
2991 gfc_add_block_to_block (&block
, &lse
.pre
);
2992 gfc_add_block_to_block (&block
, &rse
.pre
);
2994 if (aop
== OMP_ATOMIC
)
2996 x
= build2_v (OMP_ATOMIC
, lhsaddr
, convert (type
, x
));
2997 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
2998 gfc_add_expr_to_block (&block
, x
);
3002 if (aop
== OMP_ATOMIC_CAPTURE_NEW
)
3005 expr2
= code
->expr2
;
3006 if (expr2
->expr_type
== EXPR_FUNCTION
3007 && expr2
->value
.function
.isym
->id
== GFC_ISYM_CONVERSION
)
3008 expr2
= expr2
->value
.function
.actual
->expr
;
3010 gcc_assert (expr2
->expr_type
== EXPR_VARIABLE
);
3011 gfc_conv_expr (&vse
, code
->expr1
);
3012 gfc_add_block_to_block (&block
, &vse
.pre
);
3014 gfc_init_se (&lse
, NULL
);
3015 gfc_conv_expr (&lse
, expr2
);
3016 gfc_add_block_to_block (&block
, &lse
.pre
);
3018 x
= build2 (aop
, type
, lhsaddr
, convert (type
, x
));
3019 OMP_ATOMIC_SEQ_CST (x
) = seq_cst
;
3020 x
= convert (TREE_TYPE (vse
.expr
), x
);
3021 gfc_add_modify (&block
, vse
.expr
, x
);
3024 return gfc_finish_block (&block
);
3028 gfc_trans_omp_barrier (void)
3030 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
3031 return build_call_expr_loc (input_location
, decl
, 0);
3035 gfc_trans_omp_cancel (gfc_code
*code
)
3038 tree ifc
= boolean_true_node
;
3040 switch (code
->ext
.omp_clauses
->cancel
)
3042 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3043 case OMP_CANCEL_DO
: mask
= 2; break;
3044 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3045 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3046 default: gcc_unreachable ();
3048 gfc_start_block (&block
);
3049 if (code
->ext
.omp_clauses
->if_expr
)
3054 gfc_init_se (&se
, NULL
);
3055 gfc_conv_expr (&se
, code
->ext
.omp_clauses
->if_expr
);
3056 gfc_add_block_to_block (&block
, &se
.pre
);
3057 if_var
= gfc_evaluate_now (se
.expr
, &block
);
3058 gfc_add_block_to_block (&block
, &se
.post
);
3059 tree type
= TREE_TYPE (if_var
);
3060 ifc
= fold_build2_loc (input_location
, NE_EXPR
,
3061 boolean_type_node
, if_var
,
3062 build_zero_cst (type
));
3064 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
3065 tree c_bool_type
= TREE_TYPE (TREE_TYPE (decl
));
3066 ifc
= fold_convert (c_bool_type
, ifc
);
3067 gfc_add_expr_to_block (&block
,
3068 build_call_expr_loc (input_location
, decl
, 2,
3069 build_int_cst (integer_type_node
,
3071 return gfc_finish_block (&block
);
3075 gfc_trans_omp_cancellation_point (gfc_code
*code
)
3078 switch (code
->ext
.omp_clauses
->cancel
)
3080 case OMP_CANCEL_PARALLEL
: mask
= 1; break;
3081 case OMP_CANCEL_DO
: mask
= 2; break;
3082 case OMP_CANCEL_SECTIONS
: mask
= 4; break;
3083 case OMP_CANCEL_TASKGROUP
: mask
= 8; break;
3084 default: gcc_unreachable ();
3086 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT
);
3087 return build_call_expr_loc (input_location
, decl
, 1,
3088 build_int_cst (integer_type_node
, mask
));
3092 gfc_trans_omp_critical (gfc_code
*code
)
3094 tree name
= NULL_TREE
, stmt
;
3095 if (code
->ext
.omp_name
!= NULL
)
3096 name
= get_identifier (code
->ext
.omp_name
);
3097 stmt
= gfc_trans_code (code
->block
->next
);
3098 return build2_loc (input_location
, OMP_CRITICAL
, void_type_node
, stmt
, name
);
3101 typedef struct dovar_init_d
{
3108 gfc_trans_omp_do (gfc_code
*code
, gfc_exec_op op
, stmtblock_t
*pblock
,
3109 gfc_omp_clauses
*do_clauses
, tree par_clauses
)
3112 tree dovar
, stmt
, from
, to
, step
, type
, init
, cond
, incr
;
3113 tree count
= NULL_TREE
, cycle_label
, tmp
, omp_clauses
;
3116 gfc_omp_clauses
*clauses
= code
->ext
.omp_clauses
;
3117 int i
, collapse
= clauses
->collapse
;
3118 vec
<dovar_init
> inits
= vNULL
;
3125 code
= code
->block
->next
;
3126 gcc_assert (code
->op
== EXEC_DO
);
3128 init
= make_tree_vec (collapse
);
3129 cond
= make_tree_vec (collapse
);
3130 incr
= make_tree_vec (collapse
);
3134 gfc_start_block (&block
);
3138 omp_clauses
= gfc_trans_omp_clauses (pblock
, do_clauses
, code
->loc
);
3140 for (i
= 0; i
< collapse
; i
++)
3143 int dovar_found
= 0;
3148 gfc_omp_namelist
*n
= NULL
;
3149 if (op
!= EXEC_OMP_DISTRIBUTE
)
3150 for (n
= clauses
->lists
[(op
== EXEC_OMP_SIMD
&& collapse
== 1)
3151 ? OMP_LIST_LINEAR
: OMP_LIST_LASTPRIVATE
];
3152 n
!= NULL
; n
= n
->next
)
3153 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3157 else if (n
== NULL
&& op
!= EXEC_OMP_SIMD
)
3158 for (n
= clauses
->lists
[OMP_LIST_PRIVATE
]; n
!= NULL
; n
= n
->next
)
3159 if (code
->ext
.iterator
->var
->symtree
->n
.sym
== n
->sym
)
3165 /* Evaluate all the expressions in the iterator. */
3166 gfc_init_se (&se
, NULL
);
3167 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
3168 gfc_add_block_to_block (pblock
, &se
.pre
);
3170 type
= TREE_TYPE (dovar
);
3171 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
);
3173 gfc_init_se (&se
, NULL
);
3174 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
3175 gfc_add_block_to_block (pblock
, &se
.pre
);
3176 from
= gfc_evaluate_now (se
.expr
, pblock
);
3178 gfc_init_se (&se
, NULL
);
3179 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
3180 gfc_add_block_to_block (pblock
, &se
.pre
);
3181 to
= gfc_evaluate_now (se
.expr
, pblock
);
3183 gfc_init_se (&se
, NULL
);
3184 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
3185 gfc_add_block_to_block (pblock
, &se
.pre
);
3186 step
= gfc_evaluate_now (se
.expr
, pblock
);
3189 /* Special case simple loops. */
3190 if (TREE_CODE (dovar
) == VAR_DECL
)
3192 if (integer_onep (step
))
3194 else if (tree_int_cst_equal (step
, integer_minus_one_node
))
3199 = gfc_trans_omp_variable (code
->ext
.iterator
->var
->symtree
->n
.sym
,
3205 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, dovar
, from
);
3206 /* The condition should not be folded. */
3207 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, simple
> 0
3208 ? LE_EXPR
: GE_EXPR
,
3209 boolean_type_node
, dovar
, to
);
3210 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3212 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3215 TREE_VEC_ELT (incr
, i
));
3219 /* STEP is not 1 or -1. Use:
3220 for (count = 0; count < (to + step - from) / step; count++)
3222 dovar = from + count * step;
3226 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, step
, from
);
3227 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, to
, tmp
);
3228 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, type
, tmp
,
3230 tmp
= gfc_evaluate_now (tmp
, pblock
);
3231 count
= gfc_create_var (type
, "count");
3232 TREE_VEC_ELT (init
, i
) = build2_v (MODIFY_EXPR
, count
,
3233 build_int_cst (type
, 0));
3234 /* The condition should not be folded. */
3235 TREE_VEC_ELT (cond
, i
) = build2_loc (input_location
, LT_EXPR
,
3238 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
, PLUS_EXPR
,
3240 build_int_cst (type
, 1));
3241 TREE_VEC_ELT (incr
, i
) = fold_build2_loc (input_location
,
3242 MODIFY_EXPR
, type
, count
,
3243 TREE_VEC_ELT (incr
, i
));
3245 /* Initialize DOVAR. */
3246 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, type
, count
, step
);
3247 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, from
, tmp
);
3248 dovar_init e
= {dovar
, tmp
};
3249 inits
.safe_push (e
);
3252 if (dovar_found
== 2
3253 && op
== EXEC_OMP_SIMD
3257 for (tmp
= omp_clauses
; tmp
; tmp
= OMP_CLAUSE_CHAIN (tmp
))
3258 if (OMP_CLAUSE_CODE (tmp
) == OMP_CLAUSE_LINEAR
3259 && OMP_CLAUSE_DECL (tmp
) == dovar
)
3261 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3267 if (op
== EXEC_OMP_SIMD
)
3271 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3272 OMP_CLAUSE_LINEAR_STEP (tmp
) = step
;
3273 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3276 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3281 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3282 OMP_CLAUSE_DECL (tmp
) = dovar_decl
;
3283 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3285 if (dovar_found
== 2)
3292 /* If dovar is lastprivate, but different counter is used,
3293 dovar += step needs to be added to
3294 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3295 will have the value on entry of the last loop, rather
3296 than value after iterator increment. */
3297 tmp
= gfc_evaluate_now (step
, pblock
);
3298 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, dovar
,
3300 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
,
3302 for (c
= omp_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3303 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3304 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3306 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = tmp
;
3309 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3310 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3312 OMP_CLAUSE_LINEAR_STMT (c
) = tmp
;
3316 if (c
== NULL
&& op
== EXEC_OMP_DO
&& par_clauses
!= NULL
)
3318 for (c
= par_clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3319 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
3320 && OMP_CLAUSE_DECL (c
) == dovar_decl
)
3322 tree l
= build_omp_clause (input_location
,
3323 OMP_CLAUSE_LASTPRIVATE
);
3324 OMP_CLAUSE_DECL (l
) = dovar_decl
;
3325 OMP_CLAUSE_CHAIN (l
) = omp_clauses
;
3326 OMP_CLAUSE_LASTPRIVATE_STMT (l
) = tmp
;
3328 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_SHARED
);
3332 gcc_assert (simple
|| c
!= NULL
);
3336 if (op
!= EXEC_OMP_SIMD
)
3337 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
3338 else if (collapse
== 1)
3340 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
3341 OMP_CLAUSE_LINEAR_STEP (tmp
) = build_int_cst (type
, 1);
3342 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp
) = 1;
3343 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp
) = 1;
3346 tmp
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
3347 OMP_CLAUSE_DECL (tmp
) = count
;
3348 omp_clauses
= gfc_trans_add_clause (tmp
, omp_clauses
);
3351 if (i
+ 1 < collapse
)
3352 code
= code
->block
->next
;
3355 if (pblock
!= &block
)
3358 gfc_start_block (&block
);
3361 gfc_start_block (&body
);
3363 FOR_EACH_VEC_ELT (inits
, ix
, di
)
3364 gfc_add_modify (&body
, di
->var
, di
->init
);
3367 /* Cycle statement is implemented with a goto. Exit statement must not be
3368 present for this loop. */
3369 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3371 /* Put these labels where they can be found later. */
3373 code
->cycle_label
= cycle_label
;
3374 code
->exit_label
= NULL_TREE
;
3376 /* Main loop body. */
3377 tmp
= gfc_trans_omp_code (code
->block
->next
, true);
3378 gfc_add_expr_to_block (&body
, tmp
);
3380 /* Label for cycle statements (if needed). */
3381 if (TREE_USED (cycle_label
))
3383 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3384 gfc_add_expr_to_block (&body
, tmp
);
3387 /* End of loop body. */
3390 case EXEC_OMP_SIMD
: stmt
= make_node (OMP_SIMD
); break;
3391 case EXEC_OMP_DO
: stmt
= make_node (OMP_FOR
); break;
3392 case EXEC_OMP_DISTRIBUTE
: stmt
= make_node (OMP_DISTRIBUTE
); break;
3393 case EXEC_OACC_LOOP
: stmt
= make_node (OACC_LOOP
); break;
3394 default: gcc_unreachable ();
3397 TREE_TYPE (stmt
) = void_type_node
;
3398 OMP_FOR_BODY (stmt
) = gfc_finish_block (&body
);
3399 OMP_FOR_CLAUSES (stmt
) = omp_clauses
;
3400 OMP_FOR_INIT (stmt
) = init
;
3401 OMP_FOR_COND (stmt
) = cond
;
3402 OMP_FOR_INCR (stmt
) = incr
;
3403 gfc_add_expr_to_block (&block
, stmt
);
3405 return gfc_finish_block (&block
);
3408 /* parallel loop and kernels loop. */
3410 gfc_trans_oacc_combined_directive (gfc_code
*code
)
3412 stmtblock_t block
, *pblock
= NULL
;
3413 gfc_omp_clauses construct_clauses
, loop_clauses
;
3414 tree stmt
, oacc_clauses
= NULL_TREE
;
3415 enum tree_code construct_code
;
3419 case EXEC_OACC_PARALLEL_LOOP
:
3420 construct_code
= OACC_PARALLEL
;
3422 case EXEC_OACC_KERNELS_LOOP
:
3423 construct_code
= OACC_KERNELS
;
3429 gfc_start_block (&block
);
3431 memset (&loop_clauses
, 0, sizeof (loop_clauses
));
3432 if (code
->ext
.omp_clauses
!= NULL
)
3434 memcpy (&construct_clauses
, code
->ext
.omp_clauses
,
3435 sizeof (construct_clauses
));
3436 loop_clauses
.collapse
= construct_clauses
.collapse
;
3437 loop_clauses
.gang
= construct_clauses
.gang
;
3438 loop_clauses
.vector
= construct_clauses
.vector
;
3439 loop_clauses
.worker
= construct_clauses
.worker
;
3440 loop_clauses
.seq
= construct_clauses
.seq
;
3441 loop_clauses
.independent
= construct_clauses
.independent
;
3442 construct_clauses
.collapse
= 0;
3443 construct_clauses
.gang
= false;
3444 construct_clauses
.vector
= false;
3445 construct_clauses
.worker
= false;
3446 construct_clauses
.seq
= false;
3447 construct_clauses
.independent
= false;
3448 oacc_clauses
= gfc_trans_omp_clauses (&block
, &construct_clauses
,
3451 if (!loop_clauses
.seq
)
3455 stmt
= gfc_trans_omp_do (code
, EXEC_OACC_LOOP
, pblock
, &loop_clauses
, NULL
);
3456 if (TREE_CODE (stmt
) != BIND_EXPR
)
3457 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3460 stmt
= build2_loc (input_location
, construct_code
, void_type_node
, stmt
,
3462 if (code
->op
== EXEC_OACC_KERNELS_LOOP
)
3463 OACC_KERNELS_COMBINED (stmt
) = 1;
3465 OACC_PARALLEL_COMBINED (stmt
) = 1;
3466 gfc_add_expr_to_block (&block
, stmt
);
3467 return gfc_finish_block (&block
);
3471 gfc_trans_omp_flush (void)
3473 tree decl
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
3474 return build_call_expr_loc (input_location
, decl
, 0);
3478 gfc_trans_omp_master (gfc_code
*code
)
3480 tree stmt
= gfc_trans_code (code
->block
->next
);
3481 if (IS_EMPTY_STMT (stmt
))
3483 return build1_v (OMP_MASTER
, stmt
);
3487 gfc_trans_omp_ordered (gfc_code
*code
)
3489 return build1_v (OMP_ORDERED
, gfc_trans_code (code
->block
->next
));
3493 gfc_trans_omp_parallel (gfc_code
*code
)
3496 tree stmt
, omp_clauses
;
3498 gfc_start_block (&block
);
3499 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3501 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3502 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3504 gfc_add_expr_to_block (&block
, stmt
);
3505 return gfc_finish_block (&block
);
3512 GFC_OMP_SPLIT_PARALLEL
,
3513 GFC_OMP_SPLIT_DISTRIBUTE
,
3514 GFC_OMP_SPLIT_TEAMS
,
3515 GFC_OMP_SPLIT_TARGET
,
3521 GFC_OMP_MASK_SIMD
= (1 << GFC_OMP_SPLIT_SIMD
),
3522 GFC_OMP_MASK_DO
= (1 << GFC_OMP_SPLIT_DO
),
3523 GFC_OMP_MASK_PARALLEL
= (1 << GFC_OMP_SPLIT_PARALLEL
),
3524 GFC_OMP_MASK_DISTRIBUTE
= (1 << GFC_OMP_SPLIT_DISTRIBUTE
),
3525 GFC_OMP_MASK_TEAMS
= (1 << GFC_OMP_SPLIT_TEAMS
),
3526 GFC_OMP_MASK_TARGET
= (1 << GFC_OMP_SPLIT_TARGET
)
3530 gfc_split_omp_clauses (gfc_code
*code
,
3531 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
])
3533 int mask
= 0, innermost
= 0;
3534 memset (clausesa
, 0, GFC_OMP_SPLIT_NUM
* sizeof (gfc_omp_clauses
));
3537 case EXEC_OMP_DISTRIBUTE
:
3538 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3540 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
3541 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3542 innermost
= GFC_OMP_SPLIT_DO
;
3544 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
3545 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_PARALLEL
3546 | GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3547 innermost
= GFC_OMP_SPLIT_SIMD
;
3549 case EXEC_OMP_DISTRIBUTE_SIMD
:
3550 mask
= GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3551 innermost
= GFC_OMP_SPLIT_SIMD
;
3554 innermost
= GFC_OMP_SPLIT_DO
;
3556 case EXEC_OMP_DO_SIMD
:
3557 mask
= GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3558 innermost
= GFC_OMP_SPLIT_SIMD
;
3560 case EXEC_OMP_PARALLEL
:
3561 innermost
= GFC_OMP_SPLIT_PARALLEL
;
3563 case EXEC_OMP_PARALLEL_DO
:
3564 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3565 innermost
= GFC_OMP_SPLIT_DO
;
3567 case EXEC_OMP_PARALLEL_DO_SIMD
:
3568 mask
= GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3569 innermost
= GFC_OMP_SPLIT_SIMD
;
3572 innermost
= GFC_OMP_SPLIT_SIMD
;
3574 case EXEC_OMP_TARGET
:
3575 innermost
= GFC_OMP_SPLIT_TARGET
;
3577 case EXEC_OMP_TARGET_TEAMS
:
3578 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
;
3579 innermost
= GFC_OMP_SPLIT_TEAMS
;
3581 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
3582 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3583 | GFC_OMP_MASK_DISTRIBUTE
;
3584 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3586 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3587 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3588 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3589 innermost
= GFC_OMP_SPLIT_DO
;
3591 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3592 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3593 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3594 innermost
= GFC_OMP_SPLIT_SIMD
;
3596 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
3597 mask
= GFC_OMP_MASK_TARGET
| GFC_OMP_MASK_TEAMS
3598 | GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3599 innermost
= GFC_OMP_SPLIT_SIMD
;
3601 case EXEC_OMP_TEAMS
:
3602 innermost
= GFC_OMP_SPLIT_TEAMS
;
3604 case EXEC_OMP_TEAMS_DISTRIBUTE
:
3605 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
;
3606 innermost
= GFC_OMP_SPLIT_DISTRIBUTE
;
3608 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
3609 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3610 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
;
3611 innermost
= GFC_OMP_SPLIT_DO
;
3613 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
3614 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
3615 | GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
| GFC_OMP_MASK_SIMD
;
3616 innermost
= GFC_OMP_SPLIT_SIMD
;
3618 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
3619 mask
= GFC_OMP_MASK_TEAMS
| GFC_OMP_MASK_DISTRIBUTE
| GFC_OMP_MASK_SIMD
;
3620 innermost
= GFC_OMP_SPLIT_SIMD
;
3627 clausesa
[innermost
] = *code
->ext
.omp_clauses
;
3630 if (code
->ext
.omp_clauses
!= NULL
)
3632 if (mask
& GFC_OMP_MASK_TARGET
)
3634 /* First the clauses that are unique to some constructs. */
3635 clausesa
[GFC_OMP_SPLIT_TARGET
].lists
[OMP_LIST_MAP
]
3636 = code
->ext
.omp_clauses
->lists
[OMP_LIST_MAP
];
3637 clausesa
[GFC_OMP_SPLIT_TARGET
].device
3638 = code
->ext
.omp_clauses
->device
;
3640 if (mask
& GFC_OMP_MASK_TEAMS
)
3642 /* First the clauses that are unique to some constructs. */
3643 clausesa
[GFC_OMP_SPLIT_TEAMS
].num_teams
3644 = code
->ext
.omp_clauses
->num_teams
;
3645 clausesa
[GFC_OMP_SPLIT_TEAMS
].thread_limit
3646 = code
->ext
.omp_clauses
->thread_limit
;
3647 /* Shared and default clauses are allowed on parallel and teams. */
3648 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_SHARED
]
3649 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3650 clausesa
[GFC_OMP_SPLIT_TEAMS
].default_sharing
3651 = code
->ext
.omp_clauses
->default_sharing
;
3653 if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3655 /* First the clauses that are unique to some constructs. */
3656 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_sched_kind
3657 = code
->ext
.omp_clauses
->dist_sched_kind
;
3658 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].dist_chunk_size
3659 = code
->ext
.omp_clauses
->dist_chunk_size
;
3660 /* Duplicate collapse. */
3661 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].collapse
3662 = code
->ext
.omp_clauses
->collapse
;
3664 if (mask
& GFC_OMP_MASK_PARALLEL
)
3666 /* First the clauses that are unique to some constructs. */
3667 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_COPYIN
]
3668 = code
->ext
.omp_clauses
->lists
[OMP_LIST_COPYIN
];
3669 clausesa
[GFC_OMP_SPLIT_PARALLEL
].num_threads
3670 = code
->ext
.omp_clauses
->num_threads
;
3671 clausesa
[GFC_OMP_SPLIT_PARALLEL
].proc_bind
3672 = code
->ext
.omp_clauses
->proc_bind
;
3673 /* Shared and default clauses are allowed on parallel and teams. */
3674 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_SHARED
]
3675 = code
->ext
.omp_clauses
->lists
[OMP_LIST_SHARED
];
3676 clausesa
[GFC_OMP_SPLIT_PARALLEL
].default_sharing
3677 = code
->ext
.omp_clauses
->default_sharing
;
3679 if (mask
& GFC_OMP_MASK_DO
)
3681 /* First the clauses that are unique to some constructs. */
3682 clausesa
[GFC_OMP_SPLIT_DO
].ordered
3683 = code
->ext
.omp_clauses
->ordered
;
3684 clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
3685 = code
->ext
.omp_clauses
->sched_kind
;
3686 clausesa
[GFC_OMP_SPLIT_DO
].chunk_size
3687 = code
->ext
.omp_clauses
->chunk_size
;
3688 clausesa
[GFC_OMP_SPLIT_DO
].nowait
3689 = code
->ext
.omp_clauses
->nowait
;
3690 /* Duplicate collapse. */
3691 clausesa
[GFC_OMP_SPLIT_DO
].collapse
3692 = code
->ext
.omp_clauses
->collapse
;
3694 if (mask
& GFC_OMP_MASK_SIMD
)
3696 clausesa
[GFC_OMP_SPLIT_SIMD
].safelen_expr
3697 = code
->ext
.omp_clauses
->safelen_expr
;
3698 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LINEAR
]
3699 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LINEAR
];
3700 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_ALIGNED
]
3701 = code
->ext
.omp_clauses
->lists
[OMP_LIST_ALIGNED
];
3702 /* Duplicate collapse. */
3703 clausesa
[GFC_OMP_SPLIT_SIMD
].collapse
3704 = code
->ext
.omp_clauses
->collapse
;
3706 /* Private clause is supported on all constructs but target,
3707 it is enough to put it on the innermost one. For
3708 !$ omp do put it on parallel though,
3709 as that's what we did for OpenMP 3.1. */
3710 clausesa
[innermost
== GFC_OMP_SPLIT_DO
3711 ? (int) GFC_OMP_SPLIT_PARALLEL
3712 : innermost
].lists
[OMP_LIST_PRIVATE
]
3713 = code
->ext
.omp_clauses
->lists
[OMP_LIST_PRIVATE
];
3714 /* Firstprivate clause is supported on all constructs but
3715 target and simd. Put it on the outermost of those and
3716 duplicate on parallel. */
3717 if (mask
& GFC_OMP_MASK_TEAMS
)
3718 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_FIRSTPRIVATE
]
3719 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3720 else if (mask
& GFC_OMP_MASK_DISTRIBUTE
)
3721 clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
].lists
[OMP_LIST_FIRSTPRIVATE
]
3722 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3723 if (mask
& GFC_OMP_MASK_PARALLEL
)
3724 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_FIRSTPRIVATE
]
3725 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3726 else if (mask
& GFC_OMP_MASK_DO
)
3727 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_FIRSTPRIVATE
]
3728 = code
->ext
.omp_clauses
->lists
[OMP_LIST_FIRSTPRIVATE
];
3729 /* Lastprivate is allowed on do and simd. In
3730 parallel do{, simd} we actually want to put it on
3731 parallel rather than do. */
3732 if (mask
& GFC_OMP_MASK_PARALLEL
)
3733 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_LASTPRIVATE
]
3734 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3735 else if (mask
& GFC_OMP_MASK_DO
)
3736 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_LASTPRIVATE
]
3737 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3738 if (mask
& GFC_OMP_MASK_SIMD
)
3739 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_LASTPRIVATE
]
3740 = code
->ext
.omp_clauses
->lists
[OMP_LIST_LASTPRIVATE
];
3741 /* Reduction is allowed on simd, do, parallel and teams.
3742 Duplicate it on all of them, but omit on do if
3743 parallel is present. */
3744 if (mask
& GFC_OMP_MASK_TEAMS
)
3745 clausesa
[GFC_OMP_SPLIT_TEAMS
].lists
[OMP_LIST_REDUCTION
]
3746 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3747 if (mask
& GFC_OMP_MASK_PARALLEL
)
3748 clausesa
[GFC_OMP_SPLIT_PARALLEL
].lists
[OMP_LIST_REDUCTION
]
3749 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3750 else if (mask
& GFC_OMP_MASK_DO
)
3751 clausesa
[GFC_OMP_SPLIT_DO
].lists
[OMP_LIST_REDUCTION
]
3752 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3753 if (mask
& GFC_OMP_MASK_SIMD
)
3754 clausesa
[GFC_OMP_SPLIT_SIMD
].lists
[OMP_LIST_REDUCTION
]
3755 = code
->ext
.omp_clauses
->lists
[OMP_LIST_REDUCTION
];
3756 /* FIXME: This is currently being discussed. */
3757 if (mask
& GFC_OMP_MASK_PARALLEL
)
3758 clausesa
[GFC_OMP_SPLIT_PARALLEL
].if_expr
3759 = code
->ext
.omp_clauses
->if_expr
;
3761 clausesa
[GFC_OMP_SPLIT_TARGET
].if_expr
3762 = code
->ext
.omp_clauses
->if_expr
;
3764 if ((mask
& (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3765 == (GFC_OMP_MASK_PARALLEL
| GFC_OMP_MASK_DO
))
3766 clausesa
[GFC_OMP_SPLIT_DO
].nowait
= true;
3770 gfc_trans_omp_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3771 gfc_omp_clauses
*clausesa
, tree omp_clauses
)
3774 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3775 tree stmt
, body
, omp_do_clauses
= NULL_TREE
;
3778 gfc_start_block (&block
);
3780 gfc_init_block (&block
);
3782 if (clausesa
== NULL
)
3784 clausesa
= clausesa_buf
;
3785 gfc_split_omp_clauses (code
, clausesa
);
3789 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DO
], code
->loc
);
3790 body
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, pblock
? pblock
: &block
,
3791 &clausesa
[GFC_OMP_SPLIT_SIMD
], omp_clauses
);
3794 if (TREE_CODE (body
) != BIND_EXPR
)
3795 body
= build3_v (BIND_EXPR
, NULL
, body
, poplevel (1, 0));
3799 else if (TREE_CODE (body
) != BIND_EXPR
)
3800 body
= build3_v (BIND_EXPR
, NULL
, body
, NULL_TREE
);
3803 stmt
= make_node (OMP_FOR
);
3804 TREE_TYPE (stmt
) = void_type_node
;
3805 OMP_FOR_BODY (stmt
) = body
;
3806 OMP_FOR_CLAUSES (stmt
) = omp_do_clauses
;
3810 gfc_add_expr_to_block (&block
, stmt
);
3811 return gfc_finish_block (&block
);
3815 gfc_trans_omp_parallel_do (gfc_code
*code
, stmtblock_t
*pblock
,
3816 gfc_omp_clauses
*clausesa
)
3818 stmtblock_t block
, *new_pblock
= pblock
;
3819 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3820 tree stmt
, omp_clauses
= NULL_TREE
;
3823 gfc_start_block (&block
);
3825 gfc_init_block (&block
);
3827 if (clausesa
== NULL
)
3829 clausesa
= clausesa_buf
;
3830 gfc_split_omp_clauses (code
, clausesa
);
3833 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3837 if (!clausesa
[GFC_OMP_SPLIT_DO
].ordered
3838 && clausesa
[GFC_OMP_SPLIT_DO
].sched_kind
!= OMP_SCHED_STATIC
)
3839 new_pblock
= &block
;
3843 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DO
, new_pblock
,
3844 &clausesa
[GFC_OMP_SPLIT_DO
], omp_clauses
);
3847 if (TREE_CODE (stmt
) != BIND_EXPR
)
3848 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3852 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3853 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3854 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3856 OMP_PARALLEL_COMBINED (stmt
) = 1;
3857 gfc_add_expr_to_block (&block
, stmt
);
3858 return gfc_finish_block (&block
);
3862 gfc_trans_omp_parallel_do_simd (gfc_code
*code
, stmtblock_t
*pblock
,
3863 gfc_omp_clauses
*clausesa
)
3866 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
3867 tree stmt
, omp_clauses
= NULL_TREE
;
3870 gfc_start_block (&block
);
3872 gfc_init_block (&block
);
3874 if (clausesa
== NULL
)
3876 clausesa
= clausesa_buf
;
3877 gfc_split_omp_clauses (code
, clausesa
);
3881 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_PARALLEL
],
3885 stmt
= gfc_trans_omp_do_simd (code
, pblock
, clausesa
, omp_clauses
);
3888 if (TREE_CODE (stmt
) != BIND_EXPR
)
3889 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3893 else if (TREE_CODE (stmt
) != BIND_EXPR
)
3894 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, NULL_TREE
);
3897 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3899 OMP_PARALLEL_COMBINED (stmt
) = 1;
3901 gfc_add_expr_to_block (&block
, stmt
);
3902 return gfc_finish_block (&block
);
3906 gfc_trans_omp_parallel_sections (gfc_code
*code
)
3909 gfc_omp_clauses section_clauses
;
3910 tree stmt
, omp_clauses
;
3912 memset (§ion_clauses
, 0, sizeof (section_clauses
));
3913 section_clauses
.nowait
= true;
3915 gfc_start_block (&block
);
3916 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3919 stmt
= gfc_trans_omp_sections (code
, §ion_clauses
);
3920 if (TREE_CODE (stmt
) != BIND_EXPR
)
3921 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3924 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3926 OMP_PARALLEL_COMBINED (stmt
) = 1;
3927 gfc_add_expr_to_block (&block
, stmt
);
3928 return gfc_finish_block (&block
);
3932 gfc_trans_omp_parallel_workshare (gfc_code
*code
)
3935 gfc_omp_clauses workshare_clauses
;
3936 tree stmt
, omp_clauses
;
3938 memset (&workshare_clauses
, 0, sizeof (workshare_clauses
));
3939 workshare_clauses
.nowait
= true;
3941 gfc_start_block (&block
);
3942 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
3945 stmt
= gfc_trans_omp_workshare (code
, &workshare_clauses
);
3946 if (TREE_CODE (stmt
) != BIND_EXPR
)
3947 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
3950 stmt
= build2_loc (input_location
, OMP_PARALLEL
, void_type_node
, stmt
,
3952 OMP_PARALLEL_COMBINED (stmt
) = 1;
3953 gfc_add_expr_to_block (&block
, stmt
);
3954 return gfc_finish_block (&block
);
3958 gfc_trans_omp_sections (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3960 stmtblock_t block
, body
;
3961 tree omp_clauses
, stmt
;
3962 bool has_lastprivate
= clauses
->lists
[OMP_LIST_LASTPRIVATE
] != NULL
;
3964 gfc_start_block (&block
);
3966 omp_clauses
= gfc_trans_omp_clauses (&block
, clauses
, code
->loc
);
3968 gfc_init_block (&body
);
3969 for (code
= code
->block
; code
; code
= code
->block
)
3971 /* Last section is special because of lastprivate, so even if it
3972 is empty, chain it in. */
3973 stmt
= gfc_trans_omp_code (code
->next
,
3974 has_lastprivate
&& code
->block
== NULL
);
3975 if (! IS_EMPTY_STMT (stmt
))
3977 stmt
= build1_v (OMP_SECTION
, stmt
);
3978 gfc_add_expr_to_block (&body
, stmt
);
3981 stmt
= gfc_finish_block (&body
);
3983 stmt
= build2_loc (input_location
, OMP_SECTIONS
, void_type_node
, stmt
,
3985 gfc_add_expr_to_block (&block
, stmt
);
3987 return gfc_finish_block (&block
);
3991 gfc_trans_omp_single (gfc_code
*code
, gfc_omp_clauses
*clauses
)
3993 tree omp_clauses
= gfc_trans_omp_clauses (NULL
, clauses
, code
->loc
);
3994 tree stmt
= gfc_trans_omp_code (code
->block
->next
, true);
3995 stmt
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, stmt
,
4001 gfc_trans_omp_task (gfc_code
*code
)
4004 tree stmt
, omp_clauses
;
4006 gfc_start_block (&block
);
4007 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4009 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4010 stmt
= build2_loc (input_location
, OMP_TASK
, void_type_node
, stmt
,
4012 gfc_add_expr_to_block (&block
, stmt
);
4013 return gfc_finish_block (&block
);
4017 gfc_trans_omp_taskgroup (gfc_code
*code
)
4019 tree stmt
= gfc_trans_code (code
->block
->next
);
4020 return build1_loc (input_location
, OMP_TASKGROUP
, void_type_node
, stmt
);
4024 gfc_trans_omp_taskwait (void)
4026 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
4027 return build_call_expr_loc (input_location
, decl
, 0);
4031 gfc_trans_omp_taskyield (void)
4033 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
4034 return build_call_expr_loc (input_location
, decl
, 0);
4038 gfc_trans_omp_distribute (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4041 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4042 tree stmt
, omp_clauses
= NULL_TREE
;
4044 gfc_start_block (&block
);
4045 if (clausesa
== NULL
)
4047 clausesa
= clausesa_buf
;
4048 gfc_split_omp_clauses (code
, clausesa
);
4052 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4056 case EXEC_OMP_DISTRIBUTE
:
4057 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4058 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4059 /* This is handled in gfc_trans_omp_do. */
4062 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4063 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4064 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4065 stmt
= gfc_trans_omp_parallel_do (code
, &block
, clausesa
);
4066 if (TREE_CODE (stmt
) != BIND_EXPR
)
4067 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4071 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4072 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4073 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4074 stmt
= gfc_trans_omp_parallel_do_simd (code
, &block
, clausesa
);
4075 if (TREE_CODE (stmt
) != BIND_EXPR
)
4076 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4080 case EXEC_OMP_DISTRIBUTE_SIMD
:
4081 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4082 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4083 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_SIMD
, &block
,
4084 &clausesa
[GFC_OMP_SPLIT_SIMD
], NULL_TREE
);
4085 if (TREE_CODE (stmt
) != BIND_EXPR
)
4086 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4095 tree distribute
= make_node (OMP_DISTRIBUTE
);
4096 TREE_TYPE (distribute
) = void_type_node
;
4097 OMP_FOR_BODY (distribute
) = stmt
;
4098 OMP_FOR_CLAUSES (distribute
) = omp_clauses
;
4101 gfc_add_expr_to_block (&block
, stmt
);
4102 return gfc_finish_block (&block
);
4106 gfc_trans_omp_teams (gfc_code
*code
, gfc_omp_clauses
*clausesa
)
4109 gfc_omp_clauses clausesa_buf
[GFC_OMP_SPLIT_NUM
];
4110 tree stmt
, omp_clauses
= NULL_TREE
;
4111 bool combined
= true;
4113 gfc_start_block (&block
);
4114 if (clausesa
== NULL
)
4116 clausesa
= clausesa_buf
;
4117 gfc_split_omp_clauses (code
, clausesa
);
4121 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TEAMS
],
4125 case EXEC_OMP_TARGET_TEAMS
:
4126 case EXEC_OMP_TEAMS
:
4127 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4130 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4131 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4132 stmt
= gfc_trans_omp_do (code
, EXEC_OMP_DISTRIBUTE
, NULL
,
4133 &clausesa
[GFC_OMP_SPLIT_DISTRIBUTE
],
4137 stmt
= gfc_trans_omp_distribute (code
, clausesa
);
4140 stmt
= build2_loc (input_location
, OMP_TEAMS
, void_type_node
, stmt
,
4143 OMP_TEAMS_COMBINED (stmt
) = 1;
4144 gfc_add_expr_to_block (&block
, stmt
);
4145 return gfc_finish_block (&block
);
4149 gfc_trans_omp_target (gfc_code
*code
)
4152 gfc_omp_clauses clausesa
[GFC_OMP_SPLIT_NUM
];
4153 tree stmt
, omp_clauses
= NULL_TREE
;
4155 gfc_start_block (&block
);
4156 gfc_split_omp_clauses (code
, clausesa
);
4159 = gfc_trans_omp_clauses (&block
, &clausesa
[GFC_OMP_SPLIT_TARGET
],
4161 if (code
->op
== EXEC_OMP_TARGET
)
4162 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4166 stmt
= gfc_trans_omp_teams (code
, clausesa
);
4167 if (TREE_CODE (stmt
) != BIND_EXPR
)
4168 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, poplevel (1, 0));
4173 stmt
= build2_loc (input_location
, OMP_TARGET
, void_type_node
, stmt
,
4175 gfc_add_expr_to_block (&block
, stmt
);
4176 return gfc_finish_block (&block
);
4180 gfc_trans_omp_target_data (gfc_code
*code
)
4183 tree stmt
, omp_clauses
;
4185 gfc_start_block (&block
);
4186 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4188 stmt
= gfc_trans_omp_code (code
->block
->next
, true);
4189 stmt
= build2_loc (input_location
, OMP_TARGET_DATA
, void_type_node
, stmt
,
4191 gfc_add_expr_to_block (&block
, stmt
);
4192 return gfc_finish_block (&block
);
4196 gfc_trans_omp_target_update (gfc_code
*code
)
4199 tree stmt
, omp_clauses
;
4201 gfc_start_block (&block
);
4202 omp_clauses
= gfc_trans_omp_clauses (&block
, code
->ext
.omp_clauses
,
4204 stmt
= build1_loc (input_location
, OMP_TARGET_UPDATE
, void_type_node
,
4206 gfc_add_expr_to_block (&block
, stmt
);
4207 return gfc_finish_block (&block
);
4211 gfc_trans_omp_workshare (gfc_code
*code
, gfc_omp_clauses
*clauses
)
4213 tree res
, tmp
, stmt
;
4214 stmtblock_t block
, *pblock
= NULL
;
4215 stmtblock_t singleblock
;
4216 int saved_ompws_flags
;
4217 bool singleblock_in_progress
= false;
4218 /* True if previous gfc_code in workshare construct is not workshared. */
4219 bool prev_singleunit
;
4221 code
= code
->block
->next
;
4225 gfc_start_block (&block
);
4228 ompws_flags
= OMPWS_WORKSHARE_FLAG
;
4229 prev_singleunit
= false;
4231 /* Translate statements one by one to trees until we reach
4232 the end of the workshare construct. Adjacent gfc_codes that
4233 are a single unit of work are clustered and encapsulated in a
4234 single OMP_SINGLE construct. */
4235 for (; code
; code
= code
->next
)
4237 if (code
->here
!= 0)
4239 res
= gfc_trans_label_here (code
);
4240 gfc_add_expr_to_block (pblock
, res
);
4243 /* No dependence analysis, use for clauses with wait.
4244 If this is the last gfc_code, use default omp_clauses. */
4245 if (code
->next
== NULL
&& clauses
->nowait
)
4246 ompws_flags
|= OMPWS_NOWAIT
;
4248 /* By default, every gfc_code is a single unit of work. */
4249 ompws_flags
|= OMPWS_CURR_SINGLEUNIT
;
4250 ompws_flags
&= ~OMPWS_SCALARIZER_WS
;
4259 res
= gfc_trans_assign (code
);
4262 case EXEC_POINTER_ASSIGN
:
4263 res
= gfc_trans_pointer_assign (code
);
4266 case EXEC_INIT_ASSIGN
:
4267 res
= gfc_trans_init_assign (code
);
4271 res
= gfc_trans_forall (code
);
4275 res
= gfc_trans_where (code
);
4278 case EXEC_OMP_ATOMIC
:
4279 res
= gfc_trans_omp_directive (code
);
4282 case EXEC_OMP_PARALLEL
:
4283 case EXEC_OMP_PARALLEL_DO
:
4284 case EXEC_OMP_PARALLEL_SECTIONS
:
4285 case EXEC_OMP_PARALLEL_WORKSHARE
:
4286 case EXEC_OMP_CRITICAL
:
4287 saved_ompws_flags
= ompws_flags
;
4289 res
= gfc_trans_omp_directive (code
);
4290 ompws_flags
= saved_ompws_flags
;
4294 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4297 gfc_set_backend_locus (&code
->loc
);
4299 if (res
!= NULL_TREE
&& ! IS_EMPTY_STMT (res
))
4301 if (prev_singleunit
)
4303 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4304 /* Add current gfc_code to single block. */
4305 gfc_add_expr_to_block (&singleblock
, res
);
4308 /* Finish single block and add it to pblock. */
4309 tmp
= gfc_finish_block (&singleblock
);
4310 tmp
= build2_loc (input_location
, OMP_SINGLE
,
4311 void_type_node
, tmp
, NULL_TREE
);
4312 gfc_add_expr_to_block (pblock
, tmp
);
4313 /* Add current gfc_code to pblock. */
4314 gfc_add_expr_to_block (pblock
, res
);
4315 singleblock_in_progress
= false;
4320 if (ompws_flags
& OMPWS_CURR_SINGLEUNIT
)
4322 /* Start single block. */
4323 gfc_init_block (&singleblock
);
4324 gfc_add_expr_to_block (&singleblock
, res
);
4325 singleblock_in_progress
= true;
4328 /* Add the new statement to the block. */
4329 gfc_add_expr_to_block (pblock
, res
);
4331 prev_singleunit
= (ompws_flags
& OMPWS_CURR_SINGLEUNIT
) != 0;
4335 /* Finish remaining SINGLE block, if we were in the middle of one. */
4336 if (singleblock_in_progress
)
4338 /* Finish single block and add it to pblock. */
4339 tmp
= gfc_finish_block (&singleblock
);
4340 tmp
= build2_loc (input_location
, OMP_SINGLE
, void_type_node
, tmp
,
4342 ? build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
)
4344 gfc_add_expr_to_block (pblock
, tmp
);
4347 stmt
= gfc_finish_block (pblock
);
4348 if (TREE_CODE (stmt
) != BIND_EXPR
)
4350 if (!IS_EMPTY_STMT (stmt
))
4352 tree bindblock
= poplevel (1, 0);
4353 stmt
= build3_v (BIND_EXPR
, NULL
, stmt
, bindblock
);
4361 if (IS_EMPTY_STMT (stmt
) && !clauses
->nowait
)
4362 stmt
= gfc_trans_omp_barrier ();
4369 gfc_trans_oacc_declare (stmtblock_t
*block
, gfc_namespace
*ns
)
4372 oacc_clauses
= gfc_trans_omp_clauses (block
, ns
->oacc_declare_clauses
,
4373 ns
->oacc_declare_clauses
->loc
);
4374 return build1_loc (ns
->oacc_declare_clauses
->loc
.lb
->location
,
4375 OACC_DECLARE
, void_type_node
, oacc_clauses
);
4379 gfc_trans_oacc_directive (gfc_code
*code
)
4383 case EXEC_OACC_PARALLEL_LOOP
:
4384 case EXEC_OACC_KERNELS_LOOP
:
4385 return gfc_trans_oacc_combined_directive (code
);
4386 case EXEC_OACC_PARALLEL
:
4387 case EXEC_OACC_KERNELS
:
4388 case EXEC_OACC_DATA
:
4389 case EXEC_OACC_HOST_DATA
:
4390 return gfc_trans_oacc_construct (code
);
4391 case EXEC_OACC_LOOP
:
4392 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4394 case EXEC_OACC_UPDATE
:
4395 case EXEC_OACC_CACHE
:
4396 case EXEC_OACC_ENTER_DATA
:
4397 case EXEC_OACC_EXIT_DATA
:
4398 return gfc_trans_oacc_executable_directive (code
);
4399 case EXEC_OACC_WAIT
:
4400 return gfc_trans_oacc_wait_directive (code
);
4407 gfc_trans_omp_directive (gfc_code
*code
)
4411 case EXEC_OMP_ATOMIC
:
4412 return gfc_trans_omp_atomic (code
);
4413 case EXEC_OMP_BARRIER
:
4414 return gfc_trans_omp_barrier ();
4415 case EXEC_OMP_CANCEL
:
4416 return gfc_trans_omp_cancel (code
);
4417 case EXEC_OMP_CANCELLATION_POINT
:
4418 return gfc_trans_omp_cancellation_point (code
);
4419 case EXEC_OMP_CRITICAL
:
4420 return gfc_trans_omp_critical (code
);
4421 case EXEC_OMP_DISTRIBUTE
:
4424 return gfc_trans_omp_do (code
, code
->op
, NULL
, code
->ext
.omp_clauses
,
4426 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO
:
4427 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD
:
4428 case EXEC_OMP_DISTRIBUTE_SIMD
:
4429 return gfc_trans_omp_distribute (code
, NULL
);
4430 case EXEC_OMP_DO_SIMD
:
4431 return gfc_trans_omp_do_simd (code
, NULL
, NULL
, NULL_TREE
);
4432 case EXEC_OMP_FLUSH
:
4433 return gfc_trans_omp_flush ();
4434 case EXEC_OMP_MASTER
:
4435 return gfc_trans_omp_master (code
);
4436 case EXEC_OMP_ORDERED
:
4437 return gfc_trans_omp_ordered (code
);
4438 case EXEC_OMP_PARALLEL
:
4439 return gfc_trans_omp_parallel (code
);
4440 case EXEC_OMP_PARALLEL_DO
:
4441 return gfc_trans_omp_parallel_do (code
, NULL
, NULL
);
4442 case EXEC_OMP_PARALLEL_DO_SIMD
:
4443 return gfc_trans_omp_parallel_do_simd (code
, NULL
, NULL
);
4444 case EXEC_OMP_PARALLEL_SECTIONS
:
4445 return gfc_trans_omp_parallel_sections (code
);
4446 case EXEC_OMP_PARALLEL_WORKSHARE
:
4447 return gfc_trans_omp_parallel_workshare (code
);
4448 case EXEC_OMP_SECTIONS
:
4449 return gfc_trans_omp_sections (code
, code
->ext
.omp_clauses
);
4450 case EXEC_OMP_SINGLE
:
4451 return gfc_trans_omp_single (code
, code
->ext
.omp_clauses
);
4452 case EXEC_OMP_TARGET
:
4453 case EXEC_OMP_TARGET_TEAMS
:
4454 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE
:
4455 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4456 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4457 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD
:
4458 return gfc_trans_omp_target (code
);
4459 case EXEC_OMP_TARGET_DATA
:
4460 return gfc_trans_omp_target_data (code
);
4461 case EXEC_OMP_TARGET_UPDATE
:
4462 return gfc_trans_omp_target_update (code
);
4464 return gfc_trans_omp_task (code
);
4465 case EXEC_OMP_TASKGROUP
:
4466 return gfc_trans_omp_taskgroup (code
);
4467 case EXEC_OMP_TASKWAIT
:
4468 return gfc_trans_omp_taskwait ();
4469 case EXEC_OMP_TASKYIELD
:
4470 return gfc_trans_omp_taskyield ();
4471 case EXEC_OMP_TEAMS
:
4472 case EXEC_OMP_TEAMS_DISTRIBUTE
:
4473 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO
:
4474 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD
:
4475 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD
:
4476 return gfc_trans_omp_teams (code
, NULL
);
4477 case EXEC_OMP_WORKSHARE
:
4478 return gfc_trans_omp_workshare (code
, code
->ext
.omp_clauses
);
4485 gfc_trans_omp_declare_simd (gfc_namespace
*ns
)
4490 gfc_omp_declare_simd
*ods
;
4491 for (ods
= ns
->omp_declare_simd
; ods
; ods
= ods
->next
)
4493 tree c
= gfc_trans_omp_clauses (NULL
, ods
->clauses
, ods
->where
, true);
4494 tree fndecl
= ns
->proc_name
->backend_decl
;
4496 c
= tree_cons (NULL_TREE
, c
, NULL_TREE
);
4497 c
= build_tree_list (get_identifier ("omp declare simd"), c
);
4498 TREE_CHAIN (c
) = DECL_ATTRIBUTES (fndecl
);
4499 DECL_ATTRIBUTES (fndecl
) = c
;