1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context
*outer
;
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map
;
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec
<tree
> task_reductions
;
121 /* A hash map from the reduction clauses to the registered array
123 hash_map
<tree
, unsigned> *task_reduction_map
;
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses
;
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses
;
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
146 /* True if this parallel directive is nested within another. */
149 /* True if this construct can be cancelled. */
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
154 bool combined_into_simd_safelen1
;
156 /* True if there is nested scan context with inclusive clause. */
159 /* True if there is nested scan context with exclusive clause. */
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase
;
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent
;
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
172 static splay_tree all_contexts
;
173 static int taskreg_nesting_level
;
174 static int target_nesting_level
;
175 static bitmap task_shared_vars
;
176 static bitmap global_nonaddressable_vars
;
177 static vec
<omp_context
*> taskreg_contexts
;
179 static void scan_omp (gimple_seq
*, omp_context
*);
180 static tree
scan_omp_1_op (tree
*, int *, void *);
182 #define WALK_SUBSTMTS \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
196 is_oacc_parallel_or_serial (omp_context
*ctx
)
198 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
199 return ((outer_type
== GIMPLE_OMP_TARGET
)
200 && ((gimple_omp_target_kind (ctx
->stmt
)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
202 || (gimple_omp_target_kind (ctx
->stmt
)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
206 /* Return true if CTX corresponds to an oacc kernels region. */
209 is_oacc_kernels (omp_context
*ctx
)
211 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
212 return ((outer_type
== GIMPLE_OMP_TARGET
)
213 && (gimple_omp_target_kind (ctx
->stmt
)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
222 omp_member_access_dummy_var (tree decl
)
225 || !DECL_ARTIFICIAL (decl
)
226 || !DECL_IGNORED_P (decl
)
227 || !DECL_HAS_VALUE_EXPR_P (decl
)
228 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
231 tree v
= DECL_VALUE_EXPR (decl
);
232 if (TREE_CODE (v
) != COMPONENT_REF
)
236 switch (TREE_CODE (v
))
242 case POINTER_PLUS_EXPR
:
243 v
= TREE_OPERAND (v
, 0);
246 if (DECL_CONTEXT (v
) == current_function_decl
247 && DECL_ARTIFICIAL (v
)
248 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
256 /* Helper for unshare_and_remap, called through walk_tree. */
259 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
261 tree
*pair
= (tree
*) data
;
264 *tp
= unshare_expr (pair
[1]);
267 else if (IS_TYPE_OR_DECL_P (*tp
))
272 /* Return unshare_expr (X) with all occurrences of FROM
276 unshare_and_remap (tree x
, tree from
, tree to
)
278 tree pair
[2] = { from
, to
};
279 x
= unshare_expr (x
);
280 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
287 scan_omp_op (tree
*tp
, omp_context
*ctx
)
289 struct walk_stmt_info wi
;
291 memset (&wi
, 0, sizeof (wi
));
293 wi
.want_locations
= true;
295 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
298 static void lower_omp (gimple_seq
*, omp_context
*);
299 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
300 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
302 /* Return true if CTX is for an omp parallel. */
305 is_parallel_ctx (omp_context
*ctx
)
307 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
311 /* Return true if CTX is for an omp task. */
314 is_task_ctx (omp_context
*ctx
)
316 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
320 /* Return true if CTX is for an omp taskloop. */
323 is_taskloop_ctx (omp_context
*ctx
)
325 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
330 /* Return true if CTX is for a host omp teams. */
333 is_host_teams_ctx (omp_context
*ctx
)
335 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
344 is_taskreg_ctx (omp_context
*ctx
)
346 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
349 /* Return true if EXPR is variable sized. */
352 is_variable_sized (const_tree expr
)
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
362 lookup_decl (tree var
, omp_context
*ctx
)
364 tree
*n
= ctx
->cb
.decl_map
->get (var
);
369 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
371 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
372 return n
? *n
: NULL_TREE
;
376 lookup_field (tree var
, omp_context
*ctx
)
379 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
380 return (tree
) n
->value
;
384 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
387 n
= splay_tree_lookup (ctx
->sfield_map
388 ? ctx
->sfield_map
: ctx
->field_map
, key
);
389 return (tree
) n
->value
;
393 lookup_sfield (tree var
, omp_context
*ctx
)
395 return lookup_sfield ((splay_tree_key
) var
, ctx
);
399 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
402 n
= splay_tree_lookup (ctx
->field_map
, key
);
403 return n
? (tree
) n
->value
: NULL_TREE
;
407 maybe_lookup_field (tree var
, omp_context
*ctx
)
409 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
416 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
419 || TYPE_ATOMIC (TREE_TYPE (decl
)))
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
442 /* Do not use copy-in/copy-out for variables that have their
444 if (is_global_var (decl
))
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl
))
454 if (!global_nonaddressable_vars
)
455 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
456 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars
,
463 else if (TREE_ADDRESSABLE (decl
))
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
468 if (TREE_READONLY (decl
)
469 || ((TREE_CODE (decl
) == RESULT_DECL
470 || TREE_CODE (decl
) == PARM_DECL
)
471 && DECL_BY_REFERENCE (decl
)))
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx
->is_nested
)
483 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
484 if ((is_taskreg_ctx (up
)
485 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up
->stmt
)))
487 && maybe_lookup_decl (decl
, up
))
494 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
496 for (c
= gimple_omp_target_clauses (up
->stmt
);
497 c
; c
= OMP_CLAUSE_CHAIN (c
))
498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c
) == decl
)
503 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
504 c
; c
= OMP_CLAUSE_CHAIN (c
))
505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c
) == decl
)
510 goto maybe_mark_addressable_and_ret
;
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx
))
520 maybe_mark_addressable_and_ret
:
521 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
522 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
527 if (!task_shared_vars
)
528 task_shared_vars
= BITMAP_ALLOC (NULL
);
529 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
530 TREE_ADDRESSABLE (outer
) = 1;
539 /* Construct a new automatic decl similar to VAR. */
542 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
544 tree copy
= copy_var_decl (var
, name
, type
);
546 DECL_CONTEXT (copy
) = current_function_decl
;
547 DECL_CHAIN (copy
) = ctx
->block_vars
;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
552 if (TREE_ADDRESSABLE (var
)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
557 TREE_ADDRESSABLE (copy
) = 0;
558 ctx
->block_vars
= copy
;
564 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
566 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
572 omp_build_component_ref (tree obj
, tree field
)
574 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
575 if (TREE_THIS_VOLATILE (field
))
576 TREE_THIS_VOLATILE (ret
) |= 1;
577 if (TREE_READONLY (field
))
578 TREE_READONLY (ret
) |= 1;
582 /* Build tree nodes to access the field for VAR on the receiver side. */
585 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
587 tree x
, field
= lookup_field (var
, ctx
);
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x
= maybe_lookup_field (field
, ctx
);
595 x
= build_simple_mem_ref (ctx
->receiver_decl
);
596 TREE_THIS_NOTRAP (x
) = 1;
597 x
= omp_build_component_ref (x
, field
);
600 x
= build_simple_mem_ref (x
);
601 TREE_THIS_NOTRAP (x
) = 1;
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
612 build_outer_var_ref (tree var
, omp_context
*ctx
,
613 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
616 omp_context
*outer
= ctx
->outer
;
617 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
618 outer
= outer
->outer
;
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
622 else if (is_variable_sized (var
))
624 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
625 x
= build_outer_var_ref (x
, ctx
, code
);
626 x
= build_simple_mem_ref (x
);
628 else if (is_taskreg_ctx (ctx
))
630 bool by_ref
= use_pointer_for_field (var
, NULL
);
631 x
= build_receiver_ref (var
, by_ref
, ctx
);
633 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
636 || (code
== OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
638 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
646 if (outer
&& is_taskreg_ctx (outer
))
647 x
= lookup_decl (var
, outer
);
649 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
653 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
657 = splay_tree_lookup (outer
->field_map
,
658 (splay_tree_key
) &DECL_UID (var
));
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
664 x
= lookup_decl (var
, outer
);
668 tree field
= (tree
) n
->value
;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x
= maybe_lookup_field (field
, outer
);
675 x
= build_simple_mem_ref (outer
->receiver_decl
);
676 x
= omp_build_component_ref (x
, field
);
677 if (use_pointer_for_field (var
, outer
))
678 x
= build_simple_mem_ref (x
);
682 x
= lookup_decl (var
, outer
);
683 else if (omp_is_reference (var
))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
687 else if (omp_member_access_dummy_var (var
))
694 tree t
= omp_member_access_dummy_var (var
);
697 x
= DECL_VALUE_EXPR (var
);
698 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
700 x
= unshare_and_remap (x
, t
, o
);
702 x
= unshare_expr (x
);
706 if (omp_is_reference (var
))
707 x
= build_simple_mem_ref (x
);
712 /* Build tree nodes to access the field for VAR on the sender side. */
715 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
717 tree field
= lookup_sfield (key
, ctx
);
718 return omp_build_component_ref (ctx
->sender_decl
, field
);
722 build_sender_ref (tree var
, omp_context
*ctx
)
724 return build_sender_ref ((splay_tree_key
) var
, ctx
);
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
731 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
733 tree field
, type
, sfield
= NULL_TREE
;
734 splay_tree_key key
= (splay_tree_key
) var
;
736 if ((mask
& 16) != 0)
738 key
= (splay_tree_key
) &DECL_NAME (var
);
739 gcc_checking_assert (key
!= (splay_tree_key
) var
);
743 key
= (splay_tree_key
) &DECL_UID (var
);
744 gcc_checking_assert (key
!= (splay_tree_key
) var
);
746 gcc_assert ((mask
& 1) == 0
747 || !splay_tree_lookup (ctx
->field_map
, key
));
748 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
749 || !splay_tree_lookup (ctx
->sfield_map
, key
));
750 gcc_assert ((mask
& 3) == 3
751 || !is_gimple_omp_oacc (ctx
->stmt
));
753 type
= TREE_TYPE (var
);
754 if ((mask
& 16) != 0)
755 type
= lang_hooks
.decls
.omp_array_data (var
, true);
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type
)
761 && TYPE_RESTRICT (type
))
762 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
766 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
767 type
= build_pointer_type (build_pointer_type (type
));
770 type
= build_pointer_type (type
);
771 else if ((mask
& 3) == 1 && omp_is_reference (var
))
772 type
= TREE_TYPE (type
);
774 field
= build_decl (DECL_SOURCE_LOCATION (var
),
775 FIELD_DECL
, DECL_NAME (var
), type
);
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field
) = var
;
781 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
783 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
784 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
785 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
788 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
792 insert_field_into_struct (ctx
->record_type
, field
);
793 if (ctx
->srecord_type
)
795 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
796 FIELD_DECL
, DECL_NAME (var
), type
);
797 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
798 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
799 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
800 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
801 insert_field_into_struct (ctx
->srecord_type
, sfield
);
806 if (ctx
->srecord_type
== NULL_TREE
)
810 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
811 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
812 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
814 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
815 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
816 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
817 insert_field_into_struct (ctx
->srecord_type
, sfield
);
818 splay_tree_insert (ctx
->sfield_map
,
819 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
820 (splay_tree_value
) sfield
);
824 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
825 : ctx
->srecord_type
, field
);
829 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
830 if ((mask
& 2) && ctx
->sfield_map
)
831 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
835 install_var_local (tree var
, omp_context
*ctx
)
837 tree new_var
= omp_copy_decl_1 (var
, ctx
);
838 insert_decl_map (&ctx
->cb
, var
, new_var
);
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
846 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
850 new_decl
= lookup_decl (decl
, ctx
);
852 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
855 && DECL_HAS_VALUE_EXPR_P (decl
))
857 tree ve
= DECL_VALUE_EXPR (decl
);
858 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
859 SET_DECL_VALUE_EXPR (new_decl
, ve
);
860 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
865 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
866 if (size
== error_mark_node
)
867 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
868 DECL_SIZE (new_decl
) = size
;
870 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
871 if (size
== error_mark_node
)
872 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
873 DECL_SIZE_UNIT (new_decl
) = size
;
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
883 omp_copy_decl (tree var
, copy_body_data
*cb
)
885 omp_context
*ctx
= (omp_context
*) cb
;
888 if (TREE_CODE (var
) == LABEL_DECL
)
890 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
892 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
893 DECL_CONTEXT (new_var
) = current_function_decl
;
894 insert_decl_map (&ctx
->cb
, var
, new_var
);
898 while (!is_taskreg_ctx (ctx
))
903 new_var
= maybe_lookup_decl (var
, ctx
);
908 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
911 return error_mark_node
;
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
917 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
919 omp_context
*ctx
= XCNEW (omp_context
);
921 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
922 (splay_tree_value
) ctx
);
927 ctx
->outer
= outer_ctx
;
928 ctx
->cb
= outer_ctx
->cb
;
929 ctx
->cb
.block
= NULL
;
930 ctx
->depth
= outer_ctx
->depth
+ 1;
934 ctx
->cb
.src_fn
= current_function_decl
;
935 ctx
->cb
.dst_fn
= current_function_decl
;
936 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
937 gcc_checking_assert (ctx
->cb
.src_node
);
938 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
939 ctx
->cb
.src_cfun
= cfun
;
940 ctx
->cb
.copy_decl
= omp_copy_decl
;
941 ctx
->cb
.eh_lp_nr
= 0;
942 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
943 ctx
->cb
.adjust_array_error_bounds
= true;
944 ctx
->cb
.dont_remap_vla_if_no_change
= true;
948 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
953 static gimple_seq
maybe_catch_exception (gimple_seq
);
955 /* Finalize task copyfn. */
958 finalize_task_copyfn (gomp_task
*task_stmt
)
960 struct function
*child_cfun
;
962 gimple_seq seq
= NULL
, new_seq
;
965 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
966 if (child_fn
== NULL_TREE
)
969 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
970 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
972 push_cfun (child_cfun
);
973 bind
= gimplify_body (child_fn
, false);
974 gimple_seq_add_stmt (&seq
, bind
);
975 new_seq
= maybe_catch_exception (seq
);
978 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
980 gimple_seq_add_stmt (&seq
, bind
);
982 gimple_set_body (child_fn
, seq
);
985 /* Inform the callgraph about the new function. */
986 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
987 node
->parallelized_function
= 1;
988 cgraph_node::add_new_function (child_fn
, false);
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
995 delete_omp_context (splay_tree_value value
)
997 omp_context
*ctx
= (omp_context
*) value
;
999 delete ctx
->cb
.decl_map
;
1002 splay_tree_delete (ctx
->field_map
);
1003 if (ctx
->sfield_map
)
1004 splay_tree_delete (ctx
->sfield_map
);
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx
->record_type
)
1011 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1012 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1014 if (ctx
->srecord_type
)
1017 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1018 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1021 if (is_task_ctx (ctx
))
1022 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1024 if (ctx
->task_reduction_map
)
1026 ctx
->task_reductions
.release ();
1027 delete ctx
->task_reduction_map
;
1030 delete ctx
->lastprivate_conditional_map
;
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1039 fixup_child_record_type (omp_context
*ctx
)
1041 tree f
, type
= ctx
->record_type
;
1043 if (!ctx
->receiver_decl
)
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1050 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1054 tree name
, new_fields
= NULL
;
1056 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1057 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1058 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1059 TYPE_DECL
, name
, type
);
1060 TYPE_NAME (type
) = name
;
1062 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1064 tree new_f
= copy_node (f
);
1065 DECL_CONTEXT (new_f
) = type
;
1066 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1067 DECL_CHAIN (new_f
) = new_fields
;
1068 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1069 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1071 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1078 (splay_tree_value
) new_f
);
1080 TYPE_FIELDS (type
) = nreverse (new_fields
);
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx
->stmt
))
1087 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1089 TREE_TYPE (ctx
->receiver_decl
)
1090 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1097 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1100 bool scan_array_reductions
= false;
1102 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1106 switch (OMP_CLAUSE_CODE (c
))
1108 case OMP_CLAUSE_PRIVATE
:
1109 decl
= OMP_CLAUSE_DECL (c
);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1112 else if (!is_variable_sized (decl
))
1113 install_var_local (decl
, ctx
);
1116 case OMP_CLAUSE_SHARED
:
1117 decl
= OMP_CLAUSE_DECL (c
);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx
))
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1126 if (is_global_var (odecl
))
1128 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1131 gcc_assert (is_taskreg_ctx (ctx
));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1133 || !is_variable_sized (decl
));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1140 use_pointer_for_field (decl
, ctx
);
1143 by_ref
= use_pointer_for_field (decl
, NULL
);
1144 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1145 || TREE_ADDRESSABLE (decl
)
1147 || omp_is_reference (decl
))
1149 by_ref
= use_pointer_for_field (decl
, ctx
);
1150 install_var_field (decl
, by_ref
, 3, ctx
);
1151 install_var_local (decl
, ctx
);
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1158 case OMP_CLAUSE_REDUCTION
:
1159 if (is_oacc_parallel_or_serial (ctx
) || is_oacc_kernels (ctx
))
1160 ctx
->local_reduction_clauses
1161 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1164 case OMP_CLAUSE_IN_REDUCTION
:
1165 decl
= OMP_CLAUSE_DECL (c
);
1166 if (TREE_CODE (decl
) == MEM_REF
)
1168 tree t
= TREE_OPERAND (decl
, 0);
1169 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1170 t
= TREE_OPERAND (t
, 0);
1171 if (TREE_CODE (t
) == INDIRECT_REF
1172 || TREE_CODE (t
) == ADDR_EXPR
)
1173 t
= TREE_OPERAND (t
, 0);
1174 install_var_local (t
, ctx
);
1175 if (is_taskreg_ctx (ctx
)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1177 || (is_task_ctx (ctx
)
1178 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1181 == POINTER_TYPE
)))))
1182 && !is_variable_sized (t
)
1183 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1185 && !is_task_ctx (ctx
))))
1187 by_ref
= use_pointer_for_field (t
, NULL
);
1188 if (is_task_ctx (ctx
)
1189 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1192 install_var_field (t
, false, 1, ctx
);
1193 install_var_field (t
, by_ref
, 2, ctx
);
1196 install_var_field (t
, by_ref
, 3, ctx
);
1200 if (is_task_ctx (ctx
)
1201 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c
)
1203 && is_parallel_ctx (ctx
)))
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1209 by_ref
= use_pointer_for_field (decl
, ctx
);
1210 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1211 install_var_field (decl
, by_ref
, 3, ctx
);
1213 install_var_local (decl
, ctx
);
1216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c
))
1219 install_var_local (decl
, ctx
);
1224 case OMP_CLAUSE_LASTPRIVATE
:
1225 /* Let the corresponding firstprivate clause create
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1231 case OMP_CLAUSE_FIRSTPRIVATE
:
1232 case OMP_CLAUSE_LINEAR
:
1233 decl
= OMP_CLAUSE_DECL (c
);
1235 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1237 && is_gimple_omp_offloaded (ctx
->stmt
))
1239 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1240 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1241 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1242 install_var_field (decl
, true, 3, ctx
);
1244 install_var_field (decl
, false, 3, ctx
);
1246 if (is_variable_sized (decl
))
1248 if (is_task_ctx (ctx
))
1249 install_var_field (decl
, false, 1, ctx
);
1252 else if (is_taskreg_ctx (ctx
))
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1256 by_ref
= use_pointer_for_field (decl
, NULL
);
1258 if (is_task_ctx (ctx
)
1259 && (global
|| by_ref
|| omp_is_reference (decl
)))
1261 install_var_field (decl
, false, 1, ctx
);
1263 install_var_field (decl
, by_ref
, 2, ctx
);
1266 install_var_field (decl
, by_ref
, 3, ctx
);
1268 install_var_local (decl
, ctx
);
1271 case OMP_CLAUSE_USE_DEVICE_PTR
:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1273 decl
= OMP_CLAUSE_DECL (c
);
1275 /* Fortran array descriptors. */
1276 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1277 install_var_field (decl
, false, 19, ctx
);
1278 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl
)
1280 && !omp_is_allocatable_or_ptr (decl
))
1281 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1282 install_var_field (decl
, true, 11, ctx
);
1284 install_var_field (decl
, false, 11, ctx
);
1285 if (DECL_SIZE (decl
)
1286 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1288 tree decl2
= DECL_VALUE_EXPR (decl
);
1289 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1290 decl2
= TREE_OPERAND (decl2
, 0);
1291 gcc_assert (DECL_P (decl2
));
1292 install_var_local (decl2
, ctx
);
1294 install_var_local (decl
, ctx
);
1297 case OMP_CLAUSE_IS_DEVICE_PTR
:
1298 decl
= OMP_CLAUSE_DECL (c
);
1301 case OMP_CLAUSE__LOOPTEMP_
:
1302 case OMP_CLAUSE__REDUCTEMP_
:
1303 gcc_assert (is_taskreg_ctx (ctx
));
1304 decl
= OMP_CLAUSE_DECL (c
);
1305 install_var_field (decl
, false, 3, ctx
);
1306 install_var_local (decl
, ctx
);
1309 case OMP_CLAUSE_COPYPRIVATE
:
1310 case OMP_CLAUSE_COPYIN
:
1311 decl
= OMP_CLAUSE_DECL (c
);
1312 by_ref
= use_pointer_for_field (decl
, NULL
);
1313 install_var_field (decl
, by_ref
, 3, ctx
);
1316 case OMP_CLAUSE_FINAL
:
1318 case OMP_CLAUSE_NUM_THREADS
:
1319 case OMP_CLAUSE_NUM_TEAMS
:
1320 case OMP_CLAUSE_THREAD_LIMIT
:
1321 case OMP_CLAUSE_DEVICE
:
1322 case OMP_CLAUSE_SCHEDULE
:
1323 case OMP_CLAUSE_DIST_SCHEDULE
:
1324 case OMP_CLAUSE_DEPEND
:
1325 case OMP_CLAUSE_PRIORITY
:
1326 case OMP_CLAUSE_GRAINSIZE
:
1327 case OMP_CLAUSE_NUM_TASKS
:
1328 case OMP_CLAUSE_NUM_GANGS
:
1329 case OMP_CLAUSE_NUM_WORKERS
:
1330 case OMP_CLAUSE_VECTOR_LENGTH
:
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1336 case OMP_CLAUSE_FROM
:
1337 case OMP_CLAUSE_MAP
:
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1340 decl
= OMP_CLAUSE_DECL (c
);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1347 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c
)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1350 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1351 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1356 && varpool_node::get_create (decl
)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl
)))
1360 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1370 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c
)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1375 if (TREE_CODE (decl
) == COMPONENT_REF
1376 || (TREE_CODE (decl
) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1379 == REFERENCE_TYPE
)))
1381 if (DECL_SIZE (decl
)
1382 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1384 tree decl2
= DECL_VALUE_EXPR (decl
);
1385 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1386 decl2
= TREE_OPERAND (decl2
, 0);
1387 gcc_assert (DECL_P (decl2
));
1388 install_var_local (decl2
, ctx
);
1390 install_var_local (decl
, ctx
);
1395 if (DECL_SIZE (decl
)
1396 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1398 tree decl2
= DECL_VALUE_EXPR (decl
);
1399 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1400 decl2
= TREE_OPERAND (decl2
, 0);
1401 gcc_assert (DECL_P (decl2
));
1402 install_var_field (decl2
, true, 3, ctx
);
1403 install_var_local (decl2
, ctx
);
1404 install_var_local (decl
, ctx
);
1408 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1411 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1412 install_var_field (decl
, true, 7, ctx
);
1414 install_var_field (decl
, true, 3, ctx
);
1415 if (is_gimple_omp_offloaded (ctx
->stmt
)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1417 install_var_local (decl
, ctx
);
1422 tree base
= get_base_address (decl
);
1423 tree nc
= OMP_CLAUSE_CHAIN (c
);
1426 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc
) == base
1428 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1438 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1439 decl
= OMP_CLAUSE_DECL (c
);
1441 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1442 (splay_tree_key
) decl
));
1444 = build_decl (OMP_CLAUSE_LOCATION (c
),
1445 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1446 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1447 insert_field_into_struct (ctx
->record_type
, field
);
1448 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1449 (splay_tree_value
) field
);
1454 case OMP_CLAUSE_ORDER
:
1455 ctx
->order_concurrent
= true;
1458 case OMP_CLAUSE_BIND
:
1462 case OMP_CLAUSE_NOWAIT
:
1463 case OMP_CLAUSE_ORDERED
:
1464 case OMP_CLAUSE_COLLAPSE
:
1465 case OMP_CLAUSE_UNTIED
:
1466 case OMP_CLAUSE_MERGEABLE
:
1467 case OMP_CLAUSE_PROC_BIND
:
1468 case OMP_CLAUSE_SAFELEN
:
1469 case OMP_CLAUSE_SIMDLEN
:
1470 case OMP_CLAUSE_THREADS
:
1471 case OMP_CLAUSE_SIMD
:
1472 case OMP_CLAUSE_NOGROUP
:
1473 case OMP_CLAUSE_DEFAULTMAP
:
1474 case OMP_CLAUSE_ASYNC
:
1475 case OMP_CLAUSE_WAIT
:
1476 case OMP_CLAUSE_GANG
:
1477 case OMP_CLAUSE_WORKER
:
1478 case OMP_CLAUSE_VECTOR
:
1479 case OMP_CLAUSE_INDEPENDENT
:
1480 case OMP_CLAUSE_AUTO
:
1481 case OMP_CLAUSE_SEQ
:
1482 case OMP_CLAUSE_TILE
:
1483 case OMP_CLAUSE__SIMT_
:
1484 case OMP_CLAUSE_DEFAULT
:
1485 case OMP_CLAUSE_NONTEMPORAL
:
1486 case OMP_CLAUSE_IF_PRESENT
:
1487 case OMP_CLAUSE_FINALIZE
:
1488 case OMP_CLAUSE_TASK_REDUCTION
:
1491 case OMP_CLAUSE_ALIGNED
:
1492 decl
= OMP_CLAUSE_DECL (c
);
1493 if (is_global_var (decl
)
1494 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1495 install_var_local (decl
, ctx
);
1498 case OMP_CLAUSE__CONDTEMP_
:
1499 decl
= OMP_CLAUSE_DECL (c
);
1500 if (is_parallel_ctx (ctx
))
1502 install_var_field (decl
, false, 3, ctx
);
1503 install_var_local (decl
, ctx
);
1505 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1506 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1507 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1508 install_var_local (decl
, ctx
);
1511 case OMP_CLAUSE__CACHE_
:
1517 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1519 switch (OMP_CLAUSE_CODE (c
))
1521 case OMP_CLAUSE_LASTPRIVATE
:
1522 /* Let the corresponding firstprivate clause create
1524 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1525 scan_array_reductions
= true;
1526 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1530 case OMP_CLAUSE_FIRSTPRIVATE
:
1531 case OMP_CLAUSE_PRIVATE
:
1532 case OMP_CLAUSE_LINEAR
:
1533 case OMP_CLAUSE_IS_DEVICE_PTR
:
1534 decl
= OMP_CLAUSE_DECL (c
);
1535 if (is_variable_sized (decl
))
1537 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1538 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1539 && is_gimple_omp_offloaded (ctx
->stmt
))
1541 tree decl2
= DECL_VALUE_EXPR (decl
);
1542 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1543 decl2
= TREE_OPERAND (decl2
, 0);
1544 gcc_assert (DECL_P (decl2
));
1545 install_var_local (decl2
, ctx
);
1546 fixup_remapped_decl (decl2
, ctx
, false);
1548 install_var_local (decl
, ctx
);
1550 fixup_remapped_decl (decl
, ctx
,
1551 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1552 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1553 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1554 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1555 scan_array_reductions
= true;
1558 case OMP_CLAUSE_REDUCTION
:
1559 case OMP_CLAUSE_IN_REDUCTION
:
1560 decl
= OMP_CLAUSE_DECL (c
);
1561 if (TREE_CODE (decl
) != MEM_REF
)
1563 if (is_variable_sized (decl
))
1564 install_var_local (decl
, ctx
);
1565 fixup_remapped_decl (decl
, ctx
, false);
1567 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1568 scan_array_reductions
= true;
1571 case OMP_CLAUSE_TASK_REDUCTION
:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1573 scan_array_reductions
= true;
1576 case OMP_CLAUSE_SHARED
:
1577 /* Ignore shared directives in teams construct inside of
1578 target construct. */
1579 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1580 && !is_host_teams_ctx (ctx
))
1582 decl
= OMP_CLAUSE_DECL (c
);
1583 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1585 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1587 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1590 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1591 install_var_field (decl
, by_ref
, 11, ctx
);
1594 fixup_remapped_decl (decl
, ctx
, false);
1597 case OMP_CLAUSE_MAP
:
1598 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1600 decl
= OMP_CLAUSE_DECL (c
);
1602 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1603 && (OMP_CLAUSE_MAP_KIND (c
)
1604 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1605 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1607 && varpool_node::get_create (decl
)->offloadable
)
1611 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1612 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1613 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1614 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1616 tree new_decl
= lookup_decl (decl
, ctx
);
1617 TREE_TYPE (new_decl
)
1618 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1620 else if (DECL_SIZE (decl
)
1621 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1623 tree decl2
= DECL_VALUE_EXPR (decl
);
1624 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1625 decl2
= TREE_OPERAND (decl2
, 0);
1626 gcc_assert (DECL_P (decl2
));
1627 fixup_remapped_decl (decl2
, ctx
, false);
1628 fixup_remapped_decl (decl
, ctx
, true);
1631 fixup_remapped_decl (decl
, ctx
, false);
1635 case OMP_CLAUSE_COPYPRIVATE
:
1636 case OMP_CLAUSE_COPYIN
:
1637 case OMP_CLAUSE_DEFAULT
:
1639 case OMP_CLAUSE_NUM_THREADS
:
1640 case OMP_CLAUSE_NUM_TEAMS
:
1641 case OMP_CLAUSE_THREAD_LIMIT
:
1642 case OMP_CLAUSE_DEVICE
:
1643 case OMP_CLAUSE_SCHEDULE
:
1644 case OMP_CLAUSE_DIST_SCHEDULE
:
1645 case OMP_CLAUSE_NOWAIT
:
1646 case OMP_CLAUSE_ORDERED
:
1647 case OMP_CLAUSE_COLLAPSE
:
1648 case OMP_CLAUSE_UNTIED
:
1649 case OMP_CLAUSE_FINAL
:
1650 case OMP_CLAUSE_MERGEABLE
:
1651 case OMP_CLAUSE_PROC_BIND
:
1652 case OMP_CLAUSE_SAFELEN
:
1653 case OMP_CLAUSE_SIMDLEN
:
1654 case OMP_CLAUSE_ALIGNED
:
1655 case OMP_CLAUSE_DEPEND
:
1656 case OMP_CLAUSE__LOOPTEMP_
:
1657 case OMP_CLAUSE__REDUCTEMP_
:
1659 case OMP_CLAUSE_FROM
:
1660 case OMP_CLAUSE_PRIORITY
:
1661 case OMP_CLAUSE_GRAINSIZE
:
1662 case OMP_CLAUSE_NUM_TASKS
:
1663 case OMP_CLAUSE_THREADS
:
1664 case OMP_CLAUSE_SIMD
:
1665 case OMP_CLAUSE_NOGROUP
:
1666 case OMP_CLAUSE_DEFAULTMAP
:
1667 case OMP_CLAUSE_ORDER
:
1668 case OMP_CLAUSE_BIND
:
1669 case OMP_CLAUSE_USE_DEVICE_PTR
:
1670 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1671 case OMP_CLAUSE_NONTEMPORAL
:
1672 case OMP_CLAUSE_ASYNC
:
1673 case OMP_CLAUSE_WAIT
:
1674 case OMP_CLAUSE_NUM_GANGS
:
1675 case OMP_CLAUSE_NUM_WORKERS
:
1676 case OMP_CLAUSE_VECTOR_LENGTH
:
1677 case OMP_CLAUSE_GANG
:
1678 case OMP_CLAUSE_WORKER
:
1679 case OMP_CLAUSE_VECTOR
:
1680 case OMP_CLAUSE_INDEPENDENT
:
1681 case OMP_CLAUSE_AUTO
:
1682 case OMP_CLAUSE_SEQ
:
1683 case OMP_CLAUSE_TILE
:
1684 case OMP_CLAUSE__SIMT_
:
1685 case OMP_CLAUSE_IF_PRESENT
:
1686 case OMP_CLAUSE_FINALIZE
:
1687 case OMP_CLAUSE__CONDTEMP_
:
1690 case OMP_CLAUSE__CACHE_
:
1696 gcc_checking_assert (!scan_array_reductions
1697 || !is_gimple_omp_oacc (ctx
->stmt
));
1698 if (scan_array_reductions
)
1700 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1701 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1702 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1703 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1704 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1707 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1709 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1711 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1712 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1713 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1714 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1718 /* Create a new name for omp child function. Returns an identifier. */
1721 create_omp_child_function_name (bool task_copy
)
1723 return clone_function_name_numbered (current_function_decl
,
1724 task_copy
? "_omp_cpyfn" : "_omp_fn");
1727 /* Return true if CTX may belong to offloaded code: either if current function
1728 is offloaded, or any enclosing context corresponds to a target region. */
1731 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1733 if (cgraph_node::get (current_function_decl
)->offloadable
)
1735 for (; ctx
; ctx
= ctx
->outer
)
1736 if (is_gimple_omp_offloaded (ctx
->stmt
))
1741 /* Build a decl for the omp child function. It'll not contain a body
1742 yet, just the bare decl. */
1745 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1747 tree decl
, type
, name
, t
;
1749 name
= create_omp_child_function_name (task_copy
);
1751 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1752 ptr_type_node
, NULL_TREE
);
1754 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1756 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1758 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1761 ctx
->cb
.dst_fn
= decl
;
1763 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1765 TREE_STATIC (decl
) = 1;
1766 TREE_USED (decl
) = 1;
1767 DECL_ARTIFICIAL (decl
) = 1;
1768 DECL_IGNORED_P (decl
) = 0;
1769 TREE_PUBLIC (decl
) = 0;
1770 DECL_UNINLINABLE (decl
) = 1;
1771 DECL_EXTERNAL (decl
) = 0;
1772 DECL_CONTEXT (decl
) = NULL_TREE
;
1773 DECL_INITIAL (decl
) = make_node (BLOCK
);
1774 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1775 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1776 /* Remove omp declare simd attribute from the new attributes. */
1777 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1779 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1782 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1783 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1784 *p
= TREE_CHAIN (*p
);
1787 tree chain
= TREE_CHAIN (*p
);
1788 *p
= copy_node (*p
);
1789 p
= &TREE_CHAIN (*p
);
1793 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1794 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1795 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1796 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1797 DECL_FUNCTION_VERSIONED (decl
)
1798 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1800 if (omp_maybe_offloaded_ctx (ctx
))
1802 cgraph_node::get_create (decl
)->offloadable
= 1;
1803 if (ENABLE_OFFLOADING
)
1804 g
->have_offload
= true;
1807 if (cgraph_node::get_create (decl
)->offloadable
1808 && !lookup_attribute ("omp declare target",
1809 DECL_ATTRIBUTES (current_function_decl
)))
1811 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1812 ? "omp target entrypoint"
1813 : "omp declare target");
1814 DECL_ATTRIBUTES (decl
)
1815 = tree_cons (get_identifier (target_attr
),
1816 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1819 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1820 RESULT_DECL
, NULL_TREE
, void_type_node
);
1821 DECL_ARTIFICIAL (t
) = 1;
1822 DECL_IGNORED_P (t
) = 1;
1823 DECL_CONTEXT (t
) = decl
;
1824 DECL_RESULT (decl
) = t
;
1826 tree data_name
= get_identifier (".omp_data_i");
1827 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1829 DECL_ARTIFICIAL (t
) = 1;
1830 DECL_NAMELESS (t
) = 1;
1831 DECL_ARG_TYPE (t
) = ptr_type_node
;
1832 DECL_CONTEXT (t
) = current_function_decl
;
1834 TREE_READONLY (t
) = 1;
1835 DECL_ARGUMENTS (decl
) = t
;
1837 ctx
->receiver_decl
= t
;
1840 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1841 PARM_DECL
, get_identifier (".omp_data_o"),
1843 DECL_ARTIFICIAL (t
) = 1;
1844 DECL_NAMELESS (t
) = 1;
1845 DECL_ARG_TYPE (t
) = ptr_type_node
;
1846 DECL_CONTEXT (t
) = current_function_decl
;
1848 TREE_ADDRESSABLE (t
) = 1;
1849 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1850 DECL_ARGUMENTS (decl
) = t
;
1853 /* Allocate memory for the function structure. The call to
1854 allocate_struct_function clobbers CFUN, so we need to restore
1856 push_struct_function (decl
);
1857 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1858 init_tree_ssa (cfun
);
1862 /* Callback for walk_gimple_seq. Check if combined parallel
1863 contains gimple_omp_for_combined_into_p OMP_FOR. */
1866 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1867 bool *handled_ops_p
,
1868 struct walk_stmt_info
*wi
)
1870 gimple
*stmt
= gsi_stmt (*gsi_p
);
1872 *handled_ops_p
= true;
1873 switch (gimple_code (stmt
))
1877 case GIMPLE_OMP_FOR
:
1878 if (gimple_omp_for_combined_into_p (stmt
)
1879 && gimple_omp_for_kind (stmt
)
1880 == *(const enum gf_mask
*) (wi
->info
))
1883 return integer_zero_node
;
1892 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1895 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1896 omp_context
*outer_ctx
)
1898 struct walk_stmt_info wi
;
1900 memset (&wi
, 0, sizeof (wi
));
1902 wi
.info
= (void *) &msk
;
1903 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1904 if (wi
.info
!= (void *) &msk
)
1906 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1907 struct omp_for_data fd
;
1908 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1909 /* We need two temporaries with fd.loop.v type (istart/iend)
1910 and then (fd.collapse - 1) temporaries with the same
1911 type for count2 ... countN-1 vars if not constant. */
1912 size_t count
= 2, i
;
1913 tree type
= fd
.iter_type
;
1915 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1917 count
+= fd
.collapse
- 1;
1918 /* If there are lastprivate clauses on the inner
1919 GIMPLE_OMP_FOR, add one more temporaries for the total number
1920 of iterations (product of count1 ... countN-1). */
1921 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1922 OMP_CLAUSE_LASTPRIVATE
))
1924 else if (msk
== GF_OMP_FOR_KIND_FOR
1925 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1926 OMP_CLAUSE_LASTPRIVATE
))
1929 for (i
= 0; i
< count
; i
++)
1931 tree temp
= create_tmp_var (type
);
1932 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1933 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1934 OMP_CLAUSE_DECL (c
) = temp
;
1935 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1936 gimple_omp_taskreg_set_clauses (stmt
, c
);
1939 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1940 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1941 OMP_CLAUSE_REDUCTION
))
1943 tree type
= build_pointer_type (pointer_sized_int_node
);
1944 tree temp
= create_tmp_var (type
);
1945 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1946 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1947 OMP_CLAUSE_DECL (c
) = temp
;
1948 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1949 gimple_omp_task_set_clauses (stmt
, c
);
1953 /* Scan an OpenMP parallel directive. */
1956 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1960 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1962 /* Ignore parallel directives with empty bodies, unless there
1963 are copyin clauses. */
1965 && empty_body_p (gimple_omp_body (stmt
))
1966 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1967 OMP_CLAUSE_COPYIN
) == NULL
)
1969 gsi_replace (gsi
, gimple_build_nop (), false);
1973 if (gimple_omp_parallel_combined_p (stmt
))
1974 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1975 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1976 OMP_CLAUSE_REDUCTION
);
1977 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1978 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1980 tree type
= build_pointer_type (pointer_sized_int_node
);
1981 tree temp
= create_tmp_var (type
);
1982 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1984 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1985 OMP_CLAUSE_DECL (c
) = temp
;
1986 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1987 gimple_omp_parallel_set_clauses (stmt
, c
);
1990 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1993 ctx
= new_omp_context (stmt
, outer_ctx
);
1994 taskreg_contexts
.safe_push (ctx
);
1995 if (taskreg_nesting_level
> 1)
1996 ctx
->is_nested
= true;
1997 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1998 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1999 name
= create_tmp_var_name (".omp_data_s");
2000 name
= build_decl (gimple_location (stmt
),
2001 TYPE_DECL
, name
, ctx
->record_type
);
2002 DECL_ARTIFICIAL (name
) = 1;
2003 DECL_NAMELESS (name
) = 1;
2004 TYPE_NAME (ctx
->record_type
) = name
;
2005 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2006 create_omp_child_function (ctx
, false);
2007 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2009 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2010 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2012 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2013 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2016 /* Scan an OpenMP task directive. */
2019 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2023 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2025 /* Ignore task directives with empty bodies, unless they have depend
2028 && gimple_omp_body (stmt
)
2029 && empty_body_p (gimple_omp_body (stmt
))
2030 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2032 gsi_replace (gsi
, gimple_build_nop (), false);
2036 if (gimple_omp_task_taskloop_p (stmt
))
2037 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2039 ctx
= new_omp_context (stmt
, outer_ctx
);
2041 if (gimple_omp_task_taskwait_p (stmt
))
2043 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2047 taskreg_contexts
.safe_push (ctx
);
2048 if (taskreg_nesting_level
> 1)
2049 ctx
->is_nested
= true;
2050 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2051 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2052 name
= create_tmp_var_name (".omp_data_s");
2053 name
= build_decl (gimple_location (stmt
),
2054 TYPE_DECL
, name
, ctx
->record_type
);
2055 DECL_ARTIFICIAL (name
) = 1;
2056 DECL_NAMELESS (name
) = 1;
2057 TYPE_NAME (ctx
->record_type
) = name
;
2058 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2059 create_omp_child_function (ctx
, false);
2060 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2064 if (ctx
->srecord_type
)
2066 name
= create_tmp_var_name (".omp_data_a");
2067 name
= build_decl (gimple_location (stmt
),
2068 TYPE_DECL
, name
, ctx
->srecord_type
);
2069 DECL_ARTIFICIAL (name
) = 1;
2070 DECL_NAMELESS (name
) = 1;
2071 TYPE_NAME (ctx
->srecord_type
) = name
;
2072 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2073 create_omp_child_function (ctx
, true);
2076 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2078 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2080 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2081 t
= build_int_cst (long_integer_type_node
, 0);
2082 gimple_omp_task_set_arg_size (stmt
, t
);
2083 t
= build_int_cst (long_integer_type_node
, 1);
2084 gimple_omp_task_set_arg_align (stmt
, t
);
2088 /* Helper function for finish_taskreg_scan, called through walk_tree.
2089 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2090 tree, replace it in the expression. */
2093 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2097 omp_context
*ctx
= (omp_context
*) data
;
2098 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2101 if (DECL_HAS_VALUE_EXPR_P (t
))
2102 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2107 else if (IS_TYPE_OR_DECL_P (*tp
))
2112 /* If any decls have been made addressable during scan_omp,
2113 adjust their fields if needed, and layout record types
2114 of parallel/task constructs. */
2117 finish_taskreg_scan (omp_context
*ctx
)
2119 if (ctx
->record_type
== NULL_TREE
)
2122 /* If any task_shared_vars were needed, verify all
2123 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2124 statements if use_pointer_for_field hasn't changed
2125 because of that. If it did, update field types now. */
2126 if (task_shared_vars
)
2130 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2131 c
; c
= OMP_CLAUSE_CHAIN (c
))
2132 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2133 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2135 tree decl
= OMP_CLAUSE_DECL (c
);
2137 /* Global variables don't need to be copied,
2138 the receiver side will use them directly. */
2139 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2141 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2142 || !use_pointer_for_field (decl
, ctx
))
2144 tree field
= lookup_field (decl
, ctx
);
2145 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2146 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2148 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2149 TREE_THIS_VOLATILE (field
) = 0;
2150 DECL_USER_ALIGN (field
) = 0;
2151 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2152 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2153 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2154 if (ctx
->srecord_type
)
2156 tree sfield
= lookup_sfield (decl
, ctx
);
2157 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2158 TREE_THIS_VOLATILE (sfield
) = 0;
2159 DECL_USER_ALIGN (sfield
) = 0;
2160 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2161 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2162 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2167 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2169 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2170 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2173 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2174 expects to find it at the start of data. */
2175 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2176 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2180 *p
= DECL_CHAIN (*p
);
2184 p
= &DECL_CHAIN (*p
);
2185 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2186 TYPE_FIELDS (ctx
->record_type
) = f
;
2188 layout_type (ctx
->record_type
);
2189 fixup_child_record_type (ctx
);
2191 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2193 layout_type (ctx
->record_type
);
2194 fixup_child_record_type (ctx
);
2198 location_t loc
= gimple_location (ctx
->stmt
);
2199 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2200 /* Move VLA fields to the end. */
2201 p
= &TYPE_FIELDS (ctx
->record_type
);
2203 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2204 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2207 *p
= TREE_CHAIN (*p
);
2208 TREE_CHAIN (*q
) = NULL_TREE
;
2209 q
= &TREE_CHAIN (*q
);
2212 p
= &DECL_CHAIN (*p
);
2214 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2216 /* Move fields corresponding to first and second _looptemp_
2217 clause first. There are filled by GOMP_taskloop
2218 and thus need to be in specific positions. */
2219 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2220 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2221 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2222 OMP_CLAUSE__LOOPTEMP_
);
2223 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2224 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2225 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2226 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2227 p
= &TYPE_FIELDS (ctx
->record_type
);
2229 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2230 *p
= DECL_CHAIN (*p
);
2232 p
= &DECL_CHAIN (*p
);
2233 DECL_CHAIN (f1
) = f2
;
2236 DECL_CHAIN (f2
) = f3
;
2237 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2240 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2241 TYPE_FIELDS (ctx
->record_type
) = f1
;
2242 if (ctx
->srecord_type
)
2244 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2245 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2247 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2248 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2250 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2251 *p
= DECL_CHAIN (*p
);
2253 p
= &DECL_CHAIN (*p
);
2254 DECL_CHAIN (f1
) = f2
;
2255 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2258 DECL_CHAIN (f2
) = f3
;
2259 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2262 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2263 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2266 layout_type (ctx
->record_type
);
2267 fixup_child_record_type (ctx
);
2268 if (ctx
->srecord_type
)
2269 layout_type (ctx
->srecord_type
);
2270 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2271 TYPE_SIZE_UNIT (ctx
->record_type
));
2272 if (TREE_CODE (t
) != INTEGER_CST
)
2274 t
= unshare_expr (t
);
2275 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2277 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2278 t
= build_int_cst (long_integer_type_node
,
2279 TYPE_ALIGN_UNIT (ctx
->record_type
));
2280 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2284 /* Find the enclosing offload context. */
2286 static omp_context
*
2287 enclosing_target_ctx (omp_context
*ctx
)
2289 for (; ctx
; ctx
= ctx
->outer
)
2290 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2296 /* Return true if ctx is part of an oacc kernels region. */
2299 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2301 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2303 gimple
*stmt
= ctx
->stmt
;
2304 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2305 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2312 /* Check the parallelism clauses inside a kernels regions.
2313 Until kernels handling moves to use the same loop indirection
2314 scheme as parallel, we need to do this checking early. */
2317 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2319 bool checking
= true;
2320 unsigned outer_mask
= 0;
2321 unsigned this_mask
= 0;
2322 bool has_seq
= false, has_auto
= false;
2325 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2329 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2331 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2334 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2336 switch (OMP_CLAUSE_CODE (c
))
2338 case OMP_CLAUSE_GANG
:
2339 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2341 case OMP_CLAUSE_WORKER
:
2342 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2344 case OMP_CLAUSE_VECTOR
:
2345 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2347 case OMP_CLAUSE_SEQ
:
2350 case OMP_CLAUSE_AUTO
:
2360 if (has_seq
&& (this_mask
|| has_auto
))
2361 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2362 " OpenACC loop specifiers");
2363 else if (has_auto
&& this_mask
)
2364 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2365 " OpenACC loop specifiers");
2367 if (this_mask
& outer_mask
)
2368 error_at (gimple_location (stmt
), "inner loop uses same"
2369 " OpenACC parallelism as containing loop");
2372 return outer_mask
| this_mask
;
2375 /* Scan a GIMPLE_OMP_FOR. */
2377 static omp_context
*
2378 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2382 tree clauses
= gimple_omp_for_clauses (stmt
);
2384 ctx
= new_omp_context (stmt
, outer_ctx
);
2386 if (is_gimple_omp_oacc (stmt
))
2388 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2390 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
2391 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2393 char const *check
= NULL
;
2395 switch (OMP_CLAUSE_CODE (c
))
2397 case OMP_CLAUSE_GANG
:
2401 case OMP_CLAUSE_WORKER
:
2405 case OMP_CLAUSE_VECTOR
:
2413 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2414 error_at (gimple_location (stmt
),
2415 "argument not permitted on %qs clause in"
2416 " OpenACC %<parallel%> or %<serial%>", check
);
2419 if (tgt
&& is_oacc_kernels (tgt
))
2421 /* Strip out reductions, as they are not handled yet. */
2422 tree
*prev_ptr
= &clauses
;
2424 while (tree probe
= *prev_ptr
)
2426 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2428 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2429 *prev_ptr
= *next_ptr
;
2431 prev_ptr
= next_ptr
;
2434 gimple_omp_for_set_clauses (stmt
, clauses
);
2435 check_oacc_kernel_gwv (stmt
, ctx
);
2438 /* Collect all variables named in reductions on this loop. Ensure
2439 that, if this loop has a reduction on some variable v, and there is
2440 a reduction on v somewhere in an outer context, then there is a
2441 reduction on v on all intervening loops as well. */
2442 tree local_reduction_clauses
= NULL
;
2443 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2445 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2446 local_reduction_clauses
2447 = tree_cons (NULL
, c
, local_reduction_clauses
);
2449 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2450 ctx
->outer_reduction_clauses
2451 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2452 ctx
->outer
->outer_reduction_clauses
);
2453 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2454 tree local_iter
= local_reduction_clauses
;
2455 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2457 tree local_clause
= TREE_VALUE (local_iter
);
2458 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2459 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2460 bool have_outer_reduction
= false;
2461 tree ctx_iter
= outer_reduction_clauses
;
2462 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2464 tree outer_clause
= TREE_VALUE (ctx_iter
);
2465 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2466 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2467 if (outer_var
== local_var
&& outer_op
!= local_op
)
2469 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2470 "conflicting reduction operations for %qE",
2472 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2473 "location of the previous reduction for %qE",
2476 if (outer_var
== local_var
)
2478 have_outer_reduction
= true;
2482 if (have_outer_reduction
)
2484 /* There is a reduction on outer_var both on this loop and on
2485 some enclosing loop. Walk up the context tree until such a
2486 loop with a reduction on outer_var is found, and complain
2487 about all intervening loops that do not have such a
2489 struct omp_context
*curr_loop
= ctx
->outer
;
2491 while (curr_loop
!= NULL
)
2493 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2494 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2496 tree curr_clause
= TREE_VALUE (curr_iter
);
2497 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2498 if (curr_var
== local_var
)
2505 warning_at (gimple_location (curr_loop
->stmt
), 0,
2506 "nested loop in reduction needs "
2507 "reduction clause for %qE",
2511 curr_loop
= curr_loop
->outer
;
2515 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2516 ctx
->outer_reduction_clauses
2517 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2518 ctx
->outer_reduction_clauses
);
2521 scan_sharing_clauses (clauses
, ctx
);
2523 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2524 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2526 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2527 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2528 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2529 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2531 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2535 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2538 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2539 omp_context
*outer_ctx
)
2541 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2542 gsi_replace (gsi
, bind
, false);
2543 gimple_seq seq
= NULL
;
2544 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2545 tree cond
= create_tmp_var_raw (integer_type_node
);
2546 DECL_CONTEXT (cond
) = current_function_decl
;
2547 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2548 gimple_bind_set_vars (bind
, cond
);
2549 gimple_call_set_lhs (g
, cond
);
2550 gimple_seq_add_stmt (&seq
, g
);
2551 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2552 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2553 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2554 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2555 gimple_seq_add_stmt (&seq
, g
);
2556 g
= gimple_build_label (lab1
);
2557 gimple_seq_add_stmt (&seq
, g
);
2558 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2559 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2560 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2561 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2562 gimple_omp_for_set_clauses (new_stmt
, clause
);
2563 gimple_seq_add_stmt (&seq
, new_stmt
);
2564 g
= gimple_build_goto (lab3
);
2565 gimple_seq_add_stmt (&seq
, g
);
2566 g
= gimple_build_label (lab2
);
2567 gimple_seq_add_stmt (&seq
, g
);
2568 gimple_seq_add_stmt (&seq
, stmt
);
2569 g
= gimple_build_label (lab3
);
2570 gimple_seq_add_stmt (&seq
, g
);
2571 gimple_bind_set_body (bind
, seq
);
2573 scan_omp_for (new_stmt
, outer_ctx
);
2574 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2577 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2578 struct walk_stmt_info
*);
2579 static omp_context
*maybe_lookup_ctx (gimple
*);
2581 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2582 for scan phase loop. */
2585 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2586 omp_context
*outer_ctx
)
2588 /* The only change between inclusive and exclusive scan will be
2589 within the first simd loop, so just use inclusive in the
2590 worksharing loop. */
2591 outer_ctx
->scan_inclusive
= true;
2592 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2593 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2595 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2596 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2597 gsi_replace (gsi
, input_stmt
, false);
2598 gimple_seq input_body
= NULL
;
2599 gimple_seq_add_stmt (&input_body
, stmt
);
2600 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2602 gimple_stmt_iterator input1_gsi
= gsi_none ();
2603 struct walk_stmt_info wi
;
2604 memset (&wi
, 0, sizeof (wi
));
2606 wi
.info
= (void *) &input1_gsi
;
2607 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2608 gcc_assert (!gsi_end_p (input1_gsi
));
2610 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2611 gsi_next (&input1_gsi
);
2612 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2613 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2614 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2615 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2616 std::swap (input_stmt1
, scan_stmt1
);
2618 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2619 gimple_omp_set_body (input_stmt1
, NULL
);
2621 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2622 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2624 gimple_omp_set_body (input_stmt1
, input_body1
);
2625 gimple_omp_set_body (scan_stmt1
, NULL
);
2627 gimple_stmt_iterator input2_gsi
= gsi_none ();
2628 memset (&wi
, 0, sizeof (wi
));
2630 wi
.info
= (void *) &input2_gsi
;
2631 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2633 gcc_assert (!gsi_end_p (input2_gsi
));
2635 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2636 gsi_next (&input2_gsi
);
2637 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2638 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2639 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2640 std::swap (input_stmt2
, scan_stmt2
);
2642 gimple_omp_set_body (input_stmt2
, NULL
);
2644 gimple_omp_set_body (input_stmt
, input_body
);
2645 gimple_omp_set_body (scan_stmt
, scan_body
);
2647 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2648 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2650 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2651 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2653 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2656 /* Scan an OpenMP sections directive. */
2659 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2663 ctx
= new_omp_context (stmt
, outer_ctx
);
2664 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2665 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2668 /* Scan an OpenMP single directive. */
2671 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2676 ctx
= new_omp_context (stmt
, outer_ctx
);
2677 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2678 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2679 name
= create_tmp_var_name (".omp_copy_s");
2680 name
= build_decl (gimple_location (stmt
),
2681 TYPE_DECL
, name
, ctx
->record_type
);
2682 TYPE_NAME (ctx
->record_type
) = name
;
2684 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2685 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2687 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2688 ctx
->record_type
= NULL
;
2690 layout_type (ctx
->record_type
);
2693 /* Scan a GIMPLE_OMP_TARGET. */
2696 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2700 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2701 tree clauses
= gimple_omp_target_clauses (stmt
);
2703 ctx
= new_omp_context (stmt
, outer_ctx
);
2704 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2705 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2706 name
= create_tmp_var_name (".omp_data_t");
2707 name
= build_decl (gimple_location (stmt
),
2708 TYPE_DECL
, name
, ctx
->record_type
);
2709 DECL_ARTIFICIAL (name
) = 1;
2710 DECL_NAMELESS (name
) = 1;
2711 TYPE_NAME (ctx
->record_type
) = name
;
2712 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2716 create_omp_child_function (ctx
, false);
2717 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2720 scan_sharing_clauses (clauses
, ctx
);
2721 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2723 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2724 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2727 TYPE_FIELDS (ctx
->record_type
)
2728 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2731 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2732 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2734 field
= DECL_CHAIN (field
))
2735 gcc_assert (DECL_ALIGN (field
) == align
);
2737 layout_type (ctx
->record_type
);
2739 fixup_child_record_type (ctx
);
2743 /* Scan an OpenMP teams directive. */
2746 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2748 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2750 if (!gimple_omp_teams_host (stmt
))
2752 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2753 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2756 taskreg_contexts
.safe_push (ctx
);
2757 gcc_assert (taskreg_nesting_level
== 1);
2758 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2759 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2760 tree name
= create_tmp_var_name (".omp_data_s");
2761 name
= build_decl (gimple_location (stmt
),
2762 TYPE_DECL
, name
, ctx
->record_type
);
2763 DECL_ARTIFICIAL (name
) = 1;
2764 DECL_NAMELESS (name
) = 1;
2765 TYPE_NAME (ctx
->record_type
) = name
;
2766 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2767 create_omp_child_function (ctx
, false);
2768 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2770 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2771 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2773 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2774 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2777 /* Check nesting restrictions. */
2779 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2783 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2784 inside an OpenACC CTX. */
2785 if (!(is_gimple_omp (stmt
)
2786 && is_gimple_omp_oacc (stmt
))
2787 /* Except for atomic codes that we share with OpenMP. */
2788 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2789 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2791 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2793 error_at (gimple_location (stmt
),
2794 "non-OpenACC construct inside of OpenACC routine");
2798 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2799 if (is_gimple_omp (octx
->stmt
)
2800 && is_gimple_omp_oacc (octx
->stmt
))
2802 error_at (gimple_location (stmt
),
2803 "non-OpenACC construct inside of OpenACC region");
2810 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2812 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2814 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2815 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
2819 if (ctx
->order_concurrent
2820 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
2821 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2822 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2824 error_at (gimple_location (stmt
),
2825 "OpenMP constructs other than %<parallel%>, %<loop%>"
2826 " or %<simd%> may not be nested inside a region with"
2827 " the %<order(concurrent)%> clause");
2830 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2832 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2833 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2835 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2836 && (ctx
->outer
== NULL
2837 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2838 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2839 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2840 != GF_OMP_FOR_KIND_FOR
)
2841 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2843 error_at (gimple_location (stmt
),
2844 "%<ordered simd threads%> must be closely "
2845 "nested inside of %<for simd%> region");
2851 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2852 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2853 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2855 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
2856 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
2858 error_at (gimple_location (stmt
),
2859 "OpenMP constructs other than "
2860 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2861 "not be nested inside %<simd%> region");
2864 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2866 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2867 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
2868 && omp_find_clause (gimple_omp_for_clauses (stmt
),
2869 OMP_CLAUSE_BIND
) == NULL_TREE
))
2870 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2872 error_at (gimple_location (stmt
),
2873 "only %<distribute%>, %<parallel%> or %<loop%> "
2874 "regions are allowed to be strictly nested inside "
2875 "%<teams%> region");
2879 else if (ctx
->order_concurrent
2880 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
2881 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
2882 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
2883 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
2886 error_at (gimple_location (stmt
),
2887 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2888 "%<simd%> may not be nested inside a %<loop%> region");
2890 error_at (gimple_location (stmt
),
2891 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2892 "%<simd%> may not be nested inside a region with "
2893 "the %<order(concurrent)%> clause");
2897 switch (gimple_code (stmt
))
2899 case GIMPLE_OMP_FOR
:
2900 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
2902 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2904 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2906 error_at (gimple_location (stmt
),
2907 "%<distribute%> region must be strictly nested "
2908 "inside %<teams%> construct");
2913 /* We split taskloop into task and nested taskloop in it. */
2914 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2916 /* For now, hope this will change and loop bind(parallel) will not
2917 be allowed in lots of contexts. */
2918 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
2919 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
2921 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2926 switch (gimple_code (ctx
->stmt
))
2928 case GIMPLE_OMP_FOR
:
2929 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2930 == GF_OMP_FOR_KIND_OACC_LOOP
);
2933 case GIMPLE_OMP_TARGET
:
2934 switch (gimple_omp_target_kind (ctx
->stmt
))
2936 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2937 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2938 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
2949 else if (oacc_get_fn_attrib (current_function_decl
))
2953 error_at (gimple_location (stmt
),
2954 "OpenACC loop directive must be associated with"
2955 " an OpenACC compute region");
2961 if (is_gimple_call (stmt
)
2962 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2963 == BUILT_IN_GOMP_CANCEL
2964 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2965 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2967 const char *bad
= NULL
;
2968 const char *kind
= NULL
;
2969 const char *construct
2970 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2971 == BUILT_IN_GOMP_CANCEL
)
2973 : "cancellation point";
2976 error_at (gimple_location (stmt
), "orphaned %qs construct",
2980 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2981 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2985 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2987 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2988 == BUILT_IN_GOMP_CANCEL
2989 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2990 ctx
->cancellable
= true;
2994 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2995 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2997 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2998 == BUILT_IN_GOMP_CANCEL
2999 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3001 ctx
->cancellable
= true;
3002 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3004 warning_at (gimple_location (stmt
), 0,
3005 "%<cancel for%> inside "
3006 "%<nowait%> for construct");
3007 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3008 OMP_CLAUSE_ORDERED
))
3009 warning_at (gimple_location (stmt
), 0,
3010 "%<cancel for%> inside "
3011 "%<ordered%> for construct");
3016 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3017 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3019 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3020 == BUILT_IN_GOMP_CANCEL
3021 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3023 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3025 ctx
->cancellable
= true;
3026 if (omp_find_clause (gimple_omp_sections_clauses
3029 warning_at (gimple_location (stmt
), 0,
3030 "%<cancel sections%> inside "
3031 "%<nowait%> sections construct");
3035 gcc_assert (ctx
->outer
3036 && gimple_code (ctx
->outer
->stmt
)
3037 == GIMPLE_OMP_SECTIONS
);
3038 ctx
->outer
->cancellable
= true;
3039 if (omp_find_clause (gimple_omp_sections_clauses
3042 warning_at (gimple_location (stmt
), 0,
3043 "%<cancel sections%> inside "
3044 "%<nowait%> sections construct");
3050 if (!is_task_ctx (ctx
)
3051 && (!is_taskloop_ctx (ctx
)
3052 || ctx
->outer
== NULL
3053 || !is_task_ctx (ctx
->outer
)))
3057 for (omp_context
*octx
= ctx
->outer
;
3058 octx
; octx
= octx
->outer
)
3060 switch (gimple_code (octx
->stmt
))
3062 case GIMPLE_OMP_TASKGROUP
:
3064 case GIMPLE_OMP_TARGET
:
3065 if (gimple_omp_target_kind (octx
->stmt
)
3066 != GF_OMP_TARGET_KIND_REGION
)
3069 case GIMPLE_OMP_PARALLEL
:
3070 case GIMPLE_OMP_TEAMS
:
3071 error_at (gimple_location (stmt
),
3072 "%<%s taskgroup%> construct not closely "
3073 "nested inside of %<taskgroup%> region",
3076 case GIMPLE_OMP_TASK
:
3077 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3079 && is_taskloop_ctx (octx
->outer
))
3082 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3083 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3092 ctx
->cancellable
= true;
3097 error_at (gimple_location (stmt
), "invalid arguments");
3102 error_at (gimple_location (stmt
),
3103 "%<%s %s%> construct not closely nested inside of %qs",
3104 construct
, kind
, bad
);
3109 case GIMPLE_OMP_SECTIONS
:
3110 case GIMPLE_OMP_SINGLE
:
3111 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3112 switch (gimple_code (ctx
->stmt
))
3114 case GIMPLE_OMP_FOR
:
3115 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3116 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3119 case GIMPLE_OMP_SECTIONS
:
3120 case GIMPLE_OMP_SINGLE
:
3121 case GIMPLE_OMP_ORDERED
:
3122 case GIMPLE_OMP_MASTER
:
3123 case GIMPLE_OMP_TASK
:
3124 case GIMPLE_OMP_CRITICAL
:
3125 if (is_gimple_call (stmt
))
3127 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3128 != BUILT_IN_GOMP_BARRIER
)
3130 error_at (gimple_location (stmt
),
3131 "barrier region may not be closely nested inside "
3132 "of work-sharing, %<loop%>, %<critical%>, "
3133 "%<ordered%>, %<master%>, explicit %<task%> or "
3134 "%<taskloop%> region");
3137 error_at (gimple_location (stmt
),
3138 "work-sharing region may not be closely nested inside "
3139 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3140 "%<master%>, explicit %<task%> or %<taskloop%> region");
3142 case GIMPLE_OMP_PARALLEL
:
3143 case GIMPLE_OMP_TEAMS
:
3145 case GIMPLE_OMP_TARGET
:
3146 if (gimple_omp_target_kind (ctx
->stmt
)
3147 == GF_OMP_TARGET_KIND_REGION
)
3154 case GIMPLE_OMP_MASTER
:
3155 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3156 switch (gimple_code (ctx
->stmt
))
3158 case GIMPLE_OMP_FOR
:
3159 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3160 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3163 case GIMPLE_OMP_SECTIONS
:
3164 case GIMPLE_OMP_SINGLE
:
3165 case GIMPLE_OMP_TASK
:
3166 error_at (gimple_location (stmt
),
3167 "%<master%> region may not be closely nested inside "
3168 "of work-sharing, %<loop%>, explicit %<task%> or "
3169 "%<taskloop%> region");
3171 case GIMPLE_OMP_PARALLEL
:
3172 case GIMPLE_OMP_TEAMS
:
3174 case GIMPLE_OMP_TARGET
:
3175 if (gimple_omp_target_kind (ctx
->stmt
)
3176 == GF_OMP_TARGET_KIND_REGION
)
3183 case GIMPLE_OMP_TASK
:
3184 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3185 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3186 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3187 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3189 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3190 error_at (OMP_CLAUSE_LOCATION (c
),
3191 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3192 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3196 case GIMPLE_OMP_ORDERED
:
3197 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3198 c
; c
= OMP_CLAUSE_CHAIN (c
))
3200 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3202 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3203 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3206 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3207 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3208 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3211 /* Look for containing ordered(N) loop. */
3213 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3215 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3216 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3218 error_at (OMP_CLAUSE_LOCATION (c
),
3219 "%<ordered%> construct with %<depend%> clause "
3220 "must be closely nested inside an %<ordered%> "
3224 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3226 error_at (OMP_CLAUSE_LOCATION (c
),
3227 "%<ordered%> construct with %<depend%> clause "
3228 "must be closely nested inside a loop with "
3229 "%<ordered%> clause with a parameter");
3235 error_at (OMP_CLAUSE_LOCATION (c
),
3236 "invalid depend kind in omp %<ordered%> %<depend%>");
3240 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3241 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3243 /* ordered simd must be closely nested inside of simd region,
3244 and simd region must not encounter constructs other than
3245 ordered simd, therefore ordered simd may be either orphaned,
3246 or ctx->stmt must be simd. The latter case is handled already
3250 error_at (gimple_location (stmt
),
3251 "%<ordered%> %<simd%> must be closely nested inside "
3256 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3257 switch (gimple_code (ctx
->stmt
))
3259 case GIMPLE_OMP_CRITICAL
:
3260 case GIMPLE_OMP_TASK
:
3261 case GIMPLE_OMP_ORDERED
:
3262 ordered_in_taskloop
:
3263 error_at (gimple_location (stmt
),
3264 "%<ordered%> region may not be closely nested inside "
3265 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3266 "%<taskloop%> region");
3268 case GIMPLE_OMP_FOR
:
3269 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3270 goto ordered_in_taskloop
;
3272 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3273 OMP_CLAUSE_ORDERED
);
3276 error_at (gimple_location (stmt
),
3277 "%<ordered%> region must be closely nested inside "
3278 "a loop region with an %<ordered%> clause");
3281 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3282 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3284 error_at (gimple_location (stmt
),
3285 "%<ordered%> region without %<depend%> clause may "
3286 "not be closely nested inside a loop region with "
3287 "an %<ordered%> clause with a parameter");
3291 case GIMPLE_OMP_TARGET
:
3292 if (gimple_omp_target_kind (ctx
->stmt
)
3293 != GF_OMP_TARGET_KIND_REGION
)
3296 case GIMPLE_OMP_PARALLEL
:
3297 case GIMPLE_OMP_TEAMS
:
3298 error_at (gimple_location (stmt
),
3299 "%<ordered%> region must be closely nested inside "
3300 "a loop region with an %<ordered%> clause");
3306 case GIMPLE_OMP_CRITICAL
:
3309 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3310 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3311 if (gomp_critical
*other_crit
3312 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3313 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3315 error_at (gimple_location (stmt
),
3316 "%<critical%> region may not be nested inside "
3317 "a %<critical%> region with the same name");
3322 case GIMPLE_OMP_TEAMS
:
3325 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3326 || (gimple_omp_target_kind (ctx
->stmt
)
3327 != GF_OMP_TARGET_KIND_REGION
))
3329 /* Teams construct can appear either strictly nested inside of
3330 target construct with no intervening stmts, or can be encountered
3331 only by initial task (so must not appear inside any OpenMP
3333 error_at (gimple_location (stmt
),
3334 "%<teams%> construct must be closely nested inside of "
3335 "%<target%> construct or not nested in any OpenMP "
3340 case GIMPLE_OMP_TARGET
:
3341 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3342 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3343 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3344 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3346 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3347 error_at (OMP_CLAUSE_LOCATION (c
),
3348 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3349 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3352 if (is_gimple_omp_offloaded (stmt
)
3353 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3355 error_at (gimple_location (stmt
),
3356 "OpenACC region inside of OpenACC routine, nested "
3357 "parallelism not supported yet");
3360 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3362 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3364 if (is_gimple_omp (stmt
)
3365 && is_gimple_omp_oacc (stmt
)
3366 && is_gimple_omp (ctx
->stmt
))
3368 error_at (gimple_location (stmt
),
3369 "OpenACC construct inside of non-OpenACC region");
3375 const char *stmt_name
, *ctx_stmt_name
;
3376 switch (gimple_omp_target_kind (stmt
))
3378 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3379 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3380 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3381 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3382 stmt_name
= "target enter data"; break;
3383 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3384 stmt_name
= "target exit data"; break;
3385 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3386 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3387 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3388 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3389 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3390 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3391 stmt_name
= "enter/exit data"; break;
3392 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3393 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3395 default: gcc_unreachable ();
3397 switch (gimple_omp_target_kind (ctx
->stmt
))
3399 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3400 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3401 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3402 ctx_stmt_name
= "parallel"; break;
3403 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3404 ctx_stmt_name
= "kernels"; break;
3405 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3406 ctx_stmt_name
= "serial"; break;
3407 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3408 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3409 ctx_stmt_name
= "host_data"; break;
3410 default: gcc_unreachable ();
3413 /* OpenACC/OpenMP mismatch? */
3414 if (is_gimple_omp_oacc (stmt
)
3415 != is_gimple_omp_oacc (ctx
->stmt
))
3417 error_at (gimple_location (stmt
),
3418 "%s %qs construct inside of %s %qs region",
3419 (is_gimple_omp_oacc (stmt
)
3420 ? "OpenACC" : "OpenMP"), stmt_name
,
3421 (is_gimple_omp_oacc (ctx
->stmt
)
3422 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3425 if (is_gimple_omp_offloaded (ctx
->stmt
))
3427 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3428 if (is_gimple_omp_oacc (ctx
->stmt
))
3430 error_at (gimple_location (stmt
),
3431 "%qs construct inside of %qs region",
3432 stmt_name
, ctx_stmt_name
);
3437 warning_at (gimple_location (stmt
), 0,
3438 "%qs construct inside of %qs region",
3439 stmt_name
, ctx_stmt_name
);
3451 /* Helper function scan_omp.
3453 Callback for walk_tree or operators in walk_gimple_stmt used to
3454 scan for OMP directives in TP. */
3457 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3459 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3460 omp_context
*ctx
= (omp_context
*) wi
->info
;
3463 switch (TREE_CODE (t
))
3471 tree repl
= remap_decl (t
, &ctx
->cb
);
3472 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3478 if (ctx
&& TYPE_P (t
))
3479 *tp
= remap_type (t
, &ctx
->cb
);
3480 else if (!DECL_P (t
))
3485 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3486 if (tem
!= TREE_TYPE (t
))
3488 if (TREE_CODE (t
) == INTEGER_CST
)
3489 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3491 TREE_TYPE (t
) = tem
;
3501 /* Return true if FNDECL is a setjmp or a longjmp. */
3504 setjmp_or_longjmp_p (const_tree fndecl
)
3506 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3507 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3510 tree declname
= DECL_NAME (fndecl
);
3512 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3513 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3514 || !TREE_PUBLIC (fndecl
))
3517 const char *name
= IDENTIFIER_POINTER (declname
);
3518 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3521 /* Return true if FNDECL is an omp_* runtime API call. */
3524 omp_runtime_api_call (const_tree fndecl
)
3526 tree declname
= DECL_NAME (fndecl
);
3528 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3529 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3530 || !TREE_PUBLIC (fndecl
))
3533 const char *name
= IDENTIFIER_POINTER (declname
);
3534 if (strncmp (name
, "omp_", 4) != 0)
3537 static const char *omp_runtime_apis
[] =
3539 /* This array has 3 sections. First omp_* calls that don't
3540 have any suffixes. */
3542 "target_associate_ptr",
3543 "target_disassociate_ptr",
3545 "target_is_present",
3547 "target_memcpy_rect",
3549 /* Now omp_* calls that are available as omp_* and omp_*_. */
3552 "destroy_nest_lock",
3555 "get_affinity_format",
3557 "get_default_device",
3559 "get_initial_device",
3561 "get_max_active_levels",
3562 "get_max_task_priority",
3570 "get_partition_num_places",
3582 "is_initial_device",
3584 "pause_resource_all",
3585 "set_affinity_format",
3593 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3594 "get_ancestor_thread_num",
3595 "get_partition_place_nums",
3596 "get_place_num_procs",
3597 "get_place_proc_ids",
3600 "set_default_device",
3602 "set_max_active_levels",
3609 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3611 if (omp_runtime_apis
[i
] == NULL
)
3616 size_t len
= strlen (omp_runtime_apis
[i
]);
3617 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3618 && (name
[4 + len
] == '\0'
3620 && name
[4 + len
] == '_'
3621 && (name
[4 + len
+ 1] == '\0'
3623 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3629 /* Helper function for scan_omp.
3631 Callback for walk_gimple_stmt used to scan for OMP directives in
3632 the current statement in GSI. */
3635 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3636 struct walk_stmt_info
*wi
)
3638 gimple
*stmt
= gsi_stmt (*gsi
);
3639 omp_context
*ctx
= (omp_context
*) wi
->info
;
3641 if (gimple_has_location (stmt
))
3642 input_location
= gimple_location (stmt
);
3644 /* Check the nesting restrictions. */
3645 bool remove
= false;
3646 if (is_gimple_omp (stmt
))
3647 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3648 else if (is_gimple_call (stmt
))
3650 tree fndecl
= gimple_call_fndecl (stmt
);
3654 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3655 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3656 && setjmp_or_longjmp_p (fndecl
)
3660 error_at (gimple_location (stmt
),
3661 "setjmp/longjmp inside %<simd%> construct");
3663 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3664 switch (DECL_FUNCTION_CODE (fndecl
))
3666 case BUILT_IN_GOMP_BARRIER
:
3667 case BUILT_IN_GOMP_CANCEL
:
3668 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3669 case BUILT_IN_GOMP_TASKYIELD
:
3670 case BUILT_IN_GOMP_TASKWAIT
:
3671 case BUILT_IN_GOMP_TASKGROUP_START
:
3672 case BUILT_IN_GOMP_TASKGROUP_END
:
3673 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3680 omp_context
*octx
= ctx
;
3681 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3683 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
3686 error_at (gimple_location (stmt
),
3687 "OpenMP runtime API call %qD in a region with "
3688 "%<order(concurrent)%> clause", fndecl
);
3695 stmt
= gimple_build_nop ();
3696 gsi_replace (gsi
, stmt
, false);
3699 *handled_ops_p
= true;
3701 switch (gimple_code (stmt
))
3703 case GIMPLE_OMP_PARALLEL
:
3704 taskreg_nesting_level
++;
3705 scan_omp_parallel (gsi
, ctx
);
3706 taskreg_nesting_level
--;
3709 case GIMPLE_OMP_TASK
:
3710 taskreg_nesting_level
++;
3711 scan_omp_task (gsi
, ctx
);
3712 taskreg_nesting_level
--;
3715 case GIMPLE_OMP_FOR
:
3716 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3717 == GF_OMP_FOR_KIND_SIMD
)
3718 && gimple_omp_for_combined_into_p (stmt
)
3719 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3721 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3722 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3723 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3725 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3729 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3730 == GF_OMP_FOR_KIND_SIMD
)
3731 && omp_maybe_offloaded_ctx (ctx
)
3732 && omp_max_simt_vf ())
3733 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3735 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3738 case GIMPLE_OMP_SECTIONS
:
3739 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3742 case GIMPLE_OMP_SINGLE
:
3743 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3746 case GIMPLE_OMP_SCAN
:
3747 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3749 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3750 ctx
->scan_inclusive
= true;
3751 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3752 ctx
->scan_exclusive
= true;
3755 case GIMPLE_OMP_SECTION
:
3756 case GIMPLE_OMP_MASTER
:
3757 case GIMPLE_OMP_ORDERED
:
3758 case GIMPLE_OMP_CRITICAL
:
3759 ctx
= new_omp_context (stmt
, ctx
);
3760 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3763 case GIMPLE_OMP_TASKGROUP
:
3764 ctx
= new_omp_context (stmt
, ctx
);
3765 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3766 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3769 case GIMPLE_OMP_TARGET
:
3770 if (is_gimple_omp_offloaded (stmt
))
3772 taskreg_nesting_level
++;
3773 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3774 taskreg_nesting_level
--;
3777 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3780 case GIMPLE_OMP_TEAMS
:
3781 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3783 taskreg_nesting_level
++;
3784 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3785 taskreg_nesting_level
--;
3788 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3795 *handled_ops_p
= false;
3797 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3799 var
= DECL_CHAIN (var
))
3800 insert_decl_map (&ctx
->cb
, var
, var
);
3804 *handled_ops_p
= false;
3812 /* Scan all the statements starting at the current statement. CTX
3813 contains context information about the OMP directives and
3814 clauses found during the scan. */
3817 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3819 location_t saved_location
;
3820 struct walk_stmt_info wi
;
3822 memset (&wi
, 0, sizeof (wi
));
3824 wi
.want_locations
= true;
3826 saved_location
= input_location
;
3827 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3828 input_location
= saved_location
;
3831 /* Re-gimplification and code generation routines. */
3833 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3834 of BIND if in a method. */
3837 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3839 if (DECL_ARGUMENTS (current_function_decl
)
3840 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3841 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3844 tree vars
= gimple_bind_vars (bind
);
3845 for (tree
*pvar
= &vars
; *pvar
; )
3846 if (omp_member_access_dummy_var (*pvar
))
3847 *pvar
= DECL_CHAIN (*pvar
);
3849 pvar
= &DECL_CHAIN (*pvar
);
3850 gimple_bind_set_vars (bind
, vars
);
3854 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3855 block and its subblocks. */
3858 remove_member_access_dummy_vars (tree block
)
3860 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3861 if (omp_member_access_dummy_var (*pvar
))
3862 *pvar
= DECL_CHAIN (*pvar
);
3864 pvar
= &DECL_CHAIN (*pvar
);
3866 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3867 remove_member_access_dummy_vars (block
);
3870 /* If a context was created for STMT when it was scanned, return it. */
3872 static omp_context
*
3873 maybe_lookup_ctx (gimple
*stmt
)
3876 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3877 return n
? (omp_context
*) n
->value
: NULL
;
3881 /* Find the mapping for DECL in CTX or the immediately enclosing
3882 context that has a mapping for DECL.
3884 If CTX is a nested parallel directive, we may have to use the decl
3885 mappings created in CTX's parent context. Suppose that we have the
3886 following parallel nesting (variable UIDs showed for clarity):
3889 #omp parallel shared(iD.1562) -> outer parallel
3890 iD.1562 = iD.1562 + 1;
3892 #omp parallel shared (iD.1562) -> inner parallel
3893 iD.1562 = iD.1562 - 1;
3895 Each parallel structure will create a distinct .omp_data_s structure
3896 for copying iD.1562 in/out of the directive:
3898 outer parallel .omp_data_s.1.i -> iD.1562
3899 inner parallel .omp_data_s.2.i -> iD.1562
3901 A shared variable mapping will produce a copy-out operation before
3902 the parallel directive and a copy-in operation after it. So, in
3903 this case we would have:
3906 .omp_data_o.1.i = iD.1562;
3907 #omp parallel shared(iD.1562) -> outer parallel
3908 .omp_data_i.1 = &.omp_data_o.1
3909 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3911 .omp_data_o.2.i = iD.1562; -> **
3912 #omp parallel shared(iD.1562) -> inner parallel
3913 .omp_data_i.2 = &.omp_data_o.2
3914 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3917 ** This is a problem. The symbol iD.1562 cannot be referenced
3918 inside the body of the outer parallel region. But since we are
3919 emitting this copy operation while expanding the inner parallel
3920 directive, we need to access the CTX structure of the outer
3921 parallel directive to get the correct mapping:
3923 .omp_data_o.2.i = .omp_data_i.1->i
3925 Since there may be other workshare or parallel directives enclosing
3926 the parallel directive, it may be necessary to walk up the context
3927 parent chain. This is not a problem in general because nested
3928 parallelism happens only rarely. */
3931 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3936 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3937 t
= maybe_lookup_decl (decl
, up
);
3939 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3941 return t
? t
: decl
;
3945 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3946 in outer contexts. */
3949 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3954 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3955 t
= maybe_lookup_decl (decl
, up
);
3957 return t
? t
: decl
;
3961 /* Construct the initialization value for reduction operation OP. */
3964 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3973 case TRUTH_ORIF_EXPR
:
3974 case TRUTH_XOR_EXPR
:
3976 return build_zero_cst (type
);
3979 case TRUTH_AND_EXPR
:
3980 case TRUTH_ANDIF_EXPR
:
3982 return fold_convert_loc (loc
, type
, integer_one_node
);
3985 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3988 if (SCALAR_FLOAT_TYPE_P (type
))
3990 REAL_VALUE_TYPE max
, min
;
3991 if (HONOR_INFINITIES (type
))
3994 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3997 real_maxval (&min
, 1, TYPE_MODE (type
));
3998 return build_real (type
, min
);
4000 else if (POINTER_TYPE_P (type
))
4003 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4004 return wide_int_to_tree (type
, min
);
4008 gcc_assert (INTEGRAL_TYPE_P (type
));
4009 return TYPE_MIN_VALUE (type
);
4013 if (SCALAR_FLOAT_TYPE_P (type
))
4015 REAL_VALUE_TYPE max
;
4016 if (HONOR_INFINITIES (type
))
4019 real_maxval (&max
, 0, TYPE_MODE (type
));
4020 return build_real (type
, max
);
4022 else if (POINTER_TYPE_P (type
))
4025 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4026 return wide_int_to_tree (type
, max
);
4030 gcc_assert (INTEGRAL_TYPE_P (type
));
4031 return TYPE_MAX_VALUE (type
);
4039 /* Construct the initialization value for reduction CLAUSE. */
4042 omp_reduction_init (tree clause
, tree type
)
4044 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4045 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4048 /* Return alignment to be assumed for var in CLAUSE, which should be
4049 OMP_CLAUSE_ALIGNED. */
4052 omp_clause_aligned_alignment (tree clause
)
4054 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4055 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4057 /* Otherwise return implementation defined alignment. */
4058 unsigned int al
= 1;
4059 opt_scalar_mode mode_iter
;
4060 auto_vector_modes modes
;
4061 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4062 static enum mode_class classes
[]
4063 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4064 for (int i
= 0; i
< 4; i
+= 2)
4065 /* The for loop above dictates that we only walk through scalar classes. */
4066 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4068 scalar_mode mode
= mode_iter
.require ();
4069 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4070 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4072 machine_mode alt_vmode
;
4073 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4074 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4075 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4078 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4079 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4081 type
= build_vector_type_for_mode (type
, vmode
);
4082 if (TYPE_MODE (type
) != vmode
)
4084 if (TYPE_ALIGN_UNIT (type
) > al
)
4085 al
= TYPE_ALIGN_UNIT (type
);
4087 return build_int_cst (integer_type_node
, al
);
4091 /* This structure is part of the interface between lower_rec_simd_input_clauses
4092 and lower_rec_input_clauses. */
4094 class omplow_simd_context
{
4096 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4100 vec
<tree
, va_heap
> simt_eargs
;
4101 gimple_seq simt_dlist
;
4102 poly_uint64_pod max_vf
;
4106 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4110 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4111 omplow_simd_context
*sctx
, tree
&ivar
,
4112 tree
&lvar
, tree
*rvar
= NULL
,
4115 if (known_eq (sctx
->max_vf
, 0U))
4117 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4118 if (maybe_gt (sctx
->max_vf
, 1U))
4120 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4121 OMP_CLAUSE_SAFELEN
);
4124 poly_uint64 safe_len
;
4125 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4126 || maybe_lt (safe_len
, 1U))
4129 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4132 if (maybe_gt (sctx
->max_vf
, 1U))
4134 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4135 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4138 if (known_eq (sctx
->max_vf
, 1U))
4143 if (is_gimple_reg (new_var
))
4145 ivar
= lvar
= new_var
;
4148 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4149 ivar
= lvar
= create_tmp_var (type
);
4150 TREE_ADDRESSABLE (ivar
) = 1;
4151 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4152 NULL
, DECL_ATTRIBUTES (ivar
));
4153 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4154 tree clobber
= build_clobber (type
);
4155 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4156 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4160 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4161 tree avar
= create_tmp_var_raw (atype
);
4162 if (TREE_ADDRESSABLE (new_var
))
4163 TREE_ADDRESSABLE (avar
) = 1;
4164 DECL_ATTRIBUTES (avar
)
4165 = tree_cons (get_identifier ("omp simd array"), NULL
,
4166 DECL_ATTRIBUTES (avar
));
4167 gimple_add_tmp_var (avar
);
4169 if (rvar
&& !ctx
->for_simd_scan_phase
)
4171 /* For inscan reductions, create another array temporary,
4172 which will hold the reduced value. */
4173 iavar
= create_tmp_var_raw (atype
);
4174 if (TREE_ADDRESSABLE (new_var
))
4175 TREE_ADDRESSABLE (iavar
) = 1;
4176 DECL_ATTRIBUTES (iavar
)
4177 = tree_cons (get_identifier ("omp simd array"), NULL
,
4178 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4179 DECL_ATTRIBUTES (iavar
)));
4180 gimple_add_tmp_var (iavar
);
4181 ctx
->cb
.decl_map
->put (avar
, iavar
);
4182 if (sctx
->lastlane
== NULL_TREE
)
4183 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4184 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4185 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4186 TREE_THIS_NOTRAP (*rvar
) = 1;
4188 if (ctx
->scan_exclusive
)
4190 /* And for exclusive scan yet another one, which will
4191 hold the value during the scan phase. */
4192 tree savar
= create_tmp_var_raw (atype
);
4193 if (TREE_ADDRESSABLE (new_var
))
4194 TREE_ADDRESSABLE (savar
) = 1;
4195 DECL_ATTRIBUTES (savar
)
4196 = tree_cons (get_identifier ("omp simd array"), NULL
,
4197 tree_cons (get_identifier ("omp simd inscan "
4199 DECL_ATTRIBUTES (savar
)));
4200 gimple_add_tmp_var (savar
);
4201 ctx
->cb
.decl_map
->put (iavar
, savar
);
4202 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4203 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4204 TREE_THIS_NOTRAP (*rvar2
) = 1;
4207 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4208 NULL_TREE
, NULL_TREE
);
4209 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4210 NULL_TREE
, NULL_TREE
);
4211 TREE_THIS_NOTRAP (ivar
) = 1;
4212 TREE_THIS_NOTRAP (lvar
) = 1;
4214 if (DECL_P (new_var
))
4216 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4217 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4222 /* Helper function of lower_rec_input_clauses. For a reference
4223 in simd reduction, add an underlying variable it will reference. */
4226 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4228 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4229 if (TREE_CONSTANT (z
))
4231 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4232 get_name (new_vard
));
4233 gimple_add_tmp_var (z
);
4234 TREE_ADDRESSABLE (z
) = 1;
4235 z
= build_fold_addr_expr_loc (loc
, z
);
4236 gimplify_assign (new_vard
, z
, ilist
);
4240 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4241 code to emit (type) (tskred_temp[idx]). */
4244 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4247 unsigned HOST_WIDE_INT sz
4248 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4249 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4250 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4252 tree v
= create_tmp_var (pointer_sized_int_node
);
4253 gimple
*g
= gimple_build_assign (v
, r
);
4254 gimple_seq_add_stmt (ilist
, g
);
4255 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4257 v
= create_tmp_var (type
);
4258 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4259 gimple_seq_add_stmt (ilist
, g
);
4264 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4265 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4266 private variables. Initialization statements go in ILIST, while calls
4267 to destructors go in DLIST. */
4270 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4271 omp_context
*ctx
, struct omp_for_data
*fd
)
4273 tree c
, copyin_seq
, x
, ptr
;
4274 bool copyin_by_ref
= false;
4275 bool lastprivate_firstprivate
= false;
4276 bool reduction_omp_orig_ref
= false;
4278 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4279 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4280 omplow_simd_context sctx
= omplow_simd_context ();
4281 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4282 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4283 gimple_seq llist
[4] = { };
4284 tree nonconst_simd_if
= NULL_TREE
;
4287 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4289 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4290 with data sharing clauses referencing variable sized vars. That
4291 is unnecessarily hard to support and very unlikely to result in
4292 vectorized code anyway. */
4294 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4295 switch (OMP_CLAUSE_CODE (c
))
4297 case OMP_CLAUSE_LINEAR
:
4298 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4301 case OMP_CLAUSE_PRIVATE
:
4302 case OMP_CLAUSE_FIRSTPRIVATE
:
4303 case OMP_CLAUSE_LASTPRIVATE
:
4304 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4306 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4308 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4309 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4313 case OMP_CLAUSE_REDUCTION
:
4314 case OMP_CLAUSE_IN_REDUCTION
:
4315 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4316 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4318 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4320 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4321 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4326 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4328 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4329 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4331 case OMP_CLAUSE_SIMDLEN
:
4332 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4335 case OMP_CLAUSE__CONDTEMP_
:
4336 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4344 /* Add a placeholder for simduid. */
4345 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4346 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4348 unsigned task_reduction_cnt
= 0;
4349 unsigned task_reduction_cntorig
= 0;
4350 unsigned task_reduction_cnt_full
= 0;
4351 unsigned task_reduction_cntorig_full
= 0;
4352 unsigned task_reduction_other_cnt
= 0;
4353 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4354 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4355 /* Do all the fixed sized types in the first pass, and the variable sized
4356 types in the second pass. This makes sure that the scalar arguments to
4357 the variable sized types are processed before we use them in the
4358 variable sized operations. For task reductions we use 4 passes, in the
4359 first two we ignore them, in the third one gather arguments for
4360 GOMP_task_reduction_remap call and in the last pass actually handle
4361 the task reductions. */
4362 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4365 if (pass
== 2 && task_reduction_cnt
)
4368 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4369 + task_reduction_cntorig
);
4370 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4371 gimple_add_tmp_var (tskred_avar
);
4372 TREE_ADDRESSABLE (tskred_avar
) = 1;
4373 task_reduction_cnt_full
= task_reduction_cnt
;
4374 task_reduction_cntorig_full
= task_reduction_cntorig
;
4376 else if (pass
== 3 && task_reduction_cnt
)
4378 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4380 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4381 size_int (task_reduction_cntorig
),
4382 build_fold_addr_expr (tskred_avar
));
4383 gimple_seq_add_stmt (ilist
, g
);
4385 if (pass
== 3 && task_reduction_other_cnt
)
4387 /* For reduction clauses, build
4388 tskred_base = (void *) tskred_temp[2]
4389 + omp_get_thread_num () * tskred_temp[1]
4390 or if tskred_temp[1] is known to be constant, that constant
4391 directly. This is the start of the private reduction copy block
4392 for the current thread. */
4393 tree v
= create_tmp_var (integer_type_node
);
4394 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4395 gimple
*g
= gimple_build_call (x
, 0);
4396 gimple_call_set_lhs (g
, v
);
4397 gimple_seq_add_stmt (ilist
, g
);
4398 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4399 tskred_temp
= OMP_CLAUSE_DECL (c
);
4400 if (is_taskreg_ctx (ctx
))
4401 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4402 tree v2
= create_tmp_var (sizetype
);
4403 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4404 gimple_seq_add_stmt (ilist
, g
);
4405 if (ctx
->task_reductions
[0])
4406 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4408 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4409 tree v3
= create_tmp_var (sizetype
);
4410 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4411 gimple_seq_add_stmt (ilist
, g
);
4412 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4413 tskred_base
= create_tmp_var (ptr_type_node
);
4414 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4415 gimple_seq_add_stmt (ilist
, g
);
4417 task_reduction_cnt
= 0;
4418 task_reduction_cntorig
= 0;
4419 task_reduction_other_cnt
= 0;
4420 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4422 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4425 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4426 bool task_reduction_p
= false;
4427 bool task_reduction_needs_orig_p
= false;
4428 tree cond
= NULL_TREE
;
4432 case OMP_CLAUSE_PRIVATE
:
4433 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4436 case OMP_CLAUSE_SHARED
:
4437 /* Ignore shared directives in teams construct inside
4438 of target construct. */
4439 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4440 && !is_host_teams_ctx (ctx
))
4442 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4444 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4445 || is_global_var (OMP_CLAUSE_DECL (c
)));
4448 case OMP_CLAUSE_FIRSTPRIVATE
:
4449 case OMP_CLAUSE_COPYIN
:
4451 case OMP_CLAUSE_LINEAR
:
4452 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4453 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4454 lastprivate_firstprivate
= true;
4456 case OMP_CLAUSE_REDUCTION
:
4457 case OMP_CLAUSE_IN_REDUCTION
:
4458 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4460 task_reduction_p
= true;
4461 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4463 task_reduction_other_cnt
++;
4468 task_reduction_cnt
++;
4469 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4471 var
= OMP_CLAUSE_DECL (c
);
4472 /* If var is a global variable that isn't privatized
4473 in outer contexts, we don't need to look up the
4474 original address, it is always the address of the
4475 global variable itself. */
4477 || omp_is_reference (var
)
4479 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4481 task_reduction_needs_orig_p
= true;
4482 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4483 task_reduction_cntorig
++;
4487 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4488 reduction_omp_orig_ref
= true;
4490 case OMP_CLAUSE__REDUCTEMP_
:
4491 if (!is_taskreg_ctx (ctx
))
4494 case OMP_CLAUSE__LOOPTEMP_
:
4495 /* Handle _looptemp_/_reductemp_ clauses only on
4500 case OMP_CLAUSE_LASTPRIVATE
:
4501 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4503 lastprivate_firstprivate
= true;
4504 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4507 /* Even without corresponding firstprivate, if
4508 decl is Fortran allocatable, it needs outer var
4511 && lang_hooks
.decls
.omp_private_outer_ref
4512 (OMP_CLAUSE_DECL (c
)))
4513 lastprivate_firstprivate
= true;
4515 case OMP_CLAUSE_ALIGNED
:
4518 var
= OMP_CLAUSE_DECL (c
);
4519 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4520 && !is_global_var (var
))
4522 new_var
= maybe_lookup_decl (var
, ctx
);
4523 if (new_var
== NULL_TREE
)
4524 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4525 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4526 tree alarg
= omp_clause_aligned_alignment (c
);
4527 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4528 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4529 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4530 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4531 gimplify_and_add (x
, ilist
);
4533 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4534 && is_global_var (var
))
4536 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4537 new_var
= lookup_decl (var
, ctx
);
4538 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4539 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4540 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4541 tree alarg
= omp_clause_aligned_alignment (c
);
4542 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4543 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4544 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4545 x
= create_tmp_var (ptype
);
4546 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4547 gimplify_and_add (t
, ilist
);
4548 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4549 SET_DECL_VALUE_EXPR (new_var
, t
);
4550 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4553 case OMP_CLAUSE__CONDTEMP_
:
4554 if (is_parallel_ctx (ctx
)
4555 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4562 if (task_reduction_p
!= (pass
>= 2))
4565 new_var
= var
= OMP_CLAUSE_DECL (c
);
4566 if ((c_kind
== OMP_CLAUSE_REDUCTION
4567 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4568 && TREE_CODE (var
) == MEM_REF
)
4570 var
= TREE_OPERAND (var
, 0);
4571 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4572 var
= TREE_OPERAND (var
, 0);
4573 if (TREE_CODE (var
) == INDIRECT_REF
4574 || TREE_CODE (var
) == ADDR_EXPR
)
4575 var
= TREE_OPERAND (var
, 0);
4576 if (is_variable_sized (var
))
4578 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4579 var
= DECL_VALUE_EXPR (var
);
4580 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4581 var
= TREE_OPERAND (var
, 0);
4582 gcc_assert (DECL_P (var
));
4586 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4587 new_var
= lookup_decl (var
, ctx
);
4589 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4594 /* C/C++ array section reductions. */
4595 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4596 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4597 && var
!= OMP_CLAUSE_DECL (c
))
4602 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4603 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4605 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4607 tree b
= TREE_OPERAND (orig_var
, 1);
4608 b
= maybe_lookup_decl (b
, ctx
);
4611 b
= TREE_OPERAND (orig_var
, 1);
4612 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4614 if (integer_zerop (bias
))
4618 bias
= fold_convert_loc (clause_loc
,
4619 TREE_TYPE (b
), bias
);
4620 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4621 TREE_TYPE (b
), b
, bias
);
4623 orig_var
= TREE_OPERAND (orig_var
, 0);
4627 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4628 if (is_global_var (out
)
4629 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4630 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4631 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4636 bool by_ref
= use_pointer_for_field (var
, NULL
);
4637 x
= build_receiver_ref (var
, by_ref
, ctx
);
4638 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4639 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4641 x
= build_fold_addr_expr (x
);
4643 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4644 x
= build_simple_mem_ref (x
);
4645 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4647 if (var
== TREE_OPERAND (orig_var
, 0))
4648 x
= build_fold_addr_expr (x
);
4650 bias
= fold_convert (sizetype
, bias
);
4651 x
= fold_convert (ptr_type_node
, x
);
4652 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4653 TREE_TYPE (x
), x
, bias
);
4654 unsigned cnt
= task_reduction_cnt
- 1;
4655 if (!task_reduction_needs_orig_p
)
4656 cnt
+= (task_reduction_cntorig_full
4657 - task_reduction_cntorig
);
4659 cnt
= task_reduction_cntorig
- 1;
4660 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4661 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4662 gimplify_assign (r
, x
, ilist
);
4666 if (TREE_CODE (orig_var
) == INDIRECT_REF
4667 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4668 orig_var
= TREE_OPERAND (orig_var
, 0);
4669 tree d
= OMP_CLAUSE_DECL (c
);
4670 tree type
= TREE_TYPE (d
);
4671 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4672 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4673 const char *name
= get_name (orig_var
);
4676 tree xv
= create_tmp_var (ptr_type_node
);
4677 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4679 unsigned cnt
= task_reduction_cnt
- 1;
4680 if (!task_reduction_needs_orig_p
)
4681 cnt
+= (task_reduction_cntorig_full
4682 - task_reduction_cntorig
);
4684 cnt
= task_reduction_cntorig
- 1;
4685 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4686 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4688 gimple
*g
= gimple_build_assign (xv
, x
);
4689 gimple_seq_add_stmt (ilist
, g
);
4693 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4695 if (ctx
->task_reductions
[1 + idx
])
4696 off
= fold_convert (sizetype
,
4697 ctx
->task_reductions
[1 + idx
]);
4699 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4701 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4703 gimple_seq_add_stmt (ilist
, g
);
4705 x
= fold_convert (build_pointer_type (boolean_type_node
),
4707 if (TREE_CONSTANT (v
))
4708 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4709 TYPE_SIZE_UNIT (type
));
4712 tree t
= maybe_lookup_decl (v
, ctx
);
4716 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4717 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4719 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4721 build_int_cst (TREE_TYPE (v
), 1));
4722 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4724 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4725 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4727 cond
= create_tmp_var (TREE_TYPE (x
));
4728 gimplify_assign (cond
, x
, ilist
);
4731 else if (TREE_CONSTANT (v
))
4733 x
= create_tmp_var_raw (type
, name
);
4734 gimple_add_tmp_var (x
);
4735 TREE_ADDRESSABLE (x
) = 1;
4736 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4741 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4742 tree t
= maybe_lookup_decl (v
, ctx
);
4746 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4747 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4748 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4750 build_int_cst (TREE_TYPE (v
), 1));
4751 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4753 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4754 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4755 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4758 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4759 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4760 tree y
= create_tmp_var (ptype
, name
);
4761 gimplify_assign (y
, x
, ilist
);
4765 if (!integer_zerop (bias
))
4767 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4769 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4771 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4772 pointer_sized_int_node
, yb
, bias
);
4773 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4774 yb
= create_tmp_var (ptype
, name
);
4775 gimplify_assign (yb
, x
, ilist
);
4779 d
= TREE_OPERAND (d
, 0);
4780 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4781 d
= TREE_OPERAND (d
, 0);
4782 if (TREE_CODE (d
) == ADDR_EXPR
)
4784 if (orig_var
!= var
)
4786 gcc_assert (is_variable_sized (orig_var
));
4787 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4789 gimplify_assign (new_var
, x
, ilist
);
4790 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4791 tree t
= build_fold_indirect_ref (new_var
);
4792 DECL_IGNORED_P (new_var
) = 0;
4793 TREE_THIS_NOTRAP (t
) = 1;
4794 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4795 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4799 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4800 build_int_cst (ptype
, 0));
4801 SET_DECL_VALUE_EXPR (new_var
, x
);
4802 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4807 gcc_assert (orig_var
== var
);
4808 if (TREE_CODE (d
) == INDIRECT_REF
)
4810 x
= create_tmp_var (ptype
, name
);
4811 TREE_ADDRESSABLE (x
) = 1;
4812 gimplify_assign (x
, yb
, ilist
);
4813 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4815 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4816 gimplify_assign (new_var
, x
, ilist
);
4818 /* GOMP_taskgroup_reduction_register memsets the whole
4819 array to zero. If the initializer is zero, we don't
4820 need to initialize it again, just mark it as ever
4821 used unconditionally, i.e. cond = true. */
4823 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4824 && initializer_zerop (omp_reduction_init (c
,
4827 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4829 gimple_seq_add_stmt (ilist
, g
);
4832 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4836 if (!is_parallel_ctx (ctx
))
4838 tree condv
= create_tmp_var (boolean_type_node
);
4839 g
= gimple_build_assign (condv
,
4840 build_simple_mem_ref (cond
));
4841 gimple_seq_add_stmt (ilist
, g
);
4842 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4843 g
= gimple_build_cond (NE_EXPR
, condv
,
4844 boolean_false_node
, end
, lab1
);
4845 gimple_seq_add_stmt (ilist
, g
);
4846 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4848 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4850 gimple_seq_add_stmt (ilist
, g
);
4853 tree y1
= create_tmp_var (ptype
);
4854 gimplify_assign (y1
, y
, ilist
);
4855 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4856 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4857 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4858 if (task_reduction_needs_orig_p
)
4860 y3
= create_tmp_var (ptype
);
4862 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4863 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4864 size_int (task_reduction_cnt_full
4865 + task_reduction_cntorig
- 1),
4866 NULL_TREE
, NULL_TREE
);
4869 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4870 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4873 gimplify_assign (y3
, ref
, ilist
);
4875 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4879 y2
= create_tmp_var (ptype
);
4880 gimplify_assign (y2
, y
, ilist
);
4882 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4884 tree ref
= build_outer_var_ref (var
, ctx
);
4885 /* For ref build_outer_var_ref already performs this. */
4886 if (TREE_CODE (d
) == INDIRECT_REF
)
4887 gcc_assert (omp_is_reference (var
));
4888 else if (TREE_CODE (d
) == ADDR_EXPR
)
4889 ref
= build_fold_addr_expr (ref
);
4890 else if (omp_is_reference (var
))
4891 ref
= build_fold_addr_expr (ref
);
4892 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4893 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4894 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4896 y3
= create_tmp_var (ptype
);
4897 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4901 y4
= create_tmp_var (ptype
);
4902 gimplify_assign (y4
, ref
, dlist
);
4906 tree i
= create_tmp_var (TREE_TYPE (v
));
4907 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4908 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4909 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4912 i2
= create_tmp_var (TREE_TYPE (v
));
4913 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4914 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4915 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4916 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4918 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4920 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4921 tree decl_placeholder
4922 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4923 SET_DECL_VALUE_EXPR (decl_placeholder
,
4924 build_simple_mem_ref (y1
));
4925 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4926 SET_DECL_VALUE_EXPR (placeholder
,
4927 y3
? build_simple_mem_ref (y3
)
4929 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4930 x
= lang_hooks
.decls
.omp_clause_default_ctor
4931 (c
, build_simple_mem_ref (y1
),
4932 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4934 gimplify_and_add (x
, ilist
);
4935 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4937 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4938 lower_omp (&tseq
, ctx
);
4939 gimple_seq_add_seq (ilist
, tseq
);
4941 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4944 SET_DECL_VALUE_EXPR (decl_placeholder
,
4945 build_simple_mem_ref (y2
));
4946 SET_DECL_VALUE_EXPR (placeholder
,
4947 build_simple_mem_ref (y4
));
4948 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4949 lower_omp (&tseq
, ctx
);
4950 gimple_seq_add_seq (dlist
, tseq
);
4951 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4953 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4954 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4957 x
= lang_hooks
.decls
.omp_clause_dtor
4958 (c
, build_simple_mem_ref (y2
));
4960 gimplify_and_add (x
, dlist
);
4965 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4966 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4968 /* reduction(-:var) sums up the partial results, so it
4969 acts identically to reduction(+:var). */
4970 if (code
== MINUS_EXPR
)
4973 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4976 x
= build2 (code
, TREE_TYPE (type
),
4977 build_simple_mem_ref (y4
),
4978 build_simple_mem_ref (y2
));
4979 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4983 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4984 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4985 gimple_seq_add_stmt (ilist
, g
);
4988 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4989 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4990 gimple_seq_add_stmt (ilist
, g
);
4992 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4993 build_int_cst (TREE_TYPE (i
), 1));
4994 gimple_seq_add_stmt (ilist
, g
);
4995 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4996 gimple_seq_add_stmt (ilist
, g
);
4997 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5000 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5001 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5002 gimple_seq_add_stmt (dlist
, g
);
5005 g
= gimple_build_assign
5006 (y4
, POINTER_PLUS_EXPR
, y4
,
5007 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5008 gimple_seq_add_stmt (dlist
, g
);
5010 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5011 build_int_cst (TREE_TYPE (i2
), 1));
5012 gimple_seq_add_stmt (dlist
, g
);
5013 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5014 gimple_seq_add_stmt (dlist
, g
);
5015 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5021 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5025 bool by_ref
= use_pointer_for_field (var
, ctx
);
5026 x
= build_receiver_ref (var
, by_ref
, ctx
);
5028 if (!omp_is_reference (var
))
5029 x
= build_fold_addr_expr (x
);
5030 x
= fold_convert (ptr_type_node
, x
);
5031 unsigned cnt
= task_reduction_cnt
- 1;
5032 if (!task_reduction_needs_orig_p
)
5033 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5035 cnt
= task_reduction_cntorig
- 1;
5036 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5037 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5038 gimplify_assign (r
, x
, ilist
);
5043 tree type
= TREE_TYPE (new_var
);
5044 if (!omp_is_reference (var
))
5045 type
= build_pointer_type (type
);
5046 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5048 unsigned cnt
= task_reduction_cnt
- 1;
5049 if (!task_reduction_needs_orig_p
)
5050 cnt
+= (task_reduction_cntorig_full
5051 - task_reduction_cntorig
);
5053 cnt
= task_reduction_cntorig
- 1;
5054 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5055 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5059 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5061 if (ctx
->task_reductions
[1 + idx
])
5062 off
= fold_convert (sizetype
,
5063 ctx
->task_reductions
[1 + idx
]);
5065 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5067 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5070 x
= fold_convert (type
, x
);
5072 if (omp_is_reference (var
))
5074 gimplify_assign (new_var
, x
, ilist
);
5076 new_var
= build_simple_mem_ref (new_var
);
5080 t
= create_tmp_var (type
);
5081 gimplify_assign (t
, x
, ilist
);
5082 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5083 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5085 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5086 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5087 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5088 cond
= create_tmp_var (TREE_TYPE (t
));
5089 gimplify_assign (cond
, t
, ilist
);
5091 else if (is_variable_sized (var
))
5093 /* For variable sized types, we need to allocate the
5094 actual storage here. Call alloca and store the
5095 result in the pointer decl that we created elsewhere. */
5099 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5104 ptr
= DECL_VALUE_EXPR (new_var
);
5105 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5106 ptr
= TREE_OPERAND (ptr
, 0);
5107 gcc_assert (DECL_P (ptr
));
5108 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5110 /* void *tmp = __builtin_alloca */
5111 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5112 stmt
= gimple_build_call (atmp
, 2, x
,
5113 size_int (DECL_ALIGN (var
)));
5114 tmp
= create_tmp_var_raw (ptr_type_node
);
5115 gimple_add_tmp_var (tmp
);
5116 gimple_call_set_lhs (stmt
, tmp
);
5118 gimple_seq_add_stmt (ilist
, stmt
);
5120 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5121 gimplify_assign (ptr
, x
, ilist
);
5124 else if (omp_is_reference (var
)
5125 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5126 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5128 /* For references that are being privatized for Fortran,
5129 allocate new backing storage for the new pointer
5130 variable. This allows us to avoid changing all the
5131 code that expects a pointer to something that expects
5132 a direct variable. */
5136 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5137 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5139 x
= build_receiver_ref (var
, false, ctx
);
5140 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5142 else if (TREE_CONSTANT (x
))
5144 /* For reduction in SIMD loop, defer adding the
5145 initialization of the reference, because if we decide
5146 to use SIMD array for it, the initilization could cause
5147 expansion ICE. Ditto for other privatization clauses. */
5152 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5154 gimple_add_tmp_var (x
);
5155 TREE_ADDRESSABLE (x
) = 1;
5156 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5162 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5163 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5164 tree al
= size_int (TYPE_ALIGN (rtype
));
5165 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5170 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5171 gimplify_assign (new_var
, x
, ilist
);
5174 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5176 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5177 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5178 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5186 switch (OMP_CLAUSE_CODE (c
))
5188 case OMP_CLAUSE_SHARED
:
5189 /* Ignore shared directives in teams construct inside
5190 target construct. */
5191 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5192 && !is_host_teams_ctx (ctx
))
5194 /* Shared global vars are just accessed directly. */
5195 if (is_global_var (new_var
))
5197 /* For taskloop firstprivate/lastprivate, represented
5198 as firstprivate and shared clause on the task, new_var
5199 is the firstprivate var. */
5200 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5202 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5203 needs to be delayed until after fixup_child_record_type so
5204 that we get the correct type during the dereference. */
5205 by_ref
= use_pointer_for_field (var
, ctx
);
5206 x
= build_receiver_ref (var
, by_ref
, ctx
);
5207 SET_DECL_VALUE_EXPR (new_var
, x
);
5208 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5210 /* ??? If VAR is not passed by reference, and the variable
5211 hasn't been initialized yet, then we'll get a warning for
5212 the store into the omp_data_s structure. Ideally, we'd be
5213 able to notice this and not store anything at all, but
5214 we're generating code too early. Suppress the warning. */
5216 TREE_NO_WARNING (var
) = 1;
5219 case OMP_CLAUSE__CONDTEMP_
:
5220 if (is_parallel_ctx (ctx
))
5222 x
= build_receiver_ref (var
, false, ctx
);
5223 SET_DECL_VALUE_EXPR (new_var
, x
);
5224 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5226 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5228 x
= build_zero_cst (TREE_TYPE (var
));
5233 case OMP_CLAUSE_LASTPRIVATE
:
5234 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5238 case OMP_CLAUSE_PRIVATE
:
5239 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5240 x
= build_outer_var_ref (var
, ctx
);
5241 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5243 if (is_task_ctx (ctx
))
5244 x
= build_receiver_ref (var
, false, ctx
);
5246 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5254 nx
= unshare_expr (new_var
);
5256 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5257 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5260 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5262 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5265 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5266 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5267 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5268 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5269 || (gimple_omp_for_index (ctx
->stmt
, 0)
5271 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5272 || omp_is_reference (var
))
5273 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5276 if (omp_is_reference (var
))
5278 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5279 tree new_vard
= TREE_OPERAND (new_var
, 0);
5280 gcc_assert (DECL_P (new_vard
));
5281 SET_DECL_VALUE_EXPR (new_vard
,
5282 build_fold_addr_expr (lvar
));
5283 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5288 tree iv
= unshare_expr (ivar
);
5290 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5293 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5297 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5299 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5300 unshare_expr (ivar
), x
);
5304 gimplify_and_add (x
, &llist
[0]);
5305 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5306 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5311 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5312 v
= TREE_OPERAND (v
, 0);
5313 gcc_assert (DECL_P (v
));
5315 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5316 tree t
= create_tmp_var (TREE_TYPE (v
));
5317 tree z
= build_zero_cst (TREE_TYPE (v
));
5319 = build_outer_var_ref (var
, ctx
,
5320 OMP_CLAUSE_LASTPRIVATE
);
5321 gimple_seq_add_stmt (dlist
,
5322 gimple_build_assign (t
, z
));
5323 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5324 tree civar
= DECL_VALUE_EXPR (v
);
5325 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5326 civar
= unshare_expr (civar
);
5327 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5328 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5329 unshare_expr (civar
));
5330 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5331 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5332 orig_v
, unshare_expr (ivar
)));
5333 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5335 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5337 gimple_seq tseq
= NULL
;
5338 gimplify_and_add (x
, &tseq
);
5340 lower_omp (&tseq
, ctx
->outer
);
5341 gimple_seq_add_seq (&llist
[1], tseq
);
5343 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5344 && ctx
->for_simd_scan_phase
)
5346 x
= unshare_expr (ivar
);
5348 = build_outer_var_ref (var
, ctx
,
5349 OMP_CLAUSE_LASTPRIVATE
);
5350 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5352 gimplify_and_add (x
, &llist
[0]);
5356 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5358 gimplify_and_add (y
, &llist
[1]);
5362 if (omp_is_reference (var
))
5364 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5365 tree new_vard
= TREE_OPERAND (new_var
, 0);
5366 gcc_assert (DECL_P (new_vard
));
5367 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5368 x
= TYPE_SIZE_UNIT (type
);
5369 if (TREE_CONSTANT (x
))
5371 x
= create_tmp_var_raw (type
, get_name (var
));
5372 gimple_add_tmp_var (x
);
5373 TREE_ADDRESSABLE (x
) = 1;
5374 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5375 x
= fold_convert_loc (clause_loc
,
5376 TREE_TYPE (new_vard
), x
);
5377 gimplify_assign (new_vard
, x
, ilist
);
5382 gimplify_and_add (nx
, ilist
);
5383 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5385 && ctx
->for_simd_scan_phase
)
5387 tree orig_v
= build_outer_var_ref (var
, ctx
,
5388 OMP_CLAUSE_LASTPRIVATE
);
5389 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5391 gimplify_and_add (x
, ilist
);
5396 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5398 gimplify_and_add (x
, dlist
);
5401 case OMP_CLAUSE_LINEAR
:
5402 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5403 goto do_firstprivate
;
5404 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5407 x
= build_outer_var_ref (var
, ctx
);
5410 case OMP_CLAUSE_FIRSTPRIVATE
:
5411 if (is_task_ctx (ctx
))
5413 if ((omp_is_reference (var
)
5414 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5415 || is_variable_sized (var
))
5417 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5419 || use_pointer_for_field (var
, NULL
))
5421 x
= build_receiver_ref (var
, false, ctx
);
5422 SET_DECL_VALUE_EXPR (new_var
, x
);
5423 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5427 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5428 && omp_is_reference (var
))
5430 x
= build_outer_var_ref (var
, ctx
);
5431 gcc_assert (TREE_CODE (x
) == MEM_REF
5432 && integer_zerop (TREE_OPERAND (x
, 1)));
5433 x
= TREE_OPERAND (x
, 0);
5434 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5435 (c
, unshare_expr (new_var
), x
);
5436 gimplify_and_add (x
, ilist
);
5440 x
= build_outer_var_ref (var
, ctx
);
5443 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5444 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5446 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5447 tree stept
= TREE_TYPE (t
);
5448 tree ct
= omp_find_clause (clauses
,
5449 OMP_CLAUSE__LOOPTEMP_
);
5451 tree l
= OMP_CLAUSE_DECL (ct
);
5452 tree n1
= fd
->loop
.n1
;
5453 tree step
= fd
->loop
.step
;
5454 tree itype
= TREE_TYPE (l
);
5455 if (POINTER_TYPE_P (itype
))
5456 itype
= signed_type_for (itype
);
5457 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5458 if (TYPE_UNSIGNED (itype
)
5459 && fd
->loop
.cond_code
== GT_EXPR
)
5460 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5461 fold_build1 (NEGATE_EXPR
, itype
, l
),
5462 fold_build1 (NEGATE_EXPR
,
5465 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5466 t
= fold_build2 (MULT_EXPR
, stept
,
5467 fold_convert (stept
, l
), t
);
5469 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5471 if (omp_is_reference (var
))
5473 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5474 tree new_vard
= TREE_OPERAND (new_var
, 0);
5475 gcc_assert (DECL_P (new_vard
));
5476 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5477 nx
= TYPE_SIZE_UNIT (type
);
5478 if (TREE_CONSTANT (nx
))
5480 nx
= create_tmp_var_raw (type
,
5482 gimple_add_tmp_var (nx
);
5483 TREE_ADDRESSABLE (nx
) = 1;
5484 nx
= build_fold_addr_expr_loc (clause_loc
,
5486 nx
= fold_convert_loc (clause_loc
,
5487 TREE_TYPE (new_vard
),
5489 gimplify_assign (new_vard
, nx
, ilist
);
5493 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5495 gimplify_and_add (x
, ilist
);
5499 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5500 x
= fold_build2 (POINTER_PLUS_EXPR
,
5501 TREE_TYPE (x
), x
, t
);
5503 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5506 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5507 || TREE_ADDRESSABLE (new_var
)
5508 || omp_is_reference (var
))
5509 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5512 if (omp_is_reference (var
))
5514 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5515 tree new_vard
= TREE_OPERAND (new_var
, 0);
5516 gcc_assert (DECL_P (new_vard
));
5517 SET_DECL_VALUE_EXPR (new_vard
,
5518 build_fold_addr_expr (lvar
));
5519 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5523 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5524 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5525 gimplify_and_add (x
, ilist
);
5526 gimple_stmt_iterator gsi
5527 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5529 = gimple_build_assign (unshare_expr (lvar
), iv
);
5530 gsi_insert_before_without_update (&gsi
, g
,
5532 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5533 enum tree_code code
= PLUS_EXPR
;
5534 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5535 code
= POINTER_PLUS_EXPR
;
5536 g
= gimple_build_assign (iv
, code
, iv
, t
);
5537 gsi_insert_before_without_update (&gsi
, g
,
5541 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5542 (c
, unshare_expr (ivar
), x
);
5543 gimplify_and_add (x
, &llist
[0]);
5544 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5546 gimplify_and_add (x
, &llist
[1]);
5549 if (omp_is_reference (var
))
5551 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5552 tree new_vard
= TREE_OPERAND (new_var
, 0);
5553 gcc_assert (DECL_P (new_vard
));
5554 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5555 nx
= TYPE_SIZE_UNIT (type
);
5556 if (TREE_CONSTANT (nx
))
5558 nx
= create_tmp_var_raw (type
, get_name (var
));
5559 gimple_add_tmp_var (nx
);
5560 TREE_ADDRESSABLE (nx
) = 1;
5561 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5562 nx
= fold_convert_loc (clause_loc
,
5563 TREE_TYPE (new_vard
), nx
);
5564 gimplify_assign (new_vard
, nx
, ilist
);
5568 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5569 (c
, unshare_expr (new_var
), x
);
5570 gimplify_and_add (x
, ilist
);
5573 case OMP_CLAUSE__LOOPTEMP_
:
5574 case OMP_CLAUSE__REDUCTEMP_
:
5575 gcc_assert (is_taskreg_ctx (ctx
));
5576 x
= build_outer_var_ref (var
, ctx
);
5577 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5578 gimplify_and_add (x
, ilist
);
5581 case OMP_CLAUSE_COPYIN
:
5582 by_ref
= use_pointer_for_field (var
, NULL
);
5583 x
= build_receiver_ref (var
, by_ref
, ctx
);
5584 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5585 append_to_statement_list (x
, ©in_seq
);
5586 copyin_by_ref
|= by_ref
;
5589 case OMP_CLAUSE_REDUCTION
:
5590 case OMP_CLAUSE_IN_REDUCTION
:
5591 /* OpenACC reductions are initialized using the
5592 GOACC_REDUCTION internal function. */
5593 if (is_gimple_omp_oacc (ctx
->stmt
))
5595 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5597 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5599 tree ptype
= TREE_TYPE (placeholder
);
5602 x
= error_mark_node
;
5603 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5604 && !task_reduction_needs_orig_p
)
5606 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5608 tree pptype
= build_pointer_type (ptype
);
5609 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5610 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5611 size_int (task_reduction_cnt_full
5612 + task_reduction_cntorig
- 1),
5613 NULL_TREE
, NULL_TREE
);
5617 = *ctx
->task_reduction_map
->get (c
);
5618 x
= task_reduction_read (ilist
, tskred_temp
,
5619 pptype
, 7 + 3 * idx
);
5621 x
= fold_convert (pptype
, x
);
5622 x
= build_simple_mem_ref (x
);
5627 x
= build_outer_var_ref (var
, ctx
);
5629 if (omp_is_reference (var
)
5630 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5631 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5633 SET_DECL_VALUE_EXPR (placeholder
, x
);
5634 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5635 tree new_vard
= new_var
;
5636 if (omp_is_reference (var
))
5638 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5639 new_vard
= TREE_OPERAND (new_var
, 0);
5640 gcc_assert (DECL_P (new_vard
));
5642 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5644 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5645 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5648 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5652 if (new_vard
== new_var
)
5654 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5655 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5659 SET_DECL_VALUE_EXPR (new_vard
,
5660 build_fold_addr_expr (ivar
));
5661 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5663 x
= lang_hooks
.decls
.omp_clause_default_ctor
5664 (c
, unshare_expr (ivar
),
5665 build_outer_var_ref (var
, ctx
));
5666 if (rvarp
&& ctx
->for_simd_scan_phase
)
5669 gimplify_and_add (x
, &llist
[0]);
5670 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5672 gimplify_and_add (x
, &llist
[1]);
5679 gimplify_and_add (x
, &llist
[0]);
5681 tree ivar2
= unshare_expr (lvar
);
5682 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5683 x
= lang_hooks
.decls
.omp_clause_default_ctor
5684 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5685 gimplify_and_add (x
, &llist
[0]);
5689 x
= lang_hooks
.decls
.omp_clause_default_ctor
5690 (c
, unshare_expr (rvar2
),
5691 build_outer_var_ref (var
, ctx
));
5692 gimplify_and_add (x
, &llist
[0]);
5695 /* For types that need construction, add another
5696 private var which will be default constructed
5697 and optionally initialized with
5698 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5699 loop we want to assign this value instead of
5700 constructing and destructing it in each
5702 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5703 gimple_add_tmp_var (nv
);
5704 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5708 x
= lang_hooks
.decls
.omp_clause_default_ctor
5709 (c
, nv
, build_outer_var_ref (var
, ctx
));
5710 gimplify_and_add (x
, ilist
);
5712 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5714 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5715 x
= DECL_VALUE_EXPR (new_vard
);
5717 if (new_vard
!= new_var
)
5718 vexpr
= build_fold_addr_expr (nv
);
5719 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5720 lower_omp (&tseq
, ctx
);
5721 SET_DECL_VALUE_EXPR (new_vard
, x
);
5722 gimple_seq_add_seq (ilist
, tseq
);
5723 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5726 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5728 gimplify_and_add (x
, dlist
);
5731 tree ref
= build_outer_var_ref (var
, ctx
);
5732 x
= unshare_expr (ivar
);
5733 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5735 gimplify_and_add (x
, &llist
[0]);
5737 ref
= build_outer_var_ref (var
, ctx
);
5738 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5740 gimplify_and_add (x
, &llist
[3]);
5742 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5743 if (new_vard
== new_var
)
5744 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5746 SET_DECL_VALUE_EXPR (new_vard
,
5747 build_fold_addr_expr (lvar
));
5749 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5751 gimplify_and_add (x
, &llist
[1]);
5753 tree ivar2
= unshare_expr (lvar
);
5754 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5755 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5757 gimplify_and_add (x
, &llist
[1]);
5761 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5763 gimplify_and_add (x
, &llist
[1]);
5768 gimplify_and_add (x
, &llist
[0]);
5769 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5771 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5772 lower_omp (&tseq
, ctx
);
5773 gimple_seq_add_seq (&llist
[0], tseq
);
5775 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5776 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5777 lower_omp (&tseq
, ctx
);
5778 gimple_seq_add_seq (&llist
[1], tseq
);
5779 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5780 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5781 if (new_vard
== new_var
)
5782 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5784 SET_DECL_VALUE_EXPR (new_vard
,
5785 build_fold_addr_expr (lvar
));
5786 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5788 gimplify_and_add (x
, &llist
[1]);
5791 /* If this is a reference to constant size reduction var
5792 with placeholder, we haven't emitted the initializer
5793 for it because it is undesirable if SIMD arrays are used.
5794 But if they aren't used, we need to emit the deferred
5795 initialization now. */
5796 else if (omp_is_reference (var
) && is_simd
)
5797 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5799 tree lab2
= NULL_TREE
;
5803 if (!is_parallel_ctx (ctx
))
5805 tree condv
= create_tmp_var (boolean_type_node
);
5806 tree m
= build_simple_mem_ref (cond
);
5807 g
= gimple_build_assign (condv
, m
);
5808 gimple_seq_add_stmt (ilist
, g
);
5810 = create_artificial_label (UNKNOWN_LOCATION
);
5811 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5812 g
= gimple_build_cond (NE_EXPR
, condv
,
5815 gimple_seq_add_stmt (ilist
, g
);
5816 gimple_seq_add_stmt (ilist
,
5817 gimple_build_label (lab1
));
5819 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5821 gimple_seq_add_stmt (ilist
, g
);
5823 x
= lang_hooks
.decls
.omp_clause_default_ctor
5824 (c
, unshare_expr (new_var
),
5826 : build_outer_var_ref (var
, ctx
));
5828 gimplify_and_add (x
, ilist
);
5830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5831 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5833 if (ctx
->for_simd_scan_phase
)
5836 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5838 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5839 gimple_add_tmp_var (nv
);
5840 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5841 x
= lang_hooks
.decls
.omp_clause_default_ctor
5842 (c
, nv
, build_outer_var_ref (var
, ctx
));
5844 gimplify_and_add (x
, ilist
);
5845 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5847 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5849 if (new_vard
!= new_var
)
5850 vexpr
= build_fold_addr_expr (nv
);
5851 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5852 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5853 lower_omp (&tseq
, ctx
);
5854 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5855 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5856 gimple_seq_add_seq (ilist
, tseq
);
5858 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5859 if (is_simd
&& ctx
->scan_exclusive
)
5862 = create_tmp_var_raw (TREE_TYPE (new_var
));
5863 gimple_add_tmp_var (nv2
);
5864 ctx
->cb
.decl_map
->put (nv
, nv2
);
5865 x
= lang_hooks
.decls
.omp_clause_default_ctor
5866 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5867 gimplify_and_add (x
, ilist
);
5868 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5870 gimplify_and_add (x
, dlist
);
5872 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5874 gimplify_and_add (x
, dlist
);
5877 && ctx
->scan_exclusive
5878 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5880 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5881 gimple_add_tmp_var (nv2
);
5882 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5883 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5885 gimplify_and_add (x
, dlist
);
5887 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5891 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5893 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5894 lower_omp (&tseq
, ctx
);
5895 gimple_seq_add_seq (ilist
, tseq
);
5897 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5900 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5901 lower_omp (&tseq
, ctx
);
5902 gimple_seq_add_seq (dlist
, tseq
);
5903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5905 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5909 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5916 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5917 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5918 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5923 tree lab2
= NULL_TREE
;
5924 /* GOMP_taskgroup_reduction_register memsets the whole
5925 array to zero. If the initializer is zero, we don't
5926 need to initialize it again, just mark it as ever
5927 used unconditionally, i.e. cond = true. */
5928 if (initializer_zerop (x
))
5930 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5932 gimple_seq_add_stmt (ilist
, g
);
5937 if (!cond) { cond = true; new_var = x; } */
5938 if (!is_parallel_ctx (ctx
))
5940 tree condv
= create_tmp_var (boolean_type_node
);
5941 tree m
= build_simple_mem_ref (cond
);
5942 g
= gimple_build_assign (condv
, m
);
5943 gimple_seq_add_stmt (ilist
, g
);
5945 = create_artificial_label (UNKNOWN_LOCATION
);
5946 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5947 g
= gimple_build_cond (NE_EXPR
, condv
,
5950 gimple_seq_add_stmt (ilist
, g
);
5951 gimple_seq_add_stmt (ilist
,
5952 gimple_build_label (lab1
));
5954 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5956 gimple_seq_add_stmt (ilist
, g
);
5957 gimplify_assign (new_var
, x
, ilist
);
5959 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5963 /* reduction(-:var) sums up the partial results, so it
5964 acts identically to reduction(+:var). */
5965 if (code
== MINUS_EXPR
)
5968 tree new_vard
= new_var
;
5969 if (is_simd
&& omp_is_reference (var
))
5971 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5972 new_vard
= TREE_OPERAND (new_var
, 0);
5973 gcc_assert (DECL_P (new_vard
));
5975 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5977 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5978 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5981 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5985 if (new_vard
!= new_var
)
5987 SET_DECL_VALUE_EXPR (new_vard
,
5988 build_fold_addr_expr (lvar
));
5989 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5992 tree ref
= build_outer_var_ref (var
, ctx
);
5996 if (ctx
->for_simd_scan_phase
)
5998 gimplify_assign (ivar
, ref
, &llist
[0]);
5999 ref
= build_outer_var_ref (var
, ctx
);
6000 gimplify_assign (ref
, rvar
, &llist
[3]);
6004 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6009 simt_lane
= create_tmp_var (unsigned_type_node
);
6010 x
= build_call_expr_internal_loc
6011 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6012 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6013 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6014 gimplify_assign (ivar
, x
, &llist
[2]);
6016 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
6017 ref
= build_outer_var_ref (var
, ctx
);
6018 gimplify_assign (ref
, x
, &llist
[1]);
6023 if (omp_is_reference (var
) && is_simd
)
6024 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6025 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6026 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6028 gimplify_assign (new_var
, x
, ilist
);
6031 tree ref
= build_outer_var_ref (var
, ctx
);
6033 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6034 ref
= build_outer_var_ref (var
, ctx
);
6035 gimplify_assign (ref
, x
, dlist
);
6048 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6049 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6052 if (known_eq (sctx
.max_vf
, 1U))
6054 sctx
.is_simt
= false;
6055 if (ctx
->lastprivate_conditional_map
)
6057 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6059 /* Signal to lower_omp_1 that it should use parent context. */
6060 ctx
->combined_into_simd_safelen1
= true;
6061 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6062 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6063 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6065 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6066 omp_context
*outer
= ctx
->outer
;
6067 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6068 outer
= outer
->outer
;
6069 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6070 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6071 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6077 /* When not vectorized, treat lastprivate(conditional:) like
6078 normal lastprivate, as there will be just one simd lane
6079 writing the privatized variable. */
6080 delete ctx
->lastprivate_conditional_map
;
6081 ctx
->lastprivate_conditional_map
= NULL
;
6086 if (nonconst_simd_if
)
6088 if (sctx
.lane
== NULL_TREE
)
6090 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6091 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6093 /* FIXME: For now. */
6094 sctx
.is_simt
= false;
6097 if (sctx
.lane
|| sctx
.is_simt
)
6099 uid
= create_tmp_var (ptr_type_node
, "simduid");
6100 /* Don't want uninit warnings on simduid, it is always uninitialized,
6101 but we use it not for the value, but for the DECL_UID only. */
6102 TREE_NO_WARNING (uid
) = 1;
6103 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6104 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6105 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6106 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6108 /* Emit calls denoting privatized variables and initializing a pointer to
6109 structure that holds private variables as fields after ompdevlow pass. */
6112 sctx
.simt_eargs
[0] = uid
;
6114 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6115 gimple_call_set_lhs (g
, uid
);
6116 gimple_seq_add_stmt (ilist
, g
);
6117 sctx
.simt_eargs
.release ();
6119 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6120 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6121 gimple_call_set_lhs (g
, simtrec
);
6122 gimple_seq_add_stmt (ilist
, g
);
6126 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6127 2 + (nonconst_simd_if
!= NULL
),
6128 uid
, integer_zero_node
,
6130 gimple_call_set_lhs (g
, sctx
.lane
);
6131 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6132 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6133 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6134 build_int_cst (unsigned_type_node
, 0));
6135 gimple_seq_add_stmt (ilist
, g
);
6138 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6140 gimple_call_set_lhs (g
, sctx
.lastlane
);
6141 gimple_seq_add_stmt (dlist
, g
);
6142 gimple_seq_add_seq (dlist
, llist
[3]);
6144 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6147 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6148 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6149 gimple_call_set_lhs (g
, simt_vf
);
6150 gimple_seq_add_stmt (dlist
, g
);
6152 tree t
= build_int_cst (unsigned_type_node
, 1);
6153 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6154 gimple_seq_add_stmt (dlist
, g
);
6156 t
= build_int_cst (unsigned_type_node
, 0);
6157 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6158 gimple_seq_add_stmt (dlist
, g
);
6160 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6161 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6162 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6163 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6164 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6166 gimple_seq_add_seq (dlist
, llist
[2]);
6168 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6169 gimple_seq_add_stmt (dlist
, g
);
6171 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6172 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6173 gimple_seq_add_stmt (dlist
, g
);
6175 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6177 for (int i
= 0; i
< 2; i
++)
6180 tree vf
= create_tmp_var (unsigned_type_node
);
6181 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6182 gimple_call_set_lhs (g
, vf
);
6183 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6184 gimple_seq_add_stmt (seq
, g
);
6185 tree t
= build_int_cst (unsigned_type_node
, 0);
6186 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6187 gimple_seq_add_stmt (seq
, g
);
6188 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6189 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6190 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6191 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6192 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6193 gimple_seq_add_seq (seq
, llist
[i
]);
6194 t
= build_int_cst (unsigned_type_node
, 1);
6195 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6196 gimple_seq_add_stmt (seq
, g
);
6197 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6198 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6199 gimple_seq_add_stmt (seq
, g
);
6200 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6205 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6207 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6208 gimple_seq_add_stmt (dlist
, g
);
6211 /* The copyin sequence is not to be executed by the main thread, since
6212 that would result in self-copies. Perhaps not visible to scalars,
6213 but it certainly is to C++ operator=. */
6216 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6218 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6219 build_int_cst (TREE_TYPE (x
), 0));
6220 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6221 gimplify_and_add (x
, ilist
);
6224 /* If any copyin variable is passed by reference, we must ensure the
6225 master thread doesn't modify it before it is copied over in all
6226 threads. Similarly for variables in both firstprivate and
6227 lastprivate clauses we need to ensure the lastprivate copying
6228 happens after firstprivate copying in all threads. And similarly
6229 for UDRs if initializer expression refers to omp_orig. */
6230 if (copyin_by_ref
|| lastprivate_firstprivate
6231 || (reduction_omp_orig_ref
6232 && !ctx
->scan_inclusive
6233 && !ctx
->scan_exclusive
))
6235 /* Don't add any barrier for #pragma omp simd or
6236 #pragma omp distribute. */
6237 if (!is_task_ctx (ctx
)
6238 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6239 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6240 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6243 /* If max_vf is non-zero, then we can use only a vectorization factor
6244 up to the max_vf we chose. So stick it into the safelen clause. */
6245 if (maybe_ne (sctx
.max_vf
, 0U))
6247 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6248 OMP_CLAUSE_SAFELEN
);
6249 poly_uint64 safe_len
;
6251 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6252 && maybe_gt (safe_len
, sctx
.max_vf
)))
6254 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6255 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6257 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6258 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6263 /* Create temporary variables for lastprivate(conditional:) implementation
6264 in context CTX with CLAUSES. */
6267 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6269 tree iter_type
= NULL_TREE
;
6270 tree cond_ptr
= NULL_TREE
;
6271 tree iter_var
= NULL_TREE
;
6272 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6273 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6274 tree next
= *clauses
;
6275 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6276 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6277 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6281 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6283 if (iter_type
== NULL_TREE
)
6285 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6286 iter_var
= create_tmp_var_raw (iter_type
);
6287 DECL_CONTEXT (iter_var
) = current_function_decl
;
6288 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6289 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6290 ctx
->block_vars
= iter_var
;
6292 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6293 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6294 OMP_CLAUSE_DECL (c3
) = iter_var
;
6295 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6297 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6299 next
= OMP_CLAUSE_CHAIN (cc
);
6300 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6301 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6302 ctx
->lastprivate_conditional_map
->put (o
, v
);
6305 if (iter_type
== NULL
)
6307 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6309 struct omp_for_data fd
;
6310 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6312 iter_type
= unsigned_type_for (fd
.iter_type
);
6314 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6315 iter_type
= unsigned_type_node
;
6316 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6320 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6321 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6325 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6326 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6327 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6328 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6329 ctx
->block_vars
= cond_ptr
;
6330 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6331 OMP_CLAUSE__CONDTEMP_
);
6332 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6333 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6336 iter_var
= create_tmp_var_raw (iter_type
);
6337 DECL_CONTEXT (iter_var
) = current_function_decl
;
6338 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6339 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6340 ctx
->block_vars
= iter_var
;
6342 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6343 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6344 OMP_CLAUSE_DECL (c3
) = iter_var
;
6345 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6346 OMP_CLAUSE_CHAIN (c2
) = c3
;
6347 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6349 tree v
= create_tmp_var_raw (iter_type
);
6350 DECL_CONTEXT (v
) = current_function_decl
;
6351 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6352 DECL_CHAIN (v
) = ctx
->block_vars
;
6353 ctx
->block_vars
= v
;
6354 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6355 ctx
->lastprivate_conditional_map
->put (o
, v
);
6360 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6361 both parallel and workshare constructs. PREDICATE may be NULL if it's
6362 always true. BODY_P is the sequence to insert early initialization
6363 if needed, STMT_LIST is where the non-conditional lastprivate handling
6364 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6368 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6369 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6372 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6373 bool par_clauses
= false;
6374 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6375 unsigned HOST_WIDE_INT conditional_off
= 0;
6376 gimple_seq post_stmt_list
= NULL
;
6378 /* Early exit if there are no lastprivate or linear clauses. */
6379 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6380 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6381 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6382 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6384 if (clauses
== NULL
)
6386 /* If this was a workshare clause, see if it had been combined
6387 with its parallel. In that case, look for the clauses on the
6388 parallel statement itself. */
6389 if (is_parallel_ctx (ctx
))
6393 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6396 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6397 OMP_CLAUSE_LASTPRIVATE
);
6398 if (clauses
== NULL
)
6403 bool maybe_simt
= false;
6404 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6405 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6407 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6408 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6410 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6416 tree label_true
, arm1
, arm2
;
6417 enum tree_code pred_code
= TREE_CODE (predicate
);
6419 label
= create_artificial_label (UNKNOWN_LOCATION
);
6420 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6421 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6423 arm1
= TREE_OPERAND (predicate
, 0);
6424 arm2
= TREE_OPERAND (predicate
, 1);
6425 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6426 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6431 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6432 arm2
= boolean_false_node
;
6433 pred_code
= NE_EXPR
;
6437 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6438 c
= fold_convert (integer_type_node
, c
);
6439 simtcond
= create_tmp_var (integer_type_node
);
6440 gimplify_assign (simtcond
, c
, stmt_list
);
6441 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6443 c
= create_tmp_var (integer_type_node
);
6444 gimple_call_set_lhs (g
, c
);
6445 gimple_seq_add_stmt (stmt_list
, g
);
6446 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6450 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6451 gimple_seq_add_stmt (stmt_list
, stmt
);
6452 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6455 tree cond_ptr
= NULL_TREE
;
6456 for (c
= clauses
; c
;)
6459 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6460 gimple_seq
*this_stmt_list
= stmt_list
;
6461 tree lab2
= NULL_TREE
;
6463 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6464 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6465 && ctx
->lastprivate_conditional_map
6466 && !ctx
->combined_into_simd_safelen1
)
6468 gcc_assert (body_p
);
6471 if (cond_ptr
== NULL_TREE
)
6473 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6474 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6476 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6477 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6478 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6479 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6480 this_stmt_list
= cstmt_list
;
6482 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6484 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6485 build_int_cst (TREE_TYPE (cond_ptr
),
6487 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6490 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6491 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6492 tree mem2
= copy_node (mem
);
6493 gimple_seq seq
= NULL
;
6494 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6495 gimple_seq_add_seq (this_stmt_list
, seq
);
6496 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6497 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6498 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6499 gimple_seq_add_stmt (this_stmt_list
, g
);
6500 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6501 gimplify_assign (mem2
, v
, this_stmt_list
);
6504 && ctx
->combined_into_simd_safelen1
6505 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6506 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6507 && ctx
->lastprivate_conditional_map
)
6508 this_stmt_list
= &post_stmt_list
;
6510 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6511 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6512 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6514 var
= OMP_CLAUSE_DECL (c
);
6515 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6516 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6517 && is_taskloop_ctx (ctx
))
6519 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6520 new_var
= lookup_decl (var
, ctx
->outer
);
6524 new_var
= lookup_decl (var
, ctx
);
6525 /* Avoid uninitialized warnings for lastprivate and
6526 for linear iterators. */
6528 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6529 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6530 TREE_NO_WARNING (new_var
) = 1;
6533 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6535 tree val
= DECL_VALUE_EXPR (new_var
);
6536 if (TREE_CODE (val
) == ARRAY_REF
6537 && VAR_P (TREE_OPERAND (val
, 0))
6538 && lookup_attribute ("omp simd array",
6539 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6542 if (lastlane
== NULL
)
6544 lastlane
= create_tmp_var (unsigned_type_node
);
6546 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6548 TREE_OPERAND (val
, 1));
6549 gimple_call_set_lhs (g
, lastlane
);
6550 gimple_seq_add_stmt (this_stmt_list
, g
);
6552 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6553 TREE_OPERAND (val
, 0), lastlane
,
6554 NULL_TREE
, NULL_TREE
);
6555 TREE_THIS_NOTRAP (new_var
) = 1;
6558 else if (maybe_simt
)
6560 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6561 ? DECL_VALUE_EXPR (new_var
)
6563 if (simtlast
== NULL
)
6565 simtlast
= create_tmp_var (unsigned_type_node
);
6566 gcall
*g
= gimple_build_call_internal
6567 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6568 gimple_call_set_lhs (g
, simtlast
);
6569 gimple_seq_add_stmt (this_stmt_list
, g
);
6571 x
= build_call_expr_internal_loc
6572 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6573 TREE_TYPE (val
), 2, val
, simtlast
);
6574 new_var
= unshare_expr (new_var
);
6575 gimplify_assign (new_var
, x
, this_stmt_list
);
6576 new_var
= unshare_expr (new_var
);
6579 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6580 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6582 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6583 gimple_seq_add_seq (this_stmt_list
,
6584 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6585 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6587 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6588 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6590 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6591 gimple_seq_add_seq (this_stmt_list
,
6592 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6593 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6597 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6598 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
6599 && is_taskloop_ctx (ctx
))
6601 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6603 if (is_global_var (ovar
))
6607 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6608 if (omp_is_reference (var
))
6609 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6610 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6611 gimplify_and_add (x
, this_stmt_list
);
6614 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6618 c
= OMP_CLAUSE_CHAIN (c
);
6619 if (c
== NULL
&& !par_clauses
)
6621 /* If this was a workshare clause, see if it had been combined
6622 with its parallel. In that case, continue looking for the
6623 clauses also on the parallel statement itself. */
6624 if (is_parallel_ctx (ctx
))
6628 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6631 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6632 OMP_CLAUSE_LASTPRIVATE
);
6638 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6639 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6642 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6643 (which might be a placeholder). INNER is true if this is an inner
6644 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6645 join markers. Generate the before-loop forking sequence in
6646 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6647 general form of these sequences is
6649 GOACC_REDUCTION_SETUP
6651 GOACC_REDUCTION_INIT
6653 GOACC_REDUCTION_FINI
6655 GOACC_REDUCTION_TEARDOWN. */
6658 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6659 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6660 gimple_seq
*join_seq
, omp_context
*ctx
)
6662 gimple_seq before_fork
= NULL
;
6663 gimple_seq after_fork
= NULL
;
6664 gimple_seq before_join
= NULL
;
6665 gimple_seq after_join
= NULL
;
6666 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6667 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6668 unsigned offset
= 0;
6670 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6671 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6673 tree orig
= OMP_CLAUSE_DECL (c
);
6674 tree var
= maybe_lookup_decl (orig
, ctx
);
6675 tree ref_to_res
= NULL_TREE
;
6676 tree incoming
, outgoing
, v1
, v2
, v3
;
6677 bool is_private
= false;
6679 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6680 if (rcode
== MINUS_EXPR
)
6682 else if (rcode
== TRUTH_ANDIF_EXPR
)
6683 rcode
= BIT_AND_EXPR
;
6684 else if (rcode
== TRUTH_ORIF_EXPR
)
6685 rcode
= BIT_IOR_EXPR
;
6686 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6691 incoming
= outgoing
= var
;
6695 /* See if an outer construct also reduces this variable. */
6696 omp_context
*outer
= ctx
;
6698 while (omp_context
*probe
= outer
->outer
)
6700 enum gimple_code type
= gimple_code (probe
->stmt
);
6705 case GIMPLE_OMP_FOR
:
6706 cls
= gimple_omp_for_clauses (probe
->stmt
);
6709 case GIMPLE_OMP_TARGET
:
6710 if ((gimple_omp_target_kind (probe
->stmt
)
6711 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6712 && (gimple_omp_target_kind (probe
->stmt
)
6713 != GF_OMP_TARGET_KIND_OACC_SERIAL
))
6716 cls
= gimple_omp_target_clauses (probe
->stmt
);
6724 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6725 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6726 && orig
== OMP_CLAUSE_DECL (cls
))
6728 incoming
= outgoing
= lookup_decl (orig
, probe
);
6729 goto has_outer_reduction
;
6731 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6732 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6733 && orig
== OMP_CLAUSE_DECL (cls
))
6741 /* This is the outermost construct with this reduction,
6742 see if there's a mapping for it. */
6743 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6744 && maybe_lookup_field (orig
, outer
) && !is_private
)
6746 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6747 if (omp_is_reference (orig
))
6748 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6750 tree type
= TREE_TYPE (var
);
6751 if (POINTER_TYPE_P (type
))
6752 type
= TREE_TYPE (type
);
6755 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6759 /* Try to look at enclosing contexts for reduction var,
6760 use original if no mapping found. */
6762 omp_context
*c
= ctx
->outer
;
6765 t
= maybe_lookup_decl (orig
, c
);
6768 incoming
= outgoing
= (t
? t
: orig
);
6771 has_outer_reduction
:;
6775 ref_to_res
= integer_zero_node
;
6777 if (omp_is_reference (orig
))
6779 tree type
= TREE_TYPE (var
);
6780 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6784 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6785 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6788 v1
= create_tmp_var (type
, id
);
6789 v2
= create_tmp_var (type
, id
);
6790 v3
= create_tmp_var (type
, id
);
6792 gimplify_assign (v1
, var
, fork_seq
);
6793 gimplify_assign (v2
, var
, fork_seq
);
6794 gimplify_assign (v3
, var
, fork_seq
);
6796 var
= build_simple_mem_ref (var
);
6797 v1
= build_simple_mem_ref (v1
);
6798 v2
= build_simple_mem_ref (v2
);
6799 v3
= build_simple_mem_ref (v3
);
6800 outgoing
= build_simple_mem_ref (outgoing
);
6802 if (!TREE_CONSTANT (incoming
))
6803 incoming
= build_simple_mem_ref (incoming
);
6808 /* Determine position in reduction buffer, which may be used
6809 by target. The parser has ensured that this is not a
6810 variable-sized type. */
6811 fixed_size_mode mode
6812 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6813 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6814 offset
= (offset
+ align
- 1) & ~(align
- 1);
6815 tree off
= build_int_cst (sizetype
, offset
);
6816 offset
+= GET_MODE_SIZE (mode
);
6820 init_code
= build_int_cst (integer_type_node
,
6821 IFN_GOACC_REDUCTION_INIT
);
6822 fini_code
= build_int_cst (integer_type_node
,
6823 IFN_GOACC_REDUCTION_FINI
);
6824 setup_code
= build_int_cst (integer_type_node
,
6825 IFN_GOACC_REDUCTION_SETUP
);
6826 teardown_code
= build_int_cst (integer_type_node
,
6827 IFN_GOACC_REDUCTION_TEARDOWN
);
6831 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6832 TREE_TYPE (var
), 6, setup_code
,
6833 unshare_expr (ref_to_res
),
6834 incoming
, level
, op
, off
);
6836 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6837 TREE_TYPE (var
), 6, init_code
,
6838 unshare_expr (ref_to_res
),
6839 v1
, level
, op
, off
);
6841 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6842 TREE_TYPE (var
), 6, fini_code
,
6843 unshare_expr (ref_to_res
),
6844 v2
, level
, op
, off
);
6846 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6847 TREE_TYPE (var
), 6, teardown_code
,
6848 ref_to_res
, v3
, level
, op
, off
);
6850 gimplify_assign (v1
, setup_call
, &before_fork
);
6851 gimplify_assign (v2
, init_call
, &after_fork
);
6852 gimplify_assign (v3
, fini_call
, &before_join
);
6853 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6856 /* Now stitch things together. */
6857 gimple_seq_add_seq (fork_seq
, before_fork
);
6859 gimple_seq_add_stmt (fork_seq
, fork
);
6860 gimple_seq_add_seq (fork_seq
, after_fork
);
6862 gimple_seq_add_seq (join_seq
, before_join
);
6864 gimple_seq_add_stmt (join_seq
, join
);
6865 gimple_seq_add_seq (join_seq
, after_join
);
6868 /* Generate code to implement the REDUCTION clauses, append it
6869 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6870 that should be emitted also inside of the critical section,
6871 in that case clear *CLIST afterwards, otherwise leave it as is
6872 and let the caller emit it itself. */
6875 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6876 gimple_seq
*clist
, omp_context
*ctx
)
6878 gimple_seq sub_seq
= NULL
;
6883 /* OpenACC loop reductions are handled elsewhere. */
6884 if (is_gimple_omp_oacc (ctx
->stmt
))
6887 /* SIMD reductions are handled in lower_rec_input_clauses. */
6888 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6889 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6892 /* inscan reductions are handled elsewhere. */
6893 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6896 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6897 update in that case, otherwise use a lock. */
6898 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6899 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6900 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6902 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6903 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6905 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6915 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6917 tree var
, ref
, new_var
, orig_var
;
6918 enum tree_code code
;
6919 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6921 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6922 || OMP_CLAUSE_REDUCTION_TASK (c
))
6925 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6926 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6927 if (TREE_CODE (var
) == MEM_REF
)
6929 var
= TREE_OPERAND (var
, 0);
6930 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6931 var
= TREE_OPERAND (var
, 0);
6932 if (TREE_CODE (var
) == ADDR_EXPR
)
6933 var
= TREE_OPERAND (var
, 0);
6936 /* If this is a pointer or referenced based array
6937 section, the var could be private in the outer
6938 context e.g. on orphaned loop construct. Pretend this
6939 is private variable's outer reference. */
6940 ccode
= OMP_CLAUSE_PRIVATE
;
6941 if (TREE_CODE (var
) == INDIRECT_REF
)
6942 var
= TREE_OPERAND (var
, 0);
6945 if (is_variable_sized (var
))
6947 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6948 var
= DECL_VALUE_EXPR (var
);
6949 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6950 var
= TREE_OPERAND (var
, 0);
6951 gcc_assert (DECL_P (var
));
6954 new_var
= lookup_decl (var
, ctx
);
6955 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6956 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6957 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6958 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6960 /* reduction(-:var) sums up the partial results, so it acts
6961 identically to reduction(+:var). */
6962 if (code
== MINUS_EXPR
)
6967 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6969 addr
= save_expr (addr
);
6970 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6971 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6972 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6973 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6974 gimplify_and_add (x
, stmt_seqp
);
6977 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6979 tree d
= OMP_CLAUSE_DECL (c
);
6980 tree type
= TREE_TYPE (d
);
6981 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6982 tree i
= create_tmp_var (TREE_TYPE (v
));
6983 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6984 tree bias
= TREE_OPERAND (d
, 1);
6985 d
= TREE_OPERAND (d
, 0);
6986 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6988 tree b
= TREE_OPERAND (d
, 1);
6989 b
= maybe_lookup_decl (b
, ctx
);
6992 b
= TREE_OPERAND (d
, 1);
6993 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6995 if (integer_zerop (bias
))
6999 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7000 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7001 TREE_TYPE (b
), b
, bias
);
7003 d
= TREE_OPERAND (d
, 0);
7005 /* For ref build_outer_var_ref already performs this, so
7006 only new_var needs a dereference. */
7007 if (TREE_CODE (d
) == INDIRECT_REF
)
7009 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7010 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7012 else if (TREE_CODE (d
) == ADDR_EXPR
)
7014 if (orig_var
== var
)
7016 new_var
= build_fold_addr_expr (new_var
);
7017 ref
= build_fold_addr_expr (ref
);
7022 gcc_assert (orig_var
== var
);
7023 if (omp_is_reference (var
))
7024 ref
= build_fold_addr_expr (ref
);
7028 tree t
= maybe_lookup_decl (v
, ctx
);
7032 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7033 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7035 if (!integer_zerop (bias
))
7037 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7038 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7039 TREE_TYPE (new_var
), new_var
,
7040 unshare_expr (bias
));
7041 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7042 TREE_TYPE (ref
), ref
, bias
);
7044 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7045 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7046 tree m
= create_tmp_var (ptype
);
7047 gimplify_assign (m
, new_var
, stmt_seqp
);
7049 m
= create_tmp_var (ptype
);
7050 gimplify_assign (m
, ref
, stmt_seqp
);
7052 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7053 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7054 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7055 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7056 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7057 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7058 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7060 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7061 tree decl_placeholder
7062 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7063 SET_DECL_VALUE_EXPR (placeholder
, out
);
7064 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7065 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7066 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7067 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7068 gimple_seq_add_seq (&sub_seq
,
7069 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7070 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7071 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7072 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7076 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
7077 out
= unshare_expr (out
);
7078 gimplify_assign (out
, x
, &sub_seq
);
7080 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7081 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7082 gimple_seq_add_stmt (&sub_seq
, g
);
7083 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7084 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7085 gimple_seq_add_stmt (&sub_seq
, g
);
7086 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7087 build_int_cst (TREE_TYPE (i
), 1));
7088 gimple_seq_add_stmt (&sub_seq
, g
);
7089 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7090 gimple_seq_add_stmt (&sub_seq
, g
);
7091 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7093 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7095 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7097 if (omp_is_reference (var
)
7098 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7100 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7101 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7102 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7103 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7104 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7105 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7106 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7110 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
7111 ref
= build_outer_var_ref (var
, ctx
);
7112 gimplify_assign (ref
, x
, &sub_seq
);
7116 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7118 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7120 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7124 gimple_seq_add_seq (stmt_seqp
, *clist
);
7128 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7130 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7134 /* Generate code to implement the COPYPRIVATE clauses. */
7137 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7142 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7144 tree var
, new_var
, ref
, x
;
7146 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7148 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7151 var
= OMP_CLAUSE_DECL (c
);
7152 by_ref
= use_pointer_for_field (var
, NULL
);
7154 ref
= build_sender_ref (var
, ctx
);
7155 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7158 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7159 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7161 gimplify_assign (ref
, x
, slist
);
7163 ref
= build_receiver_ref (var
, false, ctx
);
7166 ref
= fold_convert_loc (clause_loc
,
7167 build_pointer_type (TREE_TYPE (new_var
)),
7169 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7171 if (omp_is_reference (var
))
7173 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7174 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7175 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7177 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7178 gimplify_and_add (x
, rlist
);
7183 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7184 and REDUCTION from the sender (aka parent) side. */
7187 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7191 int ignored_looptemp
= 0;
7192 bool is_taskloop
= false;
7194 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7195 by GOMP_taskloop. */
7196 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7198 ignored_looptemp
= 2;
7202 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7204 tree val
, ref
, x
, var
;
7205 bool by_ref
, do_in
= false, do_out
= false;
7206 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7208 switch (OMP_CLAUSE_CODE (c
))
7210 case OMP_CLAUSE_PRIVATE
:
7211 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7214 case OMP_CLAUSE_FIRSTPRIVATE
:
7215 case OMP_CLAUSE_COPYIN
:
7216 case OMP_CLAUSE_LASTPRIVATE
:
7217 case OMP_CLAUSE_IN_REDUCTION
:
7218 case OMP_CLAUSE__REDUCTEMP_
:
7220 case OMP_CLAUSE_REDUCTION
:
7221 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7224 case OMP_CLAUSE_SHARED
:
7225 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7228 case OMP_CLAUSE__LOOPTEMP_
:
7229 if (ignored_looptemp
)
7239 val
= OMP_CLAUSE_DECL (c
);
7240 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7241 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7242 && TREE_CODE (val
) == MEM_REF
)
7244 val
= TREE_OPERAND (val
, 0);
7245 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7246 val
= TREE_OPERAND (val
, 0);
7247 if (TREE_CODE (val
) == INDIRECT_REF
7248 || TREE_CODE (val
) == ADDR_EXPR
)
7249 val
= TREE_OPERAND (val
, 0);
7250 if (is_variable_sized (val
))
7254 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7255 outer taskloop region. */
7256 omp_context
*ctx_for_o
= ctx
;
7258 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7259 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7260 ctx_for_o
= ctx
->outer
;
7262 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7264 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7265 && is_global_var (var
)
7266 && (val
== OMP_CLAUSE_DECL (c
)
7267 || !is_task_ctx (ctx
)
7268 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7269 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7270 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7271 != POINTER_TYPE
)))))
7274 t
= omp_member_access_dummy_var (var
);
7277 var
= DECL_VALUE_EXPR (var
);
7278 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7280 var
= unshare_and_remap (var
, t
, o
);
7282 var
= unshare_expr (var
);
7285 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7287 /* Handle taskloop firstprivate/lastprivate, where the
7288 lastprivate on GIMPLE_OMP_TASK is represented as
7289 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7290 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7291 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7292 if (use_pointer_for_field (val
, ctx
))
7293 var
= build_fold_addr_expr (var
);
7294 gimplify_assign (x
, var
, ilist
);
7295 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7299 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7300 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7301 || val
== OMP_CLAUSE_DECL (c
))
7302 && is_variable_sized (val
))
7304 by_ref
= use_pointer_for_field (val
, NULL
);
7306 switch (OMP_CLAUSE_CODE (c
))
7308 case OMP_CLAUSE_FIRSTPRIVATE
:
7309 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7311 && is_task_ctx (ctx
))
7312 TREE_NO_WARNING (var
) = 1;
7316 case OMP_CLAUSE_PRIVATE
:
7317 case OMP_CLAUSE_COPYIN
:
7318 case OMP_CLAUSE__LOOPTEMP_
:
7319 case OMP_CLAUSE__REDUCTEMP_
:
7323 case OMP_CLAUSE_LASTPRIVATE
:
7324 if (by_ref
|| omp_is_reference (val
))
7326 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7333 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7338 case OMP_CLAUSE_REDUCTION
:
7339 case OMP_CLAUSE_IN_REDUCTION
:
7341 if (val
== OMP_CLAUSE_DECL (c
))
7343 if (is_task_ctx (ctx
))
7344 by_ref
= use_pointer_for_field (val
, ctx
);
7346 do_out
= !(by_ref
|| omp_is_reference (val
));
7349 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7358 ref
= build_sender_ref (val
, ctx
);
7359 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7360 gimplify_assign (ref
, x
, ilist
);
7361 if (is_task_ctx (ctx
))
7362 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7367 ref
= build_sender_ref (val
, ctx
);
7368 gimplify_assign (var
, ref
, olist
);
7373 /* Generate code to implement SHARED from the sender (aka parent)
7374 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7375 list things that got automatically shared. */
7378 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7380 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7382 if (ctx
->record_type
== NULL
)
7385 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7386 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7388 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7389 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7392 nvar
= maybe_lookup_decl (ovar
, ctx
);
7393 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7396 /* If CTX is a nested parallel directive. Find the immediately
7397 enclosing parallel or workshare construct that contains a
7398 mapping for OVAR. */
7399 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7401 t
= omp_member_access_dummy_var (var
);
7404 var
= DECL_VALUE_EXPR (var
);
7405 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7407 var
= unshare_and_remap (var
, t
, o
);
7409 var
= unshare_expr (var
);
7412 if (use_pointer_for_field (ovar
, ctx
))
7414 x
= build_sender_ref (ovar
, ctx
);
7415 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7416 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7418 gcc_assert (is_parallel_ctx (ctx
)
7419 && DECL_ARTIFICIAL (ovar
));
7420 /* _condtemp_ clause. */
7421 var
= build_constructor (TREE_TYPE (x
), NULL
);
7424 var
= build_fold_addr_expr (var
);
7425 gimplify_assign (x
, var
, ilist
);
7429 x
= build_sender_ref (ovar
, ctx
);
7430 gimplify_assign (x
, var
, ilist
);
7432 if (!TREE_READONLY (var
)
7433 /* We don't need to receive a new reference to a result
7434 or parm decl. In fact we may not store to it as we will
7435 invalidate any pending RSO and generate wrong gimple
7437 && !((TREE_CODE (var
) == RESULT_DECL
7438 || TREE_CODE (var
) == PARM_DECL
)
7439 && DECL_BY_REFERENCE (var
)))
7441 x
= build_sender_ref (ovar
, ctx
);
7442 gimplify_assign (var
, x
, olist
);
7448 /* Emit an OpenACC head marker call, encapulating the partitioning and
7449 other information that must be processed by the target compiler.
7450 Return the maximum number of dimensions the associated loop might
7451 be partitioned over. */
7454 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7455 gimple_seq
*seq
, omp_context
*ctx
)
7457 unsigned levels
= 0;
7459 tree gang_static
= NULL_TREE
;
7460 auto_vec
<tree
, 5> args
;
7462 args
.quick_push (build_int_cst
7463 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7464 args
.quick_push (ddvar
);
7465 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7467 switch (OMP_CLAUSE_CODE (c
))
7469 case OMP_CLAUSE_GANG
:
7470 tag
|= OLF_DIM_GANG
;
7471 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7472 /* static:* is represented by -1, and we can ignore it, as
7473 scheduling is always static. */
7474 if (gang_static
&& integer_minus_onep (gang_static
))
7475 gang_static
= NULL_TREE
;
7479 case OMP_CLAUSE_WORKER
:
7480 tag
|= OLF_DIM_WORKER
;
7484 case OMP_CLAUSE_VECTOR
:
7485 tag
|= OLF_DIM_VECTOR
;
7489 case OMP_CLAUSE_SEQ
:
7493 case OMP_CLAUSE_AUTO
:
7497 case OMP_CLAUSE_INDEPENDENT
:
7498 tag
|= OLF_INDEPENDENT
;
7501 case OMP_CLAUSE_TILE
:
7512 if (DECL_P (gang_static
))
7513 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7514 tag
|= OLF_GANG_STATIC
;
7517 /* In a parallel region, loops are implicitly INDEPENDENT. */
7518 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7519 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
7520 tag
|= OLF_INDEPENDENT
;
7523 /* Tiling could use all 3 levels. */
7527 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7528 Ensure at least one level, or 2 for possible auto
7530 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7531 << OLF_DIM_BASE
) | OLF_SEQ
));
7533 if (levels
< 1u + maybe_auto
)
7534 levels
= 1u + maybe_auto
;
7537 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7538 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7540 args
.quick_push (gang_static
);
7542 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7543 gimple_set_location (call
, loc
);
7544 gimple_set_lhs (call
, ddvar
);
7545 gimple_seq_add_stmt (seq
, call
);
7550 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7551 partitioning level of the enclosed region. */
7554 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7555 tree tofollow
, gimple_seq
*seq
)
7557 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7558 : IFN_UNIQUE_OACC_TAIL_MARK
);
7559 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7560 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7561 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7562 marker
, ddvar
, tofollow
);
7563 gimple_set_location (call
, loc
);
7564 gimple_set_lhs (call
, ddvar
);
7565 gimple_seq_add_stmt (seq
, call
);
7568 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7569 the loop clauses, from which we extract reductions. Initialize
7573 lower_oacc_head_tail (location_t loc
, tree clauses
,
7574 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7577 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7578 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7580 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7581 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7582 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7585 for (unsigned done
= 1; count
; count
--, done
++)
7587 gimple_seq fork_seq
= NULL
;
7588 gimple_seq join_seq
= NULL
;
7590 tree place
= build_int_cst (integer_type_node
, -1);
7591 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7592 fork_kind
, ddvar
, place
);
7593 gimple_set_location (fork
, loc
);
7594 gimple_set_lhs (fork
, ddvar
);
7596 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7597 join_kind
, ddvar
, place
);
7598 gimple_set_location (join
, loc
);
7599 gimple_set_lhs (join
, ddvar
);
7601 /* Mark the beginning of this level sequence. */
7603 lower_oacc_loop_marker (loc
, ddvar
, true,
7604 build_int_cst (integer_type_node
, count
),
7606 lower_oacc_loop_marker (loc
, ddvar
, false,
7607 build_int_cst (integer_type_node
, done
),
7610 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7611 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7613 /* Append this level to head. */
7614 gimple_seq_add_seq (head
, fork_seq
);
7615 /* Prepend it to tail. */
7616 gimple_seq_add_seq (&join_seq
, *tail
);
7622 /* Mark the end of the sequence. */
7623 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7624 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7627 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7628 catch handler and return it. This prevents programs from violating the
7629 structured block semantics with throws. */
7632 maybe_catch_exception (gimple_seq body
)
7637 if (!flag_exceptions
)
7640 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7641 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7643 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7645 g
= gimple_build_eh_must_not_throw (decl
);
7646 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7649 return gimple_seq_alloc_with_stmt (g
);
7653 /* Routines to lower OMP directives into OMP-GIMPLE. */
7655 /* If ctx is a worksharing context inside of a cancellable parallel
7656 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7657 and conditional branch to parallel's cancel_label to handle
7658 cancellation in the implicit barrier. */
7661 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7664 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7665 if (gimple_omp_return_nowait_p (omp_return
))
7667 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7668 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7669 && outer
->cancellable
)
7671 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7672 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7673 tree lhs
= create_tmp_var (c_bool_type
);
7674 gimple_omp_return_set_lhs (omp_return
, lhs
);
7675 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7676 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7677 fold_convert (c_bool_type
,
7678 boolean_false_node
),
7679 outer
->cancel_label
, fallthru_label
);
7680 gimple_seq_add_stmt (body
, g
);
7681 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7683 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7687 /* Find the first task_reduction or reduction clause or return NULL
7688 if there are none. */
7691 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7692 enum omp_clause_code ccode
)
7696 clauses
= omp_find_clause (clauses
, ccode
);
7697 if (clauses
== NULL_TREE
)
7699 if (ccode
!= OMP_CLAUSE_REDUCTION
7700 || code
== OMP_TASKLOOP
7701 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7703 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7707 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7708 gimple_seq
*, gimple_seq
*);
7710 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7711 CTX is the enclosing OMP context for the current statement. */
7714 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7716 tree block
, control
;
7717 gimple_stmt_iterator tgsi
;
7718 gomp_sections
*stmt
;
7720 gbind
*new_stmt
, *bind
;
7721 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7723 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7725 push_gimplify_context ();
7731 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7732 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7733 tree rtmp
= NULL_TREE
;
7736 tree type
= build_pointer_type (pointer_sized_int_node
);
7737 tree temp
= create_tmp_var (type
);
7738 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7739 OMP_CLAUSE_DECL (c
) = temp
;
7740 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7741 gimple_omp_sections_set_clauses (stmt
, c
);
7742 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7743 gimple_omp_sections_clauses (stmt
),
7744 &ilist
, &tred_dlist
);
7746 rtmp
= make_ssa_name (type
);
7747 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7750 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7751 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7753 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7754 &ilist
, &dlist
, ctx
, NULL
);
7756 control
= create_tmp_var (unsigned_type_node
, ".section");
7757 gimple_omp_sections_set_control (stmt
, control
);
7759 new_body
= gimple_omp_body (stmt
);
7760 gimple_omp_set_body (stmt
, NULL
);
7761 tgsi
= gsi_start (new_body
);
7762 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7767 sec_start
= gsi_stmt (tgsi
);
7768 sctx
= maybe_lookup_ctx (sec_start
);
7771 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7772 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7773 GSI_CONTINUE_LINKING
);
7774 gimple_omp_set_body (sec_start
, NULL
);
7776 if (gsi_one_before_end_p (tgsi
))
7778 gimple_seq l
= NULL
;
7779 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7780 &ilist
, &l
, &clist
, ctx
);
7781 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7782 gimple_omp_section_set_last (sec_start
);
7785 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7786 GSI_CONTINUE_LINKING
);
7789 block
= make_node (BLOCK
);
7790 bind
= gimple_build_bind (NULL
, new_body
, block
);
7793 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7797 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7798 gcall
*g
= gimple_build_call (fndecl
, 0);
7799 gimple_seq_add_stmt (&olist
, g
);
7800 gimple_seq_add_seq (&olist
, clist
);
7801 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7802 g
= gimple_build_call (fndecl
, 0);
7803 gimple_seq_add_stmt (&olist
, g
);
7806 block
= make_node (BLOCK
);
7807 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7808 gsi_replace (gsi_p
, new_stmt
, true);
7810 pop_gimplify_context (new_stmt
);
7811 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7812 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7813 if (BLOCK_VARS (block
))
7814 TREE_USED (block
) = 1;
7817 gimple_seq_add_seq (&new_body
, ilist
);
7818 gimple_seq_add_stmt (&new_body
, stmt
);
7819 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7820 gimple_seq_add_stmt (&new_body
, bind
);
7822 t
= gimple_build_omp_continue (control
, control
);
7823 gimple_seq_add_stmt (&new_body
, t
);
7825 gimple_seq_add_seq (&new_body
, olist
);
7826 if (ctx
->cancellable
)
7827 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7828 gimple_seq_add_seq (&new_body
, dlist
);
7830 new_body
= maybe_catch_exception (new_body
);
7832 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7833 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7834 t
= gimple_build_omp_return (nowait
);
7835 gimple_seq_add_stmt (&new_body
, t
);
7836 gimple_seq_add_seq (&new_body
, tred_dlist
);
7837 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7840 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7842 gimple_bind_set_body (new_stmt
, new_body
);
7846 /* A subroutine of lower_omp_single. Expand the simple form of
7847 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7849 if (GOMP_single_start ())
7851 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7853 FIXME. It may be better to delay expanding the logic of this until
7854 pass_expand_omp. The expanded logic may make the job more difficult
7855 to a synchronization analysis pass. */
7858 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7860 location_t loc
= gimple_location (single_stmt
);
7861 tree tlabel
= create_artificial_label (loc
);
7862 tree flabel
= create_artificial_label (loc
);
7863 gimple
*call
, *cond
;
7866 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7867 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7868 call
= gimple_build_call (decl
, 0);
7869 gimple_call_set_lhs (call
, lhs
);
7870 gimple_seq_add_stmt (pre_p
, call
);
7872 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7873 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7876 gimple_seq_add_stmt (pre_p
, cond
);
7877 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7878 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7879 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7883 /* A subroutine of lower_omp_single. Expand the simple form of
7884 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7886 #pragma omp single copyprivate (a, b, c)
7888 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7891 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7897 GOMP_single_copy_end (©out);
7908 FIXME. It may be better to delay expanding the logic of this until
7909 pass_expand_omp. The expanded logic may make the job more difficult
7910 to a synchronization analysis pass. */
7913 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7916 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7917 gimple_seq copyin_seq
;
7918 location_t loc
= gimple_location (single_stmt
);
7920 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7922 ptr_type
= build_pointer_type (ctx
->record_type
);
7923 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7925 l0
= create_artificial_label (loc
);
7926 l1
= create_artificial_label (loc
);
7927 l2
= create_artificial_label (loc
);
7929 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7930 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7931 t
= fold_convert_loc (loc
, ptr_type
, t
);
7932 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7934 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7935 build_int_cst (ptr_type
, 0));
7936 t
= build3 (COND_EXPR
, void_type_node
, t
,
7937 build_and_jump (&l0
), build_and_jump (&l1
));
7938 gimplify_and_add (t
, pre_p
);
7940 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7942 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7945 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7948 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7949 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7950 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7951 gimplify_and_add (t
, pre_p
);
7953 t
= build_and_jump (&l2
);
7954 gimplify_and_add (t
, pre_p
);
7956 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7958 gimple_seq_add_seq (pre_p
, copyin_seq
);
7960 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7964 /* Expand code for an OpenMP single directive. */
7967 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7970 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7972 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7974 push_gimplify_context ();
7976 block
= make_node (BLOCK
);
7977 bind
= gimple_build_bind (NULL
, NULL
, block
);
7978 gsi_replace (gsi_p
, bind
, true);
7981 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7982 &bind_body
, &dlist
, ctx
, NULL
);
7983 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7985 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7987 if (ctx
->record_type
)
7988 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7990 lower_omp_single_simple (single_stmt
, &bind_body
);
7992 gimple_omp_set_body (single_stmt
, NULL
);
7994 gimple_seq_add_seq (&bind_body
, dlist
);
7996 bind_body
= maybe_catch_exception (bind_body
);
7998 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7999 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8000 gimple
*g
= gimple_build_omp_return (nowait
);
8001 gimple_seq_add_stmt (&bind_body_tail
, g
);
8002 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8003 if (ctx
->record_type
)
8005 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8006 tree clobber
= build_clobber (ctx
->record_type
);
8007 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8008 clobber
), GSI_SAME_STMT
);
8010 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8011 gimple_bind_set_body (bind
, bind_body
);
8013 pop_gimplify_context (bind
);
8015 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8016 BLOCK_VARS (block
) = ctx
->block_vars
;
8017 if (BLOCK_VARS (block
))
8018 TREE_USED (block
) = 1;
8022 /* Expand code for an OpenMP master directive. */
8025 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8027 tree block
, lab
= NULL
, x
, bfn_decl
;
8028 gimple
*stmt
= gsi_stmt (*gsi_p
);
8030 location_t loc
= gimple_location (stmt
);
8033 push_gimplify_context ();
8035 block
= make_node (BLOCK
);
8036 bind
= gimple_build_bind (NULL
, NULL
, block
);
8037 gsi_replace (gsi_p
, bind
, true);
8038 gimple_bind_add_stmt (bind
, stmt
);
8040 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8041 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8042 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8043 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8045 gimplify_and_add (x
, &tseq
);
8046 gimple_bind_add_seq (bind
, tseq
);
8048 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8049 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8050 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8051 gimple_omp_set_body (stmt
, NULL
);
8053 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8055 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8057 pop_gimplify_context (bind
);
8059 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8060 BLOCK_VARS (block
) = ctx
->block_vars
;
8063 /* Helper function for lower_omp_task_reductions. For a specific PASS
8064 find out the current clause it should be processed, or return false
8065 if all have been processed already. */
8068 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8069 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8070 tree
*type
, tree
*next
)
8072 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8074 if (ccode
== OMP_CLAUSE_REDUCTION
8075 && code
!= OMP_TASKLOOP
8076 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8078 *decl
= OMP_CLAUSE_DECL (*c
);
8079 *type
= TREE_TYPE (*decl
);
8080 if (TREE_CODE (*decl
) == MEM_REF
)
8087 if (omp_is_reference (*decl
))
8088 *type
= TREE_TYPE (*type
);
8089 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8092 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8101 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8102 OMP_TASKGROUP only with task modifier). Register mapping of those in
8103 START sequence and reducing them and unregister them in the END sequence. */
8106 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8107 gimple_seq
*start
, gimple_seq
*end
)
8109 enum omp_clause_code ccode
8110 = (code
== OMP_TASKGROUP
8111 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8112 tree cancellable
= NULL_TREE
;
8113 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8114 if (clauses
== NULL_TREE
)
8116 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8118 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8119 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8120 && outer
->cancellable
)
8122 cancellable
= error_mark_node
;
8125 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8128 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8129 tree
*last
= &TYPE_FIELDS (record_type
);
8133 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8135 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8138 DECL_CHAIN (field
) = ifield
;
8139 last
= &DECL_CHAIN (ifield
);
8140 DECL_CONTEXT (field
) = record_type
;
8141 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8142 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8143 DECL_CONTEXT (ifield
) = record_type
;
8144 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8145 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8147 for (int pass
= 0; pass
< 2; pass
++)
8149 tree decl
, type
, next
;
8150 for (tree c
= clauses
;
8151 omp_task_reduction_iterate (pass
, code
, ccode
,
8152 &c
, &decl
, &type
, &next
); c
= next
)
8155 tree new_type
= type
;
8157 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8159 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8160 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8162 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8164 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8165 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8166 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8169 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8170 DECL_CONTEXT (field
) = record_type
;
8171 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8172 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8174 last
= &DECL_CHAIN (field
);
8176 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8178 DECL_CONTEXT (bfield
) = record_type
;
8179 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8180 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8182 last
= &DECL_CHAIN (bfield
);
8186 layout_type (record_type
);
8188 /* Build up an array which registers with the runtime all the reductions
8189 and deregisters them at the end. Format documented in libgomp/task.c. */
8190 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8191 tree avar
= create_tmp_var_raw (atype
);
8192 gimple_add_tmp_var (avar
);
8193 TREE_ADDRESSABLE (avar
) = 1;
8194 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8195 NULL_TREE
, NULL_TREE
);
8196 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8197 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8198 gimple_seq seq
= NULL
;
8199 tree sz
= fold_convert (pointer_sized_int_node
,
8200 TYPE_SIZE_UNIT (record_type
));
8202 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8203 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8204 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8205 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8206 ctx
->task_reductions
.create (1 + cnt
);
8207 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8208 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8210 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8211 gimple_seq_add_seq (start
, seq
);
8212 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8213 NULL_TREE
, NULL_TREE
);
8214 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8215 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8216 NULL_TREE
, NULL_TREE
);
8217 t
= build_int_cst (pointer_sized_int_node
,
8218 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8219 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8220 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8221 NULL_TREE
, NULL_TREE
);
8222 t
= build_int_cst (pointer_sized_int_node
, -1);
8223 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8224 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8225 NULL_TREE
, NULL_TREE
);
8226 t
= build_int_cst (pointer_sized_int_node
, 0);
8227 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8229 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8230 and for each task reduction checks a bool right after the private variable
8231 within that thread's chunk; if the bool is clear, it hasn't been
8232 initialized and thus isn't going to be reduced nor destructed, otherwise
8233 reduce and destruct it. */
8234 tree idx
= create_tmp_var (size_type_node
);
8235 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8236 tree num_thr_sz
= create_tmp_var (size_type_node
);
8237 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8238 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8239 tree lab3
= NULL_TREE
;
8241 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8243 /* For worksharing constructs, only perform it in the master thread,
8244 with the exception of cancelled implicit barriers - then only handle
8245 the current thread. */
8246 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8247 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8248 tree thr_num
= create_tmp_var (integer_type_node
);
8249 g
= gimple_build_call (t
, 0);
8250 gimple_call_set_lhs (g
, thr_num
);
8251 gimple_seq_add_stmt (end
, g
);
8255 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8256 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8257 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8258 if (code
== OMP_FOR
)
8259 c
= gimple_omp_for_clauses (ctx
->stmt
);
8260 else /* if (code == OMP_SECTIONS) */
8261 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8262 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8264 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8266 gimple_seq_add_stmt (end
, g
);
8267 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8268 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8269 gimple_seq_add_stmt (end
, g
);
8270 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8271 build_one_cst (TREE_TYPE (idx
)));
8272 gimple_seq_add_stmt (end
, g
);
8273 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8274 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8276 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8277 gimple_seq_add_stmt (end
, g
);
8278 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8280 if (code
!= OMP_PARALLEL
)
8282 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8283 tree num_thr
= create_tmp_var (integer_type_node
);
8284 g
= gimple_build_call (t
, 0);
8285 gimple_call_set_lhs (g
, num_thr
);
8286 gimple_seq_add_stmt (end
, g
);
8287 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8288 gimple_seq_add_stmt (end
, g
);
8290 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8294 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8295 OMP_CLAUSE__REDUCTEMP_
);
8296 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8297 t
= fold_convert (size_type_node
, t
);
8298 gimplify_assign (num_thr_sz
, t
, end
);
8300 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8301 NULL_TREE
, NULL_TREE
);
8302 tree data
= create_tmp_var (pointer_sized_int_node
);
8303 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8304 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8306 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8307 ptr
= create_tmp_var (build_pointer_type (record_type
));
8309 ptr
= create_tmp_var (ptr_type_node
);
8310 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8312 tree field
= TYPE_FIELDS (record_type
);
8315 field
= DECL_CHAIN (DECL_CHAIN (field
));
8316 for (int pass
= 0; pass
< 2; pass
++)
8318 tree decl
, type
, next
;
8319 for (tree c
= clauses
;
8320 omp_task_reduction_iterate (pass
, code
, ccode
,
8321 &c
, &decl
, &type
, &next
); c
= next
)
8323 tree var
= decl
, ref
;
8324 if (TREE_CODE (decl
) == MEM_REF
)
8326 var
= TREE_OPERAND (var
, 0);
8327 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8328 var
= TREE_OPERAND (var
, 0);
8330 if (TREE_CODE (var
) == ADDR_EXPR
)
8331 var
= TREE_OPERAND (var
, 0);
8332 else if (TREE_CODE (var
) == INDIRECT_REF
)
8333 var
= TREE_OPERAND (var
, 0);
8334 tree orig_var
= var
;
8335 if (is_variable_sized (var
))
8337 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8338 var
= DECL_VALUE_EXPR (var
);
8339 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8340 var
= TREE_OPERAND (var
, 0);
8341 gcc_assert (DECL_P (var
));
8343 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8344 if (orig_var
!= var
)
8345 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8346 else if (TREE_CODE (v
) == ADDR_EXPR
)
8347 t
= build_fold_addr_expr (t
);
8348 else if (TREE_CODE (v
) == INDIRECT_REF
)
8349 t
= build_fold_indirect_ref (t
);
8350 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8352 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8353 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8354 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8356 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8357 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8358 fold_convert (size_type_node
,
8359 TREE_OPERAND (decl
, 1)));
8363 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8364 if (!omp_is_reference (decl
))
8365 t
= build_fold_addr_expr (t
);
8367 t
= fold_convert (pointer_sized_int_node
, t
);
8369 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8370 gimple_seq_add_seq (start
, seq
);
8371 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8372 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8373 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8374 t
= unshare_expr (byte_position (field
));
8375 t
= fold_convert (pointer_sized_int_node
, t
);
8376 ctx
->task_reduction_map
->put (c
, cnt
);
8377 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8380 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8381 gimple_seq_add_seq (start
, seq
);
8382 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8383 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8384 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8386 tree bfield
= DECL_CHAIN (field
);
8388 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8389 /* In parallel or worksharing all threads unconditionally
8390 initialize all their task reduction private variables. */
8391 cond
= boolean_true_node
;
8392 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8394 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8395 unshare_expr (byte_position (bfield
)));
8397 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8398 gimple_seq_add_seq (end
, seq
);
8399 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8400 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8401 build_int_cst (pbool
, 0));
8404 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8405 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8406 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8407 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8408 tree condv
= create_tmp_var (boolean_type_node
);
8409 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8410 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8412 gimple_seq_add_stmt (end
, g
);
8413 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8414 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8416 /* If this reduction doesn't need destruction and parallel
8417 has been cancelled, there is nothing to do for this
8418 reduction, so jump around the merge operation. */
8419 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8420 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8421 build_zero_cst (TREE_TYPE (cancellable
)),
8423 gimple_seq_add_stmt (end
, g
);
8424 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8428 if (TREE_TYPE (ptr
) == ptr_type_node
)
8430 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8431 unshare_expr (byte_position (field
)));
8433 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8434 gimple_seq_add_seq (end
, seq
);
8435 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8436 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8437 build_int_cst (pbool
, 0));
8440 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8441 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8443 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8444 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8445 ref
= build_simple_mem_ref (ref
);
8446 /* reduction(-:var) sums up the partial results, so it acts
8447 identically to reduction(+:var). */
8448 if (rcode
== MINUS_EXPR
)
8450 if (TREE_CODE (decl
) == MEM_REF
)
8452 tree type
= TREE_TYPE (new_var
);
8453 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8454 tree i
= create_tmp_var (TREE_TYPE (v
));
8455 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8458 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8459 tree vv
= create_tmp_var (TREE_TYPE (v
));
8460 gimplify_assign (vv
, v
, start
);
8463 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8464 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8465 new_var
= build_fold_addr_expr (new_var
);
8466 new_var
= fold_convert (ptype
, new_var
);
8467 ref
= fold_convert (ptype
, ref
);
8468 tree m
= create_tmp_var (ptype
);
8469 gimplify_assign (m
, new_var
, end
);
8471 m
= create_tmp_var (ptype
);
8472 gimplify_assign (m
, ref
, end
);
8474 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8475 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8476 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8477 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8478 tree priv
= build_simple_mem_ref (new_var
);
8479 tree out
= build_simple_mem_ref (ref
);
8480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8482 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8483 tree decl_placeholder
8484 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8485 tree lab6
= NULL_TREE
;
8488 /* If this reduction needs destruction and parallel
8489 has been cancelled, jump around the merge operation
8490 to the destruction. */
8491 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8492 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8493 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8494 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8496 gimple_seq_add_stmt (end
, g
);
8497 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8499 SET_DECL_VALUE_EXPR (placeholder
, out
);
8500 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8501 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8502 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8503 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8504 gimple_seq_add_seq (end
,
8505 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8507 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8509 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8510 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8513 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8514 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8517 gimple_seq tseq
= NULL
;
8518 gimplify_stmt (&x
, &tseq
);
8519 gimple_seq_add_seq (end
, tseq
);
8524 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8525 out
= unshare_expr (out
);
8526 gimplify_assign (out
, x
, end
);
8529 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8530 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8531 gimple_seq_add_stmt (end
, g
);
8532 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8533 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8534 gimple_seq_add_stmt (end
, g
);
8535 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8536 build_int_cst (TREE_TYPE (i
), 1));
8537 gimple_seq_add_stmt (end
, g
);
8538 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8539 gimple_seq_add_stmt (end
, g
);
8540 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8542 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8544 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8545 tree oldv
= NULL_TREE
;
8546 tree lab6
= NULL_TREE
;
8549 /* If this reduction needs destruction and parallel
8550 has been cancelled, jump around the merge operation
8551 to the destruction. */
8552 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8553 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8554 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8555 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8557 gimple_seq_add_stmt (end
, g
);
8558 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8560 if (omp_is_reference (decl
)
8561 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8563 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8564 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8565 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8566 gimplify_assign (refv
, ref
, end
);
8567 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8568 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8569 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8570 tree d
= maybe_lookup_decl (decl
, ctx
);
8572 if (DECL_HAS_VALUE_EXPR_P (d
))
8573 oldv
= DECL_VALUE_EXPR (d
);
8574 if (omp_is_reference (var
))
8576 tree v
= fold_convert (TREE_TYPE (d
),
8577 build_fold_addr_expr (new_var
));
8578 SET_DECL_VALUE_EXPR (d
, v
);
8581 SET_DECL_VALUE_EXPR (d
, new_var
);
8582 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8583 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8585 SET_DECL_VALUE_EXPR (d
, oldv
);
8588 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8589 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8591 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8592 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8593 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8594 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8596 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8597 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8600 gimple_seq tseq
= NULL
;
8601 gimplify_stmt (&x
, &tseq
);
8602 gimple_seq_add_seq (end
, tseq
);
8607 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8608 ref
= unshare_expr (ref
);
8609 gimplify_assign (ref
, x
, end
);
8611 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8613 field
= DECL_CHAIN (bfield
);
8617 if (code
== OMP_TASKGROUP
)
8619 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8620 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8621 gimple_seq_add_stmt (start
, g
);
8626 if (code
== OMP_FOR
)
8627 c
= gimple_omp_for_clauses (ctx
->stmt
);
8628 else if (code
== OMP_SECTIONS
)
8629 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8631 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8632 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8633 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8634 build_fold_addr_expr (avar
));
8635 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8638 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8639 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8641 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8642 gimple_seq_add_stmt (end
, g
);
8643 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8644 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8646 enum built_in_function bfn
8647 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8648 t
= builtin_decl_explicit (bfn
);
8649 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8653 arg
= create_tmp_var (c_bool_type
);
8654 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8658 arg
= build_int_cst (c_bool_type
, 0);
8659 g
= gimple_build_call (t
, 1, arg
);
8663 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8664 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8666 gimple_seq_add_stmt (end
, g
);
8667 t
= build_constructor (atype
, NULL
);
8668 TREE_THIS_VOLATILE (t
) = 1;
8669 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8672 /* Expand code for an OpenMP taskgroup directive. */
8675 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8677 gimple
*stmt
= gsi_stmt (*gsi_p
);
8680 gimple_seq dseq
= NULL
;
8681 tree block
= make_node (BLOCK
);
8683 bind
= gimple_build_bind (NULL
, NULL
, block
);
8684 gsi_replace (gsi_p
, bind
, true);
8685 gimple_bind_add_stmt (bind
, stmt
);
8687 push_gimplify_context ();
8689 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8691 gimple_bind_add_stmt (bind
, x
);
8693 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8694 gimple_omp_taskgroup_clauses (stmt
),
8695 gimple_bind_body_ptr (bind
), &dseq
);
8697 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8698 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8699 gimple_omp_set_body (stmt
, NULL
);
8701 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8702 gimple_bind_add_seq (bind
, dseq
);
8704 pop_gimplify_context (bind
);
8706 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8707 BLOCK_VARS (block
) = ctx
->block_vars
;
8711 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8714 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8717 struct omp_for_data fd
;
8718 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8721 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8722 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8723 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8727 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8728 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8729 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8730 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8732 /* Merge depend clauses from multiple adjacent
8733 #pragma omp ordered depend(sink:...) constructs
8734 into one #pragma omp ordered depend(sink:...), so that
8735 we can optimize them together. */
8736 gimple_stmt_iterator gsi
= *gsi_p
;
8738 while (!gsi_end_p (gsi
))
8740 gimple
*stmt
= gsi_stmt (gsi
);
8741 if (is_gimple_debug (stmt
)
8742 || gimple_code (stmt
) == GIMPLE_NOP
)
8747 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8749 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8750 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8752 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8753 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8756 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8758 gsi_remove (&gsi
, true);
8762 /* Canonicalize sink dependence clauses into one folded clause if
8765 The basic algorithm is to create a sink vector whose first
8766 element is the GCD of all the first elements, and whose remaining
8767 elements are the minimum of the subsequent columns.
8769 We ignore dependence vectors whose first element is zero because
8770 such dependencies are known to be executed by the same thread.
8772 We take into account the direction of the loop, so a minimum
8773 becomes a maximum if the loop is iterating forwards. We also
8774 ignore sink clauses where the loop direction is unknown, or where
8775 the offsets are clearly invalid because they are not a multiple
8776 of the loop increment.
8780 #pragma omp for ordered(2)
8781 for (i=0; i < N; ++i)
8782 for (j=0; j < M; ++j)
8784 #pragma omp ordered \
8785 depend(sink:i-8,j-2) \
8786 depend(sink:i,j-1) \ // Completely ignored because i+0.
8787 depend(sink:i-4,j-3) \
8788 depend(sink:i-6,j-4)
8789 #pragma omp ordered depend(source)
8794 depend(sink:-gcd(8,4,6),-min(2,3,4))
8799 /* FIXME: Computing GCD's where the first element is zero is
8800 non-trivial in the presence of collapsed loops. Do this later. */
8801 if (fd
.collapse
> 1)
8804 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8806 /* wide_int is not a POD so it must be default-constructed. */
8807 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8808 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8810 tree folded_dep
= NULL_TREE
;
8811 /* TRUE if the first dimension's offset is negative. */
8812 bool neg_offset_p
= false;
8814 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8816 while ((c
= *list_p
) != NULL
)
8818 bool remove
= false;
8820 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8821 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8822 goto next_ordered_clause
;
8825 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8826 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8827 vec
= TREE_CHAIN (vec
), ++i
)
8829 gcc_assert (i
< len
);
8831 /* omp_extract_for_data has canonicalized the condition. */
8832 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8833 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8834 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8835 bool maybe_lexically_later
= true;
8837 /* While the committee makes up its mind, bail if we have any
8838 non-constant steps. */
8839 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8840 goto lower_omp_ordered_ret
;
8842 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8843 if (POINTER_TYPE_P (itype
))
8845 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8846 TYPE_PRECISION (itype
),
8849 /* Ignore invalid offsets that are not multiples of the step. */
8850 if (!wi::multiple_of_p (wi::abs (offset
),
8851 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8854 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8855 "ignoring sink clause with offset that is not "
8856 "a multiple of the loop step");
8858 goto next_ordered_clause
;
8861 /* Calculate the first dimension. The first dimension of
8862 the folded dependency vector is the GCD of the first
8863 elements, while ignoring any first elements whose offset
8867 /* Ignore dependence vectors whose first dimension is 0. */
8871 goto next_ordered_clause
;
8875 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8877 error_at (OMP_CLAUSE_LOCATION (c
),
8878 "first offset must be in opposite direction "
8879 "of loop iterations");
8880 goto lower_omp_ordered_ret
;
8884 neg_offset_p
= forward
;
8885 /* Initialize the first time around. */
8886 if (folded_dep
== NULL_TREE
)
8889 folded_deps
[0] = offset
;
8892 folded_deps
[0] = wi::gcd (folded_deps
[0],
8896 /* Calculate minimum for the remaining dimensions. */
8899 folded_deps
[len
+ i
- 1] = offset
;
8900 if (folded_dep
== c
)
8901 folded_deps
[i
] = offset
;
8902 else if (maybe_lexically_later
8903 && !wi::eq_p (folded_deps
[i
], offset
))
8905 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8909 for (j
= 1; j
<= i
; j
++)
8910 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8913 maybe_lexically_later
= false;
8917 gcc_assert (i
== len
);
8921 next_ordered_clause
:
8923 *list_p
= OMP_CLAUSE_CHAIN (c
);
8925 list_p
= &OMP_CLAUSE_CHAIN (c
);
8931 folded_deps
[0] = -folded_deps
[0];
8933 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8934 if (POINTER_TYPE_P (itype
))
8937 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8938 = wide_int_to_tree (itype
, folded_deps
[0]);
8939 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8940 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8943 lower_omp_ordered_ret
:
8945 /* Ordered without clauses is #pragma omp threads, while we want
8946 a nop instead if we remove all clauses. */
8947 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8948 gsi_replace (gsi_p
, gimple_build_nop (), true);
8952 /* Expand code for an OpenMP ordered directive. */
8955 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8958 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8959 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8962 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8964 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8967 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8968 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8969 OMP_CLAUSE_THREADS
);
8971 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8974 /* FIXME: This is needs to be moved to the expansion to verify various
8975 conditions only testable on cfg with dominators computed, and also
8976 all the depend clauses to be merged still might need to be available
8977 for the runtime checks. */
8979 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8983 push_gimplify_context ();
8985 block
= make_node (BLOCK
);
8986 bind
= gimple_build_bind (NULL
, NULL
, block
);
8987 gsi_replace (gsi_p
, bind
, true);
8988 gimple_bind_add_stmt (bind
, stmt
);
8992 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8993 build_int_cst (NULL_TREE
, threads
));
8994 cfun
->has_simduid_loops
= true;
8997 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8999 gimple_bind_add_stmt (bind
, x
);
9001 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9004 counter
= create_tmp_var (integer_type_node
);
9005 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9006 gimple_call_set_lhs (g
, counter
);
9007 gimple_bind_add_stmt (bind
, g
);
9009 body
= create_artificial_label (UNKNOWN_LOCATION
);
9010 test
= create_artificial_label (UNKNOWN_LOCATION
);
9011 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9013 tree simt_pred
= create_tmp_var (integer_type_node
);
9014 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9015 gimple_call_set_lhs (g
, simt_pred
);
9016 gimple_bind_add_stmt (bind
, g
);
9018 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9019 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9020 gimple_bind_add_stmt (bind
, g
);
9022 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9024 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9025 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9026 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9027 gimple_omp_set_body (stmt
, NULL
);
9031 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9032 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9033 gimple_bind_add_stmt (bind
, g
);
9035 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9036 tree nonneg
= create_tmp_var (integer_type_node
);
9037 gimple_seq tseq
= NULL
;
9038 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9039 gimple_bind_add_seq (bind
, tseq
);
9041 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9042 gimple_call_set_lhs (g
, nonneg
);
9043 gimple_bind_add_stmt (bind
, g
);
9045 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9046 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9047 gimple_bind_add_stmt (bind
, g
);
9049 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9052 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9053 build_int_cst (NULL_TREE
, threads
));
9055 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9057 gimple_bind_add_stmt (bind
, x
);
9059 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9061 pop_gimplify_context (bind
);
9063 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9064 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9068 /* Expand code for an OpenMP scan directive and the structured block
9069 before the scan directive. */
9072 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9074 gimple
*stmt
= gsi_stmt (*gsi_p
);
9076 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9077 tree lane
= NULL_TREE
;
9078 gimple_seq before
= NULL
;
9079 omp_context
*octx
= ctx
->outer
;
9081 if (octx
->scan_exclusive
&& !has_clauses
)
9083 gimple_stmt_iterator gsi2
= *gsi_p
;
9085 gimple
*stmt2
= gsi_stmt (gsi2
);
9086 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9087 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9088 the one with exclusive clause(s), comes first. */
9090 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9091 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9093 gsi_remove (gsi_p
, false);
9094 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9095 ctx
= maybe_lookup_ctx (stmt2
);
9097 lower_omp_scan (gsi_p
, ctx
);
9102 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9103 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9104 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9105 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9106 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9107 && !gimple_omp_for_combined_p (octx
->stmt
));
9108 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9109 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9112 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9113 OMP_CLAUSE__SIMDUID_
))
9115 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9116 lane
= create_tmp_var (unsigned_type_node
);
9117 tree t
= build_int_cst (integer_type_node
,
9119 : octx
->scan_inclusive
? 2 : 3);
9121 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9122 gimple_call_set_lhs (g
, lane
);
9123 gimple_seq_add_stmt (&before
, g
);
9126 if (is_simd
|| is_for
)
9128 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9129 c
; c
= OMP_CLAUSE_CHAIN (c
))
9130 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9131 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9133 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9134 tree var
= OMP_CLAUSE_DECL (c
);
9135 tree new_var
= lookup_decl (var
, octx
);
9137 tree var2
= NULL_TREE
;
9138 tree var3
= NULL_TREE
;
9139 tree var4
= NULL_TREE
;
9140 tree lane0
= NULL_TREE
;
9141 tree new_vard
= new_var
;
9142 if (omp_is_reference (var
))
9144 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9147 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9149 val
= DECL_VALUE_EXPR (new_vard
);
9150 if (new_vard
!= new_var
)
9152 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9153 val
= TREE_OPERAND (val
, 0);
9155 if (TREE_CODE (val
) == ARRAY_REF
9156 && VAR_P (TREE_OPERAND (val
, 0)))
9158 tree v
= TREE_OPERAND (val
, 0);
9159 if (lookup_attribute ("omp simd array",
9160 DECL_ATTRIBUTES (v
)))
9162 val
= unshare_expr (val
);
9163 lane0
= TREE_OPERAND (val
, 1);
9164 TREE_OPERAND (val
, 1) = lane
;
9165 var2
= lookup_decl (v
, octx
);
9166 if (octx
->scan_exclusive
)
9167 var4
= lookup_decl (var2
, octx
);
9169 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9170 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9173 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9174 var2
, lane
, NULL_TREE
, NULL_TREE
);
9175 TREE_THIS_NOTRAP (var2
) = 1;
9176 if (octx
->scan_exclusive
)
9178 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9179 var4
, lane
, NULL_TREE
,
9181 TREE_THIS_NOTRAP (var4
) = 1;
9192 var2
= build_outer_var_ref (var
, octx
);
9193 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9195 var3
= maybe_lookup_decl (new_vard
, octx
);
9196 if (var3
== new_vard
|| var3
== NULL_TREE
)
9198 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9200 var4
= maybe_lookup_decl (var3
, octx
);
9201 if (var4
== var3
|| var4
== NULL_TREE
)
9203 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9214 && octx
->scan_exclusive
9216 && var4
== NULL_TREE
)
9217 var4
= create_tmp_var (TREE_TYPE (val
));
9219 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9221 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9226 /* If we've added a separate identity element
9227 variable, copy it over into val. */
9228 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9230 gimplify_and_add (x
, &before
);
9232 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9234 /* Otherwise, assign to it the identity element. */
9235 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9237 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9238 tree ref
= build_outer_var_ref (var
, octx
);
9239 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9240 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9243 if (new_vard
!= new_var
)
9244 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9245 SET_DECL_VALUE_EXPR (new_vard
, val
);
9247 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9248 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9249 lower_omp (&tseq
, octx
);
9251 SET_DECL_VALUE_EXPR (new_vard
, x
);
9252 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9253 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9254 gimple_seq_add_seq (&before
, tseq
);
9256 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9262 if (octx
->scan_exclusive
)
9264 tree v4
= unshare_expr (var4
);
9265 tree v2
= unshare_expr (var2
);
9266 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9267 gimplify_and_add (x
, &before
);
9269 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9270 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9271 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9273 if (x
&& new_vard
!= new_var
)
9274 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9276 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9277 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9278 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9279 lower_omp (&tseq
, octx
);
9280 gimple_seq_add_seq (&before
, tseq
);
9281 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9283 SET_DECL_VALUE_EXPR (new_vard
, x
);
9284 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9285 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9286 if (octx
->scan_inclusive
)
9288 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9290 gimplify_and_add (x
, &before
);
9292 else if (lane0
== NULL_TREE
)
9294 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9296 gimplify_and_add (x
, &before
);
9304 /* input phase. Set val to initializer before
9306 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9307 gimplify_assign (val
, x
, &before
);
9312 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9313 if (code
== MINUS_EXPR
)
9316 tree x
= build2 (code
, TREE_TYPE (var2
),
9317 unshare_expr (var2
), unshare_expr (val
));
9318 if (octx
->scan_inclusive
)
9320 gimplify_assign (unshare_expr (var2
), x
, &before
);
9321 gimplify_assign (val
, var2
, &before
);
9325 gimplify_assign (unshare_expr (var4
),
9326 unshare_expr (var2
), &before
);
9327 gimplify_assign (var2
, x
, &before
);
9328 if (lane0
== NULL_TREE
)
9329 gimplify_assign (val
, var4
, &before
);
9333 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9335 tree vexpr
= unshare_expr (var4
);
9336 TREE_OPERAND (vexpr
, 1) = lane0
;
9337 if (new_vard
!= new_var
)
9338 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9339 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9343 if (is_simd
&& !is_for_simd
)
9345 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9346 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9347 gsi_replace (gsi_p
, gimple_build_nop (), true);
9350 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9353 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9354 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9359 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9360 substitution of a couple of function calls. But in the NAMED case,
9361 requires that languages coordinate a symbol name. It is therefore
9362 best put here in common code. */
9364 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9367 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9370 tree name
, lock
, unlock
;
9371 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9373 location_t loc
= gimple_location (stmt
);
9376 name
= gimple_omp_critical_name (stmt
);
9381 if (!critical_name_mutexes
)
9382 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9384 tree
*n
= critical_name_mutexes
->get (name
);
9389 decl
= create_tmp_var_raw (ptr_type_node
);
9391 new_str
= ACONCAT ((".gomp_critical_user_",
9392 IDENTIFIER_POINTER (name
), NULL
));
9393 DECL_NAME (decl
) = get_identifier (new_str
);
9394 TREE_PUBLIC (decl
) = 1;
9395 TREE_STATIC (decl
) = 1;
9396 DECL_COMMON (decl
) = 1;
9397 DECL_ARTIFICIAL (decl
) = 1;
9398 DECL_IGNORED_P (decl
) = 1;
9400 varpool_node::finalize_decl (decl
);
9402 critical_name_mutexes
->put (name
, decl
);
9407 /* If '#pragma omp critical' is inside offloaded region or
9408 inside function marked as offloadable, the symbol must be
9409 marked as offloadable too. */
9411 if (cgraph_node::get (current_function_decl
)->offloadable
)
9412 varpool_node::get_create (decl
)->offloadable
= 1;
9414 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9415 if (is_gimple_omp_offloaded (octx
->stmt
))
9417 varpool_node::get_create (decl
)->offloadable
= 1;
9421 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9422 lock
= build_call_expr_loc (loc
, lock
, 1,
9423 build_fold_addr_expr_loc (loc
, decl
));
9425 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9426 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9427 build_fold_addr_expr_loc (loc
, decl
));
9431 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9432 lock
= build_call_expr_loc (loc
, lock
, 0);
9434 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9435 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9438 push_gimplify_context ();
9440 block
= make_node (BLOCK
);
9441 bind
= gimple_build_bind (NULL
, NULL
, block
);
9442 gsi_replace (gsi_p
, bind
, true);
9443 gimple_bind_add_stmt (bind
, stmt
);
9445 tbody
= gimple_bind_body (bind
);
9446 gimplify_and_add (lock
, &tbody
);
9447 gimple_bind_set_body (bind
, tbody
);
9449 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9450 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9451 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9452 gimple_omp_set_body (stmt
, NULL
);
9454 tbody
= gimple_bind_body (bind
);
9455 gimplify_and_add (unlock
, &tbody
);
9456 gimple_bind_set_body (bind
, tbody
);
9458 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9460 pop_gimplify_context (bind
);
9461 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9462 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9465 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9466 for a lastprivate clause. Given a loop control predicate of (V
9467 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9468 is appended to *DLIST, iterator initialization is appended to
9469 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9470 to be emitted in a critical section. */
9473 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9474 gimple_seq
*dlist
, gimple_seq
*clist
,
9475 struct omp_context
*ctx
)
9477 tree clauses
, cond
, vinit
;
9478 enum tree_code cond_code
;
9481 cond_code
= fd
->loop
.cond_code
;
9482 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9484 /* When possible, use a strict equality expression. This can let VRP
9485 type optimizations deduce the value and remove a copy. */
9486 if (tree_fits_shwi_p (fd
->loop
.step
))
9488 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9489 if (step
== 1 || step
== -1)
9490 cond_code
= EQ_EXPR
;
9493 tree n2
= fd
->loop
.n2
;
9494 if (fd
->collapse
> 1
9495 && TREE_CODE (n2
) != INTEGER_CST
9496 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9498 struct omp_context
*taskreg_ctx
= NULL
;
9499 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9501 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9502 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9503 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9505 if (gimple_omp_for_combined_into_p (gfor
))
9507 gcc_assert (ctx
->outer
->outer
9508 && is_parallel_ctx (ctx
->outer
->outer
));
9509 taskreg_ctx
= ctx
->outer
->outer
;
9513 struct omp_for_data outer_fd
;
9514 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9515 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9518 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9519 taskreg_ctx
= ctx
->outer
->outer
;
9521 else if (is_taskreg_ctx (ctx
->outer
))
9522 taskreg_ctx
= ctx
->outer
;
9526 tree taskreg_clauses
9527 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9528 tree innerc
= omp_find_clause (taskreg_clauses
,
9529 OMP_CLAUSE__LOOPTEMP_
);
9530 gcc_assert (innerc
);
9531 for (i
= 0; i
< fd
->collapse
; i
++)
9533 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9534 OMP_CLAUSE__LOOPTEMP_
);
9535 gcc_assert (innerc
);
9537 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9538 OMP_CLAUSE__LOOPTEMP_
);
9540 n2
= fold_convert (TREE_TYPE (n2
),
9541 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9545 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9547 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9549 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9550 if (!gimple_seq_empty_p (stmts
))
9552 gimple_seq_add_seq (&stmts
, *dlist
);
9555 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9556 vinit
= fd
->loop
.n1
;
9557 if (cond_code
== EQ_EXPR
9558 && tree_fits_shwi_p (fd
->loop
.n2
)
9559 && ! integer_zerop (fd
->loop
.n2
))
9560 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9562 vinit
= unshare_expr (vinit
);
9564 /* Initialize the iterator variable, so that threads that don't execute
9565 any iterations don't execute the lastprivate clauses by accident. */
9566 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9570 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9573 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9574 struct walk_stmt_info
*wi
)
9576 gimple
*stmt
= gsi_stmt (*gsi_p
);
9578 *handled_ops_p
= true;
9579 switch (gimple_code (stmt
))
9583 case GIMPLE_OMP_FOR
:
9584 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
9585 && gimple_omp_for_combined_into_p (stmt
))
9586 *handled_ops_p
= false;
9589 case GIMPLE_OMP_SCAN
:
9590 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9591 return integer_zero_node
;
9598 /* Helper function for lower_omp_for, add transformations for a worksharing
9599 loop with scan directives inside of it.
9600 For worksharing loop not combined with simd, transform:
9601 #pragma omp for reduction(inscan,+:r) private(i)
9602 for (i = 0; i < n; i = i + 1)
9607 #pragma omp scan inclusive(r)
9613 into two worksharing loops + code to merge results:
9615 num_threads = omp_get_num_threads ();
9616 thread_num = omp_get_thread_num ();
9617 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9622 // For UDRs this is UDR init, or if ctors are needed, copy from
9623 // var3 that has been constructed to contain the neutral element.
9627 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9628 // a shared array with num_threads elements and rprivb to a local array
9629 // number of elements equal to the number of (contiguous) iterations the
9630 // current thread will perform. controlb and controlp variables are
9631 // temporaries to handle deallocation of rprivb at the end of second
9633 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9634 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9635 for (i = 0; i < n; i = i + 1)
9638 // For UDRs this is UDR init or copy from var3.
9640 // This is the input phase from user code.
9644 // For UDRs this is UDR merge.
9646 // Rather than handing it over to the user, save to local thread's
9648 rprivb[ivar] = var2;
9649 // For exclusive scan, the above two statements are swapped.
9653 // And remember the final value from this thread's into the shared
9655 rpriva[(sizetype) thread_num] = var2;
9656 // If more than one thread, compute using Work-Efficient prefix sum
9657 // the inclusive parallel scan of the rpriva array.
9658 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9663 num_threadsu = (unsigned int) num_threads;
9664 thread_numup1 = (unsigned int) thread_num + 1;
9667 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9671 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9676 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9677 mul = REALPART_EXPR <cplx>;
9678 ovf = IMAGPART_EXPR <cplx>;
9679 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9682 andvm1 = andv + 4294967295;
9684 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9686 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9687 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9688 rpriva[l] = rpriva[l - k] + rpriva[l];
9690 if (down == 0) goto <D.2121>; else goto <D.2122>;
9698 if (k != 0) goto <D.2108>; else goto <D.2103>;
9700 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9702 // For UDRs this is UDR init or copy from var3.
9706 var2 = rpriva[thread_num - 1];
9709 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9710 reduction(inscan,+:r) private(i)
9711 for (i = 0; i < n; i = i + 1)
9714 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9715 r = var2 + rprivb[ivar];
9718 // This is the scan phase from user code.
9720 // Plus a bump of the iterator.
9726 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9727 struct omp_for_data
*fd
, omp_context
*ctx
)
9729 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9730 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9732 gimple_seq body
= gimple_omp_body (stmt
);
9733 gimple_stmt_iterator input1_gsi
= gsi_none ();
9734 struct walk_stmt_info wi
;
9735 memset (&wi
, 0, sizeof (wi
));
9737 wi
.info
= (void *) &input1_gsi
;
9738 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9739 gcc_assert (!gsi_end_p (input1_gsi
));
9741 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9742 gimple_stmt_iterator gsi
= input1_gsi
;
9744 gimple_stmt_iterator scan1_gsi
= gsi
;
9745 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9746 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9748 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9749 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9750 gimple_omp_set_body (input_stmt1
, NULL
);
9751 gimple_omp_set_body (scan_stmt1
, NULL
);
9752 gimple_omp_set_body (stmt
, NULL
);
9754 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9755 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9756 gimple_omp_set_body (stmt
, body
);
9757 gimple_omp_set_body (input_stmt1
, input_body
);
9759 gimple_stmt_iterator input2_gsi
= gsi_none ();
9760 memset (&wi
, 0, sizeof (wi
));
9762 wi
.info
= (void *) &input2_gsi
;
9763 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9764 gcc_assert (!gsi_end_p (input2_gsi
));
9766 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9769 gimple_stmt_iterator scan2_gsi
= gsi
;
9770 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9771 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9772 gimple_omp_set_body (scan_stmt2
, scan_body
);
9774 gimple_stmt_iterator input3_gsi
= gsi_none ();
9775 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9776 gimple_stmt_iterator input4_gsi
= gsi_none ();
9777 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9778 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9779 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9780 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9783 memset (&wi
, 0, sizeof (wi
));
9785 wi
.info
= (void *) &input3_gsi
;
9786 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9787 gcc_assert (!gsi_end_p (input3_gsi
));
9789 input_stmt3
= gsi_stmt (input3_gsi
);
9793 scan_stmt3
= gsi_stmt (gsi
);
9794 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9796 memset (&wi
, 0, sizeof (wi
));
9798 wi
.info
= (void *) &input4_gsi
;
9799 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9800 gcc_assert (!gsi_end_p (input4_gsi
));
9802 input_stmt4
= gsi_stmt (input4_gsi
);
9806 scan_stmt4
= gsi_stmt (gsi
);
9807 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9809 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9810 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9813 tree num_threads
= create_tmp_var (integer_type_node
);
9814 tree thread_num
= create_tmp_var (integer_type_node
);
9815 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9816 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9817 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9818 gimple_call_set_lhs (g
, num_threads
);
9819 gimple_seq_add_stmt (body_p
, g
);
9820 g
= gimple_build_call (threadnum_decl
, 0);
9821 gimple_call_set_lhs (g
, thread_num
);
9822 gimple_seq_add_stmt (body_p
, g
);
9824 tree ivar
= create_tmp_var (sizetype
);
9825 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9826 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9827 tree k
= create_tmp_var (unsigned_type_node
);
9828 tree l
= create_tmp_var (unsigned_type_node
);
9830 gimple_seq clist
= NULL
, mdlist
= NULL
;
9831 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9832 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9833 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9834 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9835 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9836 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9837 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9839 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9840 tree var
= OMP_CLAUSE_DECL (c
);
9841 tree new_var
= lookup_decl (var
, ctx
);
9842 tree var3
= NULL_TREE
;
9843 tree new_vard
= new_var
;
9844 if (omp_is_reference (var
))
9845 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9846 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9848 var3
= maybe_lookup_decl (new_vard
, ctx
);
9849 if (var3
== new_vard
)
9853 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9854 tree rpriva
= create_tmp_var (ptype
);
9855 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9856 OMP_CLAUSE_DECL (nc
) = rpriva
;
9858 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9860 tree rprivb
= create_tmp_var (ptype
);
9861 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9862 OMP_CLAUSE_DECL (nc
) = rprivb
;
9863 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9865 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9867 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9868 if (new_vard
!= new_var
)
9869 TREE_ADDRESSABLE (var2
) = 1;
9870 gimple_add_tmp_var (var2
);
9872 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9873 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9874 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9875 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9876 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9878 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9879 thread_num
, integer_minus_one_node
);
9880 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9881 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9882 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9883 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9884 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9886 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9887 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9888 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9889 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9890 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9892 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9893 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9894 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9895 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9896 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9897 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9899 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9900 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9901 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9902 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9904 tree var4
= is_for_simd
? new_var
: var2
;
9905 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9908 var5
= lookup_decl (var
, input_simd_ctx
);
9909 var6
= lookup_decl (var
, scan_simd_ctx
);
9910 if (new_vard
!= new_var
)
9912 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9913 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9916 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9918 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9921 x
= lang_hooks
.decls
.omp_clause_default_ctor
9922 (c
, var2
, build_outer_var_ref (var
, ctx
));
9924 gimplify_and_add (x
, &clist
);
9926 x
= build_outer_var_ref (var
, ctx
);
9927 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9929 gimplify_and_add (x
, &thr01_list
);
9931 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9932 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9935 x
= unshare_expr (var4
);
9936 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9937 gimplify_and_add (x
, &thrn1_list
);
9938 x
= unshare_expr (var4
);
9939 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9940 gimplify_and_add (x
, &thr02_list
);
9942 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9944 /* Otherwise, assign to it the identity element. */
9945 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9946 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9949 if (new_vard
!= new_var
)
9950 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9951 SET_DECL_VALUE_EXPR (new_vard
, val
);
9952 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9954 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9955 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9956 lower_omp (&tseq
, ctx
);
9957 gimple_seq_add_seq (&thrn1_list
, tseq
);
9958 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9959 lower_omp (&tseq
, ctx
);
9960 gimple_seq_add_seq (&thr02_list
, tseq
);
9961 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9962 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9963 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9965 SET_DECL_VALUE_EXPR (new_vard
, y
);
9968 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9969 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9973 x
= unshare_expr (var4
);
9974 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
9975 gimplify_and_add (x
, &thrn2_list
);
9979 x
= unshare_expr (rprivb_ref
);
9980 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
9981 gimplify_and_add (x
, &scan1_list
);
9985 if (ctx
->scan_exclusive
)
9987 x
= unshare_expr (rprivb_ref
);
9988 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9989 gimplify_and_add (x
, &scan1_list
);
9992 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9993 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9994 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9995 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9996 lower_omp (&tseq
, ctx
);
9997 gimple_seq_add_seq (&scan1_list
, tseq
);
9999 if (ctx
->scan_inclusive
)
10001 x
= unshare_expr (rprivb_ref
);
10002 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10003 gimplify_and_add (x
, &scan1_list
);
10007 x
= unshare_expr (rpriva_ref
);
10008 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10009 unshare_expr (var4
));
10010 gimplify_and_add (x
, &mdlist
);
10012 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10013 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10014 gimplify_and_add (x
, &input2_list
);
10017 if (new_vard
!= new_var
)
10018 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10020 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10021 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10022 SET_DECL_VALUE_EXPR (new_vard
, val
);
10023 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10026 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10027 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10030 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10031 lower_omp (&tseq
, ctx
);
10033 SET_DECL_VALUE_EXPR (new_vard
, y
);
10036 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10037 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10041 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10042 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10043 lower_omp (&tseq
, ctx
);
10045 gimple_seq_add_seq (&input2_list
, tseq
);
10047 x
= build_outer_var_ref (var
, ctx
);
10048 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10049 gimplify_and_add (x
, &last_list
);
10051 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10052 gimplify_and_add (x
, &reduc_list
);
10053 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10054 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10056 if (new_vard
!= new_var
)
10057 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10058 SET_DECL_VALUE_EXPR (new_vard
, val
);
10059 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10060 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10061 lower_omp (&tseq
, ctx
);
10062 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10063 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10064 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10066 SET_DECL_VALUE_EXPR (new_vard
, y
);
10069 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10070 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10072 gimple_seq_add_seq (&reduc_list
, tseq
);
10073 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10074 gimplify_and_add (x
, &reduc_list
);
10076 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10078 gimplify_and_add (x
, dlist
);
10082 x
= build_outer_var_ref (var
, ctx
);
10083 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10085 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10086 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10088 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10090 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10092 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10093 if (code
== MINUS_EXPR
)
10097 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10100 if (ctx
->scan_exclusive
)
10101 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10103 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10104 gimplify_assign (var2
, x
, &scan1_list
);
10105 if (ctx
->scan_inclusive
)
10106 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10110 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10113 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10114 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10116 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10119 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10120 unshare_expr (rprival_ref
));
10121 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10125 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10126 gimple_seq_add_stmt (&scan1_list
, g
);
10127 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10128 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10129 ? scan_stmt4
: scan_stmt2
), g
);
10131 tree controlb
= create_tmp_var (boolean_type_node
);
10132 tree controlp
= create_tmp_var (ptr_type_node
);
10133 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10134 OMP_CLAUSE_DECL (nc
) = controlb
;
10135 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10137 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10138 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10139 OMP_CLAUSE_DECL (nc
) = controlp
;
10140 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10142 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10143 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10144 OMP_CLAUSE_DECL (nc
) = controlb
;
10145 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10147 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10148 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10149 OMP_CLAUSE_DECL (nc
) = controlp
;
10150 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10152 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10154 *cp1
= gimple_omp_for_clauses (stmt
);
10155 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10156 *cp2
= gimple_omp_for_clauses (new_stmt
);
10157 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10161 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10162 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10164 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
10166 gsi_remove (&input3_gsi
, true);
10167 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
10169 gsi_remove (&scan3_gsi
, true);
10170 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
10172 gsi_remove (&input4_gsi
, true);
10173 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
10175 gsi_remove (&scan4_gsi
, true);
10179 gimple_omp_set_body (scan_stmt1
, scan1_list
);
10180 gimple_omp_set_body (input_stmt2
, input2_list
);
10183 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
10185 gsi_remove (&input1_gsi
, true);
10186 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
10188 gsi_remove (&scan1_gsi
, true);
10189 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
10191 gsi_remove (&input2_gsi
, true);
10192 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
10194 gsi_remove (&scan2_gsi
, true);
10196 gimple_seq_add_seq (body_p
, clist
);
10198 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10199 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10200 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10201 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10202 gimple_seq_add_stmt (body_p
, g
);
10203 g
= gimple_build_label (lab1
);
10204 gimple_seq_add_stmt (body_p
, g
);
10205 gimple_seq_add_seq (body_p
, thr01_list
);
10206 g
= gimple_build_goto (lab3
);
10207 gimple_seq_add_stmt (body_p
, g
);
10208 g
= gimple_build_label (lab2
);
10209 gimple_seq_add_stmt (body_p
, g
);
10210 gimple_seq_add_seq (body_p
, thrn1_list
);
10211 g
= gimple_build_label (lab3
);
10212 gimple_seq_add_stmt (body_p
, g
);
10214 g
= gimple_build_assign (ivar
, size_zero_node
);
10215 gimple_seq_add_stmt (body_p
, g
);
10217 gimple_seq_add_stmt (body_p
, stmt
);
10218 gimple_seq_add_seq (body_p
, body
);
10219 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
10222 g
= gimple_build_omp_return (true);
10223 gimple_seq_add_stmt (body_p
, g
);
10224 gimple_seq_add_seq (body_p
, mdlist
);
10226 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10227 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10228 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
10229 gimple_seq_add_stmt (body_p
, g
);
10230 g
= gimple_build_label (lab1
);
10231 gimple_seq_add_stmt (body_p
, g
);
10233 g
= omp_build_barrier (NULL
);
10234 gimple_seq_add_stmt (body_p
, g
);
10236 tree down
= create_tmp_var (unsigned_type_node
);
10237 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
10238 gimple_seq_add_stmt (body_p
, g
);
10240 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
10241 gimple_seq_add_stmt (body_p
, g
);
10243 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
10244 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
10245 gimple_seq_add_stmt (body_p
, g
);
10247 tree thread_numu
= create_tmp_var (unsigned_type_node
);
10248 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
10249 gimple_seq_add_stmt (body_p
, g
);
10251 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
10252 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
10253 build_int_cst (unsigned_type_node
, 1));
10254 gimple_seq_add_stmt (body_p
, g
);
10256 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10257 g
= gimple_build_label (lab3
);
10258 gimple_seq_add_stmt (body_p
, g
);
10260 tree twok
= create_tmp_var (unsigned_type_node
);
10261 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10262 gimple_seq_add_stmt (body_p
, g
);
10264 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
10265 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
10266 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
10267 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
10268 gimple_seq_add_stmt (body_p
, g
);
10269 g
= gimple_build_label (lab4
);
10270 gimple_seq_add_stmt (body_p
, g
);
10271 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
10272 gimple_seq_add_stmt (body_p
, g
);
10273 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10274 gimple_seq_add_stmt (body_p
, g
);
10276 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
10277 gimple_seq_add_stmt (body_p
, g
);
10278 g
= gimple_build_label (lab6
);
10279 gimple_seq_add_stmt (body_p
, g
);
10281 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10282 gimple_seq_add_stmt (body_p
, g
);
10284 g
= gimple_build_label (lab5
);
10285 gimple_seq_add_stmt (body_p
, g
);
10287 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
10288 gimple_seq_add_stmt (body_p
, g
);
10290 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
10291 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
10292 gimple_call_set_lhs (g
, cplx
);
10293 gimple_seq_add_stmt (body_p
, g
);
10294 tree mul
= create_tmp_var (unsigned_type_node
);
10295 g
= gimple_build_assign (mul
, REALPART_EXPR
,
10296 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
10297 gimple_seq_add_stmt (body_p
, g
);
10298 tree ovf
= create_tmp_var (unsigned_type_node
);
10299 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
10300 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
10301 gimple_seq_add_stmt (body_p
, g
);
10303 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
10304 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
10305 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
10307 gimple_seq_add_stmt (body_p
, g
);
10308 g
= gimple_build_label (lab7
);
10309 gimple_seq_add_stmt (body_p
, g
);
10311 tree andv
= create_tmp_var (unsigned_type_node
);
10312 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
10313 gimple_seq_add_stmt (body_p
, g
);
10314 tree andvm1
= create_tmp_var (unsigned_type_node
);
10315 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
10316 build_minus_one_cst (unsigned_type_node
));
10317 gimple_seq_add_stmt (body_p
, g
);
10319 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
10320 gimple_seq_add_stmt (body_p
, g
);
10322 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
10323 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
10324 gimple_seq_add_stmt (body_p
, g
);
10325 g
= gimple_build_label (lab9
);
10326 gimple_seq_add_stmt (body_p
, g
);
10327 gimple_seq_add_seq (body_p
, reduc_list
);
10328 g
= gimple_build_label (lab8
);
10329 gimple_seq_add_stmt (body_p
, g
);
10331 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10332 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10333 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10334 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10336 gimple_seq_add_stmt (body_p
, g
);
10337 g
= gimple_build_label (lab10
);
10338 gimple_seq_add_stmt (body_p
, g
);
10339 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10340 gimple_seq_add_stmt (body_p
, g
);
10341 g
= gimple_build_goto (lab12
);
10342 gimple_seq_add_stmt (body_p
, g
);
10343 g
= gimple_build_label (lab11
);
10344 gimple_seq_add_stmt (body_p
, g
);
10345 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10346 gimple_seq_add_stmt (body_p
, g
);
10347 g
= gimple_build_label (lab12
);
10348 gimple_seq_add_stmt (body_p
, g
);
10350 g
= omp_build_barrier (NULL
);
10351 gimple_seq_add_stmt (body_p
, g
);
10353 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10355 gimple_seq_add_stmt (body_p
, g
);
10357 g
= gimple_build_label (lab2
);
10358 gimple_seq_add_stmt (body_p
, g
);
10360 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10361 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10362 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10363 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10364 gimple_seq_add_stmt (body_p
, g
);
10365 g
= gimple_build_label (lab1
);
10366 gimple_seq_add_stmt (body_p
, g
);
10367 gimple_seq_add_seq (body_p
, thr02_list
);
10368 g
= gimple_build_goto (lab3
);
10369 gimple_seq_add_stmt (body_p
, g
);
10370 g
= gimple_build_label (lab2
);
10371 gimple_seq_add_stmt (body_p
, g
);
10372 gimple_seq_add_seq (body_p
, thrn2_list
);
10373 g
= gimple_build_label (lab3
);
10374 gimple_seq_add_stmt (body_p
, g
);
10376 g
= gimple_build_assign (ivar
, size_zero_node
);
10377 gimple_seq_add_stmt (body_p
, g
);
10378 gimple_seq_add_stmt (body_p
, new_stmt
);
10379 gimple_seq_add_seq (body_p
, new_body
);
10381 gimple_seq new_dlist
= NULL
;
10382 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10383 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10384 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10385 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10386 integer_minus_one_node
);
10387 gimple_seq_add_stmt (&new_dlist
, g
);
10388 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10389 gimple_seq_add_stmt (&new_dlist
, g
);
10390 g
= gimple_build_label (lab1
);
10391 gimple_seq_add_stmt (&new_dlist
, g
);
10392 gimple_seq_add_seq (&new_dlist
, last_list
);
10393 g
= gimple_build_label (lab2
);
10394 gimple_seq_add_stmt (&new_dlist
, g
);
10395 gimple_seq_add_seq (&new_dlist
, *dlist
);
10396 *dlist
= new_dlist
;
10399 /* Lower code for an OMP loop directive. */
10402 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10404 tree
*rhs_p
, block
;
10405 struct omp_for_data fd
, *fdp
= NULL
;
10406 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10408 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10409 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10410 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10413 push_gimplify_context ();
10415 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10417 block
= make_node (BLOCK
);
10418 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10419 /* Replace at gsi right away, so that 'stmt' is no member
10420 of a sequence anymore as we're going to add to a different
10422 gsi_replace (gsi_p
, new_stmt
, true);
10424 /* Move declaration of temporaries in the loop body before we make
10426 omp_for_body
= gimple_omp_body (stmt
);
10427 if (!gimple_seq_empty_p (omp_for_body
)
10428 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10431 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10432 tree vars
= gimple_bind_vars (inner_bind
);
10433 gimple_bind_append_vars (new_stmt
, vars
);
10434 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10435 keep them on the inner_bind and it's block. */
10436 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10437 if (gimple_bind_block (inner_bind
))
10438 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10441 if (gimple_omp_for_combined_into_p (stmt
))
10443 omp_extract_for_data (stmt
, &fd
, NULL
);
10446 /* We need two temporaries with fd.loop.v type (istart/iend)
10447 and then (fd.collapse - 1) temporaries with the same
10448 type for count2 ... countN-1 vars if not constant. */
10450 tree type
= fd
.iter_type
;
10451 if (fd
.collapse
> 1
10452 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10453 count
+= fd
.collapse
- 1;
10455 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10456 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10457 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10459 tree clauses
= *pc
;
10462 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10463 OMP_CLAUSE__LOOPTEMP_
);
10464 if (ctx
->simt_stmt
)
10465 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10466 OMP_CLAUSE__LOOPTEMP_
);
10467 for (i
= 0; i
< count
; i
++)
10472 gcc_assert (outerc
);
10473 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10474 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10475 OMP_CLAUSE__LOOPTEMP_
);
10479 /* If there are 2 adjacent SIMD stmts, one with _simt_
10480 clause, another without, make sure they have the same
10481 decls in _looptemp_ clauses, because the outer stmt
10482 they are combined into will look up just one inner_stmt. */
10483 if (ctx
->simt_stmt
)
10484 temp
= OMP_CLAUSE_DECL (simtc
);
10486 temp
= create_tmp_var (type
);
10487 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10489 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10490 OMP_CLAUSE_DECL (*pc
) = temp
;
10491 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10492 if (ctx
->simt_stmt
)
10493 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10494 OMP_CLAUSE__LOOPTEMP_
);
10499 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10503 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10504 OMP_CLAUSE_REDUCTION
);
10505 tree rtmp
= NULL_TREE
;
10508 tree type
= build_pointer_type (pointer_sized_int_node
);
10509 tree temp
= create_tmp_var (type
);
10510 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10511 OMP_CLAUSE_DECL (c
) = temp
;
10512 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10513 gimple_omp_for_set_clauses (stmt
, c
);
10514 lower_omp_task_reductions (ctx
, OMP_FOR
,
10515 gimple_omp_for_clauses (stmt
),
10516 &tred_ilist
, &tred_dlist
);
10518 rtmp
= make_ssa_name (type
);
10519 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10522 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10525 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10527 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10528 gimple_omp_for_pre_body (stmt
));
10530 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10532 /* Lower the header expressions. At this point, we can assume that
10533 the header is of the form:
10535 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10537 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10538 using the .omp_data_s mapping, if needed. */
10539 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10541 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10542 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10544 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10545 TREE_VEC_ELT (*rhs_p
, 1)
10546 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10547 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10548 TREE_VEC_ELT (*rhs_p
, 2)
10549 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10551 else if (!is_gimple_min_invariant (*rhs_p
))
10552 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10553 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10554 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10556 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10557 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
10559 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
10560 TREE_VEC_ELT (*rhs_p
, 1)
10561 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
10562 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
10563 TREE_VEC_ELT (*rhs_p
, 2)
10564 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
10566 else if (!is_gimple_min_invariant (*rhs_p
))
10567 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10568 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10569 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10571 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10572 if (!is_gimple_min_invariant (*rhs_p
))
10573 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10576 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10578 gimple_seq_add_seq (&body
, cnt_list
);
10580 /* Once lowered, extract the bounds and clauses. */
10581 omp_extract_for_data (stmt
, &fd
, NULL
);
10583 if (is_gimple_omp_oacc (ctx
->stmt
)
10584 && !ctx_in_oacc_kernels_region (ctx
))
10585 lower_oacc_head_tail (gimple_location (stmt
),
10586 gimple_omp_for_clauses (stmt
),
10587 &oacc_head
, &oacc_tail
, ctx
);
10589 /* Add OpenACC partitioning and reduction markers just before the loop. */
10591 gimple_seq_add_seq (&body
, oacc_head
);
10593 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10595 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10596 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10597 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10598 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10600 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10601 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10602 OMP_CLAUSE_LINEAR_STEP (c
)
10603 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10607 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10608 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10609 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10612 gimple_seq_add_stmt (&body
, stmt
);
10613 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10616 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10619 /* After the loop, add exit clauses. */
10620 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10624 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10625 gcall
*g
= gimple_build_call (fndecl
, 0);
10626 gimple_seq_add_stmt (&body
, g
);
10627 gimple_seq_add_seq (&body
, clist
);
10628 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10629 g
= gimple_build_call (fndecl
, 0);
10630 gimple_seq_add_stmt (&body
, g
);
10633 if (ctx
->cancellable
)
10634 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10636 gimple_seq_add_seq (&body
, dlist
);
10640 gimple_seq_add_seq (&tred_ilist
, body
);
10644 body
= maybe_catch_exception (body
);
10646 /* Region exit marker goes at the end of the loop body. */
10647 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10648 gimple_seq_add_stmt (&body
, g
);
10650 gimple_seq_add_seq (&body
, tred_dlist
);
10652 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10655 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10657 /* Add OpenACC joining and reduction markers just after the loop. */
10659 gimple_seq_add_seq (&body
, oacc_tail
);
10661 pop_gimplify_context (new_stmt
);
10663 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10664 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10665 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10666 if (BLOCK_VARS (block
))
10667 TREE_USED (block
) = 1;
10669 gimple_bind_set_body (new_stmt
, body
);
10670 gimple_omp_set_body (stmt
, NULL
);
10671 gimple_omp_for_set_pre_body (stmt
, NULL
);
10674 /* Callback for walk_stmts. Check if the current statement only contains
10675 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10678 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10679 bool *handled_ops_p
,
10680 struct walk_stmt_info
*wi
)
10682 int *info
= (int *) wi
->info
;
10683 gimple
*stmt
= gsi_stmt (*gsi_p
);
10685 *handled_ops_p
= true;
10686 switch (gimple_code (stmt
))
10692 case GIMPLE_OMP_FOR
:
10693 case GIMPLE_OMP_SECTIONS
:
10694 *info
= *info
== 0 ? 1 : -1;
10703 struct omp_taskcopy_context
10705 /* This field must be at the beginning, as we do "inheritance": Some
10706 callback functions for tree-inline.c (e.g., omp_copy_decl)
10707 receive a copy_body_data pointer that is up-casted to an
10708 omp_context pointer. */
10714 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10716 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10718 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10719 return create_tmp_var (TREE_TYPE (var
));
10725 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10727 tree name
, new_fields
= NULL
, type
, f
;
10729 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10730 name
= DECL_NAME (TYPE_NAME (orig_type
));
10731 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10732 TYPE_DECL
, name
, type
);
10733 TYPE_NAME (type
) = name
;
10735 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10737 tree new_f
= copy_node (f
);
10738 DECL_CONTEXT (new_f
) = type
;
10739 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10740 TREE_CHAIN (new_f
) = new_fields
;
10741 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10742 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10743 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10745 new_fields
= new_f
;
10746 tcctx
->cb
.decl_map
->put (f
, new_f
);
10748 TYPE_FIELDS (type
) = nreverse (new_fields
);
10749 layout_type (type
);
10753 /* Create task copyfn. */
10756 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10758 struct function
*child_cfun
;
10759 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10760 tree record_type
, srecord_type
, bind
, list
;
10761 bool record_needs_remap
= false, srecord_needs_remap
= false;
10763 struct omp_taskcopy_context tcctx
;
10764 location_t loc
= gimple_location (task_stmt
);
10765 size_t looptempno
= 0;
10767 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10768 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10769 gcc_assert (child_cfun
->cfg
== NULL
);
10770 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10772 /* Reset DECL_CONTEXT on function arguments. */
10773 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10774 DECL_CONTEXT (t
) = child_fn
;
10776 /* Populate the function. */
10777 push_gimplify_context ();
10778 push_cfun (child_cfun
);
10780 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10781 TREE_SIDE_EFFECTS (bind
) = 1;
10783 DECL_SAVED_TREE (child_fn
) = bind
;
10784 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10786 /* Remap src and dst argument types if needed. */
10787 record_type
= ctx
->record_type
;
10788 srecord_type
= ctx
->srecord_type
;
10789 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10790 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10792 record_needs_remap
= true;
10795 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10796 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10798 srecord_needs_remap
= true;
10802 if (record_needs_remap
|| srecord_needs_remap
)
10804 memset (&tcctx
, '\0', sizeof (tcctx
));
10805 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10806 tcctx
.cb
.dst_fn
= child_fn
;
10807 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10808 gcc_checking_assert (tcctx
.cb
.src_node
);
10809 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10810 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10811 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10812 tcctx
.cb
.eh_lp_nr
= 0;
10813 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10814 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10817 if (record_needs_remap
)
10818 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10819 if (srecord_needs_remap
)
10820 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10823 tcctx
.cb
.decl_map
= NULL
;
10825 arg
= DECL_ARGUMENTS (child_fn
);
10826 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10827 sarg
= DECL_CHAIN (arg
);
10828 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10830 /* First pass: initialize temporaries used in record_type and srecord_type
10831 sizes and field offsets. */
10832 if (tcctx
.cb
.decl_map
)
10833 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10834 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10838 decl
= OMP_CLAUSE_DECL (c
);
10839 p
= tcctx
.cb
.decl_map
->get (decl
);
10842 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10843 sf
= (tree
) n
->value
;
10844 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10845 src
= build_simple_mem_ref_loc (loc
, sarg
);
10846 src
= omp_build_component_ref (src
, sf
);
10847 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10848 append_to_statement_list (t
, &list
);
10851 /* Second pass: copy shared var pointers and copy construct non-VLA
10852 firstprivate vars. */
10853 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10854 switch (OMP_CLAUSE_CODE (c
))
10856 splay_tree_key key
;
10857 case OMP_CLAUSE_SHARED
:
10858 decl
= OMP_CLAUSE_DECL (c
);
10859 key
= (splay_tree_key
) decl
;
10860 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10861 key
= (splay_tree_key
) &DECL_UID (decl
);
10862 n
= splay_tree_lookup (ctx
->field_map
, key
);
10865 f
= (tree
) n
->value
;
10866 if (tcctx
.cb
.decl_map
)
10867 f
= *tcctx
.cb
.decl_map
->get (f
);
10868 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10869 sf
= (tree
) n
->value
;
10870 if (tcctx
.cb
.decl_map
)
10871 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10872 src
= build_simple_mem_ref_loc (loc
, sarg
);
10873 src
= omp_build_component_ref (src
, sf
);
10874 dst
= build_simple_mem_ref_loc (loc
, arg
);
10875 dst
= omp_build_component_ref (dst
, f
);
10876 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10877 append_to_statement_list (t
, &list
);
10879 case OMP_CLAUSE_REDUCTION
:
10880 case OMP_CLAUSE_IN_REDUCTION
:
10881 decl
= OMP_CLAUSE_DECL (c
);
10882 if (TREE_CODE (decl
) == MEM_REF
)
10884 decl
= TREE_OPERAND (decl
, 0);
10885 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10886 decl
= TREE_OPERAND (decl
, 0);
10887 if (TREE_CODE (decl
) == INDIRECT_REF
10888 || TREE_CODE (decl
) == ADDR_EXPR
)
10889 decl
= TREE_OPERAND (decl
, 0);
10891 key
= (splay_tree_key
) decl
;
10892 n
= splay_tree_lookup (ctx
->field_map
, key
);
10895 f
= (tree
) n
->value
;
10896 if (tcctx
.cb
.decl_map
)
10897 f
= *tcctx
.cb
.decl_map
->get (f
);
10898 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10899 sf
= (tree
) n
->value
;
10900 if (tcctx
.cb
.decl_map
)
10901 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10902 src
= build_simple_mem_ref_loc (loc
, sarg
);
10903 src
= omp_build_component_ref (src
, sf
);
10904 if (decl
!= OMP_CLAUSE_DECL (c
)
10905 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10906 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10907 src
= build_simple_mem_ref_loc (loc
, src
);
10908 dst
= build_simple_mem_ref_loc (loc
, arg
);
10909 dst
= omp_build_component_ref (dst
, f
);
10910 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10911 append_to_statement_list (t
, &list
);
10913 case OMP_CLAUSE__LOOPTEMP_
:
10914 /* Fields for first two _looptemp_ clauses are initialized by
10915 GOMP_taskloop*, the rest are handled like firstprivate. */
10916 if (looptempno
< 2)
10922 case OMP_CLAUSE__REDUCTEMP_
:
10923 case OMP_CLAUSE_FIRSTPRIVATE
:
10924 decl
= OMP_CLAUSE_DECL (c
);
10925 if (is_variable_sized (decl
))
10927 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10930 f
= (tree
) n
->value
;
10931 if (tcctx
.cb
.decl_map
)
10932 f
= *tcctx
.cb
.decl_map
->get (f
);
10933 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10936 sf
= (tree
) n
->value
;
10937 if (tcctx
.cb
.decl_map
)
10938 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10939 src
= build_simple_mem_ref_loc (loc
, sarg
);
10940 src
= omp_build_component_ref (src
, sf
);
10941 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10942 src
= build_simple_mem_ref_loc (loc
, src
);
10946 dst
= build_simple_mem_ref_loc (loc
, arg
);
10947 dst
= omp_build_component_ref (dst
, f
);
10948 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10949 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10951 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10952 append_to_statement_list (t
, &list
);
10954 case OMP_CLAUSE_PRIVATE
:
10955 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10957 decl
= OMP_CLAUSE_DECL (c
);
10958 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10959 f
= (tree
) n
->value
;
10960 if (tcctx
.cb
.decl_map
)
10961 f
= *tcctx
.cb
.decl_map
->get (f
);
10962 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10965 sf
= (tree
) n
->value
;
10966 if (tcctx
.cb
.decl_map
)
10967 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10968 src
= build_simple_mem_ref_loc (loc
, sarg
);
10969 src
= omp_build_component_ref (src
, sf
);
10970 if (use_pointer_for_field (decl
, NULL
))
10971 src
= build_simple_mem_ref_loc (loc
, src
);
10975 dst
= build_simple_mem_ref_loc (loc
, arg
);
10976 dst
= omp_build_component_ref (dst
, f
);
10977 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10978 append_to_statement_list (t
, &list
);
10984 /* Last pass: handle VLA firstprivates. */
10985 if (tcctx
.cb
.decl_map
)
10986 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10987 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10991 decl
= OMP_CLAUSE_DECL (c
);
10992 if (!is_variable_sized (decl
))
10994 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10997 f
= (tree
) n
->value
;
10998 f
= *tcctx
.cb
.decl_map
->get (f
);
10999 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11000 ind
= DECL_VALUE_EXPR (decl
);
11001 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11002 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11003 n
= splay_tree_lookup (ctx
->sfield_map
,
11004 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11005 sf
= (tree
) n
->value
;
11006 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11007 src
= build_simple_mem_ref_loc (loc
, sarg
);
11008 src
= omp_build_component_ref (src
, sf
);
11009 src
= build_simple_mem_ref_loc (loc
, src
);
11010 dst
= build_simple_mem_ref_loc (loc
, arg
);
11011 dst
= omp_build_component_ref (dst
, f
);
11012 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11013 append_to_statement_list (t
, &list
);
11014 n
= splay_tree_lookup (ctx
->field_map
,
11015 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11016 df
= (tree
) n
->value
;
11017 df
= *tcctx
.cb
.decl_map
->get (df
);
11018 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11019 ptr
= omp_build_component_ref (ptr
, df
);
11020 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11021 build_fold_addr_expr_loc (loc
, dst
));
11022 append_to_statement_list (t
, &list
);
11025 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11026 append_to_statement_list (t
, &list
);
11028 if (tcctx
.cb
.decl_map
)
11029 delete tcctx
.cb
.decl_map
;
11030 pop_gimplify_context (NULL
);
11031 BIND_EXPR_BODY (bind
) = list
;
11036 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11040 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11042 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11043 gcc_assert (clauses
);
11044 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11045 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11046 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11048 case OMP_CLAUSE_DEPEND_LAST
:
11049 /* Lowering already done at gimplification. */
11051 case OMP_CLAUSE_DEPEND_IN
:
11054 case OMP_CLAUSE_DEPEND_OUT
:
11055 case OMP_CLAUSE_DEPEND_INOUT
:
11058 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11061 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11064 case OMP_CLAUSE_DEPEND_SOURCE
:
11065 case OMP_CLAUSE_DEPEND_SINK
:
11068 gcc_unreachable ();
11070 if (cnt
[1] || cnt
[3])
11072 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
11073 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
11074 tree array
= create_tmp_var (type
);
11075 TREE_ADDRESSABLE (array
) = 1;
11076 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
11080 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
11081 gimple_seq_add_stmt (iseq
, g
);
11082 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
11085 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
11086 gimple_seq_add_stmt (iseq
, g
);
11087 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
11089 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
11090 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
11091 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
11092 gimple_seq_add_stmt (iseq
, g
);
11094 for (i
= 0; i
< 4; i
++)
11098 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11099 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
11103 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11105 case OMP_CLAUSE_DEPEND_IN
:
11109 case OMP_CLAUSE_DEPEND_OUT
:
11110 case OMP_CLAUSE_DEPEND_INOUT
:
11114 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11118 case OMP_CLAUSE_DEPEND_DEPOBJ
:
11123 gcc_unreachable ();
11125 tree t
= OMP_CLAUSE_DECL (c
);
11126 t
= fold_convert (ptr_type_node
, t
);
11127 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
11128 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
11129 NULL_TREE
, NULL_TREE
);
11130 g
= gimple_build_assign (r
, t
);
11131 gimple_seq_add_stmt (iseq
, g
);
11134 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
11135 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
11136 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
11137 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
11139 tree clobber
= build_clobber (type
);
11140 g
= gimple_build_assign (array
, clobber
);
11141 gimple_seq_add_stmt (oseq
, g
);
11144 /* Lower the OpenMP parallel or task directive in the current statement
11145 in GSI_P. CTX holds context information for the directive. */
11148 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11152 gimple
*stmt
= gsi_stmt (*gsi_p
);
11153 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
11154 gimple_seq par_body
;
11155 location_t loc
= gimple_location (stmt
);
11157 clauses
= gimple_omp_taskreg_clauses (stmt
);
11158 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11159 && gimple_omp_task_taskwait_p (stmt
))
11167 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
11168 par_body
= gimple_bind_body (par_bind
);
11170 child_fn
= ctx
->cb
.dst_fn
;
11171 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
11172 && !gimple_omp_parallel_combined_p (stmt
))
11174 struct walk_stmt_info wi
;
11177 memset (&wi
, 0, sizeof (wi
));
11179 wi
.val_only
= true;
11180 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
11182 gimple_omp_parallel_set_combined_p (stmt
, true);
11184 gimple_seq dep_ilist
= NULL
;
11185 gimple_seq dep_olist
= NULL
;
11186 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11187 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11189 push_gimplify_context ();
11190 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11191 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
11192 &dep_ilist
, &dep_olist
);
11195 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
11196 && gimple_omp_task_taskwait_p (stmt
))
11200 gsi_replace (gsi_p
, dep_bind
, true);
11201 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11202 gimple_bind_add_stmt (dep_bind
, stmt
);
11203 gimple_bind_add_seq (dep_bind
, dep_olist
);
11204 pop_gimplify_context (dep_bind
);
11209 if (ctx
->srecord_type
)
11210 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
11212 gimple_seq tskred_ilist
= NULL
;
11213 gimple_seq tskred_olist
= NULL
;
11214 if ((is_task_ctx (ctx
)
11215 && gimple_omp_task_taskloop_p (ctx
->stmt
)
11216 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
11217 OMP_CLAUSE_REDUCTION
))
11218 || (is_parallel_ctx (ctx
)
11219 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
11220 OMP_CLAUSE__REDUCTEMP_
)))
11222 if (dep_bind
== NULL
)
11224 push_gimplify_context ();
11225 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11227 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
11229 gimple_omp_taskreg_clauses (ctx
->stmt
),
11230 &tskred_ilist
, &tskred_olist
);
11233 push_gimplify_context ();
11235 gimple_seq par_olist
= NULL
;
11236 gimple_seq par_ilist
= NULL
;
11237 gimple_seq par_rlist
= NULL
;
11238 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
11239 lower_omp (&par_body
, ctx
);
11240 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
11241 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
11243 /* Declare all the variables created by mapping and the variables
11244 declared in the scope of the parallel body. */
11245 record_vars_into (ctx
->block_vars
, child_fn
);
11246 maybe_remove_omp_member_access_dummy_vars (par_bind
);
11247 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
11249 if (ctx
->record_type
)
11252 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
11253 : ctx
->record_type
, ".omp_data_o");
11254 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11255 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11256 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
11259 gimple_seq olist
= NULL
;
11260 gimple_seq ilist
= NULL
;
11261 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
11262 lower_send_shared_vars (&ilist
, &olist
, ctx
);
11264 if (ctx
->record_type
)
11266 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
11267 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11271 /* Once all the expansions are done, sequence all the different
11272 fragments inside gimple_omp_body. */
11274 gimple_seq new_body
= NULL
;
11276 if (ctx
->record_type
)
11278 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11279 /* fixup_child_record_type might have changed receiver_decl's type. */
11280 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11281 gimple_seq_add_stmt (&new_body
,
11282 gimple_build_assign (ctx
->receiver_decl
, t
));
11285 gimple_seq_add_seq (&new_body
, par_ilist
);
11286 gimple_seq_add_seq (&new_body
, par_body
);
11287 gimple_seq_add_seq (&new_body
, par_rlist
);
11288 if (ctx
->cancellable
)
11289 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
11290 gimple_seq_add_seq (&new_body
, par_olist
);
11291 new_body
= maybe_catch_exception (new_body
);
11292 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
11293 gimple_seq_add_stmt (&new_body
,
11294 gimple_build_omp_continue (integer_zero_node
,
11295 integer_zero_node
));
11296 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
11297 gimple_omp_set_body (stmt
, new_body
);
11299 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
11300 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11302 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
11303 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
11304 gimple_bind_add_seq (bind
, ilist
);
11305 gimple_bind_add_stmt (bind
, stmt
);
11306 gimple_bind_add_seq (bind
, olist
);
11308 pop_gimplify_context (NULL
);
11312 gimple_bind_add_seq (dep_bind
, dep_ilist
);
11313 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
11314 gimple_bind_add_stmt (dep_bind
, bind
);
11315 gimple_bind_add_seq (dep_bind
, tskred_olist
);
11316 gimple_bind_add_seq (dep_bind
, dep_olist
);
11317 pop_gimplify_context (dep_bind
);
11321 /* Lower the GIMPLE_OMP_TARGET in the current statement
11322 in GSI_P. CTX holds context information for the directive. */
11325 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11328 tree child_fn
, t
, c
;
11329 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11330 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11331 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11332 location_t loc
= gimple_location (stmt
);
11333 bool offloaded
, data_region
;
11334 unsigned int map_cnt
= 0;
11336 offloaded
= is_gimple_omp_offloaded (stmt
);
11337 switch (gimple_omp_target_kind (stmt
))
11339 case GF_OMP_TARGET_KIND_REGION
:
11340 case GF_OMP_TARGET_KIND_UPDATE
:
11341 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11342 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11343 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11344 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11345 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
11346 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11347 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11348 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11349 data_region
= false;
11351 case GF_OMP_TARGET_KIND_DATA
:
11352 case GF_OMP_TARGET_KIND_OACC_DATA
:
11353 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11354 data_region
= true;
11357 gcc_unreachable ();
11360 clauses
= gimple_omp_target_clauses (stmt
);
11362 gimple_seq dep_ilist
= NULL
;
11363 gimple_seq dep_olist
= NULL
;
11364 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11366 push_gimplify_context ();
11367 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11368 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11369 &dep_ilist
, &dep_olist
);
11376 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11377 tgt_body
= gimple_bind_body (tgt_bind
);
11379 else if (data_region
)
11380 tgt_body
= gimple_omp_body (stmt
);
11381 child_fn
= ctx
->cb
.dst_fn
;
11383 push_gimplify_context ();
11386 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11387 switch (OMP_CLAUSE_CODE (c
))
11393 case OMP_CLAUSE_MAP
:
11395 /* First check what we're prepared to handle in the following. */
11396 switch (OMP_CLAUSE_MAP_KIND (c
))
11398 case GOMP_MAP_ALLOC
:
11400 case GOMP_MAP_FROM
:
11401 case GOMP_MAP_TOFROM
:
11402 case GOMP_MAP_POINTER
:
11403 case GOMP_MAP_TO_PSET
:
11404 case GOMP_MAP_DELETE
:
11405 case GOMP_MAP_RELEASE
:
11406 case GOMP_MAP_ALWAYS_TO
:
11407 case GOMP_MAP_ALWAYS_FROM
:
11408 case GOMP_MAP_ALWAYS_TOFROM
:
11409 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11410 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11411 case GOMP_MAP_STRUCT
:
11412 case GOMP_MAP_ALWAYS_POINTER
:
11414 case GOMP_MAP_IF_PRESENT
:
11415 case GOMP_MAP_FORCE_ALLOC
:
11416 case GOMP_MAP_FORCE_TO
:
11417 case GOMP_MAP_FORCE_FROM
:
11418 case GOMP_MAP_FORCE_TOFROM
:
11419 case GOMP_MAP_FORCE_PRESENT
:
11420 case GOMP_MAP_FORCE_DEVICEPTR
:
11421 case GOMP_MAP_DEVICE_RESIDENT
:
11422 case GOMP_MAP_LINK
:
11423 case GOMP_MAP_ATTACH
:
11424 case GOMP_MAP_DETACH
:
11425 case GOMP_MAP_FORCE_DETACH
:
11426 gcc_assert (is_gimple_omp_oacc (stmt
));
11429 gcc_unreachable ();
11433 case OMP_CLAUSE_TO
:
11434 case OMP_CLAUSE_FROM
:
11436 var
= OMP_CLAUSE_DECL (c
);
11439 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11440 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11441 && (OMP_CLAUSE_MAP_KIND (c
)
11442 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11447 if (DECL_SIZE (var
)
11448 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11450 tree var2
= DECL_VALUE_EXPR (var
);
11451 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11452 var2
= TREE_OPERAND (var2
, 0);
11453 gcc_assert (DECL_P (var2
));
11458 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11459 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11460 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11462 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11464 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11465 && varpool_node::get_create (var
)->offloadable
)
11468 tree type
= build_pointer_type (TREE_TYPE (var
));
11469 tree new_var
= lookup_decl (var
, ctx
);
11470 x
= create_tmp_var_raw (type
, get_name (new_var
));
11471 gimple_add_tmp_var (x
);
11472 x
= build_simple_mem_ref (x
);
11473 SET_DECL_VALUE_EXPR (new_var
, x
);
11474 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11479 if (!maybe_lookup_field (var
, ctx
))
11482 /* Don't remap compute constructs' reduction variables, because the
11483 intermediate result must be local to each gang. */
11484 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11485 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11487 x
= build_receiver_ref (var
, true, ctx
);
11488 tree new_var
= lookup_decl (var
, ctx
);
11490 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11491 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11492 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11493 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11494 x
= build_simple_mem_ref (x
);
11495 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11497 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11498 if (omp_is_reference (new_var
)
11499 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
11500 || DECL_BY_REFERENCE (var
)))
11502 /* Create a local object to hold the instance
11504 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11505 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11506 tree inst
= create_tmp_var (type
, id
);
11507 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11508 x
= build_fold_addr_expr (inst
);
11510 gimplify_assign (new_var
, x
, &fplist
);
11512 else if (DECL_P (new_var
))
11514 SET_DECL_VALUE_EXPR (new_var
, x
);
11515 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11518 gcc_unreachable ();
11523 case OMP_CLAUSE_FIRSTPRIVATE
:
11524 if (is_oacc_parallel_or_serial (ctx
))
11525 goto oacc_firstprivate
;
11527 var
= OMP_CLAUSE_DECL (c
);
11528 if (!omp_is_reference (var
)
11529 && !is_gimple_reg_type (TREE_TYPE (var
)))
11531 tree new_var
= lookup_decl (var
, ctx
);
11532 if (is_variable_sized (var
))
11534 tree pvar
= DECL_VALUE_EXPR (var
);
11535 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11536 pvar
= TREE_OPERAND (pvar
, 0);
11537 gcc_assert (DECL_P (pvar
));
11538 tree new_pvar
= lookup_decl (pvar
, ctx
);
11539 x
= build_fold_indirect_ref (new_pvar
);
11540 TREE_THIS_NOTRAP (x
) = 1;
11543 x
= build_receiver_ref (var
, true, ctx
);
11544 SET_DECL_VALUE_EXPR (new_var
, x
);
11545 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11549 case OMP_CLAUSE_PRIVATE
:
11550 if (is_gimple_omp_oacc (ctx
->stmt
))
11552 var
= OMP_CLAUSE_DECL (c
);
11553 if (is_variable_sized (var
))
11555 tree new_var
= lookup_decl (var
, ctx
);
11556 tree pvar
= DECL_VALUE_EXPR (var
);
11557 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11558 pvar
= TREE_OPERAND (pvar
, 0);
11559 gcc_assert (DECL_P (pvar
));
11560 tree new_pvar
= lookup_decl (pvar
, ctx
);
11561 x
= build_fold_indirect_ref (new_pvar
);
11562 TREE_THIS_NOTRAP (x
) = 1;
11563 SET_DECL_VALUE_EXPR (new_var
, x
);
11564 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11568 case OMP_CLAUSE_USE_DEVICE_PTR
:
11569 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11570 case OMP_CLAUSE_IS_DEVICE_PTR
:
11571 var
= OMP_CLAUSE_DECL (c
);
11573 if (is_variable_sized (var
))
11575 tree new_var
= lookup_decl (var
, ctx
);
11576 tree pvar
= DECL_VALUE_EXPR (var
);
11577 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11578 pvar
= TREE_OPERAND (pvar
, 0);
11579 gcc_assert (DECL_P (pvar
));
11580 tree new_pvar
= lookup_decl (pvar
, ctx
);
11581 x
= build_fold_indirect_ref (new_pvar
);
11582 TREE_THIS_NOTRAP (x
) = 1;
11583 SET_DECL_VALUE_EXPR (new_var
, x
);
11584 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11586 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
11587 && !omp_is_reference (var
)
11588 && !omp_is_allocatable_or_ptr (var
)
11589 && !lang_hooks
.decls
.omp_array_data (var
, true))
11590 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11592 tree new_var
= lookup_decl (var
, ctx
);
11593 tree type
= build_pointer_type (TREE_TYPE (var
));
11594 x
= create_tmp_var_raw (type
, get_name (new_var
));
11595 gimple_add_tmp_var (x
);
11596 x
= build_simple_mem_ref (x
);
11597 SET_DECL_VALUE_EXPR (new_var
, x
);
11598 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11602 tree new_var
= lookup_decl (var
, ctx
);
11603 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11604 gimple_add_tmp_var (x
);
11605 SET_DECL_VALUE_EXPR (new_var
, x
);
11606 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11613 target_nesting_level
++;
11614 lower_omp (&tgt_body
, ctx
);
11615 target_nesting_level
--;
11617 else if (data_region
)
11618 lower_omp (&tgt_body
, ctx
);
11622 /* Declare all the variables created by mapping and the variables
11623 declared in the scope of the target body. */
11624 record_vars_into (ctx
->block_vars
, child_fn
);
11625 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11626 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11631 if (ctx
->record_type
)
11634 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11635 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11636 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11637 t
= make_tree_vec (3);
11638 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11639 TREE_VEC_ELT (t
, 1)
11640 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11641 ".omp_data_sizes");
11642 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11643 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11644 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11645 tree tkind_type
= short_unsigned_type_node
;
11646 int talign_shift
= 8;
11647 TREE_VEC_ELT (t
, 2)
11648 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11649 ".omp_data_kinds");
11650 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11651 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11652 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11653 gimple_omp_target_set_data_arg (stmt
, t
);
11655 vec
<constructor_elt
, va_gc
> *vsize
;
11656 vec
<constructor_elt
, va_gc
> *vkind
;
11657 vec_alloc (vsize
, map_cnt
);
11658 vec_alloc (vkind
, map_cnt
);
11659 unsigned int map_idx
= 0;
11661 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11662 switch (OMP_CLAUSE_CODE (c
))
11664 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11665 unsigned int talign
;
11670 case OMP_CLAUSE_MAP
:
11671 case OMP_CLAUSE_TO
:
11672 case OMP_CLAUSE_FROM
:
11673 oacc_firstprivate_map
:
11675 ovar
= OMP_CLAUSE_DECL (c
);
11676 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11677 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11678 || (OMP_CLAUSE_MAP_KIND (c
)
11679 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11681 if (!DECL_P (ovar
))
11683 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11684 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11686 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11687 == get_base_address (ovar
));
11688 nc
= OMP_CLAUSE_CHAIN (c
);
11689 ovar
= OMP_CLAUSE_DECL (nc
);
11693 tree x
= build_sender_ref (ovar
, ctx
);
11695 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11696 gimplify_assign (x
, v
, &ilist
);
11702 if (DECL_SIZE (ovar
)
11703 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11705 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11706 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11707 ovar2
= TREE_OPERAND (ovar2
, 0);
11708 gcc_assert (DECL_P (ovar2
));
11711 if (!maybe_lookup_field (ovar
, ctx
))
11715 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11716 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11717 talign
= DECL_ALIGN_UNIT (ovar
);
11720 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11721 x
= build_sender_ref (ovar
, ctx
);
11723 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11724 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11725 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11726 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11728 gcc_assert (offloaded
);
11730 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11731 mark_addressable (avar
);
11732 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11733 talign
= DECL_ALIGN_UNIT (avar
);
11734 avar
= build_fold_addr_expr (avar
);
11735 gimplify_assign (x
, avar
, &ilist
);
11737 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11739 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11740 if (!omp_is_reference (var
))
11742 if (is_gimple_reg (var
)
11743 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11744 TREE_NO_WARNING (var
) = 1;
11745 var
= build_fold_addr_expr (var
);
11748 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11749 gimplify_assign (x
, var
, &ilist
);
11751 else if (is_gimple_reg (var
))
11753 gcc_assert (offloaded
);
11754 tree avar
= create_tmp_var (TREE_TYPE (var
));
11755 mark_addressable (avar
);
11756 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11757 if (GOMP_MAP_COPY_TO_P (map_kind
)
11758 || map_kind
== GOMP_MAP_POINTER
11759 || map_kind
== GOMP_MAP_TO_PSET
11760 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11762 /* If we need to initialize a temporary
11763 with VAR because it is not addressable, and
11764 the variable hasn't been initialized yet, then
11765 we'll get a warning for the store to avar.
11766 Don't warn in that case, the mapping might
11768 TREE_NO_WARNING (var
) = 1;
11769 gimplify_assign (avar
, var
, &ilist
);
11771 avar
= build_fold_addr_expr (avar
);
11772 gimplify_assign (x
, avar
, &ilist
);
11773 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11774 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11775 && !TYPE_READONLY (TREE_TYPE (var
)))
11777 x
= unshare_expr (x
);
11778 x
= build_simple_mem_ref (x
);
11779 gimplify_assign (var
, x
, &olist
);
11784 /* While MAP is handled explicitly by the FE,
11785 for 'target update', only the identified is passed. */
11786 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
11787 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
11788 && (omp_is_allocatable_or_ptr (var
)
11789 && omp_check_optional_argument (var
, false)))
11790 var
= build_fold_indirect_ref (var
);
11791 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
11792 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
11793 || (!omp_is_allocatable_or_ptr (var
)
11794 && !omp_check_optional_argument (var
, false)))
11795 var
= build_fold_addr_expr (var
);
11796 gimplify_assign (x
, var
, &ilist
);
11800 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11802 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11803 s
= TREE_TYPE (ovar
);
11804 if (TREE_CODE (s
) == REFERENCE_TYPE
11805 || omp_check_optional_argument (ovar
, false))
11807 s
= TYPE_SIZE_UNIT (s
);
11810 s
= OMP_CLAUSE_SIZE (c
);
11811 if (s
== NULL_TREE
)
11812 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11813 s
= fold_convert (size_type_node
, s
);
11814 purpose
= size_int (map_idx
++);
11815 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11816 if (TREE_CODE (s
) != INTEGER_CST
)
11817 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11819 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11820 switch (OMP_CLAUSE_CODE (c
))
11822 case OMP_CLAUSE_MAP
:
11823 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11824 tkind_zero
= tkind
;
11825 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11828 case GOMP_MAP_ALLOC
:
11829 case GOMP_MAP_IF_PRESENT
:
11831 case GOMP_MAP_FROM
:
11832 case GOMP_MAP_TOFROM
:
11833 case GOMP_MAP_ALWAYS_TO
:
11834 case GOMP_MAP_ALWAYS_FROM
:
11835 case GOMP_MAP_ALWAYS_TOFROM
:
11836 case GOMP_MAP_RELEASE
:
11837 case GOMP_MAP_FORCE_TO
:
11838 case GOMP_MAP_FORCE_FROM
:
11839 case GOMP_MAP_FORCE_TOFROM
:
11840 case GOMP_MAP_FORCE_PRESENT
:
11841 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11843 case GOMP_MAP_DELETE
:
11844 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11848 if (tkind_zero
!= tkind
)
11850 if (integer_zerop (s
))
11851 tkind
= tkind_zero
;
11852 else if (integer_nonzerop (s
))
11853 tkind_zero
= tkind
;
11856 case OMP_CLAUSE_FIRSTPRIVATE
:
11857 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11858 tkind
= GOMP_MAP_TO
;
11859 tkind_zero
= tkind
;
11861 case OMP_CLAUSE_TO
:
11862 tkind
= GOMP_MAP_TO
;
11863 tkind_zero
= tkind
;
11865 case OMP_CLAUSE_FROM
:
11866 tkind
= GOMP_MAP_FROM
;
11867 tkind_zero
= tkind
;
11870 gcc_unreachable ();
11872 gcc_checking_assert (tkind
11873 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11874 gcc_checking_assert (tkind_zero
11875 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11876 talign
= ceil_log2 (talign
);
11877 tkind
|= talign
<< talign_shift
;
11878 tkind_zero
|= talign
<< talign_shift
;
11879 gcc_checking_assert (tkind
11880 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11881 gcc_checking_assert (tkind_zero
11882 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11883 if (tkind
== tkind_zero
)
11884 x
= build_int_cstu (tkind_type
, tkind
);
11887 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11888 x
= build3 (COND_EXPR
, tkind_type
,
11889 fold_build2 (EQ_EXPR
, boolean_type_node
,
11890 unshare_expr (s
), size_zero_node
),
11891 build_int_cstu (tkind_type
, tkind_zero
),
11892 build_int_cstu (tkind_type
, tkind
));
11894 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11899 case OMP_CLAUSE_FIRSTPRIVATE
:
11900 if (is_oacc_parallel_or_serial (ctx
))
11901 goto oacc_firstprivate_map
;
11902 ovar
= OMP_CLAUSE_DECL (c
);
11903 if (omp_is_reference (ovar
))
11904 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11906 talign
= DECL_ALIGN_UNIT (ovar
);
11907 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11908 x
= build_sender_ref (ovar
, ctx
);
11909 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11910 type
= TREE_TYPE (ovar
);
11911 if (omp_is_reference (ovar
))
11912 type
= TREE_TYPE (type
);
11913 if ((INTEGRAL_TYPE_P (type
)
11914 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11915 || TREE_CODE (type
) == POINTER_TYPE
)
11917 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11919 if (omp_is_reference (var
))
11920 t
= build_simple_mem_ref (var
);
11921 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11922 TREE_NO_WARNING (var
) = 1;
11923 if (TREE_CODE (type
) != POINTER_TYPE
)
11924 t
= fold_convert (pointer_sized_int_node
, t
);
11925 t
= fold_convert (TREE_TYPE (x
), t
);
11926 gimplify_assign (x
, t
, &ilist
);
11928 else if (omp_is_reference (var
))
11929 gimplify_assign (x
, var
, &ilist
);
11930 else if (is_gimple_reg (var
))
11932 tree avar
= create_tmp_var (TREE_TYPE (var
));
11933 mark_addressable (avar
);
11934 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11935 TREE_NO_WARNING (var
) = 1;
11936 gimplify_assign (avar
, var
, &ilist
);
11937 avar
= build_fold_addr_expr (avar
);
11938 gimplify_assign (x
, avar
, &ilist
);
11942 var
= build_fold_addr_expr (var
);
11943 gimplify_assign (x
, var
, &ilist
);
11945 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11947 else if (omp_is_reference (ovar
))
11948 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11950 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11951 s
= fold_convert (size_type_node
, s
);
11952 purpose
= size_int (map_idx
++);
11953 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11954 if (TREE_CODE (s
) != INTEGER_CST
)
11955 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11957 gcc_checking_assert (tkind
11958 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11959 talign
= ceil_log2 (talign
);
11960 tkind
|= talign
<< talign_shift
;
11961 gcc_checking_assert (tkind
11962 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11963 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11964 build_int_cstu (tkind_type
, tkind
));
11967 case OMP_CLAUSE_USE_DEVICE_PTR
:
11968 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11969 case OMP_CLAUSE_IS_DEVICE_PTR
:
11970 ovar
= OMP_CLAUSE_DECL (c
);
11971 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11973 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
11975 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
11976 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
11977 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
11979 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
11981 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11982 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
11986 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11987 x
= build_sender_ref (ovar
, ctx
);
11990 if (is_gimple_omp_oacc (ctx
->stmt
))
11992 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
11994 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
11995 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
11998 type
= TREE_TYPE (ovar
);
11999 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
12000 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
12001 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12002 && !omp_is_reference (ovar
)
12003 && !omp_is_allocatable_or_ptr (ovar
))
12004 || TREE_CODE (type
) == ARRAY_TYPE
)
12005 var
= build_fold_addr_expr (var
);
12008 if (omp_is_reference (ovar
)
12009 || omp_check_optional_argument (ovar
, false)
12010 || omp_is_allocatable_or_ptr (ovar
))
12012 type
= TREE_TYPE (type
);
12013 if (TREE_CODE (type
) != ARRAY_TYPE
12014 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12015 && !omp_is_allocatable_or_ptr (ovar
))
12016 || (omp_is_reference (ovar
)
12017 && omp_is_allocatable_or_ptr (ovar
))))
12018 var
= build_simple_mem_ref (var
);
12019 var
= fold_convert (TREE_TYPE (x
), var
);
12023 present
= omp_check_optional_argument (ovar
, true);
12026 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12027 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12028 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12029 tree new_x
= unshare_expr (x
);
12030 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
12032 gcond
*cond
= gimple_build_cond_from_tree (present
,
12035 gimple_seq_add_stmt (&ilist
, cond
);
12036 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
12037 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
12038 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
12039 gimple_seq_add_stmt (&ilist
,
12040 gimple_build_label (notnull_label
));
12041 gimplify_assign (x
, var
, &ilist
);
12042 gimple_seq_add_stmt (&ilist
,
12043 gimple_build_label (opt_arg_label
));
12046 gimplify_assign (x
, var
, &ilist
);
12048 purpose
= size_int (map_idx
++);
12049 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12050 gcc_checking_assert (tkind
12051 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12052 gcc_checking_assert (tkind
12053 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12054 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
12055 build_int_cstu (tkind_type
, tkind
));
12059 gcc_assert (map_idx
== map_cnt
);
12061 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
12062 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
12063 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
12064 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
12065 for (int i
= 1; i
<= 2; i
++)
12066 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
12068 gimple_seq initlist
= NULL
;
12069 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
12070 TREE_VEC_ELT (t
, i
)),
12071 &initlist
, true, NULL_TREE
);
12072 gimple_seq_add_seq (&ilist
, initlist
);
12074 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
12075 gimple_seq_add_stmt (&olist
,
12076 gimple_build_assign (TREE_VEC_ELT (t
, i
),
12080 tree clobber
= build_clobber (ctx
->record_type
);
12081 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12085 /* Once all the expansions are done, sequence all the different
12086 fragments inside gimple_omp_body. */
12091 && ctx
->record_type
)
12093 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12094 /* fixup_child_record_type might have changed receiver_decl's type. */
12095 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12096 gimple_seq_add_stmt (&new_body
,
12097 gimple_build_assign (ctx
->receiver_decl
, t
));
12099 gimple_seq_add_seq (&new_body
, fplist
);
12101 if (offloaded
|| data_region
)
12103 tree prev
= NULL_TREE
;
12104 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12105 switch (OMP_CLAUSE_CODE (c
))
12110 case OMP_CLAUSE_FIRSTPRIVATE
:
12111 if (is_gimple_omp_oacc (ctx
->stmt
))
12113 var
= OMP_CLAUSE_DECL (c
);
12114 if (omp_is_reference (var
)
12115 || is_gimple_reg_type (TREE_TYPE (var
)))
12117 tree new_var
= lookup_decl (var
, ctx
);
12119 type
= TREE_TYPE (var
);
12120 if (omp_is_reference (var
))
12121 type
= TREE_TYPE (type
);
12122 if ((INTEGRAL_TYPE_P (type
)
12123 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12124 || TREE_CODE (type
) == POINTER_TYPE
)
12126 x
= build_receiver_ref (var
, false, ctx
);
12127 if (TREE_CODE (type
) != POINTER_TYPE
)
12128 x
= fold_convert (pointer_sized_int_node
, x
);
12129 x
= fold_convert (type
, x
);
12130 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12132 if (omp_is_reference (var
))
12134 tree v
= create_tmp_var_raw (type
, get_name (var
));
12135 gimple_add_tmp_var (v
);
12136 TREE_ADDRESSABLE (v
) = 1;
12137 gimple_seq_add_stmt (&new_body
,
12138 gimple_build_assign (v
, x
));
12139 x
= build_fold_addr_expr (v
);
12141 gimple_seq_add_stmt (&new_body
,
12142 gimple_build_assign (new_var
, x
));
12146 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
12147 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12149 gimple_seq_add_stmt (&new_body
,
12150 gimple_build_assign (new_var
, x
));
12153 else if (is_variable_sized (var
))
12155 tree pvar
= DECL_VALUE_EXPR (var
);
12156 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12157 pvar
= TREE_OPERAND (pvar
, 0);
12158 gcc_assert (DECL_P (pvar
));
12159 tree new_var
= lookup_decl (pvar
, ctx
);
12160 x
= build_receiver_ref (var
, false, ctx
);
12161 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12162 gimple_seq_add_stmt (&new_body
,
12163 gimple_build_assign (new_var
, x
));
12166 case OMP_CLAUSE_PRIVATE
:
12167 if (is_gimple_omp_oacc (ctx
->stmt
))
12169 var
= OMP_CLAUSE_DECL (c
);
12170 if (omp_is_reference (var
))
12172 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12173 tree new_var
= lookup_decl (var
, ctx
);
12174 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12175 if (TREE_CONSTANT (x
))
12177 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
12179 gimple_add_tmp_var (x
);
12180 TREE_ADDRESSABLE (x
) = 1;
12181 x
= build_fold_addr_expr_loc (clause_loc
, x
);
12186 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12187 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12188 gimple_seq_add_stmt (&new_body
,
12189 gimple_build_assign (new_var
, x
));
12192 case OMP_CLAUSE_USE_DEVICE_PTR
:
12193 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12194 case OMP_CLAUSE_IS_DEVICE_PTR
:
12196 gimple_seq assign_body
;
12197 bool is_array_data
;
12198 bool do_optional_check
;
12199 assign_body
= NULL
;
12200 do_optional_check
= false;
12201 var
= OMP_CLAUSE_DECL (c
);
12202 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
12204 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
12205 x
= build_sender_ref (is_array_data
12206 ? (splay_tree_key
) &DECL_NAME (var
)
12207 : (splay_tree_key
) &DECL_UID (var
), ctx
);
12209 x
= build_receiver_ref (var
, false, ctx
);
12213 bool is_ref
= omp_is_reference (var
);
12214 do_optional_check
= true;
12215 /* First, we copy the descriptor data from the host; then
12216 we update its data to point to the target address. */
12217 new_var
= lookup_decl (var
, ctx
);
12218 new_var
= DECL_VALUE_EXPR (new_var
);
12223 var
= build_fold_indirect_ref (var
);
12224 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
12226 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
12227 gimple_add_tmp_var (v
);
12228 TREE_ADDRESSABLE (v
) = 1;
12229 gimple_seq_add_stmt (&assign_body
,
12230 gimple_build_assign (v
, var
));
12231 tree rhs
= build_fold_addr_expr (v
);
12232 gimple_seq_add_stmt (&assign_body
,
12233 gimple_build_assign (new_var
, rhs
));
12236 gimple_seq_add_stmt (&assign_body
,
12237 gimple_build_assign (new_var
, var
));
12239 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
12241 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12242 gimple_seq_add_stmt (&assign_body
,
12243 gimple_build_assign (v2
, x
));
12245 else if (is_variable_sized (var
))
12247 tree pvar
= DECL_VALUE_EXPR (var
);
12248 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12249 pvar
= TREE_OPERAND (pvar
, 0);
12250 gcc_assert (DECL_P (pvar
));
12251 new_var
= lookup_decl (pvar
, ctx
);
12252 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12253 gimple_seq_add_stmt (&assign_body
,
12254 gimple_build_assign (new_var
, x
));
12256 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12257 && !omp_is_reference (var
)
12258 && !omp_is_allocatable_or_ptr (var
))
12259 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12261 new_var
= lookup_decl (var
, ctx
);
12262 new_var
= DECL_VALUE_EXPR (new_var
);
12263 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
12264 new_var
= TREE_OPERAND (new_var
, 0);
12265 gcc_assert (DECL_P (new_var
));
12266 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12267 gimple_seq_add_stmt (&assign_body
,
12268 gimple_build_assign (new_var
, x
));
12272 tree type
= TREE_TYPE (var
);
12273 new_var
= lookup_decl (var
, ctx
);
12274 if (omp_is_reference (var
))
12276 type
= TREE_TYPE (type
);
12277 if (TREE_CODE (type
) != ARRAY_TYPE
12278 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
12279 || (omp_is_reference (var
)
12280 && omp_is_allocatable_or_ptr (var
))))
12282 tree v
= create_tmp_var_raw (type
, get_name (var
));
12283 gimple_add_tmp_var (v
);
12284 TREE_ADDRESSABLE (v
) = 1;
12285 x
= fold_convert (type
, x
);
12286 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
12288 gimple_seq_add_stmt (&assign_body
,
12289 gimple_build_assign (v
, x
));
12290 x
= build_fold_addr_expr (v
);
12291 do_optional_check
= true;
12294 new_var
= DECL_VALUE_EXPR (new_var
);
12295 x
= fold_convert (TREE_TYPE (new_var
), x
);
12296 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
12297 gimple_seq_add_stmt (&assign_body
,
12298 gimple_build_assign (new_var
, x
));
12301 present
= (do_optional_check
12302 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
12306 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
12307 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
12308 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
12309 glabel
*null_glabel
= gimple_build_label (null_label
);
12310 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
12311 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
12312 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
12314 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
12316 gcond
*cond
= gimple_build_cond_from_tree (present
,
12319 gimple_seq_add_stmt (&new_body
, cond
);
12320 gimple_seq_add_stmt (&new_body
, null_glabel
);
12321 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
12322 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
12323 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
12324 gimple_seq_add_seq (&new_body
, assign_body
);
12325 gimple_seq_add_stmt (&new_body
,
12326 gimple_build_label (opt_arg_label
));
12329 gimple_seq_add_seq (&new_body
, assign_body
);
12332 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12333 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12334 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12335 or references to VLAs. */
12336 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12337 switch (OMP_CLAUSE_CODE (c
))
12342 case OMP_CLAUSE_MAP
:
12343 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12344 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12346 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12347 poly_int64 offset
= 0;
12349 var
= OMP_CLAUSE_DECL (c
);
12351 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
12352 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
12354 && varpool_node::get_create (var
)->offloadable
)
12356 if (TREE_CODE (var
) == INDIRECT_REF
12357 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
12358 var
= TREE_OPERAND (var
, 0);
12359 if (TREE_CODE (var
) == COMPONENT_REF
)
12361 var
= get_addr_base_and_unit_offset (var
, &offset
);
12362 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
12364 else if (DECL_SIZE (var
)
12365 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12367 tree var2
= DECL_VALUE_EXPR (var
);
12368 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12369 var2
= TREE_OPERAND (var2
, 0);
12370 gcc_assert (DECL_P (var2
));
12373 tree new_var
= lookup_decl (var
, ctx
), x
;
12374 tree type
= TREE_TYPE (new_var
);
12376 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
12377 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12380 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
12382 new_var
= build2 (MEM_REF
, type
,
12383 build_fold_addr_expr (new_var
),
12384 build_int_cst (build_pointer_type (type
),
12387 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
12389 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
12390 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
12391 new_var
= build2 (MEM_REF
, type
,
12392 build_fold_addr_expr (new_var
),
12393 build_int_cst (build_pointer_type (type
),
12397 is_ref
= omp_is_reference (var
);
12398 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
12400 bool ref_to_array
= false;
12403 type
= TREE_TYPE (type
);
12404 if (TREE_CODE (type
) == ARRAY_TYPE
)
12406 type
= build_pointer_type (type
);
12407 ref_to_array
= true;
12410 else if (TREE_CODE (type
) == ARRAY_TYPE
)
12412 tree decl2
= DECL_VALUE_EXPR (new_var
);
12413 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
12414 decl2
= TREE_OPERAND (decl2
, 0);
12415 gcc_assert (DECL_P (decl2
));
12417 type
= TREE_TYPE (new_var
);
12419 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
12420 x
= fold_convert_loc (clause_loc
, type
, x
);
12421 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
12423 tree bias
= OMP_CLAUSE_SIZE (c
);
12425 bias
= lookup_decl (bias
, ctx
);
12426 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
12427 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
12429 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
12430 TREE_TYPE (x
), x
, bias
);
12433 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12434 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12435 if (is_ref
&& !ref_to_array
)
12437 tree t
= create_tmp_var_raw (type
, get_name (var
));
12438 gimple_add_tmp_var (t
);
12439 TREE_ADDRESSABLE (t
) = 1;
12440 gimple_seq_add_stmt (&new_body
,
12441 gimple_build_assign (t
, x
));
12442 x
= build_fold_addr_expr_loc (clause_loc
, t
);
12444 gimple_seq_add_stmt (&new_body
,
12445 gimple_build_assign (new_var
, x
));
12448 else if (OMP_CLAUSE_CHAIN (c
)
12449 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
12451 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12452 == GOMP_MAP_FIRSTPRIVATE_POINTER
12453 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
12454 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12457 case OMP_CLAUSE_PRIVATE
:
12458 var
= OMP_CLAUSE_DECL (c
);
12459 if (is_variable_sized (var
))
12461 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12462 tree new_var
= lookup_decl (var
, ctx
);
12463 tree pvar
= DECL_VALUE_EXPR (var
);
12464 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12465 pvar
= TREE_OPERAND (pvar
, 0);
12466 gcc_assert (DECL_P (pvar
));
12467 tree new_pvar
= lookup_decl (pvar
, ctx
);
12468 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12469 tree al
= size_int (DECL_ALIGN (var
));
12470 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
12471 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12472 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
12473 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12474 gimple_seq_add_stmt (&new_body
,
12475 gimple_build_assign (new_pvar
, x
));
12477 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12479 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12480 tree new_var
= lookup_decl (var
, ctx
);
12481 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12482 if (TREE_CONSTANT (x
))
12487 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12488 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12489 tree al
= size_int (TYPE_ALIGN (rtype
));
12490 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12493 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12494 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12495 gimple_seq_add_stmt (&new_body
,
12496 gimple_build_assign (new_var
, x
));
12501 gimple_seq fork_seq
= NULL
;
12502 gimple_seq join_seq
= NULL
;
12504 if (is_oacc_parallel_or_serial (ctx
))
12506 /* If there are reductions on the offloaded region itself, treat
12507 them as a dummy GANG loop. */
12508 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12510 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12511 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12514 gimple_seq_add_seq (&new_body
, fork_seq
);
12515 gimple_seq_add_seq (&new_body
, tgt_body
);
12516 gimple_seq_add_seq (&new_body
, join_seq
);
12519 new_body
= maybe_catch_exception (new_body
);
12521 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12522 gimple_omp_set_body (stmt
, new_body
);
12525 bind
= gimple_build_bind (NULL
, NULL
,
12526 tgt_bind
? gimple_bind_block (tgt_bind
)
12528 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12529 gimple_bind_add_seq (bind
, ilist
);
12530 gimple_bind_add_stmt (bind
, stmt
);
12531 gimple_bind_add_seq (bind
, olist
);
12533 pop_gimplify_context (NULL
);
12537 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12538 gimple_bind_add_stmt (dep_bind
, bind
);
12539 gimple_bind_add_seq (dep_bind
, dep_olist
);
12540 pop_gimplify_context (dep_bind
);
12544 /* Expand code for an OpenMP teams directive. */
12547 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12549 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12550 push_gimplify_context ();
12552 tree block
= make_node (BLOCK
);
12553 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12554 gsi_replace (gsi_p
, bind
, true);
12555 gimple_seq bind_body
= NULL
;
12556 gimple_seq dlist
= NULL
;
12557 gimple_seq olist
= NULL
;
12559 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12560 OMP_CLAUSE_NUM_TEAMS
);
12561 if (num_teams
== NULL_TREE
)
12562 num_teams
= build_int_cst (unsigned_type_node
, 0);
12565 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12566 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12567 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12569 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12570 OMP_CLAUSE_THREAD_LIMIT
);
12571 if (thread_limit
== NULL_TREE
)
12572 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12575 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12576 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12577 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12581 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12582 &bind_body
, &dlist
, ctx
, NULL
);
12583 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12584 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12586 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12588 location_t loc
= gimple_location (teams_stmt
);
12589 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12590 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12591 gimple_set_location (call
, loc
);
12592 gimple_seq_add_stmt (&bind_body
, call
);
12594 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12595 gimple_omp_set_body (teams_stmt
, NULL
);
12596 gimple_seq_add_seq (&bind_body
, olist
);
12597 gimple_seq_add_seq (&bind_body
, dlist
);
12598 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12599 gimple_bind_set_body (bind
, bind_body
);
12601 pop_gimplify_context (bind
);
12603 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12604 BLOCK_VARS (block
) = ctx
->block_vars
;
12605 if (BLOCK_VARS (block
))
12606 TREE_USED (block
) = 1;
12609 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12610 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12611 of OMP context, but with task_shared_vars set. */
12614 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12619 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12620 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12623 if (task_shared_vars
12625 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12628 /* If a global variable has been privatized, TREE_CONSTANT on
12629 ADDR_EXPR might be wrong. */
12630 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12631 recompute_tree_invariant_for_addr_expr (t
);
12633 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12637 /* Data to be communicated between lower_omp_regimplify_operands and
12638 lower_omp_regimplify_operands_p. */
12640 struct lower_omp_regimplify_operands_data
12646 /* Helper function for lower_omp_regimplify_operands. Find
12647 omp_member_access_dummy_var vars and adjust temporarily their
12648 DECL_VALUE_EXPRs if needed. */
12651 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12654 tree t
= omp_member_access_dummy_var (*tp
);
12657 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12658 lower_omp_regimplify_operands_data
*ldata
12659 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12660 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12663 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12664 ldata
->decls
->safe_push (*tp
);
12665 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12666 SET_DECL_VALUE_EXPR (*tp
, v
);
12669 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12673 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12674 of omp_member_access_dummy_var vars during regimplification. */
12677 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12678 gimple_stmt_iterator
*gsi_p
)
12680 auto_vec
<tree
, 10> decls
;
12683 struct walk_stmt_info wi
;
12684 memset (&wi
, '\0', sizeof (wi
));
12685 struct lower_omp_regimplify_operands_data data
;
12687 data
.decls
= &decls
;
12689 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12691 gimple_regimplify_operands (stmt
, gsi_p
);
12692 while (!decls
.is_empty ())
12694 tree t
= decls
.pop ();
12695 tree v
= decls
.pop ();
12696 SET_DECL_VALUE_EXPR (t
, v
);
12701 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12703 gimple
*stmt
= gsi_stmt (*gsi_p
);
12704 struct walk_stmt_info wi
;
12707 if (gimple_has_location (stmt
))
12708 input_location
= gimple_location (stmt
);
12710 if (task_shared_vars
)
12711 memset (&wi
, '\0', sizeof (wi
));
12713 /* If we have issued syntax errors, avoid doing any heavy lifting.
12714 Just replace the OMP directives with a NOP to avoid
12715 confusing RTL expansion. */
12716 if (seen_error () && is_gimple_omp (stmt
))
12718 gsi_replace (gsi_p
, gimple_build_nop (), true);
12722 switch (gimple_code (stmt
))
12726 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12727 if ((ctx
|| task_shared_vars
)
12728 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12729 lower_omp_regimplify_p
,
12730 ctx
? NULL
: &wi
, NULL
)
12731 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12732 lower_omp_regimplify_p
,
12733 ctx
? NULL
: &wi
, NULL
)))
12734 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12738 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12740 case GIMPLE_EH_FILTER
:
12741 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12744 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12745 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12747 case GIMPLE_TRANSACTION
:
12748 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12752 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12753 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12755 case GIMPLE_OMP_PARALLEL
:
12756 case GIMPLE_OMP_TASK
:
12757 ctx
= maybe_lookup_ctx (stmt
);
12759 if (ctx
->cancellable
)
12760 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12761 lower_omp_taskreg (gsi_p
, ctx
);
12763 case GIMPLE_OMP_FOR
:
12764 ctx
= maybe_lookup_ctx (stmt
);
12766 if (ctx
->cancellable
)
12767 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12768 lower_omp_for (gsi_p
, ctx
);
12770 case GIMPLE_OMP_SECTIONS
:
12771 ctx
= maybe_lookup_ctx (stmt
);
12773 if (ctx
->cancellable
)
12774 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12775 lower_omp_sections (gsi_p
, ctx
);
12777 case GIMPLE_OMP_SINGLE
:
12778 ctx
= maybe_lookup_ctx (stmt
);
12780 lower_omp_single (gsi_p
, ctx
);
12782 case GIMPLE_OMP_MASTER
:
12783 ctx
= maybe_lookup_ctx (stmt
);
12785 lower_omp_master (gsi_p
, ctx
);
12787 case GIMPLE_OMP_TASKGROUP
:
12788 ctx
= maybe_lookup_ctx (stmt
);
12790 lower_omp_taskgroup (gsi_p
, ctx
);
12792 case GIMPLE_OMP_ORDERED
:
12793 ctx
= maybe_lookup_ctx (stmt
);
12795 lower_omp_ordered (gsi_p
, ctx
);
12797 case GIMPLE_OMP_SCAN
:
12798 ctx
= maybe_lookup_ctx (stmt
);
12800 lower_omp_scan (gsi_p
, ctx
);
12802 case GIMPLE_OMP_CRITICAL
:
12803 ctx
= maybe_lookup_ctx (stmt
);
12805 lower_omp_critical (gsi_p
, ctx
);
12807 case GIMPLE_OMP_ATOMIC_LOAD
:
12808 if ((ctx
|| task_shared_vars
)
12809 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12810 as_a
<gomp_atomic_load
*> (stmt
)),
12811 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12812 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12814 case GIMPLE_OMP_TARGET
:
12815 ctx
= maybe_lookup_ctx (stmt
);
12817 lower_omp_target (gsi_p
, ctx
);
12819 case GIMPLE_OMP_TEAMS
:
12820 ctx
= maybe_lookup_ctx (stmt
);
12822 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12823 lower_omp_taskreg (gsi_p
, ctx
);
12825 lower_omp_teams (gsi_p
, ctx
);
12829 call_stmt
= as_a
<gcall
*> (stmt
);
12830 fndecl
= gimple_call_fndecl (call_stmt
);
12832 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12833 switch (DECL_FUNCTION_CODE (fndecl
))
12835 case BUILT_IN_GOMP_BARRIER
:
12839 case BUILT_IN_GOMP_CANCEL
:
12840 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12843 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12844 cctx
= cctx
->outer
;
12845 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12846 if (!cctx
->cancellable
)
12848 if (DECL_FUNCTION_CODE (fndecl
)
12849 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12851 stmt
= gimple_build_nop ();
12852 gsi_replace (gsi_p
, stmt
, false);
12856 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12858 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12859 gimple_call_set_fndecl (call_stmt
, fndecl
);
12860 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12863 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12864 gimple_call_set_lhs (call_stmt
, lhs
);
12865 tree fallthru_label
;
12866 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12868 g
= gimple_build_label (fallthru_label
);
12869 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12870 g
= gimple_build_cond (NE_EXPR
, lhs
,
12871 fold_convert (TREE_TYPE (lhs
),
12872 boolean_false_node
),
12873 cctx
->cancel_label
, fallthru_label
);
12874 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12881 case GIMPLE_ASSIGN
:
12882 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12884 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12885 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12886 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12887 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12888 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12889 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12890 && (gimple_omp_target_kind (up
->stmt
)
12891 == GF_OMP_TARGET_KIND_DATA
)))
12893 else if (!up
->lastprivate_conditional_map
)
12895 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12896 if (TREE_CODE (lhs
) == MEM_REF
12897 && DECL_P (TREE_OPERAND (lhs
, 0))
12898 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12899 0))) == REFERENCE_TYPE
)
12900 lhs
= TREE_OPERAND (lhs
, 0);
12902 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12905 if (up
->combined_into_simd_safelen1
)
12908 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12911 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12912 clauses
= gimple_omp_for_clauses (up
->stmt
);
12914 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12915 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12916 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12917 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12918 OMP_CLAUSE__CONDTEMP_
);
12919 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12920 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12921 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12928 if ((ctx
|| task_shared_vars
)
12929 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12932 /* Just remove clobbers, this should happen only if we have
12933 "privatized" local addressable variables in SIMD regions,
12934 the clobber isn't needed in that case and gimplifying address
12935 of the ARRAY_REF into a pointer and creating MEM_REF based
12936 clobber would create worse code than we get with the clobber
12938 if (gimple_clobber_p (stmt
))
12940 gsi_replace (gsi_p
, gimple_build_nop (), true);
12943 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12950 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12952 location_t saved_location
= input_location
;
12953 gimple_stmt_iterator gsi
;
12954 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12955 lower_omp_1 (&gsi
, ctx
);
12956 /* During gimplification, we haven't folded statments inside offloading
12957 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12958 if (target_nesting_level
|| taskreg_nesting_level
)
12959 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12961 input_location
= saved_location
;
12964 /* Main entry point. */
12966 static unsigned int
12967 execute_lower_omp (void)
12973 /* This pass always runs, to provide PROP_gimple_lomp.
12974 But often, there is nothing to do. */
12975 if (flag_openacc
== 0 && flag_openmp
== 0
12976 && flag_openmp_simd
== 0)
12979 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12980 delete_omp_context
);
12982 body
= gimple_body (current_function_decl
);
12984 scan_omp (&body
, NULL
);
12985 gcc_assert (taskreg_nesting_level
== 0);
12986 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12987 finish_taskreg_scan (ctx
);
12988 taskreg_contexts
.release ();
12990 if (all_contexts
->root
)
12992 if (task_shared_vars
)
12993 push_gimplify_context ();
12994 lower_omp (&body
, NULL
);
12995 if (task_shared_vars
)
12996 pop_gimplify_context (NULL
);
13001 splay_tree_delete (all_contexts
);
13002 all_contexts
= NULL
;
13004 BITMAP_FREE (task_shared_vars
);
13005 BITMAP_FREE (global_nonaddressable_vars
);
13007 /* If current function is a method, remove artificial dummy VAR_DECL created
13008 for non-static data member privatization, they aren't needed for
13009 debuginfo nor anything else, have been already replaced everywhere in the
13010 IL and cause problems with LTO. */
13011 if (DECL_ARGUMENTS (current_function_decl
)
13012 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
13013 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
13015 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
13021 const pass_data pass_data_lower_omp
=
13023 GIMPLE_PASS
, /* type */
13024 "omplower", /* name */
13025 OPTGROUP_OMP
, /* optinfo_flags */
13026 TV_NONE
, /* tv_id */
13027 PROP_gimple_any
, /* properties_required */
13028 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
13029 0, /* properties_destroyed */
13030 0, /* todo_flags_start */
13031 0, /* todo_flags_finish */
13034 class pass_lower_omp
: public gimple_opt_pass
13037 pass_lower_omp (gcc::context
*ctxt
)
13038 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
13041 /* opt_pass methods: */
13042 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
13044 }; // class pass_lower_omp
13046 } // anon namespace
13049 make_pass_lower_omp (gcc::context
*ctxt
)
13051 return new pass_lower_omp (ctxt
);
13054 /* The following is a utility to diagnose structured block violations.
13055 It is not part of the "omplower" pass, as that's invoked too late. It
13056 should be invoked by the respective front ends after gimplification. */
13058 static splay_tree all_labels
;
13060 /* Check for mismatched contexts and generate an error if needed. Return
13061 true if an error is detected. */
13064 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
13065 gimple
*branch_ctx
, gimple
*label_ctx
)
13067 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
13068 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
13070 if (label_ctx
== branch_ctx
)
13073 const char* kind
= NULL
;
13077 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
13078 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
13080 gcc_checking_assert (kind
== NULL
);
13086 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
13090 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13091 so we could traverse it and issue a correct "exit" or "enter" error
13092 message upon a structured block violation.
13094 We built the context by building a list with tree_cons'ing, but there is
13095 no easy counterpart in gimple tuples. It seems like far too much work
13096 for issuing exit/enter error messages. If someone really misses the
13097 distinct error message... patches welcome. */
13100 /* Try to avoid confusing the user by producing and error message
13101 with correct "exit" or "enter" verbiage. We prefer "exit"
13102 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13103 if (branch_ctx
== NULL
)
13109 if (TREE_VALUE (label_ctx
) == branch_ctx
)
13114 label_ctx
= TREE_CHAIN (label_ctx
);
13119 error ("invalid exit from %s structured block", kind
);
13121 error ("invalid entry to %s structured block", kind
);
13124 /* If it's obvious we have an invalid entry, be specific about the error. */
13125 if (branch_ctx
== NULL
)
13126 error ("invalid entry to %s structured block", kind
);
13129 /* Otherwise, be vague and lazy, but efficient. */
13130 error ("invalid branch to/from %s structured block", kind
);
13133 gsi_replace (gsi_p
, gimple_build_nop (), false);
13137 /* Pass 1: Create a minimal tree of structured blocks, and record
13138 where each label is found. */
13141 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13142 struct walk_stmt_info
*wi
)
13144 gimple
*context
= (gimple
*) wi
->info
;
13145 gimple
*inner_context
;
13146 gimple
*stmt
= gsi_stmt (*gsi_p
);
13148 *handled_ops_p
= true;
13150 switch (gimple_code (stmt
))
13154 case GIMPLE_OMP_PARALLEL
:
13155 case GIMPLE_OMP_TASK
:
13156 case GIMPLE_OMP_SECTIONS
:
13157 case GIMPLE_OMP_SINGLE
:
13158 case GIMPLE_OMP_SECTION
:
13159 case GIMPLE_OMP_MASTER
:
13160 case GIMPLE_OMP_ORDERED
:
13161 case GIMPLE_OMP_SCAN
:
13162 case GIMPLE_OMP_CRITICAL
:
13163 case GIMPLE_OMP_TARGET
:
13164 case GIMPLE_OMP_TEAMS
:
13165 case GIMPLE_OMP_TASKGROUP
:
13166 /* The minimal context here is just the current OMP construct. */
13167 inner_context
= stmt
;
13168 wi
->info
= inner_context
;
13169 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13170 wi
->info
= context
;
13173 case GIMPLE_OMP_FOR
:
13174 inner_context
= stmt
;
13175 wi
->info
= inner_context
;
13176 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13178 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13179 diagnose_sb_1
, NULL
, wi
);
13180 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
13181 wi
->info
= context
;
13185 splay_tree_insert (all_labels
,
13186 (splay_tree_key
) gimple_label_label (
13187 as_a
<glabel
*> (stmt
)),
13188 (splay_tree_value
) context
);
13198 /* Pass 2: Check each branch and see if its context differs from that of
13199 the destination label's context. */
13202 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
13203 struct walk_stmt_info
*wi
)
13205 gimple
*context
= (gimple
*) wi
->info
;
13207 gimple
*stmt
= gsi_stmt (*gsi_p
);
13209 *handled_ops_p
= true;
13211 switch (gimple_code (stmt
))
13215 case GIMPLE_OMP_PARALLEL
:
13216 case GIMPLE_OMP_TASK
:
13217 case GIMPLE_OMP_SECTIONS
:
13218 case GIMPLE_OMP_SINGLE
:
13219 case GIMPLE_OMP_SECTION
:
13220 case GIMPLE_OMP_MASTER
:
13221 case GIMPLE_OMP_ORDERED
:
13222 case GIMPLE_OMP_SCAN
:
13223 case GIMPLE_OMP_CRITICAL
:
13224 case GIMPLE_OMP_TARGET
:
13225 case GIMPLE_OMP_TEAMS
:
13226 case GIMPLE_OMP_TASKGROUP
:
13228 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13229 wi
->info
= context
;
13232 case GIMPLE_OMP_FOR
:
13234 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13236 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
13237 diagnose_sb_2
, NULL
, wi
);
13238 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
13239 wi
->info
= context
;
13244 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13245 tree lab
= gimple_cond_true_label (cond_stmt
);
13248 n
= splay_tree_lookup (all_labels
,
13249 (splay_tree_key
) lab
);
13250 diagnose_sb_0 (gsi_p
, context
,
13251 n
? (gimple
*) n
->value
: NULL
);
13253 lab
= gimple_cond_false_label (cond_stmt
);
13256 n
= splay_tree_lookup (all_labels
,
13257 (splay_tree_key
) lab
);
13258 diagnose_sb_0 (gsi_p
, context
,
13259 n
? (gimple
*) n
->value
: NULL
);
13266 tree lab
= gimple_goto_dest (stmt
);
13267 if (TREE_CODE (lab
) != LABEL_DECL
)
13270 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13271 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
13275 case GIMPLE_SWITCH
:
13277 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
13279 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
13281 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
13282 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
13283 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
13289 case GIMPLE_RETURN
:
13290 diagnose_sb_0 (gsi_p
, context
, NULL
);
13300 static unsigned int
13301 diagnose_omp_structured_block_errors (void)
13303 struct walk_stmt_info wi
;
13304 gimple_seq body
= gimple_body (current_function_decl
);
13306 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
13308 memset (&wi
, 0, sizeof (wi
));
13309 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
13311 memset (&wi
, 0, sizeof (wi
));
13312 wi
.want_locations
= true;
13313 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
13315 gimple_set_body (current_function_decl
, body
);
13317 splay_tree_delete (all_labels
);
13325 const pass_data pass_data_diagnose_omp_blocks
=
13327 GIMPLE_PASS
, /* type */
13328 "*diagnose_omp_blocks", /* name */
13329 OPTGROUP_OMP
, /* optinfo_flags */
13330 TV_NONE
, /* tv_id */
13331 PROP_gimple_any
, /* properties_required */
13332 0, /* properties_provided */
13333 0, /* properties_destroyed */
13334 0, /* todo_flags_start */
13335 0, /* todo_flags_finish */
13338 class pass_diagnose_omp_blocks
: public gimple_opt_pass
13341 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13342 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
13345 /* opt_pass methods: */
13346 virtual bool gate (function
*)
13348 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
13350 virtual unsigned int execute (function
*)
13352 return diagnose_omp_structured_block_errors ();
13355 }; // class pass_diagnose_omp_blocks
13357 } // anon namespace
13360 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
13362 return new pass_diagnose_omp_blocks (ctxt
);
13366 #include "gt-omp-low.h"