1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap task_shared_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
195 static void scan_omp (gimple_seq
*, omp_context
*);
196 static tree
scan_omp_1_op (tree
*, int *, void *);
198 #define WALK_SUBSTMTS \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212 is_oacc_parallel_or_serial (omp_context
*ctx
)
214 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
215 return ((outer_type
== GIMPLE_OMP_TARGET
)
216 && ((gimple_omp_target_kind (ctx
->stmt
)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
218 || (gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226 is_oacc_kernels (omp_context
*ctx
)
228 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
229 return ((outer_type
== GIMPLE_OMP_TARGET
)
230 && (gimple_omp_target_kind (ctx
->stmt
)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
239 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
240 return ((outer_type
== GIMPLE_OMP_TARGET
)
241 && ((gimple_omp_target_kind (ctx
->stmt
)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
243 || (gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
251 is_omp_target (gimple
*stmt
)
253 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
255 int kind
= gimple_omp_target_kind (stmt
);
256 return (kind
== GF_OMP_TARGET_KIND_REGION
257 || kind
== GF_OMP_TARGET_KIND_DATA
258 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
269 omp_member_access_dummy_var (tree decl
)
272 || !DECL_ARTIFICIAL (decl
)
273 || !DECL_IGNORED_P (decl
)
274 || !DECL_HAS_VALUE_EXPR_P (decl
)
275 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
278 tree v
= DECL_VALUE_EXPR (decl
);
279 if (TREE_CODE (v
) != COMPONENT_REF
)
283 switch (TREE_CODE (v
))
289 case POINTER_PLUS_EXPR
:
290 v
= TREE_OPERAND (v
, 0);
293 if (DECL_CONTEXT (v
) == current_function_decl
294 && DECL_ARTIFICIAL (v
)
295 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
303 /* Helper for unshare_and_remap, called through walk_tree. */
306 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
308 tree
*pair
= (tree
*) data
;
311 *tp
= unshare_expr (pair
[1]);
314 else if (IS_TYPE_OR_DECL_P (*tp
))
319 /* Return unshare_expr (X) with all occurrences of FROM
323 unshare_and_remap (tree x
, tree from
, tree to
)
325 tree pair
[2] = { from
, to
};
326 x
= unshare_expr (x
);
327 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
334 scan_omp_op (tree
*tp
, omp_context
*ctx
)
336 struct walk_stmt_info wi
;
338 memset (&wi
, 0, sizeof (wi
));
340 wi
.want_locations
= true;
342 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
345 static void lower_omp (gimple_seq
*, omp_context
*);
346 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
347 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 /* Return true if CTX is for an omp parallel. */
352 is_parallel_ctx (omp_context
*ctx
)
354 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
358 /* Return true if CTX is for an omp task. */
361 is_task_ctx (omp_context
*ctx
)
363 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
367 /* Return true if CTX is for an omp taskloop. */
370 is_taskloop_ctx (omp_context
*ctx
)
372 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
377 /* Return true if CTX is for a host omp teams. */
380 is_host_teams_ctx (omp_context
*ctx
)
382 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
391 is_taskreg_ctx (omp_context
*ctx
)
393 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
396 /* Return true if EXPR is variable sized. */
399 is_variable_sized (const_tree expr
)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
409 lookup_decl (tree var
, omp_context
*ctx
)
411 tree
*n
= ctx
->cb
.decl_map
->get (var
);
416 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
418 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
419 return n
? *n
: NULL_TREE
;
423 lookup_field (tree var
, omp_context
*ctx
)
426 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
427 return (tree
) n
->value
;
431 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
434 n
= splay_tree_lookup (ctx
->sfield_map
435 ? ctx
->sfield_map
: ctx
->field_map
, key
);
436 return (tree
) n
->value
;
440 lookup_sfield (tree var
, omp_context
*ctx
)
442 return lookup_sfield ((splay_tree_key
) var
, ctx
);
446 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
449 n
= splay_tree_lookup (ctx
->field_map
, key
);
450 return n
? (tree
) n
->value
: NULL_TREE
;
454 maybe_lookup_field (tree var
, omp_context
*ctx
)
456 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
463 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
466 || TYPE_ATOMIC (TREE_TYPE (decl
)))
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
489 /* Do not use copy-in/copy-out for variables that have their
491 if (is_global_var (decl
))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl
))
501 if (!global_nonaddressable_vars
)
502 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars
,
510 else if (TREE_ADDRESSABLE (decl
))
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 if (TREE_READONLY (decl
)
516 || ((TREE_CODE (decl
) == RESULT_DECL
517 || TREE_CODE (decl
) == PARM_DECL
)
518 && DECL_BY_REFERENCE (decl
)))
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx
->is_nested
)
530 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
531 if ((is_taskreg_ctx (up
)
532 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up
->stmt
)))
534 && maybe_lookup_decl (decl
, up
))
541 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
543 for (c
= gimple_omp_target_clauses (up
->stmt
);
544 c
; c
= OMP_CLAUSE_CHAIN (c
))
545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c
) == decl
)
550 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
551 c
; c
= OMP_CLAUSE_CHAIN (c
))
552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c
) == decl
)
557 goto maybe_mark_addressable_and_ret
;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx
))
567 maybe_mark_addressable_and_ret
:
568 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
569 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
574 if (!task_shared_vars
)
575 task_shared_vars
= BITMAP_ALLOC (NULL
);
576 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
577 TREE_ADDRESSABLE (outer
) = 1;
586 /* Construct a new automatic decl similar to VAR. */
589 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
591 tree copy
= copy_var_decl (var
, name
, type
);
593 DECL_CONTEXT (copy
) = current_function_decl
;
597 DECL_CHAIN (copy
) = ctx
->block_vars
;
598 ctx
->block_vars
= copy
;
603 /* If VAR is listed in task_shared_vars, it means it wasn't
604 originally addressable and is just because task needs to take
605 it's address. But we don't need to take address of privatizations
607 if (TREE_ADDRESSABLE (var
)
608 && ((task_shared_vars
609 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
610 || (global_nonaddressable_vars
611 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
612 TREE_ADDRESSABLE (copy
) = 0;
618 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
620 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
623 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
625 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'. */
628 omp_build_component_ref (tree obj
, tree field
)
630 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
631 if (TREE_THIS_VOLATILE (field
))
632 TREE_THIS_VOLATILE (ret
) |= 1;
633 if (TREE_READONLY (field
))
634 TREE_READONLY (ret
) |= 1;
638 /* Build tree nodes to access the field for VAR on the receiver side. */
641 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
643 tree x
, field
= lookup_field (var
, ctx
);
645 /* If the receiver record type was remapped in the child function,
646 remap the field into the new record type. */
647 x
= maybe_lookup_field (field
, ctx
);
651 x
= build_simple_mem_ref (ctx
->receiver_decl
);
652 TREE_THIS_NOTRAP (x
) = 1;
653 x
= omp_build_component_ref (x
, field
);
656 x
= build_simple_mem_ref (x
);
657 TREE_THIS_NOTRAP (x
) = 1;
663 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
664 of a parallel, this is a component reference; for workshare constructs
665 this is some variable. */
668 build_outer_var_ref (tree var
, omp_context
*ctx
,
669 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
672 omp_context
*outer
= ctx
->outer
;
673 for (; outer
; outer
= outer
->outer
)
675 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
677 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
678 && !maybe_lookup_decl (var
, outer
))
683 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
685 else if (is_variable_sized (var
))
687 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
688 x
= build_outer_var_ref (x
, ctx
, code
);
689 x
= build_simple_mem_ref (x
);
691 else if (is_taskreg_ctx (ctx
))
693 bool by_ref
= use_pointer_for_field (var
, NULL
);
694 x
= build_receiver_ref (var
, by_ref
, ctx
);
696 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
697 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
699 || (code
== OMP_CLAUSE_PRIVATE
700 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
701 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
702 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
704 /* #pragma omp simd isn't a worksharing construct, and can reference
705 even private vars in its linear etc. clauses.
706 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
707 to private vars in all worksharing constructs. */
709 if (outer
&& is_taskreg_ctx (outer
))
710 x
= lookup_decl (var
, outer
);
712 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
716 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
720 = splay_tree_lookup (outer
->field_map
,
721 (splay_tree_key
) &DECL_UID (var
));
724 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
727 x
= lookup_decl (var
, outer
);
731 tree field
= (tree
) n
->value
;
732 /* If the receiver record type was remapped in the child function,
733 remap the field into the new record type. */
734 x
= maybe_lookup_field (field
, outer
);
738 x
= build_simple_mem_ref (outer
->receiver_decl
);
739 x
= omp_build_component_ref (x
, field
);
740 if (use_pointer_for_field (var
, outer
))
741 x
= build_simple_mem_ref (x
);
745 x
= lookup_decl (var
, outer
);
746 else if (omp_privatize_by_reference (var
))
747 /* This can happen with orphaned constructs. If var is reference, it is
748 possible it is shared and as such valid. */
750 else if (omp_member_access_dummy_var (var
))
757 tree t
= omp_member_access_dummy_var (var
);
760 x
= DECL_VALUE_EXPR (var
);
761 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
763 x
= unshare_and_remap (x
, t
, o
);
765 x
= unshare_expr (x
);
769 if (omp_privatize_by_reference (var
))
770 x
= build_simple_mem_ref (x
);
775 /* Build tree nodes to access the field for VAR on the sender side. */
778 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
780 tree field
= lookup_sfield (key
, ctx
);
781 return omp_build_component_ref (ctx
->sender_decl
, field
);
785 build_sender_ref (tree var
, omp_context
*ctx
)
787 return build_sender_ref ((splay_tree_key
) var
, ctx
);
790 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
791 BASE_POINTERS_RESTRICT, declare the field with restrict. */
794 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
796 tree field
, type
, sfield
= NULL_TREE
;
797 splay_tree_key key
= (splay_tree_key
) var
;
799 if ((mask
& 16) != 0)
801 key
= (splay_tree_key
) &DECL_NAME (var
);
802 gcc_checking_assert (key
!= (splay_tree_key
) var
);
806 key
= (splay_tree_key
) &DECL_UID (var
);
807 gcc_checking_assert (key
!= (splay_tree_key
) var
);
809 gcc_assert ((mask
& 1) == 0
810 || !splay_tree_lookup (ctx
->field_map
, key
));
811 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
812 || !splay_tree_lookup (ctx
->sfield_map
, key
));
813 gcc_assert ((mask
& 3) == 3
814 || !is_gimple_omp_oacc (ctx
->stmt
));
816 type
= TREE_TYPE (var
);
817 if ((mask
& 16) != 0)
818 type
= lang_hooks
.decls
.omp_array_data (var
, true);
820 /* Prevent redeclaring the var in the split-off function with a restrict
821 pointer type. Note that we only clear type itself, restrict qualifiers in
822 the pointed-to type will be ignored by points-to analysis. */
823 if (POINTER_TYPE_P (type
)
824 && TYPE_RESTRICT (type
))
825 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
829 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
830 type
= build_pointer_type (build_pointer_type (type
));
833 type
= build_pointer_type (type
);
834 else if ((mask
& (32 | 3)) == 1
835 && omp_privatize_by_reference (var
))
836 type
= TREE_TYPE (type
);
838 field
= build_decl (DECL_SOURCE_LOCATION (var
),
839 FIELD_DECL
, DECL_NAME (var
), type
);
841 /* Remember what variable this field was created for. This does have a
842 side effect of making dwarf2out ignore this member, so for helpful
843 debugging we clear it later in delete_omp_context. */
844 DECL_ABSTRACT_ORIGIN (field
) = var
;
845 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
847 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
848 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
849 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
852 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
856 insert_field_into_struct (ctx
->record_type
, field
);
857 if (ctx
->srecord_type
)
859 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
860 FIELD_DECL
, DECL_NAME (var
), type
);
861 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
862 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
863 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
864 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
865 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 if (ctx
->srecord_type
== NULL_TREE
)
874 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
875 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
876 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
878 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
879 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
880 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
881 insert_field_into_struct (ctx
->srecord_type
, sfield
);
882 splay_tree_insert (ctx
->sfield_map
,
883 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
884 (splay_tree_value
) sfield
);
888 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
889 : ctx
->srecord_type
, field
);
893 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
894 if ((mask
& 2) && ctx
->sfield_map
)
895 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
899 install_var_local (tree var
, omp_context
*ctx
)
901 tree new_var
= omp_copy_decl_1 (var
, ctx
);
902 insert_decl_map (&ctx
->cb
, var
, new_var
);
906 /* Adjust the replacement for DECL in CTX for the new context. This means
907 copying the DECL_VALUE_EXPR, and fixing up the type. */
910 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
914 new_decl
= lookup_decl (decl
, ctx
);
916 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
918 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
919 && DECL_HAS_VALUE_EXPR_P (decl
))
921 tree ve
= DECL_VALUE_EXPR (decl
);
922 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
923 SET_DECL_VALUE_EXPR (new_decl
, ve
);
924 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
927 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
929 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
930 if (size
== error_mark_node
)
931 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
932 DECL_SIZE (new_decl
) = size
;
934 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
935 if (size
== error_mark_node
)
936 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
937 DECL_SIZE_UNIT (new_decl
) = size
;
941 /* The callback for remap_decl. Search all containing contexts for a
942 mapping of the variable; this avoids having to duplicate the splay
943 tree ahead of time. We know a mapping doesn't already exist in the
944 given context. Create new mappings to implement default semantics. */
947 omp_copy_decl (tree var
, copy_body_data
*cb
)
949 omp_context
*ctx
= (omp_context
*) cb
;
952 if (TREE_CODE (var
) == LABEL_DECL
)
954 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
956 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
957 DECL_CONTEXT (new_var
) = current_function_decl
;
958 insert_decl_map (&ctx
->cb
, var
, new_var
);
962 while (!is_taskreg_ctx (ctx
))
967 new_var
= maybe_lookup_decl (var
, ctx
);
972 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
975 return error_mark_node
;
978 /* Create a new context, with OUTER_CTX being the surrounding context. */
981 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
983 omp_context
*ctx
= XCNEW (omp_context
);
985 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
986 (splay_tree_value
) ctx
);
991 ctx
->outer
= outer_ctx
;
992 ctx
->cb
= outer_ctx
->cb
;
993 ctx
->cb
.block
= NULL
;
994 ctx
->depth
= outer_ctx
->depth
+ 1;
998 ctx
->cb
.src_fn
= current_function_decl
;
999 ctx
->cb
.dst_fn
= current_function_decl
;
1000 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
1001 gcc_checking_assert (ctx
->cb
.src_node
);
1002 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
1003 ctx
->cb
.src_cfun
= cfun
;
1004 ctx
->cb
.copy_decl
= omp_copy_decl
;
1005 ctx
->cb
.eh_lp_nr
= 0;
1006 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
1007 ctx
->cb
.adjust_array_error_bounds
= true;
1008 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1012 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1017 static gimple_seq
maybe_catch_exception (gimple_seq
);
1019 /* Finalize task copyfn. */
1022 finalize_task_copyfn (gomp_task
*task_stmt
)
1024 struct function
*child_cfun
;
1026 gimple_seq seq
= NULL
, new_seq
;
1029 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1030 if (child_fn
== NULL_TREE
)
1033 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1034 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1036 push_cfun (child_cfun
);
1037 bind
= gimplify_body (child_fn
, false);
1038 gimple_seq_add_stmt (&seq
, bind
);
1039 new_seq
= maybe_catch_exception (seq
);
1042 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1044 gimple_seq_add_stmt (&seq
, bind
);
1046 gimple_set_body (child_fn
, seq
);
1049 /* Inform the callgraph about the new function. */
1050 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1051 node
->parallelized_function
= 1;
1052 cgraph_node::add_new_function (child_fn
, false);
1055 /* Destroy a omp_context data structures. Called through the splay tree
1056 value delete callback. */
1059 delete_omp_context (splay_tree_value value
)
1061 omp_context
*ctx
= (omp_context
*) value
;
1063 delete ctx
->cb
.decl_map
;
1066 splay_tree_delete (ctx
->field_map
);
1067 if (ctx
->sfield_map
)
1068 splay_tree_delete (ctx
->sfield_map
);
1070 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1071 it produces corrupt debug information. */
1072 if (ctx
->record_type
)
1075 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1076 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1078 if (ctx
->srecord_type
)
1081 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1082 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1085 if (is_task_ctx (ctx
))
1086 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1088 if (ctx
->task_reduction_map
)
1090 ctx
->task_reductions
.release ();
1091 delete ctx
->task_reduction_map
;
1094 delete ctx
->lastprivate_conditional_map
;
1095 delete ctx
->allocate_map
;
1100 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1104 fixup_child_record_type (omp_context
*ctx
)
1106 tree f
, type
= ctx
->record_type
;
1108 if (!ctx
->receiver_decl
)
1110 /* ??? It isn't sufficient to just call remap_type here, because
1111 variably_modified_type_p doesn't work the way we expect for
1112 record types. Testing each field for whether it needs remapping
1113 and creating a new record by hand works, however. */
1114 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1115 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1119 tree name
, new_fields
= NULL
;
1121 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1122 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1123 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1124 TYPE_DECL
, name
, type
);
1125 TYPE_NAME (type
) = name
;
1127 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1129 tree new_f
= copy_node (f
);
1130 DECL_CONTEXT (new_f
) = type
;
1131 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1132 DECL_CHAIN (new_f
) = new_fields
;
1133 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1134 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1136 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1140 /* Arrange to be able to look up the receiver field
1141 given the sender field. */
1142 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1143 (splay_tree_value
) new_f
);
1145 TYPE_FIELDS (type
) = nreverse (new_fields
);
1149 /* In a target region we never modify any of the pointers in *.omp_data_i,
1150 so attempt to help the optimizers. */
1151 if (is_gimple_omp_offloaded (ctx
->stmt
))
1152 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1154 TREE_TYPE (ctx
->receiver_decl
)
1155 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1158 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1159 specified by CLAUSES. */
1162 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1165 bool scan_array_reductions
= false;
1167 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1168 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1169 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1170 /* omp_default_mem_alloc is 1 */
1171 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1174 if (ctx
->allocate_map
== NULL
)
1175 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1176 tree val
= integer_zero_node
;
1177 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1178 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1179 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1180 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1181 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1184 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1188 switch (OMP_CLAUSE_CODE (c
))
1190 case OMP_CLAUSE_PRIVATE
:
1191 decl
= OMP_CLAUSE_DECL (c
);
1192 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1194 else if (!is_variable_sized (decl
))
1195 install_var_local (decl
, ctx
);
1198 case OMP_CLAUSE_SHARED
:
1199 decl
= OMP_CLAUSE_DECL (c
);
1200 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1201 ctx
->allocate_map
->remove (decl
);
1202 /* Ignore shared directives in teams construct inside of
1203 target construct. */
1204 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1205 && !is_host_teams_ctx (ctx
))
1207 /* Global variables don't need to be copied,
1208 the receiver side will use them directly. */
1209 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1210 if (is_global_var (odecl
))
1212 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1215 gcc_assert (is_taskreg_ctx (ctx
));
1216 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1217 || !is_variable_sized (decl
));
1218 /* Global variables don't need to be copied,
1219 the receiver side will use them directly. */
1220 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1222 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1224 use_pointer_for_field (decl
, ctx
);
1227 by_ref
= use_pointer_for_field (decl
, NULL
);
1228 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1229 || TREE_ADDRESSABLE (decl
)
1231 || omp_privatize_by_reference (decl
))
1233 by_ref
= use_pointer_for_field (decl
, ctx
);
1234 install_var_field (decl
, by_ref
, 3, ctx
);
1235 install_var_local (decl
, ctx
);
1238 /* We don't need to copy const scalar vars back. */
1239 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1242 case OMP_CLAUSE_REDUCTION
:
1243 /* Collect 'reduction' clauses on OpenACC compute construct. */
1244 if (is_gimple_omp_oacc (ctx
->stmt
)
1245 && is_gimple_omp_offloaded (ctx
->stmt
))
1247 /* No 'reduction' clauses on OpenACC 'kernels'. */
1248 gcc_checking_assert (!is_oacc_kernels (ctx
));
1249 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1250 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1252 ctx
->local_reduction_clauses
1253 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1257 case OMP_CLAUSE_IN_REDUCTION
:
1258 decl
= OMP_CLAUSE_DECL (c
);
1259 if (ctx
->allocate_map
1260 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1261 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1262 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1263 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1264 || is_task_ctx (ctx
)))
1267 if (ctx
->allocate_map
->get (decl
))
1268 ctx
->allocate_map
->remove (decl
);
1270 if (TREE_CODE (decl
) == MEM_REF
)
1272 tree t
= TREE_OPERAND (decl
, 0);
1273 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1274 t
= TREE_OPERAND (t
, 0);
1275 if (TREE_CODE (t
) == INDIRECT_REF
1276 || TREE_CODE (t
) == ADDR_EXPR
)
1277 t
= TREE_OPERAND (t
, 0);
1278 if (is_omp_target (ctx
->stmt
))
1280 if (is_variable_sized (t
))
1282 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1283 t
= DECL_VALUE_EXPR (t
);
1284 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1285 t
= TREE_OPERAND (t
, 0);
1286 gcc_assert (DECL_P (t
));
1290 scan_omp_op (&at
, ctx
->outer
);
1291 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1292 splay_tree_insert (ctx
->field_map
,
1293 (splay_tree_key
) &DECL_CONTEXT (t
),
1294 (splay_tree_value
) nt
);
1296 splay_tree_insert (ctx
->field_map
,
1297 (splay_tree_key
) &DECL_CONTEXT (at
),
1298 (splay_tree_value
) nt
);
1301 install_var_local (t
, ctx
);
1302 if (is_taskreg_ctx (ctx
)
1303 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1304 || (is_task_ctx (ctx
)
1305 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1306 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1308 == POINTER_TYPE
)))))
1309 && !is_variable_sized (t
)
1310 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1311 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1312 && !is_task_ctx (ctx
))))
1314 by_ref
= use_pointer_for_field (t
, NULL
);
1315 if (is_task_ctx (ctx
)
1316 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1317 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1319 install_var_field (t
, false, 1, ctx
);
1320 install_var_field (t
, by_ref
, 2, ctx
);
1323 install_var_field (t
, by_ref
, 3, ctx
);
1327 if (is_omp_target (ctx
->stmt
))
1331 scan_omp_op (&at
, ctx
->outer
);
1332 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1333 splay_tree_insert (ctx
->field_map
,
1334 (splay_tree_key
) &DECL_CONTEXT (decl
),
1335 (splay_tree_value
) nt
);
1337 splay_tree_insert (ctx
->field_map
,
1338 (splay_tree_key
) &DECL_CONTEXT (at
),
1339 (splay_tree_value
) nt
);
1342 if (is_task_ctx (ctx
)
1343 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1344 && OMP_CLAUSE_REDUCTION_TASK (c
)
1345 && is_parallel_ctx (ctx
)))
1347 /* Global variables don't need to be copied,
1348 the receiver side will use them directly. */
1349 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1351 by_ref
= use_pointer_for_field (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1353 install_var_field (decl
, by_ref
, 3, ctx
);
1355 install_var_local (decl
, ctx
);
1358 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1359 && OMP_CLAUSE_REDUCTION_TASK (c
))
1361 install_var_local (decl
, ctx
);
1366 case OMP_CLAUSE_LASTPRIVATE
:
1367 /* Let the corresponding firstprivate clause create
1369 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1373 case OMP_CLAUSE_FIRSTPRIVATE
:
1374 case OMP_CLAUSE_LINEAR
:
1375 decl
= OMP_CLAUSE_DECL (c
);
1377 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1378 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1379 && is_gimple_omp_offloaded (ctx
->stmt
))
1381 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1383 by_ref
= !omp_privatize_by_reference (decl
);
1384 install_var_field (decl
, by_ref
, 3, ctx
);
1386 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1387 install_var_field (decl
, true, 3, ctx
);
1389 install_var_field (decl
, false, 3, ctx
);
1391 if (is_variable_sized (decl
))
1393 if (is_task_ctx (ctx
))
1395 if (ctx
->allocate_map
1396 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1399 if (ctx
->allocate_map
->get (decl
))
1400 ctx
->allocate_map
->remove (decl
);
1402 install_var_field (decl
, false, 1, ctx
);
1406 else if (is_taskreg_ctx (ctx
))
1409 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1410 by_ref
= use_pointer_for_field (decl
, NULL
);
1412 if (is_task_ctx (ctx
)
1413 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1415 if (ctx
->allocate_map
1416 && ctx
->allocate_map
->get (decl
))
1417 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1419 install_var_field (decl
, false, 1, ctx
);
1421 install_var_field (decl
, by_ref
, 2, ctx
);
1424 install_var_field (decl
, by_ref
, 3, ctx
);
1426 install_var_local (decl
, ctx
);
1429 case OMP_CLAUSE_USE_DEVICE_PTR
:
1430 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1431 decl
= OMP_CLAUSE_DECL (c
);
1433 /* Fortran array descriptors. */
1434 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1435 install_var_field (decl
, false, 19, ctx
);
1436 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1437 && !omp_privatize_by_reference (decl
)
1438 && !omp_is_allocatable_or_ptr (decl
))
1439 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1440 install_var_field (decl
, true, 11, ctx
);
1442 install_var_field (decl
, false, 11, ctx
);
1443 if (DECL_SIZE (decl
)
1444 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1446 tree decl2
= DECL_VALUE_EXPR (decl
);
1447 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1448 decl2
= TREE_OPERAND (decl2
, 0);
1449 gcc_assert (DECL_P (decl2
));
1450 install_var_local (decl2
, ctx
);
1452 install_var_local (decl
, ctx
);
1455 case OMP_CLAUSE_IS_DEVICE_PTR
:
1456 decl
= OMP_CLAUSE_DECL (c
);
1459 case OMP_CLAUSE__LOOPTEMP_
:
1460 case OMP_CLAUSE__REDUCTEMP_
:
1461 gcc_assert (is_taskreg_ctx (ctx
));
1462 decl
= OMP_CLAUSE_DECL (c
);
1463 install_var_field (decl
, false, 3, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_COPYPRIVATE
:
1468 case OMP_CLAUSE_COPYIN
:
1469 decl
= OMP_CLAUSE_DECL (c
);
1470 by_ref
= use_pointer_for_field (decl
, NULL
);
1471 install_var_field (decl
, by_ref
, 3, ctx
);
1474 case OMP_CLAUSE_FINAL
:
1476 case OMP_CLAUSE_NUM_THREADS
:
1477 case OMP_CLAUSE_NUM_TEAMS
:
1478 case OMP_CLAUSE_THREAD_LIMIT
:
1479 case OMP_CLAUSE_DEVICE
:
1480 case OMP_CLAUSE_SCHEDULE
:
1481 case OMP_CLAUSE_DIST_SCHEDULE
:
1482 case OMP_CLAUSE_DEPEND
:
1483 case OMP_CLAUSE_PRIORITY
:
1484 case OMP_CLAUSE_GRAINSIZE
:
1485 case OMP_CLAUSE_NUM_TASKS
:
1486 case OMP_CLAUSE_NUM_GANGS
:
1487 case OMP_CLAUSE_NUM_WORKERS
:
1488 case OMP_CLAUSE_VECTOR_LENGTH
:
1489 case OMP_CLAUSE_DETACH
:
1490 case OMP_CLAUSE_FILTER
:
1492 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1496 case OMP_CLAUSE_FROM
:
1497 case OMP_CLAUSE_MAP
:
1499 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1500 decl
= OMP_CLAUSE_DECL (c
);
1501 /* Global variables with "omp declare target" attribute
1502 don't need to be copied, the receiver side will use them
1503 directly. However, global variables with "omp declare target link"
1504 attribute need to be copied. Or when ALWAYS modifier is used. */
1505 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1507 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1508 && (OMP_CLAUSE_MAP_KIND (c
)
1509 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1510 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1511 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1512 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1513 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1514 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1515 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1516 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1517 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1518 && varpool_node::get_create (decl
)->offloadable
1519 && !lookup_attribute ("omp declare target link",
1520 DECL_ATTRIBUTES (decl
)))
1522 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1523 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1525 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1526 not offloaded; there is nothing to map for those. */
1527 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1528 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1529 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1532 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1534 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1535 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1536 && is_omp_target (ctx
->stmt
))
1538 /* If this is an offloaded region, an attach operation should
1539 only exist when the pointer variable is mapped in a prior
1541 if (is_gimple_omp_offloaded (ctx
->stmt
))
1543 (maybe_lookup_decl (decl
, ctx
)
1544 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1545 && lookup_attribute ("omp declare target",
1546 DECL_ATTRIBUTES (decl
))));
1548 /* By itself, attach/detach is generated as part of pointer
1549 variable mapping and should not create new variables in the
1550 offloaded region, however sender refs for it must be created
1551 for its address to be passed to the runtime. */
1553 = build_decl (OMP_CLAUSE_LOCATION (c
),
1554 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1555 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1556 insert_field_into_struct (ctx
->record_type
, field
);
1557 /* To not clash with a map of the pointer variable itself,
1558 attach/detach maps have their field looked up by the *clause*
1559 tree expression, not the decl. */
1560 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1561 (splay_tree_key
) c
));
1562 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1563 (splay_tree_value
) field
);
1566 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1567 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1568 || (OMP_CLAUSE_MAP_KIND (c
)
1569 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1571 if (TREE_CODE (decl
) == COMPONENT_REF
1572 || (TREE_CODE (decl
) == INDIRECT_REF
1573 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1574 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1575 == REFERENCE_TYPE
)))
1577 if (DECL_SIZE (decl
)
1578 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1580 tree decl2
= DECL_VALUE_EXPR (decl
);
1581 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1582 decl2
= TREE_OPERAND (decl2
, 0);
1583 gcc_assert (DECL_P (decl2
));
1584 install_var_local (decl2
, ctx
);
1586 install_var_local (decl
, ctx
);
1591 if (DECL_SIZE (decl
)
1592 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1594 tree decl2
= DECL_VALUE_EXPR (decl
);
1595 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1596 decl2
= TREE_OPERAND (decl2
, 0);
1597 gcc_assert (DECL_P (decl2
));
1598 install_var_field (decl2
, true, 3, ctx
);
1599 install_var_local (decl2
, ctx
);
1600 install_var_local (decl
, ctx
);
1604 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1605 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1606 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1607 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1608 install_var_field (decl
, true, 7, ctx
);
1610 install_var_field (decl
, true, 3, ctx
);
1611 if (is_gimple_omp_offloaded (ctx
->stmt
)
1612 && !(is_gimple_omp_oacc (ctx
->stmt
)
1613 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1614 install_var_local (decl
, ctx
);
1619 tree base
= get_base_address (decl
);
1620 tree nc
= OMP_CLAUSE_CHAIN (c
);
1623 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1624 && OMP_CLAUSE_DECL (nc
) == base
1625 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1626 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1628 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1629 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1635 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1636 decl
= OMP_CLAUSE_DECL (c
);
1638 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1639 (splay_tree_key
) decl
));
1641 = build_decl (OMP_CLAUSE_LOCATION (c
),
1642 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1643 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1644 insert_field_into_struct (ctx
->record_type
, field
);
1645 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1646 (splay_tree_value
) field
);
1651 case OMP_CLAUSE_ORDER
:
1652 ctx
->order_concurrent
= true;
1655 case OMP_CLAUSE_BIND
:
1659 case OMP_CLAUSE_NOWAIT
:
1660 case OMP_CLAUSE_ORDERED
:
1661 case OMP_CLAUSE_COLLAPSE
:
1662 case OMP_CLAUSE_UNTIED
:
1663 case OMP_CLAUSE_MERGEABLE
:
1664 case OMP_CLAUSE_PROC_BIND
:
1665 case OMP_CLAUSE_SAFELEN
:
1666 case OMP_CLAUSE_SIMDLEN
:
1667 case OMP_CLAUSE_THREADS
:
1668 case OMP_CLAUSE_SIMD
:
1669 case OMP_CLAUSE_NOGROUP
:
1670 case OMP_CLAUSE_DEFAULTMAP
:
1671 case OMP_CLAUSE_ASYNC
:
1672 case OMP_CLAUSE_WAIT
:
1673 case OMP_CLAUSE_GANG
:
1674 case OMP_CLAUSE_WORKER
:
1675 case OMP_CLAUSE_VECTOR
:
1676 case OMP_CLAUSE_INDEPENDENT
:
1677 case OMP_CLAUSE_AUTO
:
1678 case OMP_CLAUSE_SEQ
:
1679 case OMP_CLAUSE_TILE
:
1680 case OMP_CLAUSE__SIMT_
:
1681 case OMP_CLAUSE_DEFAULT
:
1682 case OMP_CLAUSE_NONTEMPORAL
:
1683 case OMP_CLAUSE_IF_PRESENT
:
1684 case OMP_CLAUSE_FINALIZE
:
1685 case OMP_CLAUSE_TASK_REDUCTION
:
1686 case OMP_CLAUSE_ALLOCATE
:
1689 case OMP_CLAUSE_ALIGNED
:
1690 decl
= OMP_CLAUSE_DECL (c
);
1691 if (is_global_var (decl
)
1692 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1693 install_var_local (decl
, ctx
);
1696 case OMP_CLAUSE__CONDTEMP_
:
1697 decl
= OMP_CLAUSE_DECL (c
);
1698 if (is_parallel_ctx (ctx
))
1700 install_var_field (decl
, false, 3, ctx
);
1701 install_var_local (decl
, ctx
);
1703 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1704 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1705 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1706 install_var_local (decl
, ctx
);
1709 case OMP_CLAUSE__CACHE_
:
1710 case OMP_CLAUSE_NOHOST
:
1716 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1718 switch (OMP_CLAUSE_CODE (c
))
1720 case OMP_CLAUSE_LASTPRIVATE
:
1721 /* Let the corresponding firstprivate clause create
1723 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1724 scan_array_reductions
= true;
1725 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1729 case OMP_CLAUSE_FIRSTPRIVATE
:
1730 case OMP_CLAUSE_PRIVATE
:
1731 case OMP_CLAUSE_LINEAR
:
1732 case OMP_CLAUSE_IS_DEVICE_PTR
:
1733 decl
= OMP_CLAUSE_DECL (c
);
1734 if (is_variable_sized (decl
))
1736 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1737 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1738 && is_gimple_omp_offloaded (ctx
->stmt
))
1740 tree decl2
= DECL_VALUE_EXPR (decl
);
1741 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1742 decl2
= TREE_OPERAND (decl2
, 0);
1743 gcc_assert (DECL_P (decl2
));
1744 install_var_local (decl2
, ctx
);
1745 fixup_remapped_decl (decl2
, ctx
, false);
1747 install_var_local (decl
, ctx
);
1749 fixup_remapped_decl (decl
, ctx
,
1750 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1751 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1752 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1753 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1754 scan_array_reductions
= true;
1757 case OMP_CLAUSE_REDUCTION
:
1758 case OMP_CLAUSE_IN_REDUCTION
:
1759 decl
= OMP_CLAUSE_DECL (c
);
1760 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1762 if (is_variable_sized (decl
))
1763 install_var_local (decl
, ctx
);
1764 fixup_remapped_decl (decl
, ctx
, false);
1766 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1767 scan_array_reductions
= true;
1770 case OMP_CLAUSE_TASK_REDUCTION
:
1771 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1772 scan_array_reductions
= true;
1775 case OMP_CLAUSE_SHARED
:
1776 /* Ignore shared directives in teams construct inside of
1777 target construct. */
1778 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1779 && !is_host_teams_ctx (ctx
))
1781 decl
= OMP_CLAUSE_DECL (c
);
1782 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1784 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1786 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1789 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1790 install_var_field (decl
, by_ref
, 11, ctx
);
1793 fixup_remapped_decl (decl
, ctx
, false);
1796 case OMP_CLAUSE_MAP
:
1797 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1799 decl
= OMP_CLAUSE_DECL (c
);
1801 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1802 && (OMP_CLAUSE_MAP_KIND (c
)
1803 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1804 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1805 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1806 && varpool_node::get_create (decl
)->offloadable
)
1808 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1809 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1810 && is_omp_target (ctx
->stmt
)
1811 && !is_gimple_omp_offloaded (ctx
->stmt
))
1815 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1816 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1817 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1818 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1820 tree new_decl
= lookup_decl (decl
, ctx
);
1821 TREE_TYPE (new_decl
)
1822 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1824 else if (DECL_SIZE (decl
)
1825 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1827 tree decl2
= DECL_VALUE_EXPR (decl
);
1828 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1829 decl2
= TREE_OPERAND (decl2
, 0);
1830 gcc_assert (DECL_P (decl2
));
1831 fixup_remapped_decl (decl2
, ctx
, false);
1832 fixup_remapped_decl (decl
, ctx
, true);
1835 fixup_remapped_decl (decl
, ctx
, false);
1839 case OMP_CLAUSE_COPYPRIVATE
:
1840 case OMP_CLAUSE_COPYIN
:
1841 case OMP_CLAUSE_DEFAULT
:
1843 case OMP_CLAUSE_NUM_THREADS
:
1844 case OMP_CLAUSE_NUM_TEAMS
:
1845 case OMP_CLAUSE_THREAD_LIMIT
:
1846 case OMP_CLAUSE_DEVICE
:
1847 case OMP_CLAUSE_SCHEDULE
:
1848 case OMP_CLAUSE_DIST_SCHEDULE
:
1849 case OMP_CLAUSE_NOWAIT
:
1850 case OMP_CLAUSE_ORDERED
:
1851 case OMP_CLAUSE_COLLAPSE
:
1852 case OMP_CLAUSE_UNTIED
:
1853 case OMP_CLAUSE_FINAL
:
1854 case OMP_CLAUSE_MERGEABLE
:
1855 case OMP_CLAUSE_PROC_BIND
:
1856 case OMP_CLAUSE_SAFELEN
:
1857 case OMP_CLAUSE_SIMDLEN
:
1858 case OMP_CLAUSE_ALIGNED
:
1859 case OMP_CLAUSE_DEPEND
:
1860 case OMP_CLAUSE_DETACH
:
1861 case OMP_CLAUSE_ALLOCATE
:
1862 case OMP_CLAUSE__LOOPTEMP_
:
1863 case OMP_CLAUSE__REDUCTEMP_
:
1865 case OMP_CLAUSE_FROM
:
1866 case OMP_CLAUSE_PRIORITY
:
1867 case OMP_CLAUSE_GRAINSIZE
:
1868 case OMP_CLAUSE_NUM_TASKS
:
1869 case OMP_CLAUSE_THREADS
:
1870 case OMP_CLAUSE_SIMD
:
1871 case OMP_CLAUSE_NOGROUP
:
1872 case OMP_CLAUSE_DEFAULTMAP
:
1873 case OMP_CLAUSE_ORDER
:
1874 case OMP_CLAUSE_BIND
:
1875 case OMP_CLAUSE_USE_DEVICE_PTR
:
1876 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1877 case OMP_CLAUSE_NONTEMPORAL
:
1878 case OMP_CLAUSE_ASYNC
:
1879 case OMP_CLAUSE_WAIT
:
1880 case OMP_CLAUSE_NUM_GANGS
:
1881 case OMP_CLAUSE_NUM_WORKERS
:
1882 case OMP_CLAUSE_VECTOR_LENGTH
:
1883 case OMP_CLAUSE_GANG
:
1884 case OMP_CLAUSE_WORKER
:
1885 case OMP_CLAUSE_VECTOR
:
1886 case OMP_CLAUSE_INDEPENDENT
:
1887 case OMP_CLAUSE_AUTO
:
1888 case OMP_CLAUSE_SEQ
:
1889 case OMP_CLAUSE_TILE
:
1890 case OMP_CLAUSE__SIMT_
:
1891 case OMP_CLAUSE_IF_PRESENT
:
1892 case OMP_CLAUSE_FINALIZE
:
1893 case OMP_CLAUSE_FILTER
:
1894 case OMP_CLAUSE__CONDTEMP_
:
1897 case OMP_CLAUSE__CACHE_
:
1898 case OMP_CLAUSE_NOHOST
:
1904 gcc_checking_assert (!scan_array_reductions
1905 || !is_gimple_omp_oacc (ctx
->stmt
));
1906 if (scan_array_reductions
)
1908 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1909 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1910 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1911 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1912 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1914 omp_context
*rctx
= ctx
;
1915 if (is_omp_target (ctx
->stmt
))
1917 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1918 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1920 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1921 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1922 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1923 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1924 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1925 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1929 /* Create a new name for omp child function. Returns an identifier. */
1932 create_omp_child_function_name (bool task_copy
)
1934 return clone_function_name_numbered (current_function_decl
,
1935 task_copy
? "_omp_cpyfn" : "_omp_fn");
1938 /* Return true if CTX may belong to offloaded code: either if current function
1939 is offloaded, or any enclosing context corresponds to a target region. */
1942 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1944 if (cgraph_node::get (current_function_decl
)->offloadable
)
1946 for (; ctx
; ctx
= ctx
->outer
)
1947 if (is_gimple_omp_offloaded (ctx
->stmt
))
1952 /* Build a decl for the omp child function. It'll not contain a body
1953 yet, just the bare decl. */
1956 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1958 tree decl
, type
, name
, t
;
1960 name
= create_omp_child_function_name (task_copy
);
1962 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1963 ptr_type_node
, NULL_TREE
);
1965 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1967 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1969 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1972 ctx
->cb
.dst_fn
= decl
;
1974 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1976 TREE_STATIC (decl
) = 1;
1977 TREE_USED (decl
) = 1;
1978 DECL_ARTIFICIAL (decl
) = 1;
1979 DECL_IGNORED_P (decl
) = 0;
1980 TREE_PUBLIC (decl
) = 0;
1981 DECL_UNINLINABLE (decl
) = 1;
1982 DECL_EXTERNAL (decl
) = 0;
1983 DECL_CONTEXT (decl
) = NULL_TREE
;
1984 DECL_INITIAL (decl
) = make_node (BLOCK
);
1985 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1986 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1987 /* Remove omp declare simd attribute from the new attributes. */
1988 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1990 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1993 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1994 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1995 *p
= TREE_CHAIN (*p
);
1998 tree chain
= TREE_CHAIN (*p
);
1999 *p
= copy_node (*p
);
2000 p
= &TREE_CHAIN (*p
);
2004 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2005 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2006 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2007 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2008 DECL_FUNCTION_VERSIONED (decl
)
2009 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2011 if (omp_maybe_offloaded_ctx (ctx
))
2013 cgraph_node::get_create (decl
)->offloadable
= 1;
2014 if (ENABLE_OFFLOADING
)
2015 g
->have_offload
= true;
2018 if (cgraph_node::get_create (decl
)->offloadable
)
2020 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2021 ? "omp target entrypoint"
2022 : "omp declare target");
2023 if (lookup_attribute ("omp declare target",
2024 DECL_ATTRIBUTES (current_function_decl
)))
2026 if (is_gimple_omp_offloaded (ctx
->stmt
))
2027 DECL_ATTRIBUTES (decl
)
2028 = remove_attribute ("omp declare target",
2029 copy_list (DECL_ATTRIBUTES (decl
)));
2034 DECL_ATTRIBUTES (decl
)
2035 = tree_cons (get_identifier (target_attr
),
2036 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2039 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2040 RESULT_DECL
, NULL_TREE
, void_type_node
);
2041 DECL_ARTIFICIAL (t
) = 1;
2042 DECL_IGNORED_P (t
) = 1;
2043 DECL_CONTEXT (t
) = decl
;
2044 DECL_RESULT (decl
) = t
;
2046 tree data_name
= get_identifier (".omp_data_i");
2047 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2049 DECL_ARTIFICIAL (t
) = 1;
2050 DECL_NAMELESS (t
) = 1;
2051 DECL_ARG_TYPE (t
) = ptr_type_node
;
2052 DECL_CONTEXT (t
) = current_function_decl
;
2054 TREE_READONLY (t
) = 1;
2055 DECL_ARGUMENTS (decl
) = t
;
2057 ctx
->receiver_decl
= t
;
2060 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2061 PARM_DECL
, get_identifier (".omp_data_o"),
2063 DECL_ARTIFICIAL (t
) = 1;
2064 DECL_NAMELESS (t
) = 1;
2065 DECL_ARG_TYPE (t
) = ptr_type_node
;
2066 DECL_CONTEXT (t
) = current_function_decl
;
2068 TREE_ADDRESSABLE (t
) = 1;
2069 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2070 DECL_ARGUMENTS (decl
) = t
;
2073 /* Allocate memory for the function structure. The call to
2074 allocate_struct_function clobbers CFUN, so we need to restore
2076 push_struct_function (decl
);
2077 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2078 init_tree_ssa (cfun
);
2082 /* Callback for walk_gimple_seq. Check if combined parallel
2083 contains gimple_omp_for_combined_into_p OMP_FOR. */
2086 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2087 bool *handled_ops_p
,
2088 struct walk_stmt_info
*wi
)
2090 gimple
*stmt
= gsi_stmt (*gsi_p
);
2092 *handled_ops_p
= true;
2093 switch (gimple_code (stmt
))
2097 case GIMPLE_OMP_FOR
:
2098 if (gimple_omp_for_combined_into_p (stmt
)
2099 && gimple_omp_for_kind (stmt
)
2100 == *(const enum gf_mask
*) (wi
->info
))
2103 return integer_zero_node
;
2112 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2115 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2116 omp_context
*outer_ctx
)
2118 struct walk_stmt_info wi
;
2120 memset (&wi
, 0, sizeof (wi
));
2122 wi
.info
= (void *) &msk
;
2123 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2124 if (wi
.info
!= (void *) &msk
)
2126 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2127 struct omp_for_data fd
;
2128 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2129 /* We need two temporaries with fd.loop.v type (istart/iend)
2130 and then (fd.collapse - 1) temporaries with the same
2131 type for count2 ... countN-1 vars if not constant. */
2132 size_t count
= 2, i
;
2133 tree type
= fd
.iter_type
;
2135 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2137 count
+= fd
.collapse
- 1;
2138 /* If there are lastprivate clauses on the inner
2139 GIMPLE_OMP_FOR, add one more temporaries for the total number
2140 of iterations (product of count1 ... countN-1). */
2141 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2142 OMP_CLAUSE_LASTPRIVATE
)
2143 || (msk
== GF_OMP_FOR_KIND_FOR
2144 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2145 OMP_CLAUSE_LASTPRIVATE
)))
2147 tree temp
= create_tmp_var (type
);
2148 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2149 OMP_CLAUSE__LOOPTEMP_
);
2150 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2151 OMP_CLAUSE_DECL (c
) = temp
;
2152 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2153 gimple_omp_taskreg_set_clauses (stmt
, c
);
2156 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2157 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2158 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2160 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2161 tree type2
= TREE_TYPE (v
);
2163 for (i
= 0; i
< 3; i
++)
2165 tree temp
= create_tmp_var (type2
);
2166 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2167 OMP_CLAUSE__LOOPTEMP_
);
2168 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2169 OMP_CLAUSE_DECL (c
) = temp
;
2170 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2171 gimple_omp_taskreg_set_clauses (stmt
, c
);
2175 for (i
= 0; i
< count
; i
++)
2177 tree temp
= create_tmp_var (type
);
2178 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2179 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2180 OMP_CLAUSE_DECL (c
) = temp
;
2181 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2182 gimple_omp_taskreg_set_clauses (stmt
, c
);
2185 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2186 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2187 OMP_CLAUSE_REDUCTION
))
2189 tree type
= build_pointer_type (pointer_sized_int_node
);
2190 tree temp
= create_tmp_var (type
);
2191 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2192 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2193 OMP_CLAUSE_DECL (c
) = temp
;
2194 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2195 gimple_omp_task_set_clauses (stmt
, c
);
2199 /* Scan an OpenMP parallel directive. */
2202 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2206 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2208 /* Ignore parallel directives with empty bodies, unless there
2209 are copyin clauses. */
2211 && empty_body_p (gimple_omp_body (stmt
))
2212 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2213 OMP_CLAUSE_COPYIN
) == NULL
)
2215 gsi_replace (gsi
, gimple_build_nop (), false);
2219 if (gimple_omp_parallel_combined_p (stmt
))
2220 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2221 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2222 OMP_CLAUSE_REDUCTION
);
2223 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2224 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2226 tree type
= build_pointer_type (pointer_sized_int_node
);
2227 tree temp
= create_tmp_var (type
);
2228 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2230 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2231 OMP_CLAUSE_DECL (c
) = temp
;
2232 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2233 gimple_omp_parallel_set_clauses (stmt
, c
);
2236 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2239 ctx
= new_omp_context (stmt
, outer_ctx
);
2240 taskreg_contexts
.safe_push (ctx
);
2241 if (taskreg_nesting_level
> 1)
2242 ctx
->is_nested
= true;
2243 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2244 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2245 name
= create_tmp_var_name (".omp_data_s");
2246 name
= build_decl (gimple_location (stmt
),
2247 TYPE_DECL
, name
, ctx
->record_type
);
2248 DECL_ARTIFICIAL (name
) = 1;
2249 DECL_NAMELESS (name
) = 1;
2250 TYPE_NAME (ctx
->record_type
) = name
;
2251 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2252 create_omp_child_function (ctx
, false);
2253 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2255 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2256 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2258 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2259 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2262 /* Scan an OpenMP task directive. */
2265 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2269 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2271 /* Ignore task directives with empty bodies, unless they have depend
2274 && gimple_omp_body (stmt
)
2275 && empty_body_p (gimple_omp_body (stmt
))
2276 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2278 gsi_replace (gsi
, gimple_build_nop (), false);
2282 if (gimple_omp_task_taskloop_p (stmt
))
2283 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2285 ctx
= new_omp_context (stmt
, outer_ctx
);
2287 if (gimple_omp_task_taskwait_p (stmt
))
2289 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2293 taskreg_contexts
.safe_push (ctx
);
2294 if (taskreg_nesting_level
> 1)
2295 ctx
->is_nested
= true;
2296 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2297 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2298 name
= create_tmp_var_name (".omp_data_s");
2299 name
= build_decl (gimple_location (stmt
),
2300 TYPE_DECL
, name
, ctx
->record_type
);
2301 DECL_ARTIFICIAL (name
) = 1;
2302 DECL_NAMELESS (name
) = 1;
2303 TYPE_NAME (ctx
->record_type
) = name
;
2304 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2305 create_omp_child_function (ctx
, false);
2306 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2308 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2310 if (ctx
->srecord_type
)
2312 name
= create_tmp_var_name (".omp_data_a");
2313 name
= build_decl (gimple_location (stmt
),
2314 TYPE_DECL
, name
, ctx
->srecord_type
);
2315 DECL_ARTIFICIAL (name
) = 1;
2316 DECL_NAMELESS (name
) = 1;
2317 TYPE_NAME (ctx
->srecord_type
) = name
;
2318 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2319 create_omp_child_function (ctx
, true);
2322 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2324 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2326 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2327 t
= build_int_cst (long_integer_type_node
, 0);
2328 gimple_omp_task_set_arg_size (stmt
, t
);
2329 t
= build_int_cst (long_integer_type_node
, 1);
2330 gimple_omp_task_set_arg_align (stmt
, t
);
2334 /* Helper function for finish_taskreg_scan, called through walk_tree.
2335 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2336 tree, replace it in the expression. */
2339 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2343 omp_context
*ctx
= (omp_context
*) data
;
2344 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2347 if (DECL_HAS_VALUE_EXPR_P (t
))
2348 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2353 else if (IS_TYPE_OR_DECL_P (*tp
))
2358 /* If any decls have been made addressable during scan_omp,
2359 adjust their fields if needed, and layout record types
2360 of parallel/task constructs. */
2363 finish_taskreg_scan (omp_context
*ctx
)
2365 if (ctx
->record_type
== NULL_TREE
)
2368 /* If any task_shared_vars were needed, verify all
2369 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2370 statements if use_pointer_for_field hasn't changed
2371 because of that. If it did, update field types now. */
2372 if (task_shared_vars
)
2376 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2377 c
; c
= OMP_CLAUSE_CHAIN (c
))
2378 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2379 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2381 tree decl
= OMP_CLAUSE_DECL (c
);
2383 /* Global variables don't need to be copied,
2384 the receiver side will use them directly. */
2385 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2387 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2388 || !use_pointer_for_field (decl
, ctx
))
2390 tree field
= lookup_field (decl
, ctx
);
2391 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2392 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2394 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2395 TREE_THIS_VOLATILE (field
) = 0;
2396 DECL_USER_ALIGN (field
) = 0;
2397 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2398 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2399 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2400 if (ctx
->srecord_type
)
2402 tree sfield
= lookup_sfield (decl
, ctx
);
2403 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2404 TREE_THIS_VOLATILE (sfield
) = 0;
2405 DECL_USER_ALIGN (sfield
) = 0;
2406 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2407 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2408 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2413 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2415 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2416 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2419 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2420 expects to find it at the start of data. */
2421 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2422 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2426 *p
= DECL_CHAIN (*p
);
2430 p
= &DECL_CHAIN (*p
);
2431 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2432 TYPE_FIELDS (ctx
->record_type
) = f
;
2434 layout_type (ctx
->record_type
);
2435 fixup_child_record_type (ctx
);
2437 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2439 layout_type (ctx
->record_type
);
2440 fixup_child_record_type (ctx
);
2444 location_t loc
= gimple_location (ctx
->stmt
);
2445 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2447 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2449 /* Move VLA fields to the end. */
2450 p
= &TYPE_FIELDS (ctx
->record_type
);
2452 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2453 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2456 *p
= TREE_CHAIN (*p
);
2457 TREE_CHAIN (*q
) = NULL_TREE
;
2458 q
= &TREE_CHAIN (*q
);
2461 p
= &DECL_CHAIN (*p
);
2463 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2465 /* Move fields corresponding to first and second _looptemp_
2466 clause first. There are filled by GOMP_taskloop
2467 and thus need to be in specific positions. */
2468 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2469 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2470 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2471 OMP_CLAUSE__LOOPTEMP_
);
2472 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2473 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2474 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2475 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2476 p
= &TYPE_FIELDS (ctx
->record_type
);
2478 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2479 *p
= DECL_CHAIN (*p
);
2481 p
= &DECL_CHAIN (*p
);
2482 DECL_CHAIN (f1
) = f2
;
2485 DECL_CHAIN (f2
) = f3
;
2486 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2489 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2490 TYPE_FIELDS (ctx
->record_type
) = f1
;
2491 if (ctx
->srecord_type
)
2493 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2494 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2496 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2497 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2499 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2500 *p
= DECL_CHAIN (*p
);
2502 p
= &DECL_CHAIN (*p
);
2503 DECL_CHAIN (f1
) = f2
;
2504 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2507 DECL_CHAIN (f2
) = f3
;
2508 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2511 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2512 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2519 /* Look for a firstprivate clause with the detach event handle. */
2520 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2521 c
; c
= OMP_CLAUSE_CHAIN (c
))
2523 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2525 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2526 == OMP_CLAUSE_DECL (detach_clause
))
2531 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2533 /* Move field corresponding to the detach clause first.
2534 This is filled by GOMP_task and needs to be in a
2535 specific position. */
2536 p
= &TYPE_FIELDS (ctx
->record_type
);
2539 *p
= DECL_CHAIN (*p
);
2541 p
= &DECL_CHAIN (*p
);
2542 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2543 TYPE_FIELDS (ctx
->record_type
) = field
;
2544 if (ctx
->srecord_type
)
2546 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2547 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2550 *p
= DECL_CHAIN (*p
);
2552 p
= &DECL_CHAIN (*p
);
2553 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2554 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2557 layout_type (ctx
->record_type
);
2558 fixup_child_record_type (ctx
);
2559 if (ctx
->srecord_type
)
2560 layout_type (ctx
->srecord_type
);
2561 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2562 TYPE_SIZE_UNIT (ctx
->record_type
));
2563 if (TREE_CODE (t
) != INTEGER_CST
)
2565 t
= unshare_expr (t
);
2566 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2568 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2569 t
= build_int_cst (long_integer_type_node
,
2570 TYPE_ALIGN_UNIT (ctx
->record_type
));
2571 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2575 /* Find the enclosing offload context. */
2577 static omp_context
*
2578 enclosing_target_ctx (omp_context
*ctx
)
2580 for (; ctx
; ctx
= ctx
->outer
)
2581 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2587 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2589 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2592 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2594 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2596 gimple
*stmt
= ctx
->stmt
;
2597 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2598 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2605 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2606 (This doesn't include OpenACC 'kernels' decomposed parts.)
2607 Until kernels handling moves to use the same loop indirection
2608 scheme as parallel, we need to do this checking early. */
2611 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2613 bool checking
= true;
2614 unsigned outer_mask
= 0;
2615 unsigned this_mask
= 0;
2616 bool has_seq
= false, has_auto
= false;
2619 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2623 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2625 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2628 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2630 switch (OMP_CLAUSE_CODE (c
))
2632 case OMP_CLAUSE_GANG
:
2633 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2635 case OMP_CLAUSE_WORKER
:
2636 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2638 case OMP_CLAUSE_VECTOR
:
2639 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2641 case OMP_CLAUSE_SEQ
:
2644 case OMP_CLAUSE_AUTO
:
2654 if (has_seq
&& (this_mask
|| has_auto
))
2655 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2656 " OpenACC loop specifiers");
2657 else if (has_auto
&& this_mask
)
2658 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2659 " OpenACC loop specifiers");
2661 if (this_mask
& outer_mask
)
2662 error_at (gimple_location (stmt
), "inner loop uses same"
2663 " OpenACC parallelism as containing loop");
2666 return outer_mask
| this_mask
;
2669 /* Scan a GIMPLE_OMP_FOR. */
2671 static omp_context
*
2672 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2676 tree clauses
= gimple_omp_for_clauses (stmt
);
2678 ctx
= new_omp_context (stmt
, outer_ctx
);
2680 if (is_gimple_omp_oacc (stmt
))
2682 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2684 if (!(tgt
&& is_oacc_kernels (tgt
)))
2685 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2688 switch (OMP_CLAUSE_CODE (c
))
2690 case OMP_CLAUSE_GANG
:
2691 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2694 case OMP_CLAUSE_WORKER
:
2695 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2698 case OMP_CLAUSE_VECTOR
:
2699 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2708 /* By construction, this is impossible for OpenACC 'kernels'
2709 decomposed parts. */
2710 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2712 error_at (OMP_CLAUSE_LOCATION (c
),
2713 "argument not permitted on %qs clause",
2714 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2716 inform (gimple_location (tgt
->stmt
),
2717 "enclosing parent compute construct");
2718 else if (oacc_get_fn_attrib (current_function_decl
))
2719 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2720 "enclosing routine");
2726 if (tgt
&& is_oacc_kernels (tgt
))
2727 check_oacc_kernel_gwv (stmt
, ctx
);
2729 /* Collect all variables named in reductions on this loop. Ensure
2730 that, if this loop has a reduction on some variable v, and there is
2731 a reduction on v somewhere in an outer context, then there is a
2732 reduction on v on all intervening loops as well. */
2733 tree local_reduction_clauses
= NULL
;
2734 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2736 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2737 local_reduction_clauses
2738 = tree_cons (NULL
, c
, local_reduction_clauses
);
2740 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2741 ctx
->outer_reduction_clauses
2742 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2743 ctx
->outer
->outer_reduction_clauses
);
2744 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2745 tree local_iter
= local_reduction_clauses
;
2746 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2748 tree local_clause
= TREE_VALUE (local_iter
);
2749 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2750 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2751 bool have_outer_reduction
= false;
2752 tree ctx_iter
= outer_reduction_clauses
;
2753 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2755 tree outer_clause
= TREE_VALUE (ctx_iter
);
2756 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2757 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2758 if (outer_var
== local_var
&& outer_op
!= local_op
)
2760 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2761 "conflicting reduction operations for %qE",
2763 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2764 "location of the previous reduction for %qE",
2767 if (outer_var
== local_var
)
2769 have_outer_reduction
= true;
2773 if (have_outer_reduction
)
2775 /* There is a reduction on outer_var both on this loop and on
2776 some enclosing loop. Walk up the context tree until such a
2777 loop with a reduction on outer_var is found, and complain
2778 about all intervening loops that do not have such a
2780 struct omp_context
*curr_loop
= ctx
->outer
;
2782 while (curr_loop
!= NULL
)
2784 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2785 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2787 tree curr_clause
= TREE_VALUE (curr_iter
);
2788 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2789 if (curr_var
== local_var
)
2796 warning_at (gimple_location (curr_loop
->stmt
), 0,
2797 "nested loop in reduction needs "
2798 "reduction clause for %qE",
2802 curr_loop
= curr_loop
->outer
;
2806 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2807 ctx
->outer_reduction_clauses
2808 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2809 ctx
->outer_reduction_clauses
);
2811 if (tgt
&& is_oacc_kernels (tgt
))
2813 /* Strip out reductions, as they are not handled yet. */
2814 tree
*prev_ptr
= &clauses
;
2816 while (tree probe
= *prev_ptr
)
2818 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2820 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2821 *prev_ptr
= *next_ptr
;
2823 prev_ptr
= next_ptr
;
2826 gimple_omp_for_set_clauses (stmt
, clauses
);
2830 scan_sharing_clauses (clauses
, ctx
);
2832 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2833 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2835 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2836 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2837 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2838 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2840 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2844 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2847 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2848 omp_context
*outer_ctx
)
2850 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2851 gsi_replace (gsi
, bind
, false);
2852 gimple_seq seq
= NULL
;
2853 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2854 tree cond
= create_tmp_var_raw (integer_type_node
);
2855 DECL_CONTEXT (cond
) = current_function_decl
;
2856 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2857 gimple_bind_set_vars (bind
, cond
);
2858 gimple_call_set_lhs (g
, cond
);
2859 gimple_seq_add_stmt (&seq
, g
);
2860 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2861 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2862 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2863 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2864 gimple_seq_add_stmt (&seq
, g
);
2865 g
= gimple_build_label (lab1
);
2866 gimple_seq_add_stmt (&seq
, g
);
2867 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2868 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2869 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2870 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2871 gimple_omp_for_set_clauses (new_stmt
, clause
);
2872 gimple_seq_add_stmt (&seq
, new_stmt
);
2873 g
= gimple_build_goto (lab3
);
2874 gimple_seq_add_stmt (&seq
, g
);
2875 g
= gimple_build_label (lab2
);
2876 gimple_seq_add_stmt (&seq
, g
);
2877 gimple_seq_add_stmt (&seq
, stmt
);
2878 g
= gimple_build_label (lab3
);
2879 gimple_seq_add_stmt (&seq
, g
);
2880 gimple_bind_set_body (bind
, seq
);
2882 scan_omp_for (new_stmt
, outer_ctx
);
2883 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2886 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2887 struct walk_stmt_info
*);
2888 static omp_context
*maybe_lookup_ctx (gimple
*);
2890 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2891 for scan phase loop. */
2894 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2895 omp_context
*outer_ctx
)
2897 /* The only change between inclusive and exclusive scan will be
2898 within the first simd loop, so just use inclusive in the
2899 worksharing loop. */
2900 outer_ctx
->scan_inclusive
= true;
2901 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2902 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2904 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2905 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2906 gsi_replace (gsi
, input_stmt
, false);
2907 gimple_seq input_body
= NULL
;
2908 gimple_seq_add_stmt (&input_body
, stmt
);
2909 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2911 gimple_stmt_iterator input1_gsi
= gsi_none ();
2912 struct walk_stmt_info wi
;
2913 memset (&wi
, 0, sizeof (wi
));
2915 wi
.info
= (void *) &input1_gsi
;
2916 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2917 gcc_assert (!gsi_end_p (input1_gsi
));
2919 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2920 gsi_next (&input1_gsi
);
2921 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2922 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2923 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2924 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2925 std::swap (input_stmt1
, scan_stmt1
);
2927 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2928 gimple_omp_set_body (input_stmt1
, NULL
);
2930 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2931 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2933 gimple_omp_set_body (input_stmt1
, input_body1
);
2934 gimple_omp_set_body (scan_stmt1
, NULL
);
2936 gimple_stmt_iterator input2_gsi
= gsi_none ();
2937 memset (&wi
, 0, sizeof (wi
));
2939 wi
.info
= (void *) &input2_gsi
;
2940 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2942 gcc_assert (!gsi_end_p (input2_gsi
));
2944 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2945 gsi_next (&input2_gsi
);
2946 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2947 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2948 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2949 std::swap (input_stmt2
, scan_stmt2
);
2951 gimple_omp_set_body (input_stmt2
, NULL
);
2953 gimple_omp_set_body (input_stmt
, input_body
);
2954 gimple_omp_set_body (scan_stmt
, scan_body
);
2956 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2957 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2959 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2960 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2962 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2965 /* Scan an OpenMP sections directive. */
2968 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2972 ctx
= new_omp_context (stmt
, outer_ctx
);
2973 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2974 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2977 /* Scan an OpenMP single directive. */
2980 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2985 ctx
= new_omp_context (stmt
, outer_ctx
);
2986 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2987 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2988 name
= create_tmp_var_name (".omp_copy_s");
2989 name
= build_decl (gimple_location (stmt
),
2990 TYPE_DECL
, name
, ctx
->record_type
);
2991 TYPE_NAME (ctx
->record_type
) = name
;
2993 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2994 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2996 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2997 ctx
->record_type
= NULL
;
2999 layout_type (ctx
->record_type
);
3002 /* Scan a GIMPLE_OMP_TARGET. */
3005 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3009 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3010 tree clauses
= gimple_omp_target_clauses (stmt
);
3012 ctx
= new_omp_context (stmt
, outer_ctx
);
3013 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3014 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3015 name
= create_tmp_var_name (".omp_data_t");
3016 name
= build_decl (gimple_location (stmt
),
3017 TYPE_DECL
, name
, ctx
->record_type
);
3018 DECL_ARTIFICIAL (name
) = 1;
3019 DECL_NAMELESS (name
) = 1;
3020 TYPE_NAME (ctx
->record_type
) = name
;
3021 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3025 create_omp_child_function (ctx
, false);
3026 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3029 scan_sharing_clauses (clauses
, ctx
);
3030 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3032 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3033 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3036 TYPE_FIELDS (ctx
->record_type
)
3037 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3040 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3041 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3043 field
= DECL_CHAIN (field
))
3044 gcc_assert (DECL_ALIGN (field
) == align
);
3046 layout_type (ctx
->record_type
);
3048 fixup_child_record_type (ctx
);
3051 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3053 error_at (gimple_location (stmt
),
3054 "%<target%> construct with nested %<teams%> construct "
3055 "contains directives outside of the %<teams%> construct");
3056 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3060 /* Scan an OpenMP teams directive. */
3063 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3065 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3067 if (!gimple_omp_teams_host (stmt
))
3069 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3070 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3073 taskreg_contexts
.safe_push (ctx
);
3074 gcc_assert (taskreg_nesting_level
== 1);
3075 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3076 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3077 tree name
= create_tmp_var_name (".omp_data_s");
3078 name
= build_decl (gimple_location (stmt
),
3079 TYPE_DECL
, name
, ctx
->record_type
);
3080 DECL_ARTIFICIAL (name
) = 1;
3081 DECL_NAMELESS (name
) = 1;
3082 TYPE_NAME (ctx
->record_type
) = name
;
3083 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3084 create_omp_child_function (ctx
, false);
3085 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3087 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3088 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3090 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3091 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3094 /* Check nesting restrictions. */
3096 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3100 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3101 inside an OpenACC CTX. */
3102 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3103 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3104 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3106 else if (!(is_gimple_omp (stmt
)
3107 && is_gimple_omp_oacc (stmt
)))
3109 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3111 error_at (gimple_location (stmt
),
3112 "non-OpenACC construct inside of OpenACC routine");
3116 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3117 if (is_gimple_omp (octx
->stmt
)
3118 && is_gimple_omp_oacc (octx
->stmt
))
3120 error_at (gimple_location (stmt
),
3121 "non-OpenACC construct inside of OpenACC region");
3128 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3129 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3131 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3133 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3135 error_at (gimple_location (stmt
),
3136 "OpenMP constructs are not allowed in target region "
3137 "with %<ancestor%>");
3141 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3142 ctx
->teams_nested_p
= true;
3144 ctx
->nonteams_nested_p
= true;
3146 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3148 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3150 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3151 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3155 if (ctx
->order_concurrent
3156 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3157 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3158 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3160 error_at (gimple_location (stmt
),
3161 "OpenMP constructs other than %<parallel%>, %<loop%>"
3162 " or %<simd%> may not be nested inside a region with"
3163 " the %<order(concurrent)%> clause");
3166 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3168 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3169 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3171 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3172 && (ctx
->outer
== NULL
3173 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3174 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3175 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3176 != GF_OMP_FOR_KIND_FOR
)
3177 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3179 error_at (gimple_location (stmt
),
3180 "%<ordered simd threads%> must be closely "
3181 "nested inside of %<%s simd%> region",
3182 lang_GNU_Fortran () ? "do" : "for");
3188 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3189 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3190 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3192 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3193 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3195 error_at (gimple_location (stmt
),
3196 "OpenMP constructs other than "
3197 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3198 "not be nested inside %<simd%> region");
3201 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3203 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3204 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3205 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3206 OMP_CLAUSE_BIND
) == NULL_TREE
))
3207 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3209 error_at (gimple_location (stmt
),
3210 "only %<distribute%>, %<parallel%> or %<loop%> "
3211 "regions are allowed to be strictly nested inside "
3212 "%<teams%> region");
3216 else if (ctx
->order_concurrent
3217 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3218 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3219 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3220 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3223 error_at (gimple_location (stmt
),
3224 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3225 "%<simd%> may not be nested inside a %<loop%> region");
3227 error_at (gimple_location (stmt
),
3228 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3229 "%<simd%> may not be nested inside a region with "
3230 "the %<order(concurrent)%> clause");
3234 switch (gimple_code (stmt
))
3236 case GIMPLE_OMP_FOR
:
3237 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3239 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3241 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3243 error_at (gimple_location (stmt
),
3244 "%<distribute%> region must be strictly nested "
3245 "inside %<teams%> construct");
3250 /* We split taskloop into task and nested taskloop in it. */
3251 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3253 /* For now, hope this will change and loop bind(parallel) will not
3254 be allowed in lots of contexts. */
3255 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3256 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3258 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3263 switch (gimple_code (ctx
->stmt
))
3265 case GIMPLE_OMP_FOR
:
3266 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3267 == GF_OMP_FOR_KIND_OACC_LOOP
);
3270 case GIMPLE_OMP_TARGET
:
3271 switch (gimple_omp_target_kind (ctx
->stmt
))
3273 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3274 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3275 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3276 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3277 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3288 else if (oacc_get_fn_attrib (current_function_decl
))
3292 error_at (gimple_location (stmt
),
3293 "OpenACC loop directive must be associated with"
3294 " an OpenACC compute region");
3300 if (is_gimple_call (stmt
)
3301 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3302 == BUILT_IN_GOMP_CANCEL
3303 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3304 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3306 const char *bad
= NULL
;
3307 const char *kind
= NULL
;
3308 const char *construct
3309 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3310 == BUILT_IN_GOMP_CANCEL
)
3312 : "cancellation point";
3315 error_at (gimple_location (stmt
), "orphaned %qs construct",
3319 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3320 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3324 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3326 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3327 == BUILT_IN_GOMP_CANCEL
3328 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3329 ctx
->cancellable
= true;
3333 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3334 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3336 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3337 == BUILT_IN_GOMP_CANCEL
3338 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3340 ctx
->cancellable
= true;
3341 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3343 warning_at (gimple_location (stmt
), 0,
3344 "%<cancel for%> inside "
3345 "%<nowait%> for construct");
3346 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3347 OMP_CLAUSE_ORDERED
))
3348 warning_at (gimple_location (stmt
), 0,
3349 "%<cancel for%> inside "
3350 "%<ordered%> for construct");
3355 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3356 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3358 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3359 == BUILT_IN_GOMP_CANCEL
3360 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3362 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3364 ctx
->cancellable
= true;
3365 if (omp_find_clause (gimple_omp_sections_clauses
3368 warning_at (gimple_location (stmt
), 0,
3369 "%<cancel sections%> inside "
3370 "%<nowait%> sections construct");
3374 gcc_assert (ctx
->outer
3375 && gimple_code (ctx
->outer
->stmt
)
3376 == GIMPLE_OMP_SECTIONS
);
3377 ctx
->outer
->cancellable
= true;
3378 if (omp_find_clause (gimple_omp_sections_clauses
3381 warning_at (gimple_location (stmt
), 0,
3382 "%<cancel sections%> inside "
3383 "%<nowait%> sections construct");
3389 if (!is_task_ctx (ctx
)
3390 && (!is_taskloop_ctx (ctx
)
3391 || ctx
->outer
== NULL
3392 || !is_task_ctx (ctx
->outer
)))
3396 for (omp_context
*octx
= ctx
->outer
;
3397 octx
; octx
= octx
->outer
)
3399 switch (gimple_code (octx
->stmt
))
3401 case GIMPLE_OMP_TASKGROUP
:
3403 case GIMPLE_OMP_TARGET
:
3404 if (gimple_omp_target_kind (octx
->stmt
)
3405 != GF_OMP_TARGET_KIND_REGION
)
3408 case GIMPLE_OMP_PARALLEL
:
3409 case GIMPLE_OMP_TEAMS
:
3410 error_at (gimple_location (stmt
),
3411 "%<%s taskgroup%> construct not closely "
3412 "nested inside of %<taskgroup%> region",
3415 case GIMPLE_OMP_TASK
:
3416 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3418 && is_taskloop_ctx (octx
->outer
))
3421 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3422 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3431 ctx
->cancellable
= true;
3436 error_at (gimple_location (stmt
), "invalid arguments");
3441 error_at (gimple_location (stmt
),
3442 "%<%s %s%> construct not closely nested inside of %qs",
3443 construct
, kind
, bad
);
3448 case GIMPLE_OMP_SECTIONS
:
3449 case GIMPLE_OMP_SINGLE
:
3450 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3451 switch (gimple_code (ctx
->stmt
))
3453 case GIMPLE_OMP_FOR
:
3454 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3455 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3458 case GIMPLE_OMP_SECTIONS
:
3459 case GIMPLE_OMP_SINGLE
:
3460 case GIMPLE_OMP_ORDERED
:
3461 case GIMPLE_OMP_MASTER
:
3462 case GIMPLE_OMP_MASKED
:
3463 case GIMPLE_OMP_TASK
:
3464 case GIMPLE_OMP_CRITICAL
:
3465 if (is_gimple_call (stmt
))
3467 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3468 != BUILT_IN_GOMP_BARRIER
)
3470 error_at (gimple_location (stmt
),
3471 "barrier region may not be closely nested inside "
3472 "of work-sharing, %<loop%>, %<critical%>, "
3473 "%<ordered%>, %<master%>, %<masked%>, explicit "
3474 "%<task%> or %<taskloop%> region");
3477 error_at (gimple_location (stmt
),
3478 "work-sharing region may not be closely nested inside "
3479 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3480 "%<master%>, %<masked%>, explicit %<task%> or "
3481 "%<taskloop%> region");
3483 case GIMPLE_OMP_PARALLEL
:
3484 case GIMPLE_OMP_TEAMS
:
3486 case GIMPLE_OMP_TARGET
:
3487 if (gimple_omp_target_kind (ctx
->stmt
)
3488 == GF_OMP_TARGET_KIND_REGION
)
3495 case GIMPLE_OMP_MASTER
:
3496 case GIMPLE_OMP_MASKED
:
3497 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3498 switch (gimple_code (ctx
->stmt
))
3500 case GIMPLE_OMP_FOR
:
3501 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3502 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3505 case GIMPLE_OMP_SECTIONS
:
3506 case GIMPLE_OMP_SINGLE
:
3507 case GIMPLE_OMP_TASK
:
3508 error_at (gimple_location (stmt
),
3509 "%qs region may not be closely nested inside "
3510 "of work-sharing, %<loop%>, explicit %<task%> or "
3511 "%<taskloop%> region",
3512 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3513 ? "master" : "masked");
3515 case GIMPLE_OMP_PARALLEL
:
3516 case GIMPLE_OMP_TEAMS
:
3518 case GIMPLE_OMP_TARGET
:
3519 if (gimple_omp_target_kind (ctx
->stmt
)
3520 == GF_OMP_TARGET_KIND_REGION
)
3527 case GIMPLE_OMP_SCOPE
:
3528 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3529 switch (gimple_code (ctx
->stmt
))
3531 case GIMPLE_OMP_FOR
:
3532 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3533 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3536 case GIMPLE_OMP_SECTIONS
:
3537 case GIMPLE_OMP_SINGLE
:
3538 case GIMPLE_OMP_TASK
:
3539 case GIMPLE_OMP_CRITICAL
:
3540 case GIMPLE_OMP_ORDERED
:
3541 case GIMPLE_OMP_MASTER
:
3542 case GIMPLE_OMP_MASKED
:
3543 error_at (gimple_location (stmt
),
3544 "%<scope%> region may not be closely nested inside "
3545 "of work-sharing, %<loop%>, explicit %<task%>, "
3546 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3547 "or %<masked%> region");
3549 case GIMPLE_OMP_PARALLEL
:
3550 case GIMPLE_OMP_TEAMS
:
3552 case GIMPLE_OMP_TARGET
:
3553 if (gimple_omp_target_kind (ctx
->stmt
)
3554 == GF_OMP_TARGET_KIND_REGION
)
3561 case GIMPLE_OMP_TASK
:
3562 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3563 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3564 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3565 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3567 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3568 error_at (OMP_CLAUSE_LOCATION (c
),
3569 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3574 case GIMPLE_OMP_ORDERED
:
3575 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3576 c
; c
= OMP_CLAUSE_CHAIN (c
))
3578 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3580 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3581 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3584 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3585 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3586 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3589 /* Look for containing ordered(N) loop. */
3591 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3593 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3594 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3596 error_at (OMP_CLAUSE_LOCATION (c
),
3597 "%<ordered%> construct with %<depend%> clause "
3598 "must be closely nested inside an %<ordered%> "
3602 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3604 error_at (OMP_CLAUSE_LOCATION (c
),
3605 "%<ordered%> construct with %<depend%> clause "
3606 "must be closely nested inside a loop with "
3607 "%<ordered%> clause with a parameter");
3613 error_at (OMP_CLAUSE_LOCATION (c
),
3614 "invalid depend kind in omp %<ordered%> %<depend%>");
3618 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3619 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3621 /* ordered simd must be closely nested inside of simd region,
3622 and simd region must not encounter constructs other than
3623 ordered simd, therefore ordered simd may be either orphaned,
3624 or ctx->stmt must be simd. The latter case is handled already
3628 error_at (gimple_location (stmt
),
3629 "%<ordered%> %<simd%> must be closely nested inside "
3634 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3635 switch (gimple_code (ctx
->stmt
))
3637 case GIMPLE_OMP_CRITICAL
:
3638 case GIMPLE_OMP_TASK
:
3639 case GIMPLE_OMP_ORDERED
:
3640 ordered_in_taskloop
:
3641 error_at (gimple_location (stmt
),
3642 "%<ordered%> region may not be closely nested inside "
3643 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3644 "%<taskloop%> region");
3646 case GIMPLE_OMP_FOR
:
3647 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3648 goto ordered_in_taskloop
;
3650 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3651 OMP_CLAUSE_ORDERED
);
3654 error_at (gimple_location (stmt
),
3655 "%<ordered%> region must be closely nested inside "
3656 "a loop region with an %<ordered%> clause");
3659 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3660 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3662 error_at (gimple_location (stmt
),
3663 "%<ordered%> region without %<depend%> clause may "
3664 "not be closely nested inside a loop region with "
3665 "an %<ordered%> clause with a parameter");
3669 case GIMPLE_OMP_TARGET
:
3670 if (gimple_omp_target_kind (ctx
->stmt
)
3671 != GF_OMP_TARGET_KIND_REGION
)
3674 case GIMPLE_OMP_PARALLEL
:
3675 case GIMPLE_OMP_TEAMS
:
3676 error_at (gimple_location (stmt
),
3677 "%<ordered%> region must be closely nested inside "
3678 "a loop region with an %<ordered%> clause");
3684 case GIMPLE_OMP_CRITICAL
:
3687 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3688 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3689 if (gomp_critical
*other_crit
3690 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3691 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3693 error_at (gimple_location (stmt
),
3694 "%<critical%> region may not be nested inside "
3695 "a %<critical%> region with the same name");
3700 case GIMPLE_OMP_TEAMS
:
3703 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3704 || (gimple_omp_target_kind (ctx
->stmt
)
3705 != GF_OMP_TARGET_KIND_REGION
))
3707 /* Teams construct can appear either strictly nested inside of
3708 target construct with no intervening stmts, or can be encountered
3709 only by initial task (so must not appear inside any OpenMP
3711 error_at (gimple_location (stmt
),
3712 "%<teams%> construct must be closely nested inside of "
3713 "%<target%> construct or not nested in any OpenMP "
3718 case GIMPLE_OMP_TARGET
:
3719 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3720 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3721 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3722 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3724 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3725 error_at (OMP_CLAUSE_LOCATION (c
),
3726 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3727 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3730 if (is_gimple_omp_offloaded (stmt
)
3731 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3733 error_at (gimple_location (stmt
),
3734 "OpenACC region inside of OpenACC routine, nested "
3735 "parallelism not supported yet");
3738 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3740 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3742 if (is_gimple_omp (stmt
)
3743 && is_gimple_omp_oacc (stmt
)
3744 && is_gimple_omp (ctx
->stmt
))
3746 error_at (gimple_location (stmt
),
3747 "OpenACC construct inside of non-OpenACC region");
3753 const char *stmt_name
, *ctx_stmt_name
;
3754 switch (gimple_omp_target_kind (stmt
))
3756 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3757 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3758 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3759 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3760 stmt_name
= "target enter data"; break;
3761 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3762 stmt_name
= "target exit data"; break;
3763 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3764 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3765 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3766 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3767 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3768 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3769 stmt_name
= "enter data"; break;
3770 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3771 stmt_name
= "exit data"; break;
3772 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3773 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3775 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3776 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3777 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3778 /* OpenACC 'kernels' decomposed parts. */
3779 stmt_name
= "kernels"; break;
3780 default: gcc_unreachable ();
3782 switch (gimple_omp_target_kind (ctx
->stmt
))
3784 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3785 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3786 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3787 ctx_stmt_name
= "parallel"; break;
3788 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3789 ctx_stmt_name
= "kernels"; break;
3790 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3791 ctx_stmt_name
= "serial"; break;
3792 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3793 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3794 ctx_stmt_name
= "host_data"; break;
3795 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3796 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3797 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3798 /* OpenACC 'kernels' decomposed parts. */
3799 ctx_stmt_name
= "kernels"; break;
3800 default: gcc_unreachable ();
3803 /* OpenACC/OpenMP mismatch? */
3804 if (is_gimple_omp_oacc (stmt
)
3805 != is_gimple_omp_oacc (ctx
->stmt
))
3807 error_at (gimple_location (stmt
),
3808 "%s %qs construct inside of %s %qs region",
3809 (is_gimple_omp_oacc (stmt
)
3810 ? "OpenACC" : "OpenMP"), stmt_name
,
3811 (is_gimple_omp_oacc (ctx
->stmt
)
3812 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3815 if (is_gimple_omp_offloaded (ctx
->stmt
))
3817 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3818 if (is_gimple_omp_oacc (ctx
->stmt
))
3820 error_at (gimple_location (stmt
),
3821 "%qs construct inside of %qs region",
3822 stmt_name
, ctx_stmt_name
);
3827 warning_at (gimple_location (stmt
), 0,
3828 "%qs construct inside of %qs region",
3829 stmt_name
, ctx_stmt_name
);
3841 /* Helper function scan_omp.
3843 Callback for walk_tree or operators in walk_gimple_stmt used to
3844 scan for OMP directives in TP. */
3847 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3849 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3850 omp_context
*ctx
= (omp_context
*) wi
->info
;
3853 switch (TREE_CODE (t
))
3861 tree repl
= remap_decl (t
, &ctx
->cb
);
3862 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3868 if (ctx
&& TYPE_P (t
))
3869 *tp
= remap_type (t
, &ctx
->cb
);
3870 else if (!DECL_P (t
))
3875 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3876 if (tem
!= TREE_TYPE (t
))
3878 if (TREE_CODE (t
) == INTEGER_CST
)
3879 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3881 TREE_TYPE (t
) = tem
;
3891 /* Return true if FNDECL is a setjmp or a longjmp. */
3894 setjmp_or_longjmp_p (const_tree fndecl
)
3896 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3897 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3900 tree declname
= DECL_NAME (fndecl
);
3902 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3903 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3904 || !TREE_PUBLIC (fndecl
))
3907 const char *name
= IDENTIFIER_POINTER (declname
);
3908 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3911 /* Return true if FNDECL is an omp_* runtime API call. */
3914 omp_runtime_api_call (const_tree fndecl
)
3916 tree declname
= DECL_NAME (fndecl
);
3918 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3919 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3920 || !TREE_PUBLIC (fndecl
))
3923 const char *name
= IDENTIFIER_POINTER (declname
);
3924 if (!startswith (name
, "omp_"))
3927 static const char *omp_runtime_apis
[] =
3929 /* This array has 3 sections. First omp_* calls that don't
3930 have any suffixes. */
3938 "target_associate_ptr",
3939 "target_disassociate_ptr",
3941 "target_is_present",
3943 "target_memcpy_rect",
3945 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3946 DECL_NAME is always omp_* without tailing underscore. */
3948 "destroy_allocator",
3950 "destroy_nest_lock",
3954 "get_affinity_format",
3956 "get_default_allocator",
3957 "get_default_device",
3960 "get_initial_device",
3962 "get_max_active_levels",
3963 "get_max_task_priority",
3972 "get_partition_num_places",
3975 "get_supported_active_levels",
3977 "get_teams_thread_limit",
3986 "is_initial_device",
3988 "pause_resource_all",
3989 "set_affinity_format",
3990 "set_default_allocator",
3998 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
3999 as DECL_NAME only omp_* and omp_*_8 appear. */
4001 "get_ancestor_thread_num",
4003 "get_partition_place_nums",
4004 "get_place_num_procs",
4005 "get_place_proc_ids",
4008 "set_default_device",
4010 "set_max_active_levels",
4015 "set_teams_thread_limit"
4019 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4021 if (omp_runtime_apis
[i
] == NULL
)
4026 size_t len
= strlen (omp_runtime_apis
[i
]);
4027 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4028 && (name
[4 + len
] == '\0'
4029 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4035 /* Helper function for scan_omp.
4037 Callback for walk_gimple_stmt used to scan for OMP directives in
4038 the current statement in GSI. */
4041 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4042 struct walk_stmt_info
*wi
)
4044 gimple
*stmt
= gsi_stmt (*gsi
);
4045 omp_context
*ctx
= (omp_context
*) wi
->info
;
4047 if (gimple_has_location (stmt
))
4048 input_location
= gimple_location (stmt
);
4050 /* Check the nesting restrictions. */
4051 bool remove
= false;
4052 if (is_gimple_omp (stmt
))
4053 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4054 else if (is_gimple_call (stmt
))
4056 tree fndecl
= gimple_call_fndecl (stmt
);
4060 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4061 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4062 && setjmp_or_longjmp_p (fndecl
)
4066 error_at (gimple_location (stmt
),
4067 "setjmp/longjmp inside %<simd%> construct");
4069 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4070 switch (DECL_FUNCTION_CODE (fndecl
))
4072 case BUILT_IN_GOMP_BARRIER
:
4073 case BUILT_IN_GOMP_CANCEL
:
4074 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4075 case BUILT_IN_GOMP_TASKYIELD
:
4076 case BUILT_IN_GOMP_TASKWAIT
:
4077 case BUILT_IN_GOMP_TASKGROUP_START
:
4078 case BUILT_IN_GOMP_TASKGROUP_END
:
4079 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4086 omp_context
*octx
= ctx
;
4087 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4089 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4092 error_at (gimple_location (stmt
),
4093 "OpenMP runtime API call %qD in a region with "
4094 "%<order(concurrent)%> clause", fndecl
);
4096 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4097 && omp_runtime_api_call (fndecl
)
4098 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4099 != strlen ("omp_get_num_teams"))
4100 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4101 "omp_get_num_teams") != 0)
4102 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4103 != strlen ("omp_get_team_num"))
4104 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4105 "omp_get_team_num") != 0))
4108 error_at (gimple_location (stmt
),
4109 "OpenMP runtime API call %qD strictly nested in a "
4110 "%<teams%> region", fndecl
);
4112 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4113 && (gimple_omp_target_kind (ctx
->stmt
)
4114 == GF_OMP_TARGET_KIND_REGION
)
4115 && omp_runtime_api_call (fndecl
))
4117 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4118 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4119 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4120 error_at (gimple_location (stmt
),
4121 "OpenMP runtime API call %qD in a region with "
4122 "%<device(ancestor)%> clause", fndecl
);
4129 stmt
= gimple_build_nop ();
4130 gsi_replace (gsi
, stmt
, false);
4133 *handled_ops_p
= true;
4135 switch (gimple_code (stmt
))
4137 case GIMPLE_OMP_PARALLEL
:
4138 taskreg_nesting_level
++;
4139 scan_omp_parallel (gsi
, ctx
);
4140 taskreg_nesting_level
--;
4143 case GIMPLE_OMP_TASK
:
4144 taskreg_nesting_level
++;
4145 scan_omp_task (gsi
, ctx
);
4146 taskreg_nesting_level
--;
4149 case GIMPLE_OMP_FOR
:
4150 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4151 == GF_OMP_FOR_KIND_SIMD
)
4152 && gimple_omp_for_combined_into_p (stmt
)
4153 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4155 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4156 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4157 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4159 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4163 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4164 == GF_OMP_FOR_KIND_SIMD
)
4165 && omp_maybe_offloaded_ctx (ctx
)
4166 && omp_max_simt_vf ()
4167 && gimple_omp_for_collapse (stmt
) == 1)
4168 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4170 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4173 case GIMPLE_OMP_SCOPE
:
4174 ctx
= new_omp_context (stmt
, ctx
);
4175 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4176 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4179 case GIMPLE_OMP_SECTIONS
:
4180 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4183 case GIMPLE_OMP_SINGLE
:
4184 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4187 case GIMPLE_OMP_SCAN
:
4188 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4190 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4191 ctx
->scan_inclusive
= true;
4192 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4193 ctx
->scan_exclusive
= true;
4196 case GIMPLE_OMP_SECTION
:
4197 case GIMPLE_OMP_MASTER
:
4198 case GIMPLE_OMP_ORDERED
:
4199 case GIMPLE_OMP_CRITICAL
:
4200 ctx
= new_omp_context (stmt
, ctx
);
4201 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4204 case GIMPLE_OMP_MASKED
:
4205 ctx
= new_omp_context (stmt
, ctx
);
4206 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4207 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4210 case GIMPLE_OMP_TASKGROUP
:
4211 ctx
= new_omp_context (stmt
, ctx
);
4212 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4213 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4216 case GIMPLE_OMP_TARGET
:
4217 if (is_gimple_omp_offloaded (stmt
))
4219 taskreg_nesting_level
++;
4220 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4221 taskreg_nesting_level
--;
4224 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4227 case GIMPLE_OMP_TEAMS
:
4228 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4230 taskreg_nesting_level
++;
4231 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4232 taskreg_nesting_level
--;
4235 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4242 *handled_ops_p
= false;
4244 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4246 var
= DECL_CHAIN (var
))
4247 insert_decl_map (&ctx
->cb
, var
, var
);
4251 *handled_ops_p
= false;
4259 /* Scan all the statements starting at the current statement. CTX
4260 contains context information about the OMP directives and
4261 clauses found during the scan. */
4264 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4266 location_t saved_location
;
4267 struct walk_stmt_info wi
;
4269 memset (&wi
, 0, sizeof (wi
));
4271 wi
.want_locations
= true;
4273 saved_location
= input_location
;
4274 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4275 input_location
= saved_location
;
4278 /* Re-gimplification and code generation routines. */
4280 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4281 of BIND if in a method. */
4284 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4286 if (DECL_ARGUMENTS (current_function_decl
)
4287 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4288 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4291 tree vars
= gimple_bind_vars (bind
);
4292 for (tree
*pvar
= &vars
; *pvar
; )
4293 if (omp_member_access_dummy_var (*pvar
))
4294 *pvar
= DECL_CHAIN (*pvar
);
4296 pvar
= &DECL_CHAIN (*pvar
);
4297 gimple_bind_set_vars (bind
, vars
);
4301 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4302 block and its subblocks. */
4305 remove_member_access_dummy_vars (tree block
)
4307 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4308 if (omp_member_access_dummy_var (*pvar
))
4309 *pvar
= DECL_CHAIN (*pvar
);
4311 pvar
= &DECL_CHAIN (*pvar
);
4313 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4314 remove_member_access_dummy_vars (block
);
4317 /* If a context was created for STMT when it was scanned, return it. */
4319 static omp_context
*
4320 maybe_lookup_ctx (gimple
*stmt
)
4323 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4324 return n
? (omp_context
*) n
->value
: NULL
;
4328 /* Find the mapping for DECL in CTX or the immediately enclosing
4329 context that has a mapping for DECL.
4331 If CTX is a nested parallel directive, we may have to use the decl
4332 mappings created in CTX's parent context. Suppose that we have the
4333 following parallel nesting (variable UIDs showed for clarity):
4336 #omp parallel shared(iD.1562) -> outer parallel
4337 iD.1562 = iD.1562 + 1;
4339 #omp parallel shared (iD.1562) -> inner parallel
4340 iD.1562 = iD.1562 - 1;
4342 Each parallel structure will create a distinct .omp_data_s structure
4343 for copying iD.1562 in/out of the directive:
4345 outer parallel .omp_data_s.1.i -> iD.1562
4346 inner parallel .omp_data_s.2.i -> iD.1562
4348 A shared variable mapping will produce a copy-out operation before
4349 the parallel directive and a copy-in operation after it. So, in
4350 this case we would have:
4353 .omp_data_o.1.i = iD.1562;
4354 #omp parallel shared(iD.1562) -> outer parallel
4355 .omp_data_i.1 = &.omp_data_o.1
4356 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4358 .omp_data_o.2.i = iD.1562; -> **
4359 #omp parallel shared(iD.1562) -> inner parallel
4360 .omp_data_i.2 = &.omp_data_o.2
4361 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4364 ** This is a problem. The symbol iD.1562 cannot be referenced
4365 inside the body of the outer parallel region. But since we are
4366 emitting this copy operation while expanding the inner parallel
4367 directive, we need to access the CTX structure of the outer
4368 parallel directive to get the correct mapping:
4370 .omp_data_o.2.i = .omp_data_i.1->i
4372 Since there may be other workshare or parallel directives enclosing
4373 the parallel directive, it may be necessary to walk up the context
4374 parent chain. This is not a problem in general because nested
4375 parallelism happens only rarely. */
4378 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4383 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4384 t
= maybe_lookup_decl (decl
, up
);
4386 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4388 return t
? t
: decl
;
4392 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4393 in outer contexts. */
4396 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4401 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4402 t
= maybe_lookup_decl (decl
, up
);
4404 return t
? t
: decl
;
4408 /* Construct the initialization value for reduction operation OP. */
4411 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4420 case TRUTH_ORIF_EXPR
:
4421 case TRUTH_XOR_EXPR
:
4423 return build_zero_cst (type
);
4426 case TRUTH_AND_EXPR
:
4427 case TRUTH_ANDIF_EXPR
:
4429 return fold_convert_loc (loc
, type
, integer_one_node
);
4432 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4435 if (SCALAR_FLOAT_TYPE_P (type
))
4437 REAL_VALUE_TYPE max
, min
;
4438 if (HONOR_INFINITIES (type
))
4441 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4444 real_maxval (&min
, 1, TYPE_MODE (type
));
4445 return build_real (type
, min
);
4447 else if (POINTER_TYPE_P (type
))
4450 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4451 return wide_int_to_tree (type
, min
);
4455 gcc_assert (INTEGRAL_TYPE_P (type
));
4456 return TYPE_MIN_VALUE (type
);
4460 if (SCALAR_FLOAT_TYPE_P (type
))
4462 REAL_VALUE_TYPE max
;
4463 if (HONOR_INFINITIES (type
))
4466 real_maxval (&max
, 0, TYPE_MODE (type
));
4467 return build_real (type
, max
);
4469 else if (POINTER_TYPE_P (type
))
4472 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4473 return wide_int_to_tree (type
, max
);
4477 gcc_assert (INTEGRAL_TYPE_P (type
));
4478 return TYPE_MAX_VALUE (type
);
4486 /* Construct the initialization value for reduction CLAUSE. */
4489 omp_reduction_init (tree clause
, tree type
)
4491 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4492 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4495 /* Return alignment to be assumed for var in CLAUSE, which should be
4496 OMP_CLAUSE_ALIGNED. */
4499 omp_clause_aligned_alignment (tree clause
)
4501 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4502 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4504 /* Otherwise return implementation defined alignment. */
4505 unsigned int al
= 1;
4506 opt_scalar_mode mode_iter
;
4507 auto_vector_modes modes
;
4508 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4509 static enum mode_class classes
[]
4510 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4511 for (int i
= 0; i
< 4; i
+= 2)
4512 /* The for loop above dictates that we only walk through scalar classes. */
4513 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4515 scalar_mode mode
= mode_iter
.require ();
4516 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4517 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4519 machine_mode alt_vmode
;
4520 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4521 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4522 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4525 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4526 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4528 type
= build_vector_type_for_mode (type
, vmode
);
4529 if (TYPE_MODE (type
) != vmode
)
4531 if (TYPE_ALIGN_UNIT (type
) > al
)
4532 al
= TYPE_ALIGN_UNIT (type
);
4534 return build_int_cst (integer_type_node
, al
);
4538 /* This structure is part of the interface between lower_rec_simd_input_clauses
4539 and lower_rec_input_clauses. */
4541 class omplow_simd_context
{
4543 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4547 vec
<tree
, va_heap
> simt_eargs
;
4548 gimple_seq simt_dlist
;
4549 poly_uint64_pod max_vf
;
4553 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4557 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4558 omplow_simd_context
*sctx
, tree
&ivar
,
4559 tree
&lvar
, tree
*rvar
= NULL
,
4562 if (known_eq (sctx
->max_vf
, 0U))
4564 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4565 if (maybe_gt (sctx
->max_vf
, 1U))
4567 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4568 OMP_CLAUSE_SAFELEN
);
4571 poly_uint64 safe_len
;
4572 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4573 || maybe_lt (safe_len
, 1U))
4576 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4579 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4581 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4582 c
= OMP_CLAUSE_CHAIN (c
))
4584 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4587 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4589 /* UDR reductions are not supported yet for SIMT, disable
4595 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4596 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4598 /* Doing boolean operations on non-integral types is
4599 for conformance only, it's not worth supporting this
4606 if (maybe_gt (sctx
->max_vf
, 1U))
4608 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4609 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4612 if (known_eq (sctx
->max_vf
, 1U))
4617 if (is_gimple_reg (new_var
))
4619 ivar
= lvar
= new_var
;
4622 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4623 ivar
= lvar
= create_tmp_var (type
);
4624 TREE_ADDRESSABLE (ivar
) = 1;
4625 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4626 NULL
, DECL_ATTRIBUTES (ivar
));
4627 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4628 tree clobber
= build_clobber (type
);
4629 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4630 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4634 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4635 tree avar
= create_tmp_var_raw (atype
);
4636 if (TREE_ADDRESSABLE (new_var
))
4637 TREE_ADDRESSABLE (avar
) = 1;
4638 DECL_ATTRIBUTES (avar
)
4639 = tree_cons (get_identifier ("omp simd array"), NULL
,
4640 DECL_ATTRIBUTES (avar
));
4641 gimple_add_tmp_var (avar
);
4643 if (rvar
&& !ctx
->for_simd_scan_phase
)
4645 /* For inscan reductions, create another array temporary,
4646 which will hold the reduced value. */
4647 iavar
= create_tmp_var_raw (atype
);
4648 if (TREE_ADDRESSABLE (new_var
))
4649 TREE_ADDRESSABLE (iavar
) = 1;
4650 DECL_ATTRIBUTES (iavar
)
4651 = tree_cons (get_identifier ("omp simd array"), NULL
,
4652 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4653 DECL_ATTRIBUTES (iavar
)));
4654 gimple_add_tmp_var (iavar
);
4655 ctx
->cb
.decl_map
->put (avar
, iavar
);
4656 if (sctx
->lastlane
== NULL_TREE
)
4657 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4658 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4659 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4660 TREE_THIS_NOTRAP (*rvar
) = 1;
4662 if (ctx
->scan_exclusive
)
4664 /* And for exclusive scan yet another one, which will
4665 hold the value during the scan phase. */
4666 tree savar
= create_tmp_var_raw (atype
);
4667 if (TREE_ADDRESSABLE (new_var
))
4668 TREE_ADDRESSABLE (savar
) = 1;
4669 DECL_ATTRIBUTES (savar
)
4670 = tree_cons (get_identifier ("omp simd array"), NULL
,
4671 tree_cons (get_identifier ("omp simd inscan "
4673 DECL_ATTRIBUTES (savar
)));
4674 gimple_add_tmp_var (savar
);
4675 ctx
->cb
.decl_map
->put (iavar
, savar
);
4676 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4677 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4678 TREE_THIS_NOTRAP (*rvar2
) = 1;
4681 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4682 NULL_TREE
, NULL_TREE
);
4683 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4684 NULL_TREE
, NULL_TREE
);
4685 TREE_THIS_NOTRAP (ivar
) = 1;
4686 TREE_THIS_NOTRAP (lvar
) = 1;
4688 if (DECL_P (new_var
))
4690 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4691 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4696 /* Helper function of lower_rec_input_clauses. For a reference
4697 in simd reduction, add an underlying variable it will reference. */
4700 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4702 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4703 if (TREE_CONSTANT (z
))
4705 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4706 get_name (new_vard
));
4707 gimple_add_tmp_var (z
);
4708 TREE_ADDRESSABLE (z
) = 1;
4709 z
= build_fold_addr_expr_loc (loc
, z
);
4710 gimplify_assign (new_vard
, z
, ilist
);
4714 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4715 code to emit (type) (tskred_temp[idx]). */
4718 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4721 unsigned HOST_WIDE_INT sz
4722 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4723 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4724 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4726 tree v
= create_tmp_var (pointer_sized_int_node
);
4727 gimple
*g
= gimple_build_assign (v
, r
);
4728 gimple_seq_add_stmt (ilist
, g
);
4729 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4731 v
= create_tmp_var (type
);
4732 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4733 gimple_seq_add_stmt (ilist
, g
);
4738 /* Lower early initialization of privatized variable NEW_VAR
4739 if it needs an allocator (has allocate clause). */
4742 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4743 tree
&allocate_ptr
, gimple_seq
*ilist
,
4744 omp_context
*ctx
, bool is_ref
, tree size
)
4748 gcc_assert (allocate_ptr
== NULL_TREE
);
4749 if (ctx
->allocate_map
4750 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4751 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4752 allocator
= *allocatorp
;
4753 if (allocator
== NULL_TREE
)
4755 if (!is_ref
&& omp_privatize_by_reference (var
))
4757 allocator
= NULL_TREE
;
4761 unsigned HOST_WIDE_INT ialign
= 0;
4762 if (TREE_CODE (allocator
) == TREE_LIST
)
4764 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4765 allocator
= TREE_PURPOSE (allocator
);
4767 if (TREE_CODE (allocator
) != INTEGER_CST
)
4768 allocator
= build_outer_var_ref (allocator
, ctx
);
4769 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4770 if (TREE_CODE (allocator
) != INTEGER_CST
)
4772 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4773 gimplify_assign (var
, allocator
, ilist
);
4777 tree ptr_type
, align
, sz
= size
;
4778 if (TYPE_P (new_var
))
4780 ptr_type
= build_pointer_type (new_var
);
4781 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4785 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4786 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4790 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4791 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4792 if (sz
== NULL_TREE
)
4793 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4795 align
= build_int_cst (size_type_node
, ialign
);
4796 if (TREE_CODE (sz
) != INTEGER_CST
)
4798 tree szvar
= create_tmp_var (size_type_node
);
4799 gimplify_assign (szvar
, sz
, ilist
);
4802 allocate_ptr
= create_tmp_var (ptr_type
);
4803 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4804 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4805 gimple_call_set_lhs (g
, allocate_ptr
);
4806 gimple_seq_add_stmt (ilist
, g
);
4809 tree x
= build_simple_mem_ref (allocate_ptr
);
4810 TREE_THIS_NOTRAP (x
) = 1;
4811 SET_DECL_VALUE_EXPR (new_var
, x
);
4812 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4818 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4819 private variables. Initialization statements go in ILIST, while calls
4820 to destructors go in DLIST. */
4823 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4824 omp_context
*ctx
, struct omp_for_data
*fd
)
4826 tree c
, copyin_seq
, x
, ptr
;
4827 bool copyin_by_ref
= false;
4828 bool lastprivate_firstprivate
= false;
4829 bool reduction_omp_orig_ref
= false;
4831 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4832 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4833 omplow_simd_context sctx
= omplow_simd_context ();
4834 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4835 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4836 gimple_seq llist
[4] = { };
4837 tree nonconst_simd_if
= NULL_TREE
;
4840 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4842 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4843 with data sharing clauses referencing variable sized vars. That
4844 is unnecessarily hard to support and very unlikely to result in
4845 vectorized code anyway. */
4847 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4848 switch (OMP_CLAUSE_CODE (c
))
4850 case OMP_CLAUSE_LINEAR
:
4851 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4854 case OMP_CLAUSE_PRIVATE
:
4855 case OMP_CLAUSE_FIRSTPRIVATE
:
4856 case OMP_CLAUSE_LASTPRIVATE
:
4857 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4859 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4861 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4862 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4866 case OMP_CLAUSE_REDUCTION
:
4867 case OMP_CLAUSE_IN_REDUCTION
:
4868 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4869 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4871 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4873 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4874 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4879 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4881 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4882 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4884 case OMP_CLAUSE_SIMDLEN
:
4885 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4888 case OMP_CLAUSE__CONDTEMP_
:
4889 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4897 /* Add a placeholder for simduid. */
4898 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4899 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4901 unsigned task_reduction_cnt
= 0;
4902 unsigned task_reduction_cntorig
= 0;
4903 unsigned task_reduction_cnt_full
= 0;
4904 unsigned task_reduction_cntorig_full
= 0;
4905 unsigned task_reduction_other_cnt
= 0;
4906 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4907 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4908 /* Do all the fixed sized types in the first pass, and the variable sized
4909 types in the second pass. This makes sure that the scalar arguments to
4910 the variable sized types are processed before we use them in the
4911 variable sized operations. For task reductions we use 4 passes, in the
4912 first two we ignore them, in the third one gather arguments for
4913 GOMP_task_reduction_remap call and in the last pass actually handle
4914 the task reductions. */
4915 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4918 if (pass
== 2 && task_reduction_cnt
)
4921 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4922 + task_reduction_cntorig
);
4923 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4924 gimple_add_tmp_var (tskred_avar
);
4925 TREE_ADDRESSABLE (tskred_avar
) = 1;
4926 task_reduction_cnt_full
= task_reduction_cnt
;
4927 task_reduction_cntorig_full
= task_reduction_cntorig
;
4929 else if (pass
== 3 && task_reduction_cnt
)
4931 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4933 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4934 size_int (task_reduction_cntorig
),
4935 build_fold_addr_expr (tskred_avar
));
4936 gimple_seq_add_stmt (ilist
, g
);
4938 if (pass
== 3 && task_reduction_other_cnt
)
4940 /* For reduction clauses, build
4941 tskred_base = (void *) tskred_temp[2]
4942 + omp_get_thread_num () * tskred_temp[1]
4943 or if tskred_temp[1] is known to be constant, that constant
4944 directly. This is the start of the private reduction copy block
4945 for the current thread. */
4946 tree v
= create_tmp_var (integer_type_node
);
4947 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4948 gimple
*g
= gimple_build_call (x
, 0);
4949 gimple_call_set_lhs (g
, v
);
4950 gimple_seq_add_stmt (ilist
, g
);
4951 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4952 tskred_temp
= OMP_CLAUSE_DECL (c
);
4953 if (is_taskreg_ctx (ctx
))
4954 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4955 tree v2
= create_tmp_var (sizetype
);
4956 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4957 gimple_seq_add_stmt (ilist
, g
);
4958 if (ctx
->task_reductions
[0])
4959 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4961 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4962 tree v3
= create_tmp_var (sizetype
);
4963 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4964 gimple_seq_add_stmt (ilist
, g
);
4965 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4966 tskred_base
= create_tmp_var (ptr_type_node
);
4967 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4968 gimple_seq_add_stmt (ilist
, g
);
4970 task_reduction_cnt
= 0;
4971 task_reduction_cntorig
= 0;
4972 task_reduction_other_cnt
= 0;
4973 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4975 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4978 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4979 bool task_reduction_p
= false;
4980 bool task_reduction_needs_orig_p
= false;
4981 tree cond
= NULL_TREE
;
4982 tree allocator
, allocate_ptr
;
4986 case OMP_CLAUSE_PRIVATE
:
4987 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4990 case OMP_CLAUSE_SHARED
:
4991 /* Ignore shared directives in teams construct inside
4992 of target construct. */
4993 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4994 && !is_host_teams_ctx (ctx
))
4996 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4998 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4999 || is_global_var (OMP_CLAUSE_DECL (c
)));
5002 case OMP_CLAUSE_FIRSTPRIVATE
:
5003 case OMP_CLAUSE_COPYIN
:
5005 case OMP_CLAUSE_LINEAR
:
5006 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5007 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5008 lastprivate_firstprivate
= true;
5010 case OMP_CLAUSE_REDUCTION
:
5011 case OMP_CLAUSE_IN_REDUCTION
:
5012 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5013 || is_task_ctx (ctx
)
5014 || OMP_CLAUSE_REDUCTION_TASK (c
))
5016 task_reduction_p
= true;
5017 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5019 task_reduction_other_cnt
++;
5024 task_reduction_cnt
++;
5025 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5027 var
= OMP_CLAUSE_DECL (c
);
5028 /* If var is a global variable that isn't privatized
5029 in outer contexts, we don't need to look up the
5030 original address, it is always the address of the
5031 global variable itself. */
5033 || omp_privatize_by_reference (var
)
5035 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5037 task_reduction_needs_orig_p
= true;
5038 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5039 task_reduction_cntorig
++;
5043 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5044 reduction_omp_orig_ref
= true;
5046 case OMP_CLAUSE__REDUCTEMP_
:
5047 if (!is_taskreg_ctx (ctx
))
5050 case OMP_CLAUSE__LOOPTEMP_
:
5051 /* Handle _looptemp_/_reductemp_ clauses only on
5056 case OMP_CLAUSE_LASTPRIVATE
:
5057 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5059 lastprivate_firstprivate
= true;
5060 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5063 /* Even without corresponding firstprivate, if
5064 decl is Fortran allocatable, it needs outer var
5067 && lang_hooks
.decls
.omp_private_outer_ref
5068 (OMP_CLAUSE_DECL (c
)))
5069 lastprivate_firstprivate
= true;
5071 case OMP_CLAUSE_ALIGNED
:
5074 var
= OMP_CLAUSE_DECL (c
);
5075 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5076 && !is_global_var (var
))
5078 new_var
= maybe_lookup_decl (var
, ctx
);
5079 if (new_var
== NULL_TREE
)
5080 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5081 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5082 tree alarg
= omp_clause_aligned_alignment (c
);
5083 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5084 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5085 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5086 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5087 gimplify_and_add (x
, ilist
);
5089 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5090 && is_global_var (var
))
5092 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5093 new_var
= lookup_decl (var
, ctx
);
5094 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5095 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5096 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5097 tree alarg
= omp_clause_aligned_alignment (c
);
5098 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5099 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5100 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5101 x
= create_tmp_var (ptype
);
5102 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5103 gimplify_and_add (t
, ilist
);
5104 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5105 SET_DECL_VALUE_EXPR (new_var
, t
);
5106 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5109 case OMP_CLAUSE__CONDTEMP_
:
5110 if (is_parallel_ctx (ctx
)
5111 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5118 if (task_reduction_p
!= (pass
>= 2))
5121 allocator
= NULL_TREE
;
5122 allocate_ptr
= NULL_TREE
;
5123 new_var
= var
= OMP_CLAUSE_DECL (c
);
5124 if ((c_kind
== OMP_CLAUSE_REDUCTION
5125 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5126 && TREE_CODE (var
) == MEM_REF
)
5128 var
= TREE_OPERAND (var
, 0);
5129 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5130 var
= TREE_OPERAND (var
, 0);
5131 if (TREE_CODE (var
) == INDIRECT_REF
5132 || TREE_CODE (var
) == ADDR_EXPR
)
5133 var
= TREE_OPERAND (var
, 0);
5134 if (is_variable_sized (var
))
5136 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5137 var
= DECL_VALUE_EXPR (var
);
5138 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5139 var
= TREE_OPERAND (var
, 0);
5140 gcc_assert (DECL_P (var
));
5144 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5146 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5147 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5149 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5150 new_var
= lookup_decl (var
, ctx
);
5152 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5157 /* C/C++ array section reductions. */
5158 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5159 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5160 && var
!= OMP_CLAUSE_DECL (c
))
5165 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5166 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5168 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5170 tree b
= TREE_OPERAND (orig_var
, 1);
5171 if (is_omp_target (ctx
->stmt
))
5174 b
= maybe_lookup_decl (b
, ctx
);
5177 b
= TREE_OPERAND (orig_var
, 1);
5178 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5180 if (integer_zerop (bias
))
5184 bias
= fold_convert_loc (clause_loc
,
5185 TREE_TYPE (b
), bias
);
5186 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5187 TREE_TYPE (b
), b
, bias
);
5189 orig_var
= TREE_OPERAND (orig_var
, 0);
5193 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5194 if (is_global_var (out
)
5195 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5196 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5197 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5200 else if (is_omp_target (ctx
->stmt
))
5204 bool by_ref
= use_pointer_for_field (var
, NULL
);
5205 x
= build_receiver_ref (var
, by_ref
, ctx
);
5206 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5209 x
= build_fold_addr_expr (x
);
5211 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5212 x
= build_simple_mem_ref (x
);
5213 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5215 if (var
== TREE_OPERAND (orig_var
, 0))
5216 x
= build_fold_addr_expr (x
);
5218 bias
= fold_convert (sizetype
, bias
);
5219 x
= fold_convert (ptr_type_node
, x
);
5220 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5221 TREE_TYPE (x
), x
, bias
);
5222 unsigned cnt
= task_reduction_cnt
- 1;
5223 if (!task_reduction_needs_orig_p
)
5224 cnt
+= (task_reduction_cntorig_full
5225 - task_reduction_cntorig
);
5227 cnt
= task_reduction_cntorig
- 1;
5228 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5229 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5230 gimplify_assign (r
, x
, ilist
);
5234 if (TREE_CODE (orig_var
) == INDIRECT_REF
5235 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5236 orig_var
= TREE_OPERAND (orig_var
, 0);
5237 tree d
= OMP_CLAUSE_DECL (c
);
5238 tree type
= TREE_TYPE (d
);
5239 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5240 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5242 const char *name
= get_name (orig_var
);
5243 if (pass
!= 3 && !TREE_CONSTANT (v
))
5246 if (is_omp_target (ctx
->stmt
))
5249 t
= maybe_lookup_decl (v
, ctx
);
5253 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5254 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5255 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5257 build_int_cst (TREE_TYPE (v
), 1));
5258 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5260 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5264 tree xv
= create_tmp_var (ptr_type_node
);
5265 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5267 unsigned cnt
= task_reduction_cnt
- 1;
5268 if (!task_reduction_needs_orig_p
)
5269 cnt
+= (task_reduction_cntorig_full
5270 - task_reduction_cntorig
);
5272 cnt
= task_reduction_cntorig
- 1;
5273 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5274 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5276 gimple
*g
= gimple_build_assign (xv
, x
);
5277 gimple_seq_add_stmt (ilist
, g
);
5281 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5283 if (ctx
->task_reductions
[1 + idx
])
5284 off
= fold_convert (sizetype
,
5285 ctx
->task_reductions
[1 + idx
]);
5287 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5289 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5291 gimple_seq_add_stmt (ilist
, g
);
5293 x
= fold_convert (build_pointer_type (boolean_type_node
),
5295 if (TREE_CONSTANT (v
))
5296 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5297 TYPE_SIZE_UNIT (type
));
5301 if (is_omp_target (ctx
->stmt
))
5304 t
= maybe_lookup_decl (v
, ctx
);
5308 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5309 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5311 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5313 build_int_cst (TREE_TYPE (v
), 1));
5314 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5317 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5319 cond
= create_tmp_var (TREE_TYPE (x
));
5320 gimplify_assign (cond
, x
, ilist
);
5323 else if (lower_private_allocate (var
, type
, allocator
,
5324 allocate_ptr
, ilist
, ctx
,
5327 ? TYPE_SIZE_UNIT (type
)
5330 else if (TREE_CONSTANT (v
))
5332 x
= create_tmp_var_raw (type
, name
);
5333 gimple_add_tmp_var (x
);
5334 TREE_ADDRESSABLE (x
) = 1;
5335 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5340 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5341 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5342 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5345 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5346 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5347 tree y
= create_tmp_var (ptype
, name
);
5348 gimplify_assign (y
, x
, ilist
);
5352 if (!integer_zerop (bias
))
5354 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5356 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5358 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5359 pointer_sized_int_node
, yb
, bias
);
5360 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5361 yb
= create_tmp_var (ptype
, name
);
5362 gimplify_assign (yb
, x
, ilist
);
5366 d
= TREE_OPERAND (d
, 0);
5367 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5368 d
= TREE_OPERAND (d
, 0);
5369 if (TREE_CODE (d
) == ADDR_EXPR
)
5371 if (orig_var
!= var
)
5373 gcc_assert (is_variable_sized (orig_var
));
5374 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5376 gimplify_assign (new_var
, x
, ilist
);
5377 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5378 tree t
= build_fold_indirect_ref (new_var
);
5379 DECL_IGNORED_P (new_var
) = 0;
5380 TREE_THIS_NOTRAP (t
) = 1;
5381 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5382 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5386 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5387 build_int_cst (ptype
, 0));
5388 SET_DECL_VALUE_EXPR (new_var
, x
);
5389 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5394 gcc_assert (orig_var
== var
);
5395 if (TREE_CODE (d
) == INDIRECT_REF
)
5397 x
= create_tmp_var (ptype
, name
);
5398 TREE_ADDRESSABLE (x
) = 1;
5399 gimplify_assign (x
, yb
, ilist
);
5400 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5402 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5403 gimplify_assign (new_var
, x
, ilist
);
5405 /* GOMP_taskgroup_reduction_register memsets the whole
5406 array to zero. If the initializer is zero, we don't
5407 need to initialize it again, just mark it as ever
5408 used unconditionally, i.e. cond = true. */
5410 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5411 && initializer_zerop (omp_reduction_init (c
,
5414 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5416 gimple_seq_add_stmt (ilist
, g
);
5419 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5423 if (!is_parallel_ctx (ctx
))
5425 tree condv
= create_tmp_var (boolean_type_node
);
5426 g
= gimple_build_assign (condv
,
5427 build_simple_mem_ref (cond
));
5428 gimple_seq_add_stmt (ilist
, g
);
5429 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5430 g
= gimple_build_cond (NE_EXPR
, condv
,
5431 boolean_false_node
, end
, lab1
);
5432 gimple_seq_add_stmt (ilist
, g
);
5433 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5435 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5437 gimple_seq_add_stmt (ilist
, g
);
5440 tree y1
= create_tmp_var (ptype
);
5441 gimplify_assign (y1
, y
, ilist
);
5442 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5443 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5444 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5445 if (task_reduction_needs_orig_p
)
5447 y3
= create_tmp_var (ptype
);
5449 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5450 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5451 size_int (task_reduction_cnt_full
5452 + task_reduction_cntorig
- 1),
5453 NULL_TREE
, NULL_TREE
);
5456 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5457 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5460 gimplify_assign (y3
, ref
, ilist
);
5462 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5466 y2
= create_tmp_var (ptype
);
5467 gimplify_assign (y2
, y
, ilist
);
5469 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5471 tree ref
= build_outer_var_ref (var
, ctx
);
5472 /* For ref build_outer_var_ref already performs this. */
5473 if (TREE_CODE (d
) == INDIRECT_REF
)
5474 gcc_assert (omp_privatize_by_reference (var
));
5475 else if (TREE_CODE (d
) == ADDR_EXPR
)
5476 ref
= build_fold_addr_expr (ref
);
5477 else if (omp_privatize_by_reference (var
))
5478 ref
= build_fold_addr_expr (ref
);
5479 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5481 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5483 y3
= create_tmp_var (ptype
);
5484 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5488 y4
= create_tmp_var (ptype
);
5489 gimplify_assign (y4
, ref
, dlist
);
5493 tree i
= create_tmp_var (TREE_TYPE (v
));
5494 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5495 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5496 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5499 i2
= create_tmp_var (TREE_TYPE (v
));
5500 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5501 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5502 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5503 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5505 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5507 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5508 tree decl_placeholder
5509 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5510 SET_DECL_VALUE_EXPR (decl_placeholder
,
5511 build_simple_mem_ref (y1
));
5512 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5513 SET_DECL_VALUE_EXPR (placeholder
,
5514 y3
? build_simple_mem_ref (y3
)
5516 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5517 x
= lang_hooks
.decls
.omp_clause_default_ctor
5518 (c
, build_simple_mem_ref (y1
),
5519 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5521 gimplify_and_add (x
, ilist
);
5522 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5524 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5525 lower_omp (&tseq
, ctx
);
5526 gimple_seq_add_seq (ilist
, tseq
);
5528 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5531 SET_DECL_VALUE_EXPR (decl_placeholder
,
5532 build_simple_mem_ref (y2
));
5533 SET_DECL_VALUE_EXPR (placeholder
,
5534 build_simple_mem_ref (y4
));
5535 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5536 lower_omp (&tseq
, ctx
);
5537 gimple_seq_add_seq (dlist
, tseq
);
5538 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5540 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5541 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5544 x
= lang_hooks
.decls
.omp_clause_dtor
5545 (c
, build_simple_mem_ref (y2
));
5547 gimplify_and_add (x
, dlist
);
5552 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5553 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5555 /* reduction(-:var) sums up the partial results, so it
5556 acts identically to reduction(+:var). */
5557 if (code
== MINUS_EXPR
)
5560 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5563 x
= build2 (code
, TREE_TYPE (type
),
5564 build_simple_mem_ref (y4
),
5565 build_simple_mem_ref (y2
));
5566 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5570 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5571 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5572 gimple_seq_add_stmt (ilist
, g
);
5575 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5576 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5577 gimple_seq_add_stmt (ilist
, g
);
5579 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5580 build_int_cst (TREE_TYPE (i
), 1));
5581 gimple_seq_add_stmt (ilist
, g
);
5582 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5583 gimple_seq_add_stmt (ilist
, g
);
5584 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5587 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5588 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5589 gimple_seq_add_stmt (dlist
, g
);
5592 g
= gimple_build_assign
5593 (y4
, POINTER_PLUS_EXPR
, y4
,
5594 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5595 gimple_seq_add_stmt (dlist
, g
);
5597 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5598 build_int_cst (TREE_TYPE (i2
), 1));
5599 gimple_seq_add_stmt (dlist
, g
);
5600 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5601 gimple_seq_add_stmt (dlist
, g
);
5602 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5606 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5607 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5608 gimple_seq_add_stmt (dlist
, g
);
5614 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5615 if (is_global_var (out
))
5617 else if (is_omp_target (ctx
->stmt
))
5621 bool by_ref
= use_pointer_for_field (var
, ctx
);
5622 x
= build_receiver_ref (var
, by_ref
, ctx
);
5624 if (!omp_privatize_by_reference (var
))
5625 x
= build_fold_addr_expr (x
);
5626 x
= fold_convert (ptr_type_node
, x
);
5627 unsigned cnt
= task_reduction_cnt
- 1;
5628 if (!task_reduction_needs_orig_p
)
5629 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5631 cnt
= task_reduction_cntorig
- 1;
5632 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5633 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5634 gimplify_assign (r
, x
, ilist
);
5639 tree type
= TREE_TYPE (new_var
);
5640 if (!omp_privatize_by_reference (var
))
5641 type
= build_pointer_type (type
);
5642 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5644 unsigned cnt
= task_reduction_cnt
- 1;
5645 if (!task_reduction_needs_orig_p
)
5646 cnt
+= (task_reduction_cntorig_full
5647 - task_reduction_cntorig
);
5649 cnt
= task_reduction_cntorig
- 1;
5650 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5651 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5655 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5657 if (ctx
->task_reductions
[1 + idx
])
5658 off
= fold_convert (sizetype
,
5659 ctx
->task_reductions
[1 + idx
]);
5661 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5663 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5666 x
= fold_convert (type
, x
);
5668 if (omp_privatize_by_reference (var
))
5670 gimplify_assign (new_var
, x
, ilist
);
5672 new_var
= build_simple_mem_ref (new_var
);
5676 t
= create_tmp_var (type
);
5677 gimplify_assign (t
, x
, ilist
);
5678 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5679 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5681 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5682 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5683 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5684 cond
= create_tmp_var (TREE_TYPE (t
));
5685 gimplify_assign (cond
, t
, ilist
);
5687 else if (is_variable_sized (var
))
5689 /* For variable sized types, we need to allocate the
5690 actual storage here. Call alloca and store the
5691 result in the pointer decl that we created elsewhere. */
5695 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5699 ptr
= DECL_VALUE_EXPR (new_var
);
5700 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5701 ptr
= TREE_OPERAND (ptr
, 0);
5702 gcc_assert (DECL_P (ptr
));
5703 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5705 if (lower_private_allocate (var
, new_var
, allocator
,
5706 allocate_ptr
, ilist
, ctx
,
5711 /* void *tmp = __builtin_alloca */
5713 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5715 = gimple_build_call (atmp
, 2, x
,
5716 size_int (DECL_ALIGN (var
)));
5717 cfun
->calls_alloca
= 1;
5718 tmp
= create_tmp_var_raw (ptr_type_node
);
5719 gimple_add_tmp_var (tmp
);
5720 gimple_call_set_lhs (stmt
, tmp
);
5722 gimple_seq_add_stmt (ilist
, stmt
);
5725 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5726 gimplify_assign (ptr
, x
, ilist
);
5729 else if (omp_privatize_by_reference (var
)
5730 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5731 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5733 /* For references that are being privatized for Fortran,
5734 allocate new backing storage for the new pointer
5735 variable. This allows us to avoid changing all the
5736 code that expects a pointer to something that expects
5737 a direct variable. */
5741 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5742 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5744 x
= build_receiver_ref (var
, false, ctx
);
5745 if (ctx
->allocate_map
)
5746 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5748 allocator
= *allocatep
;
5749 if (TREE_CODE (allocator
) == TREE_LIST
)
5750 allocator
= TREE_PURPOSE (allocator
);
5751 if (TREE_CODE (allocator
) != INTEGER_CST
)
5752 allocator
= build_outer_var_ref (allocator
, ctx
);
5753 allocator
= fold_convert (pointer_sized_int_node
,
5755 allocate_ptr
= unshare_expr (x
);
5757 if (allocator
== NULL_TREE
)
5758 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5760 else if (lower_private_allocate (var
, new_var
, allocator
,
5762 ilist
, ctx
, true, x
))
5764 else if (TREE_CONSTANT (x
))
5766 /* For reduction in SIMD loop, defer adding the
5767 initialization of the reference, because if we decide
5768 to use SIMD array for it, the initilization could cause
5769 expansion ICE. Ditto for other privatization clauses. */
5774 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5776 gimple_add_tmp_var (x
);
5777 TREE_ADDRESSABLE (x
) = 1;
5778 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5784 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5785 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5786 tree al
= size_int (TYPE_ALIGN (rtype
));
5787 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5792 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5793 gimplify_assign (new_var
, x
, ilist
);
5796 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5798 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5799 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5800 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5808 switch (OMP_CLAUSE_CODE (c
))
5810 case OMP_CLAUSE_SHARED
:
5811 /* Ignore shared directives in teams construct inside
5812 target construct. */
5813 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5814 && !is_host_teams_ctx (ctx
))
5816 /* Shared global vars are just accessed directly. */
5817 if (is_global_var (new_var
))
5819 /* For taskloop firstprivate/lastprivate, represented
5820 as firstprivate and shared clause on the task, new_var
5821 is the firstprivate var. */
5822 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5824 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5825 needs to be delayed until after fixup_child_record_type so
5826 that we get the correct type during the dereference. */
5827 by_ref
= use_pointer_for_field (var
, ctx
);
5828 x
= build_receiver_ref (var
, by_ref
, ctx
);
5829 SET_DECL_VALUE_EXPR (new_var
, x
);
5830 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5832 /* ??? If VAR is not passed by reference, and the variable
5833 hasn't been initialized yet, then we'll get a warning for
5834 the store into the omp_data_s structure. Ideally, we'd be
5835 able to notice this and not store anything at all, but
5836 we're generating code too early. Suppress the warning. */
5838 suppress_warning (var
, OPT_Wuninitialized
);
5841 case OMP_CLAUSE__CONDTEMP_
:
5842 if (is_parallel_ctx (ctx
))
5844 x
= build_receiver_ref (var
, false, ctx
);
5845 SET_DECL_VALUE_EXPR (new_var
, x
);
5846 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5848 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5850 x
= build_zero_cst (TREE_TYPE (var
));
5855 case OMP_CLAUSE_LASTPRIVATE
:
5856 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5860 case OMP_CLAUSE_PRIVATE
:
5861 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5862 x
= build_outer_var_ref (var
, ctx
);
5863 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5865 if (is_task_ctx (ctx
))
5866 x
= build_receiver_ref (var
, false, ctx
);
5868 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5876 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5877 ilist
, ctx
, false, NULL_TREE
);
5878 nx
= unshare_expr (new_var
);
5880 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5881 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5884 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5886 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5889 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5890 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5891 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5892 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5893 || (gimple_omp_for_index (ctx
->stmt
, 0)
5895 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5896 || omp_privatize_by_reference (var
))
5897 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5900 if (omp_privatize_by_reference (var
))
5902 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5903 tree new_vard
= TREE_OPERAND (new_var
, 0);
5904 gcc_assert (DECL_P (new_vard
));
5905 SET_DECL_VALUE_EXPR (new_vard
,
5906 build_fold_addr_expr (lvar
));
5907 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5912 tree iv
= unshare_expr (ivar
);
5914 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5917 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5921 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5923 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5924 unshare_expr (ivar
), x
);
5928 gimplify_and_add (x
, &llist
[0]);
5929 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5930 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5935 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5936 v
= TREE_OPERAND (v
, 0);
5937 gcc_assert (DECL_P (v
));
5939 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5940 tree t
= create_tmp_var (TREE_TYPE (v
));
5941 tree z
= build_zero_cst (TREE_TYPE (v
));
5943 = build_outer_var_ref (var
, ctx
,
5944 OMP_CLAUSE_LASTPRIVATE
);
5945 gimple_seq_add_stmt (dlist
,
5946 gimple_build_assign (t
, z
));
5947 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5948 tree civar
= DECL_VALUE_EXPR (v
);
5949 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5950 civar
= unshare_expr (civar
);
5951 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5952 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5953 unshare_expr (civar
));
5954 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5955 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5956 orig_v
, unshare_expr (ivar
)));
5957 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5959 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5961 gimple_seq tseq
= NULL
;
5962 gimplify_and_add (x
, &tseq
);
5964 lower_omp (&tseq
, ctx
->outer
);
5965 gimple_seq_add_seq (&llist
[1], tseq
);
5967 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5968 && ctx
->for_simd_scan_phase
)
5970 x
= unshare_expr (ivar
);
5972 = build_outer_var_ref (var
, ctx
,
5973 OMP_CLAUSE_LASTPRIVATE
);
5974 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5976 gimplify_and_add (x
, &llist
[0]);
5980 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5982 gimplify_and_add (y
, &llist
[1]);
5986 if (omp_privatize_by_reference (var
))
5988 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5989 tree new_vard
= TREE_OPERAND (new_var
, 0);
5990 gcc_assert (DECL_P (new_vard
));
5991 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5992 x
= TYPE_SIZE_UNIT (type
);
5993 if (TREE_CONSTANT (x
))
5995 x
= create_tmp_var_raw (type
, get_name (var
));
5996 gimple_add_tmp_var (x
);
5997 TREE_ADDRESSABLE (x
) = 1;
5998 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5999 x
= fold_convert_loc (clause_loc
,
6000 TREE_TYPE (new_vard
), x
);
6001 gimplify_assign (new_vard
, x
, ilist
);
6006 gimplify_and_add (nx
, ilist
);
6007 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6009 && ctx
->for_simd_scan_phase
)
6011 tree orig_v
= build_outer_var_ref (var
, ctx
,
6012 OMP_CLAUSE_LASTPRIVATE
);
6013 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6015 gimplify_and_add (x
, ilist
);
6020 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6022 gimplify_and_add (x
, dlist
);
6025 if (!is_gimple_val (allocator
))
6027 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6028 gimplify_assign (avar
, allocator
, dlist
);
6031 if (!is_gimple_val (allocate_ptr
))
6033 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6034 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6035 allocate_ptr
= apvar
;
6037 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6039 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6040 gimple_seq_add_stmt (dlist
, g
);
6044 case OMP_CLAUSE_LINEAR
:
6045 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6046 goto do_firstprivate
;
6047 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6050 x
= build_outer_var_ref (var
, ctx
);
6053 case OMP_CLAUSE_FIRSTPRIVATE
:
6054 if (is_task_ctx (ctx
))
6056 if ((omp_privatize_by_reference (var
)
6057 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6058 || is_variable_sized (var
))
6060 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6062 || use_pointer_for_field (var
, NULL
))
6064 x
= build_receiver_ref (var
, false, ctx
);
6065 if (ctx
->allocate_map
)
6066 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6068 allocator
= *allocatep
;
6069 if (TREE_CODE (allocator
) == TREE_LIST
)
6070 allocator
= TREE_PURPOSE (allocator
);
6071 if (TREE_CODE (allocator
) != INTEGER_CST
)
6072 allocator
= build_outer_var_ref (allocator
, ctx
);
6073 allocator
= fold_convert (pointer_sized_int_node
,
6075 allocate_ptr
= unshare_expr (x
);
6076 x
= build_simple_mem_ref (x
);
6077 TREE_THIS_NOTRAP (x
) = 1;
6079 SET_DECL_VALUE_EXPR (new_var
, x
);
6080 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6084 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6085 && omp_privatize_by_reference (var
))
6087 x
= build_outer_var_ref (var
, ctx
);
6088 gcc_assert (TREE_CODE (x
) == MEM_REF
6089 && integer_zerop (TREE_OPERAND (x
, 1)));
6090 x
= TREE_OPERAND (x
, 0);
6091 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6092 (c
, unshare_expr (new_var
), x
);
6093 gimplify_and_add (x
, ilist
);
6097 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6098 ilist
, ctx
, false, NULL_TREE
);
6099 x
= build_outer_var_ref (var
, ctx
);
6102 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6103 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6105 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6106 tree stept
= TREE_TYPE (t
);
6107 tree ct
= omp_find_clause (clauses
,
6108 OMP_CLAUSE__LOOPTEMP_
);
6110 tree l
= OMP_CLAUSE_DECL (ct
);
6111 tree n1
= fd
->loop
.n1
;
6112 tree step
= fd
->loop
.step
;
6113 tree itype
= TREE_TYPE (l
);
6114 if (POINTER_TYPE_P (itype
))
6115 itype
= signed_type_for (itype
);
6116 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6117 if (TYPE_UNSIGNED (itype
)
6118 && fd
->loop
.cond_code
== GT_EXPR
)
6119 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6120 fold_build1 (NEGATE_EXPR
, itype
, l
),
6121 fold_build1 (NEGATE_EXPR
,
6124 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6125 t
= fold_build2 (MULT_EXPR
, stept
,
6126 fold_convert (stept
, l
), t
);
6128 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6130 if (omp_privatize_by_reference (var
))
6132 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6133 tree new_vard
= TREE_OPERAND (new_var
, 0);
6134 gcc_assert (DECL_P (new_vard
));
6135 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6136 nx
= TYPE_SIZE_UNIT (type
);
6137 if (TREE_CONSTANT (nx
))
6139 nx
= create_tmp_var_raw (type
,
6141 gimple_add_tmp_var (nx
);
6142 TREE_ADDRESSABLE (nx
) = 1;
6143 nx
= build_fold_addr_expr_loc (clause_loc
,
6145 nx
= fold_convert_loc (clause_loc
,
6146 TREE_TYPE (new_vard
),
6148 gimplify_assign (new_vard
, nx
, ilist
);
6152 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6154 gimplify_and_add (x
, ilist
);
6158 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6159 x
= fold_build2 (POINTER_PLUS_EXPR
,
6160 TREE_TYPE (x
), x
, t
);
6162 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
6165 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6166 || TREE_ADDRESSABLE (new_var
)
6167 || omp_privatize_by_reference (var
))
6168 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6171 if (omp_privatize_by_reference (var
))
6173 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6174 tree new_vard
= TREE_OPERAND (new_var
, 0);
6175 gcc_assert (DECL_P (new_vard
));
6176 SET_DECL_VALUE_EXPR (new_vard
,
6177 build_fold_addr_expr (lvar
));
6178 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6180 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6182 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6183 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6184 gimplify_and_add (x
, ilist
);
6185 gimple_stmt_iterator gsi
6186 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6188 = gimple_build_assign (unshare_expr (lvar
), iv
);
6189 gsi_insert_before_without_update (&gsi
, g
,
6191 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6192 enum tree_code code
= PLUS_EXPR
;
6193 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6194 code
= POINTER_PLUS_EXPR
;
6195 g
= gimple_build_assign (iv
, code
, iv
, t
);
6196 gsi_insert_before_without_update (&gsi
, g
,
6200 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6201 (c
, unshare_expr (ivar
), x
);
6202 gimplify_and_add (x
, &llist
[0]);
6203 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6205 gimplify_and_add (x
, &llist
[1]);
6208 if (omp_privatize_by_reference (var
))
6210 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6211 tree new_vard
= TREE_OPERAND (new_var
, 0);
6212 gcc_assert (DECL_P (new_vard
));
6213 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6214 nx
= TYPE_SIZE_UNIT (type
);
6215 if (TREE_CONSTANT (nx
))
6217 nx
= create_tmp_var_raw (type
, get_name (var
));
6218 gimple_add_tmp_var (nx
);
6219 TREE_ADDRESSABLE (nx
) = 1;
6220 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6221 nx
= fold_convert_loc (clause_loc
,
6222 TREE_TYPE (new_vard
), nx
);
6223 gimplify_assign (new_vard
, nx
, ilist
);
6227 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6228 (c
, unshare_expr (new_var
), x
);
6229 gimplify_and_add (x
, ilist
);
6232 case OMP_CLAUSE__LOOPTEMP_
:
6233 case OMP_CLAUSE__REDUCTEMP_
:
6234 gcc_assert (is_taskreg_ctx (ctx
));
6235 x
= build_outer_var_ref (var
, ctx
);
6236 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6237 gimplify_and_add (x
, ilist
);
6240 case OMP_CLAUSE_COPYIN
:
6241 by_ref
= use_pointer_for_field (var
, NULL
);
6242 x
= build_receiver_ref (var
, by_ref
, ctx
);
6243 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6244 append_to_statement_list (x
, ©in_seq
);
6245 copyin_by_ref
|= by_ref
;
6248 case OMP_CLAUSE_REDUCTION
:
6249 case OMP_CLAUSE_IN_REDUCTION
:
6250 /* OpenACC reductions are initialized using the
6251 GOACC_REDUCTION internal function. */
6252 if (is_gimple_omp_oacc (ctx
->stmt
))
6254 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6256 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6258 tree ptype
= TREE_TYPE (placeholder
);
6261 x
= error_mark_node
;
6262 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6263 && !task_reduction_needs_orig_p
)
6265 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6267 tree pptype
= build_pointer_type (ptype
);
6268 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6269 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6270 size_int (task_reduction_cnt_full
6271 + task_reduction_cntorig
- 1),
6272 NULL_TREE
, NULL_TREE
);
6276 = *ctx
->task_reduction_map
->get (c
);
6277 x
= task_reduction_read (ilist
, tskred_temp
,
6278 pptype
, 7 + 3 * idx
);
6280 x
= fold_convert (pptype
, x
);
6281 x
= build_simple_mem_ref (x
);
6286 lower_private_allocate (var
, new_var
, allocator
,
6287 allocate_ptr
, ilist
, ctx
, false,
6289 x
= build_outer_var_ref (var
, ctx
);
6291 if (omp_privatize_by_reference (var
)
6292 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6293 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6295 SET_DECL_VALUE_EXPR (placeholder
, x
);
6296 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6297 tree new_vard
= new_var
;
6298 if (omp_privatize_by_reference (var
))
6300 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6301 new_vard
= TREE_OPERAND (new_var
, 0);
6302 gcc_assert (DECL_P (new_vard
));
6304 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6306 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6307 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6310 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6314 if (new_vard
== new_var
)
6316 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6317 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6321 SET_DECL_VALUE_EXPR (new_vard
,
6322 build_fold_addr_expr (ivar
));
6323 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6325 x
= lang_hooks
.decls
.omp_clause_default_ctor
6326 (c
, unshare_expr (ivar
),
6327 build_outer_var_ref (var
, ctx
));
6328 if (rvarp
&& ctx
->for_simd_scan_phase
)
6331 gimplify_and_add (x
, &llist
[0]);
6332 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6334 gimplify_and_add (x
, &llist
[1]);
6341 gimplify_and_add (x
, &llist
[0]);
6343 tree ivar2
= unshare_expr (lvar
);
6344 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6345 x
= lang_hooks
.decls
.omp_clause_default_ctor
6346 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6347 gimplify_and_add (x
, &llist
[0]);
6351 x
= lang_hooks
.decls
.omp_clause_default_ctor
6352 (c
, unshare_expr (rvar2
),
6353 build_outer_var_ref (var
, ctx
));
6354 gimplify_and_add (x
, &llist
[0]);
6357 /* For types that need construction, add another
6358 private var which will be default constructed
6359 and optionally initialized with
6360 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6361 loop we want to assign this value instead of
6362 constructing and destructing it in each
6364 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6365 gimple_add_tmp_var (nv
);
6366 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6370 x
= lang_hooks
.decls
.omp_clause_default_ctor
6371 (c
, nv
, build_outer_var_ref (var
, ctx
));
6372 gimplify_and_add (x
, ilist
);
6374 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6376 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6377 x
= DECL_VALUE_EXPR (new_vard
);
6379 if (new_vard
!= new_var
)
6380 vexpr
= build_fold_addr_expr (nv
);
6381 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6382 lower_omp (&tseq
, ctx
);
6383 SET_DECL_VALUE_EXPR (new_vard
, x
);
6384 gimple_seq_add_seq (ilist
, tseq
);
6385 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6388 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6390 gimplify_and_add (x
, dlist
);
6393 tree ref
= build_outer_var_ref (var
, ctx
);
6394 x
= unshare_expr (ivar
);
6395 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6397 gimplify_and_add (x
, &llist
[0]);
6399 ref
= build_outer_var_ref (var
, ctx
);
6400 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6402 gimplify_and_add (x
, &llist
[3]);
6404 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6405 if (new_vard
== new_var
)
6406 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6408 SET_DECL_VALUE_EXPR (new_vard
,
6409 build_fold_addr_expr (lvar
));
6411 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6413 gimplify_and_add (x
, &llist
[1]);
6415 tree ivar2
= unshare_expr (lvar
);
6416 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6417 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6419 gimplify_and_add (x
, &llist
[1]);
6423 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6425 gimplify_and_add (x
, &llist
[1]);
6430 gimplify_and_add (x
, &llist
[0]);
6431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6433 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6434 lower_omp (&tseq
, ctx
);
6435 gimple_seq_add_seq (&llist
[0], tseq
);
6437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6438 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6439 lower_omp (&tseq
, ctx
);
6440 gimple_seq_add_seq (&llist
[1], tseq
);
6441 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6442 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6443 if (new_vard
== new_var
)
6444 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6446 SET_DECL_VALUE_EXPR (new_vard
,
6447 build_fold_addr_expr (lvar
));
6448 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6450 gimplify_and_add (x
, &llist
[1]);
6453 /* If this is a reference to constant size reduction var
6454 with placeholder, we haven't emitted the initializer
6455 for it because it is undesirable if SIMD arrays are used.
6456 But if they aren't used, we need to emit the deferred
6457 initialization now. */
6458 else if (omp_privatize_by_reference (var
) && is_simd
)
6459 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6461 tree lab2
= NULL_TREE
;
6465 if (!is_parallel_ctx (ctx
))
6467 tree condv
= create_tmp_var (boolean_type_node
);
6468 tree m
= build_simple_mem_ref (cond
);
6469 g
= gimple_build_assign (condv
, m
);
6470 gimple_seq_add_stmt (ilist
, g
);
6472 = create_artificial_label (UNKNOWN_LOCATION
);
6473 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6474 g
= gimple_build_cond (NE_EXPR
, condv
,
6477 gimple_seq_add_stmt (ilist
, g
);
6478 gimple_seq_add_stmt (ilist
,
6479 gimple_build_label (lab1
));
6481 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6483 gimple_seq_add_stmt (ilist
, g
);
6485 x
= lang_hooks
.decls
.omp_clause_default_ctor
6486 (c
, unshare_expr (new_var
),
6488 : build_outer_var_ref (var
, ctx
));
6490 gimplify_and_add (x
, ilist
);
6492 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6493 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6495 if (ctx
->for_simd_scan_phase
)
6498 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6500 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6501 gimple_add_tmp_var (nv
);
6502 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6503 x
= lang_hooks
.decls
.omp_clause_default_ctor
6504 (c
, nv
, build_outer_var_ref (var
, ctx
));
6506 gimplify_and_add (x
, ilist
);
6507 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6509 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6511 if (new_vard
!= new_var
)
6512 vexpr
= build_fold_addr_expr (nv
);
6513 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6514 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6515 lower_omp (&tseq
, ctx
);
6516 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6517 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6518 gimple_seq_add_seq (ilist
, tseq
);
6520 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6521 if (is_simd
&& ctx
->scan_exclusive
)
6524 = create_tmp_var_raw (TREE_TYPE (new_var
));
6525 gimple_add_tmp_var (nv2
);
6526 ctx
->cb
.decl_map
->put (nv
, nv2
);
6527 x
= lang_hooks
.decls
.omp_clause_default_ctor
6528 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6529 gimplify_and_add (x
, ilist
);
6530 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6532 gimplify_and_add (x
, dlist
);
6534 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6536 gimplify_and_add (x
, dlist
);
6539 && ctx
->scan_exclusive
6540 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6542 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6543 gimple_add_tmp_var (nv2
);
6544 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6545 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6547 gimplify_and_add (x
, dlist
);
6549 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6553 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6555 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6556 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6557 && is_omp_target (ctx
->stmt
))
6559 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6560 tree oldv
= NULL_TREE
;
6562 if (DECL_HAS_VALUE_EXPR_P (d
))
6563 oldv
= DECL_VALUE_EXPR (d
);
6564 SET_DECL_VALUE_EXPR (d
, new_vard
);
6565 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6566 lower_omp (&tseq
, ctx
);
6568 SET_DECL_VALUE_EXPR (d
, oldv
);
6571 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6572 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6576 lower_omp (&tseq
, ctx
);
6577 gimple_seq_add_seq (ilist
, tseq
);
6579 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6582 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6583 lower_omp (&tseq
, ctx
);
6584 gimple_seq_add_seq (dlist
, tseq
);
6585 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6587 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6591 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6598 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6599 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6600 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6605 tree lab2
= NULL_TREE
;
6606 /* GOMP_taskgroup_reduction_register memsets the whole
6607 array to zero. If the initializer is zero, we don't
6608 need to initialize it again, just mark it as ever
6609 used unconditionally, i.e. cond = true. */
6610 if (initializer_zerop (x
))
6612 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6614 gimple_seq_add_stmt (ilist
, g
);
6619 if (!cond) { cond = true; new_var = x; } */
6620 if (!is_parallel_ctx (ctx
))
6622 tree condv
= create_tmp_var (boolean_type_node
);
6623 tree m
= build_simple_mem_ref (cond
);
6624 g
= gimple_build_assign (condv
, m
);
6625 gimple_seq_add_stmt (ilist
, g
);
6627 = create_artificial_label (UNKNOWN_LOCATION
);
6628 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6629 g
= gimple_build_cond (NE_EXPR
, condv
,
6632 gimple_seq_add_stmt (ilist
, g
);
6633 gimple_seq_add_stmt (ilist
,
6634 gimple_build_label (lab1
));
6636 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6638 gimple_seq_add_stmt (ilist
, g
);
6639 gimplify_assign (new_var
, x
, ilist
);
6641 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6645 /* reduction(-:var) sums up the partial results, so it
6646 acts identically to reduction(+:var). */
6647 if (code
== MINUS_EXPR
)
6651 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6652 tree new_vard
= new_var
;
6653 if (is_simd
&& omp_privatize_by_reference (var
))
6655 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6656 new_vard
= TREE_OPERAND (new_var
, 0);
6657 gcc_assert (DECL_P (new_vard
));
6659 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6661 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6662 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6665 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6669 if (new_vard
!= new_var
)
6671 SET_DECL_VALUE_EXPR (new_vard
,
6672 build_fold_addr_expr (lvar
));
6673 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6676 tree ref
= build_outer_var_ref (var
, ctx
);
6680 if (ctx
->for_simd_scan_phase
)
6682 gimplify_assign (ivar
, ref
, &llist
[0]);
6683 ref
= build_outer_var_ref (var
, ctx
);
6684 gimplify_assign (ref
, rvar
, &llist
[3]);
6688 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6693 simt_lane
= create_tmp_var (unsigned_type_node
);
6694 x
= build_call_expr_internal_loc
6695 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6696 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6697 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6698 gimplify_assign (ivar
, x
, &llist
[2]);
6704 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6705 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6706 boolean_type_node
, ivar
,
6708 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6709 boolean_type_node
, ref
,
6712 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6714 x
= fold_convert (TREE_TYPE (ref
), x
);
6715 ref
= build_outer_var_ref (var
, ctx
);
6716 gimplify_assign (ref
, x
, &llist
[1]);
6721 lower_private_allocate (var
, new_var
, allocator
,
6722 allocate_ptr
, ilist
, ctx
,
6724 if (omp_privatize_by_reference (var
) && is_simd
)
6725 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6726 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6727 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6729 gimplify_assign (new_var
, x
, ilist
);
6732 tree ref
= build_outer_var_ref (var
, ctx
);
6733 tree new_var2
= new_var
;
6737 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6739 = fold_build2_loc (clause_loc
, NE_EXPR
,
6740 boolean_type_node
, new_var
,
6742 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6743 boolean_type_node
, ref
,
6746 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6748 x
= fold_convert (TREE_TYPE (new_var
), x
);
6749 ref
= build_outer_var_ref (var
, ctx
);
6750 gimplify_assign (ref
, x
, dlist
);
6765 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6766 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6769 if (known_eq (sctx
.max_vf
, 1U))
6771 sctx
.is_simt
= false;
6772 if (ctx
->lastprivate_conditional_map
)
6774 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6776 /* Signal to lower_omp_1 that it should use parent context. */
6777 ctx
->combined_into_simd_safelen1
= true;
6778 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6779 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6780 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6782 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6783 omp_context
*outer
= ctx
->outer
;
6784 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6785 outer
= outer
->outer
;
6786 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6787 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6788 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6794 /* When not vectorized, treat lastprivate(conditional:) like
6795 normal lastprivate, as there will be just one simd lane
6796 writing the privatized variable. */
6797 delete ctx
->lastprivate_conditional_map
;
6798 ctx
->lastprivate_conditional_map
= NULL
;
6803 if (nonconst_simd_if
)
6805 if (sctx
.lane
== NULL_TREE
)
6807 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6808 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6810 /* FIXME: For now. */
6811 sctx
.is_simt
= false;
6814 if (sctx
.lane
|| sctx
.is_simt
)
6816 uid
= create_tmp_var (ptr_type_node
, "simduid");
6817 /* Don't want uninit warnings on simduid, it is always uninitialized,
6818 but we use it not for the value, but for the DECL_UID only. */
6819 suppress_warning (uid
, OPT_Wuninitialized
);
6820 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6821 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6822 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6823 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6825 /* Emit calls denoting privatized variables and initializing a pointer to
6826 structure that holds private variables as fields after ompdevlow pass. */
6829 sctx
.simt_eargs
[0] = uid
;
6831 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6832 gimple_call_set_lhs (g
, uid
);
6833 gimple_seq_add_stmt (ilist
, g
);
6834 sctx
.simt_eargs
.release ();
6836 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6837 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6838 gimple_call_set_lhs (g
, simtrec
);
6839 gimple_seq_add_stmt (ilist
, g
);
6843 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6844 2 + (nonconst_simd_if
!= NULL
),
6845 uid
, integer_zero_node
,
6847 gimple_call_set_lhs (g
, sctx
.lane
);
6848 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6849 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6850 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6851 build_int_cst (unsigned_type_node
, 0));
6852 gimple_seq_add_stmt (ilist
, g
);
6855 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6857 gimple_call_set_lhs (g
, sctx
.lastlane
);
6858 gimple_seq_add_stmt (dlist
, g
);
6859 gimple_seq_add_seq (dlist
, llist
[3]);
6861 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6864 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6865 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6866 gimple_call_set_lhs (g
, simt_vf
);
6867 gimple_seq_add_stmt (dlist
, g
);
6869 tree t
= build_int_cst (unsigned_type_node
, 1);
6870 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6871 gimple_seq_add_stmt (dlist
, g
);
6873 t
= build_int_cst (unsigned_type_node
, 0);
6874 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6875 gimple_seq_add_stmt (dlist
, g
);
6877 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6878 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6879 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6880 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6881 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6883 gimple_seq_add_seq (dlist
, llist
[2]);
6885 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6886 gimple_seq_add_stmt (dlist
, g
);
6888 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6889 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6890 gimple_seq_add_stmt (dlist
, g
);
6892 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6894 for (int i
= 0; i
< 2; i
++)
6897 tree vf
= create_tmp_var (unsigned_type_node
);
6898 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6899 gimple_call_set_lhs (g
, vf
);
6900 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6901 gimple_seq_add_stmt (seq
, g
);
6902 tree t
= build_int_cst (unsigned_type_node
, 0);
6903 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6904 gimple_seq_add_stmt (seq
, g
);
6905 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6906 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6907 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6908 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6909 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6910 gimple_seq_add_seq (seq
, llist
[i
]);
6911 t
= build_int_cst (unsigned_type_node
, 1);
6912 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6913 gimple_seq_add_stmt (seq
, g
);
6914 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6915 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6916 gimple_seq_add_stmt (seq
, g
);
6917 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6922 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6924 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6925 gimple_seq_add_stmt (dlist
, g
);
6928 /* The copyin sequence is not to be executed by the main thread, since
6929 that would result in self-copies. Perhaps not visible to scalars,
6930 but it certainly is to C++ operator=. */
6933 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6935 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6936 build_int_cst (TREE_TYPE (x
), 0));
6937 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6938 gimplify_and_add (x
, ilist
);
6941 /* If any copyin variable is passed by reference, we must ensure the
6942 master thread doesn't modify it before it is copied over in all
6943 threads. Similarly for variables in both firstprivate and
6944 lastprivate clauses we need to ensure the lastprivate copying
6945 happens after firstprivate copying in all threads. And similarly
6946 for UDRs if initializer expression refers to omp_orig. */
6947 if (copyin_by_ref
|| lastprivate_firstprivate
6948 || (reduction_omp_orig_ref
6949 && !ctx
->scan_inclusive
6950 && !ctx
->scan_exclusive
))
6952 /* Don't add any barrier for #pragma omp simd or
6953 #pragma omp distribute. */
6954 if (!is_task_ctx (ctx
)
6955 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6956 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6957 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6960 /* If max_vf is non-zero, then we can use only a vectorization factor
6961 up to the max_vf we chose. So stick it into the safelen clause. */
6962 if (maybe_ne (sctx
.max_vf
, 0U))
6964 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6965 OMP_CLAUSE_SAFELEN
);
6966 poly_uint64 safe_len
;
6968 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6969 && maybe_gt (safe_len
, sctx
.max_vf
)))
6971 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6972 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6974 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6975 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6980 /* Create temporary variables for lastprivate(conditional:) implementation
6981 in context CTX with CLAUSES. */
6984 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6986 tree iter_type
= NULL_TREE
;
6987 tree cond_ptr
= NULL_TREE
;
6988 tree iter_var
= NULL_TREE
;
6989 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6990 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6991 tree next
= *clauses
;
6992 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6993 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6994 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6998 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7000 if (iter_type
== NULL_TREE
)
7002 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7003 iter_var
= create_tmp_var_raw (iter_type
);
7004 DECL_CONTEXT (iter_var
) = current_function_decl
;
7005 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7006 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7007 ctx
->block_vars
= iter_var
;
7009 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7010 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7011 OMP_CLAUSE_DECL (c3
) = iter_var
;
7012 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7014 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7016 next
= OMP_CLAUSE_CHAIN (cc
);
7017 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7018 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7019 ctx
->lastprivate_conditional_map
->put (o
, v
);
7022 if (iter_type
== NULL
)
7024 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7026 struct omp_for_data fd
;
7027 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7029 iter_type
= unsigned_type_for (fd
.iter_type
);
7031 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7032 iter_type
= unsigned_type_node
;
7033 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7037 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7038 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7042 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7043 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7044 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7045 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7046 ctx
->block_vars
= cond_ptr
;
7047 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7048 OMP_CLAUSE__CONDTEMP_
);
7049 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7050 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7053 iter_var
= create_tmp_var_raw (iter_type
);
7054 DECL_CONTEXT (iter_var
) = current_function_decl
;
7055 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7056 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7057 ctx
->block_vars
= iter_var
;
7059 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7060 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7061 OMP_CLAUSE_DECL (c3
) = iter_var
;
7062 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7063 OMP_CLAUSE_CHAIN (c2
) = c3
;
7064 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7066 tree v
= create_tmp_var_raw (iter_type
);
7067 DECL_CONTEXT (v
) = current_function_decl
;
7068 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7069 DECL_CHAIN (v
) = ctx
->block_vars
;
7070 ctx
->block_vars
= v
;
7071 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7072 ctx
->lastprivate_conditional_map
->put (o
, v
);
7077 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7078 both parallel and workshare constructs. PREDICATE may be NULL if it's
7079 always true. BODY_P is the sequence to insert early initialization
7080 if needed, STMT_LIST is where the non-conditional lastprivate handling
7081 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7085 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7086 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7089 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7090 bool par_clauses
= false;
7091 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7092 unsigned HOST_WIDE_INT conditional_off
= 0;
7093 gimple_seq post_stmt_list
= NULL
;
7095 /* Early exit if there are no lastprivate or linear clauses. */
7096 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7097 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7098 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7099 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7101 if (clauses
== NULL
)
7103 /* If this was a workshare clause, see if it had been combined
7104 with its parallel. In that case, look for the clauses on the
7105 parallel statement itself. */
7106 if (is_parallel_ctx (ctx
))
7110 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7113 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7114 OMP_CLAUSE_LASTPRIVATE
);
7115 if (clauses
== NULL
)
7120 bool maybe_simt
= false;
7121 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7122 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7124 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7125 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7127 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7133 tree label_true
, arm1
, arm2
;
7134 enum tree_code pred_code
= TREE_CODE (predicate
);
7136 label
= create_artificial_label (UNKNOWN_LOCATION
);
7137 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7138 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7140 arm1
= TREE_OPERAND (predicate
, 0);
7141 arm2
= TREE_OPERAND (predicate
, 1);
7142 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7143 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7148 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7149 arm2
= boolean_false_node
;
7150 pred_code
= NE_EXPR
;
7154 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7155 c
= fold_convert (integer_type_node
, c
);
7156 simtcond
= create_tmp_var (integer_type_node
);
7157 gimplify_assign (simtcond
, c
, stmt_list
);
7158 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7160 c
= create_tmp_var (integer_type_node
);
7161 gimple_call_set_lhs (g
, c
);
7162 gimple_seq_add_stmt (stmt_list
, g
);
7163 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7167 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7168 gimple_seq_add_stmt (stmt_list
, stmt
);
7169 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7172 tree cond_ptr
= NULL_TREE
;
7173 for (c
= clauses
; c
;)
7176 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7177 gimple_seq
*this_stmt_list
= stmt_list
;
7178 tree lab2
= NULL_TREE
;
7180 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7181 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7182 && ctx
->lastprivate_conditional_map
7183 && !ctx
->combined_into_simd_safelen1
)
7185 gcc_assert (body_p
);
7188 if (cond_ptr
== NULL_TREE
)
7190 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7191 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7193 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7194 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7195 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7196 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7197 this_stmt_list
= cstmt_list
;
7199 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7201 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7202 build_int_cst (TREE_TYPE (cond_ptr
),
7204 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7207 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7208 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7209 tree mem2
= copy_node (mem
);
7210 gimple_seq seq
= NULL
;
7211 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7212 gimple_seq_add_seq (this_stmt_list
, seq
);
7213 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7214 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7215 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7216 gimple_seq_add_stmt (this_stmt_list
, g
);
7217 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7218 gimplify_assign (mem2
, v
, this_stmt_list
);
7221 && ctx
->combined_into_simd_safelen1
7222 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7223 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7224 && ctx
->lastprivate_conditional_map
)
7225 this_stmt_list
= &post_stmt_list
;
7227 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7228 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7229 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7231 var
= OMP_CLAUSE_DECL (c
);
7232 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7233 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7234 && is_taskloop_ctx (ctx
))
7236 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7237 new_var
= lookup_decl (var
, ctx
->outer
);
7241 new_var
= lookup_decl (var
, ctx
);
7242 /* Avoid uninitialized warnings for lastprivate and
7243 for linear iterators. */
7245 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7246 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7247 suppress_warning (new_var
, OPT_Wuninitialized
);
7250 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7252 tree val
= DECL_VALUE_EXPR (new_var
);
7253 if (TREE_CODE (val
) == ARRAY_REF
7254 && VAR_P (TREE_OPERAND (val
, 0))
7255 && lookup_attribute ("omp simd array",
7256 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7259 if (lastlane
== NULL
)
7261 lastlane
= create_tmp_var (unsigned_type_node
);
7263 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7265 TREE_OPERAND (val
, 1));
7266 gimple_call_set_lhs (g
, lastlane
);
7267 gimple_seq_add_stmt (this_stmt_list
, g
);
7269 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7270 TREE_OPERAND (val
, 0), lastlane
,
7271 NULL_TREE
, NULL_TREE
);
7272 TREE_THIS_NOTRAP (new_var
) = 1;
7275 else if (maybe_simt
)
7277 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7278 ? DECL_VALUE_EXPR (new_var
)
7280 if (simtlast
== NULL
)
7282 simtlast
= create_tmp_var (unsigned_type_node
);
7283 gcall
*g
= gimple_build_call_internal
7284 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7285 gimple_call_set_lhs (g
, simtlast
);
7286 gimple_seq_add_stmt (this_stmt_list
, g
);
7288 x
= build_call_expr_internal_loc
7289 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7290 TREE_TYPE (val
), 2, val
, simtlast
);
7291 new_var
= unshare_expr (new_var
);
7292 gimplify_assign (new_var
, x
, this_stmt_list
);
7293 new_var
= unshare_expr (new_var
);
7296 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7297 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7299 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7300 gimple_seq_add_seq (this_stmt_list
,
7301 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7302 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7304 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7305 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7307 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7308 gimple_seq_add_seq (this_stmt_list
,
7309 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7310 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7314 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7315 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7316 && is_taskloop_ctx (ctx
))
7318 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7320 if (is_global_var (ovar
))
7324 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7325 if (omp_privatize_by_reference (var
))
7326 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7327 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7328 gimplify_and_add (x
, this_stmt_list
);
7331 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7335 c
= OMP_CLAUSE_CHAIN (c
);
7336 if (c
== NULL
&& !par_clauses
)
7338 /* If this was a workshare clause, see if it had been combined
7339 with its parallel. In that case, continue looking for the
7340 clauses also on the parallel statement itself. */
7341 if (is_parallel_ctx (ctx
))
7345 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7348 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7349 OMP_CLAUSE_LASTPRIVATE
);
7355 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7356 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7359 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7360 (which might be a placeholder). INNER is true if this is an inner
7361 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7362 join markers. Generate the before-loop forking sequence in
7363 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7364 general form of these sequences is
7366 GOACC_REDUCTION_SETUP
7368 GOACC_REDUCTION_INIT
7370 GOACC_REDUCTION_FINI
7372 GOACC_REDUCTION_TEARDOWN. */
7375 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7376 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7377 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7380 gimple_seq before_fork
= NULL
;
7381 gimple_seq after_fork
= NULL
;
7382 gimple_seq before_join
= NULL
;
7383 gimple_seq after_join
= NULL
;
7384 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7385 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7386 unsigned offset
= 0;
7388 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7389 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7391 /* No 'reduction' clauses on OpenACC 'kernels'. */
7392 gcc_checking_assert (!is_oacc_kernels (ctx
));
7393 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7394 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7396 tree orig
= OMP_CLAUSE_DECL (c
);
7397 tree var
= maybe_lookup_decl (orig
, ctx
);
7398 tree ref_to_res
= NULL_TREE
;
7399 tree incoming
, outgoing
, v1
, v2
, v3
;
7400 bool is_private
= false;
7402 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7403 if (rcode
== MINUS_EXPR
)
7405 else if (rcode
== TRUTH_ANDIF_EXPR
)
7406 rcode
= BIT_AND_EXPR
;
7407 else if (rcode
== TRUTH_ORIF_EXPR
)
7408 rcode
= BIT_IOR_EXPR
;
7409 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7414 incoming
= outgoing
= var
;
7418 /* See if an outer construct also reduces this variable. */
7419 omp_context
*outer
= ctx
;
7421 while (omp_context
*probe
= outer
->outer
)
7423 enum gimple_code type
= gimple_code (probe
->stmt
);
7428 case GIMPLE_OMP_FOR
:
7429 cls
= gimple_omp_for_clauses (probe
->stmt
);
7432 case GIMPLE_OMP_TARGET
:
7433 /* No 'reduction' clauses inside OpenACC 'kernels'
7435 gcc_checking_assert (!is_oacc_kernels (probe
));
7437 if (!is_gimple_omp_offloaded (probe
->stmt
))
7440 cls
= gimple_omp_target_clauses (probe
->stmt
);
7448 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7449 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7450 && orig
== OMP_CLAUSE_DECL (cls
))
7452 incoming
= outgoing
= lookup_decl (orig
, probe
);
7453 goto has_outer_reduction
;
7455 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7456 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7457 && orig
== OMP_CLAUSE_DECL (cls
))
7465 /* This is the outermost construct with this reduction,
7466 see if there's a mapping for it. */
7467 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7468 && maybe_lookup_field (orig
, outer
) && !is_private
)
7470 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7471 if (omp_privatize_by_reference (orig
))
7472 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7474 tree type
= TREE_TYPE (var
);
7475 if (POINTER_TYPE_P (type
))
7476 type
= TREE_TYPE (type
);
7479 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7483 /* Try to look at enclosing contexts for reduction var,
7484 use original if no mapping found. */
7486 omp_context
*c
= ctx
->outer
;
7489 t
= maybe_lookup_decl (orig
, c
);
7492 incoming
= outgoing
= (t
? t
: orig
);
7495 has_outer_reduction
:;
7499 ref_to_res
= integer_zero_node
;
7501 if (omp_privatize_by_reference (orig
))
7503 tree type
= TREE_TYPE (var
);
7504 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7508 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7509 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7512 v1
= create_tmp_var (type
, id
);
7513 v2
= create_tmp_var (type
, id
);
7514 v3
= create_tmp_var (type
, id
);
7516 gimplify_assign (v1
, var
, fork_seq
);
7517 gimplify_assign (v2
, var
, fork_seq
);
7518 gimplify_assign (v3
, var
, fork_seq
);
7520 var
= build_simple_mem_ref (var
);
7521 v1
= build_simple_mem_ref (v1
);
7522 v2
= build_simple_mem_ref (v2
);
7523 v3
= build_simple_mem_ref (v3
);
7524 outgoing
= build_simple_mem_ref (outgoing
);
7526 if (!TREE_CONSTANT (incoming
))
7527 incoming
= build_simple_mem_ref (incoming
);
7532 /* Determine position in reduction buffer, which may be used
7533 by target. The parser has ensured that this is not a
7534 variable-sized type. */
7535 fixed_size_mode mode
7536 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7537 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7538 offset
= (offset
+ align
- 1) & ~(align
- 1);
7539 tree off
= build_int_cst (sizetype
, offset
);
7540 offset
+= GET_MODE_SIZE (mode
);
7544 init_code
= build_int_cst (integer_type_node
,
7545 IFN_GOACC_REDUCTION_INIT
);
7546 fini_code
= build_int_cst (integer_type_node
,
7547 IFN_GOACC_REDUCTION_FINI
);
7548 setup_code
= build_int_cst (integer_type_node
,
7549 IFN_GOACC_REDUCTION_SETUP
);
7550 teardown_code
= build_int_cst (integer_type_node
,
7551 IFN_GOACC_REDUCTION_TEARDOWN
);
7555 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7556 TREE_TYPE (var
), 6, setup_code
,
7557 unshare_expr (ref_to_res
),
7558 incoming
, level
, op
, off
);
7560 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7561 TREE_TYPE (var
), 6, init_code
,
7562 unshare_expr (ref_to_res
),
7563 v1
, level
, op
, off
);
7565 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7566 TREE_TYPE (var
), 6, fini_code
,
7567 unshare_expr (ref_to_res
),
7568 v2
, level
, op
, off
);
7570 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7571 TREE_TYPE (var
), 6, teardown_code
,
7572 ref_to_res
, v3
, level
, op
, off
);
7574 gimplify_assign (v1
, setup_call
, &before_fork
);
7575 gimplify_assign (v2
, init_call
, &after_fork
);
7576 gimplify_assign (v3
, fini_call
, &before_join
);
7577 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7580 /* Now stitch things together. */
7581 gimple_seq_add_seq (fork_seq
, before_fork
);
7583 gimple_seq_add_stmt (fork_seq
, private_marker
);
7585 gimple_seq_add_stmt (fork_seq
, fork
);
7586 gimple_seq_add_seq (fork_seq
, after_fork
);
7588 gimple_seq_add_seq (join_seq
, before_join
);
7590 gimple_seq_add_stmt (join_seq
, join
);
7591 gimple_seq_add_seq (join_seq
, after_join
);
7594 /* Generate code to implement the REDUCTION clauses, append it
7595 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7596 that should be emitted also inside of the critical section,
7597 in that case clear *CLIST afterwards, otherwise leave it as is
7598 and let the caller emit it itself. */
7601 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7602 gimple_seq
*clist
, omp_context
*ctx
)
7604 gimple_seq sub_seq
= NULL
;
7609 /* OpenACC loop reductions are handled elsewhere. */
7610 if (is_gimple_omp_oacc (ctx
->stmt
))
7613 /* SIMD reductions are handled in lower_rec_input_clauses. */
7614 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7615 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7618 /* inscan reductions are handled elsewhere. */
7619 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7622 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7623 update in that case, otherwise use a lock. */
7624 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7625 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7626 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7628 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7629 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7631 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7641 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7643 tree var
, ref
, new_var
, orig_var
;
7644 enum tree_code code
;
7645 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7647 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7648 || OMP_CLAUSE_REDUCTION_TASK (c
))
7651 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7652 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7653 if (TREE_CODE (var
) == MEM_REF
)
7655 var
= TREE_OPERAND (var
, 0);
7656 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7657 var
= TREE_OPERAND (var
, 0);
7658 if (TREE_CODE (var
) == ADDR_EXPR
)
7659 var
= TREE_OPERAND (var
, 0);
7662 /* If this is a pointer or referenced based array
7663 section, the var could be private in the outer
7664 context e.g. on orphaned loop construct. Pretend this
7665 is private variable's outer reference. */
7666 ccode
= OMP_CLAUSE_PRIVATE
;
7667 if (TREE_CODE (var
) == INDIRECT_REF
)
7668 var
= TREE_OPERAND (var
, 0);
7671 if (is_variable_sized (var
))
7673 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7674 var
= DECL_VALUE_EXPR (var
);
7675 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7676 var
= TREE_OPERAND (var
, 0);
7677 gcc_assert (DECL_P (var
));
7680 new_var
= lookup_decl (var
, ctx
);
7681 if (var
== OMP_CLAUSE_DECL (c
)
7682 && omp_privatize_by_reference (var
))
7683 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7684 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7685 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7687 /* reduction(-:var) sums up the partial results, so it acts
7688 identically to reduction(+:var). */
7689 if (code
== MINUS_EXPR
)
7692 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7695 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7697 addr
= save_expr (addr
);
7698 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7699 tree new_var2
= new_var
;
7703 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7704 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7705 boolean_type_node
, new_var
, zero
);
7706 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7709 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7712 x
= fold_convert (TREE_TYPE (new_var
), x
);
7713 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7714 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7715 gimplify_and_add (x
, stmt_seqp
);
7718 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7720 tree d
= OMP_CLAUSE_DECL (c
);
7721 tree type
= TREE_TYPE (d
);
7722 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7723 tree i
= create_tmp_var (TREE_TYPE (v
));
7724 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7725 tree bias
= TREE_OPERAND (d
, 1);
7726 d
= TREE_OPERAND (d
, 0);
7727 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7729 tree b
= TREE_OPERAND (d
, 1);
7730 b
= maybe_lookup_decl (b
, ctx
);
7733 b
= TREE_OPERAND (d
, 1);
7734 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7736 if (integer_zerop (bias
))
7740 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7741 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7742 TREE_TYPE (b
), b
, bias
);
7744 d
= TREE_OPERAND (d
, 0);
7746 /* For ref build_outer_var_ref already performs this, so
7747 only new_var needs a dereference. */
7748 if (TREE_CODE (d
) == INDIRECT_REF
)
7750 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7751 gcc_assert (omp_privatize_by_reference (var
)
7752 && var
== orig_var
);
7754 else if (TREE_CODE (d
) == ADDR_EXPR
)
7756 if (orig_var
== var
)
7758 new_var
= build_fold_addr_expr (new_var
);
7759 ref
= build_fold_addr_expr (ref
);
7764 gcc_assert (orig_var
== var
);
7765 if (omp_privatize_by_reference (var
))
7766 ref
= build_fold_addr_expr (ref
);
7770 tree t
= maybe_lookup_decl (v
, ctx
);
7774 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7775 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7777 if (!integer_zerop (bias
))
7779 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7780 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7781 TREE_TYPE (new_var
), new_var
,
7782 unshare_expr (bias
));
7783 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7784 TREE_TYPE (ref
), ref
, bias
);
7786 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7787 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7788 tree m
= create_tmp_var (ptype
);
7789 gimplify_assign (m
, new_var
, stmt_seqp
);
7791 m
= create_tmp_var (ptype
);
7792 gimplify_assign (m
, ref
, stmt_seqp
);
7794 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7795 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7796 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7797 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7798 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7799 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7800 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7802 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7803 tree decl_placeholder
7804 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7805 SET_DECL_VALUE_EXPR (placeholder
, out
);
7806 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7807 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7808 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7809 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7810 gimple_seq_add_seq (&sub_seq
,
7811 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7812 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7813 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7814 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7822 tree zero
= build_zero_cst (TREE_TYPE (out
));
7823 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7824 boolean_type_node
, out
, zero
);
7825 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7826 boolean_type_node
, priv
, zero
);
7828 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7830 x
= fold_convert (TREE_TYPE (out
), x
);
7831 out
= unshare_expr (out
);
7832 gimplify_assign (out
, x
, &sub_seq
);
7834 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7835 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7836 gimple_seq_add_stmt (&sub_seq
, g
);
7837 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7838 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7839 gimple_seq_add_stmt (&sub_seq
, g
);
7840 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7841 build_int_cst (TREE_TYPE (i
), 1));
7842 gimple_seq_add_stmt (&sub_seq
, g
);
7843 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7844 gimple_seq_add_stmt (&sub_seq
, g
);
7845 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7847 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7849 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7851 if (omp_privatize_by_reference (var
)
7852 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7854 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7855 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7856 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7857 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7858 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7859 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7860 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7864 tree new_var2
= new_var
;
7868 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7869 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7870 boolean_type_node
, new_var
, zero
);
7871 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7874 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7876 x
= fold_convert (TREE_TYPE (new_var
), x
);
7877 ref
= build_outer_var_ref (var
, ctx
);
7878 gimplify_assign (ref
, x
, &sub_seq
);
7882 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7884 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7886 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7890 gimple_seq_add_seq (stmt_seqp
, *clist
);
7894 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7896 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7900 /* Generate code to implement the COPYPRIVATE clauses. */
7903 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7908 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7910 tree var
, new_var
, ref
, x
;
7912 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7914 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7917 var
= OMP_CLAUSE_DECL (c
);
7918 by_ref
= use_pointer_for_field (var
, NULL
);
7920 ref
= build_sender_ref (var
, ctx
);
7921 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7924 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7925 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7927 gimplify_assign (ref
, x
, slist
);
7929 ref
= build_receiver_ref (var
, false, ctx
);
7932 ref
= fold_convert_loc (clause_loc
,
7933 build_pointer_type (TREE_TYPE (new_var
)),
7935 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7937 if (omp_privatize_by_reference (var
))
7939 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7940 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7941 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7943 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7944 gimplify_and_add (x
, rlist
);
7949 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7950 and REDUCTION from the sender (aka parent) side. */
7953 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7957 int ignored_looptemp
= 0;
7958 bool is_taskloop
= false;
7960 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7961 by GOMP_taskloop. */
7962 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7964 ignored_looptemp
= 2;
7968 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7970 tree val
, ref
, x
, var
;
7971 bool by_ref
, do_in
= false, do_out
= false;
7972 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7974 switch (OMP_CLAUSE_CODE (c
))
7976 case OMP_CLAUSE_PRIVATE
:
7977 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7980 case OMP_CLAUSE_FIRSTPRIVATE
:
7981 case OMP_CLAUSE_COPYIN
:
7982 case OMP_CLAUSE_LASTPRIVATE
:
7983 case OMP_CLAUSE_IN_REDUCTION
:
7984 case OMP_CLAUSE__REDUCTEMP_
:
7986 case OMP_CLAUSE_REDUCTION
:
7987 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7990 case OMP_CLAUSE_SHARED
:
7991 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7994 case OMP_CLAUSE__LOOPTEMP_
:
7995 if (ignored_looptemp
)
8005 val
= OMP_CLAUSE_DECL (c
);
8006 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8007 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8008 && TREE_CODE (val
) == MEM_REF
)
8010 val
= TREE_OPERAND (val
, 0);
8011 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8012 val
= TREE_OPERAND (val
, 0);
8013 if (TREE_CODE (val
) == INDIRECT_REF
8014 || TREE_CODE (val
) == ADDR_EXPR
)
8015 val
= TREE_OPERAND (val
, 0);
8016 if (is_variable_sized (val
))
8020 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8021 outer taskloop region. */
8022 omp_context
*ctx_for_o
= ctx
;
8024 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8025 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8026 ctx_for_o
= ctx
->outer
;
8028 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8030 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8031 && is_global_var (var
)
8032 && (val
== OMP_CLAUSE_DECL (c
)
8033 || !is_task_ctx (ctx
)
8034 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8035 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8036 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8037 != POINTER_TYPE
)))))
8040 t
= omp_member_access_dummy_var (var
);
8043 var
= DECL_VALUE_EXPR (var
);
8044 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8046 var
= unshare_and_remap (var
, t
, o
);
8048 var
= unshare_expr (var
);
8051 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8053 /* Handle taskloop firstprivate/lastprivate, where the
8054 lastprivate on GIMPLE_OMP_TASK is represented as
8055 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8056 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8057 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8058 if (use_pointer_for_field (val
, ctx
))
8059 var
= build_fold_addr_expr (var
);
8060 gimplify_assign (x
, var
, ilist
);
8061 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8065 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8066 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8067 || val
== OMP_CLAUSE_DECL (c
))
8068 && is_variable_sized (val
))
8070 by_ref
= use_pointer_for_field (val
, NULL
);
8072 switch (OMP_CLAUSE_CODE (c
))
8074 case OMP_CLAUSE_FIRSTPRIVATE
:
8075 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8077 && is_task_ctx (ctx
))
8078 suppress_warning (var
);
8082 case OMP_CLAUSE_PRIVATE
:
8083 case OMP_CLAUSE_COPYIN
:
8084 case OMP_CLAUSE__LOOPTEMP_
:
8085 case OMP_CLAUSE__REDUCTEMP_
:
8089 case OMP_CLAUSE_LASTPRIVATE
:
8090 if (by_ref
|| omp_privatize_by_reference (val
))
8092 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8099 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8104 case OMP_CLAUSE_REDUCTION
:
8105 case OMP_CLAUSE_IN_REDUCTION
:
8107 if (val
== OMP_CLAUSE_DECL (c
))
8109 if (is_task_ctx (ctx
))
8110 by_ref
= use_pointer_for_field (val
, ctx
);
8112 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8115 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8124 ref
= build_sender_ref (val
, ctx
);
8125 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8126 gimplify_assign (ref
, x
, ilist
);
8127 if (is_task_ctx (ctx
))
8128 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8133 ref
= build_sender_ref (val
, ctx
);
8134 gimplify_assign (var
, ref
, olist
);
8139 /* Generate code to implement SHARED from the sender (aka parent)
8140 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8141 list things that got automatically shared. */
8144 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8146 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8148 if (ctx
->record_type
== NULL
)
8151 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8152 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8154 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8155 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8158 nvar
= maybe_lookup_decl (ovar
, ctx
);
8160 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8161 || (ctx
->allocate_map
8162 && ctx
->allocate_map
->get (ovar
)))
8165 /* If CTX is a nested parallel directive. Find the immediately
8166 enclosing parallel or workshare construct that contains a
8167 mapping for OVAR. */
8168 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8170 t
= omp_member_access_dummy_var (var
);
8173 var
= DECL_VALUE_EXPR (var
);
8174 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8176 var
= unshare_and_remap (var
, t
, o
);
8178 var
= unshare_expr (var
);
8181 if (use_pointer_for_field (ovar
, ctx
))
8183 x
= build_sender_ref (ovar
, ctx
);
8184 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8185 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8187 gcc_assert (is_parallel_ctx (ctx
)
8188 && DECL_ARTIFICIAL (ovar
));
8189 /* _condtemp_ clause. */
8190 var
= build_constructor (TREE_TYPE (x
), NULL
);
8193 var
= build_fold_addr_expr (var
);
8194 gimplify_assign (x
, var
, ilist
);
8198 x
= build_sender_ref (ovar
, ctx
);
8199 gimplify_assign (x
, var
, ilist
);
8201 if (!TREE_READONLY (var
)
8202 /* We don't need to receive a new reference to a result
8203 or parm decl. In fact we may not store to it as we will
8204 invalidate any pending RSO and generate wrong gimple
8206 && !((TREE_CODE (var
) == RESULT_DECL
8207 || TREE_CODE (var
) == PARM_DECL
)
8208 && DECL_BY_REFERENCE (var
)))
8210 x
= build_sender_ref (ovar
, ctx
);
8211 gimplify_assign (var
, x
, olist
);
8217 /* Emit an OpenACC head marker call, encapulating the partitioning and
8218 other information that must be processed by the target compiler.
8219 Return the maximum number of dimensions the associated loop might
8220 be partitioned over. */
8223 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8224 gimple_seq
*seq
, omp_context
*ctx
)
8226 unsigned levels
= 0;
8228 tree gang_static
= NULL_TREE
;
8229 auto_vec
<tree
, 5> args
;
8231 args
.quick_push (build_int_cst
8232 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8233 args
.quick_push (ddvar
);
8234 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8236 switch (OMP_CLAUSE_CODE (c
))
8238 case OMP_CLAUSE_GANG
:
8239 tag
|= OLF_DIM_GANG
;
8240 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8241 /* static:* is represented by -1, and we can ignore it, as
8242 scheduling is always static. */
8243 if (gang_static
&& integer_minus_onep (gang_static
))
8244 gang_static
= NULL_TREE
;
8248 case OMP_CLAUSE_WORKER
:
8249 tag
|= OLF_DIM_WORKER
;
8253 case OMP_CLAUSE_VECTOR
:
8254 tag
|= OLF_DIM_VECTOR
;
8258 case OMP_CLAUSE_SEQ
:
8262 case OMP_CLAUSE_AUTO
:
8266 case OMP_CLAUSE_INDEPENDENT
:
8267 tag
|= OLF_INDEPENDENT
;
8270 case OMP_CLAUSE_TILE
:
8274 case OMP_CLAUSE_REDUCTION
:
8275 tag
|= OLF_REDUCTION
;
8285 if (DECL_P (gang_static
))
8286 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8287 tag
|= OLF_GANG_STATIC
;
8290 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8291 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8293 else if (is_oacc_kernels (tgt
))
8294 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8296 else if (is_oacc_kernels_decomposed_part (tgt
))
8301 /* In a parallel region, loops are implicitly INDEPENDENT. */
8302 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8303 tag
|= OLF_INDEPENDENT
;
8305 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8306 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8307 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8309 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8310 gcc_assert (!(tag
& OLF_AUTO
));
8314 /* Tiling could use all 3 levels. */
8318 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8319 Ensure at least one level, or 2 for possible auto
8321 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8322 << OLF_DIM_BASE
) | OLF_SEQ
));
8324 if (levels
< 1u + maybe_auto
)
8325 levels
= 1u + maybe_auto
;
8328 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8329 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8331 args
.quick_push (gang_static
);
8333 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8334 gimple_set_location (call
, loc
);
8335 gimple_set_lhs (call
, ddvar
);
8336 gimple_seq_add_stmt (seq
, call
);
8341 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8342 partitioning level of the enclosed region. */
8345 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8346 tree tofollow
, gimple_seq
*seq
)
8348 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8349 : IFN_UNIQUE_OACC_TAIL_MARK
);
8350 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8351 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8352 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8353 marker
, ddvar
, tofollow
);
8354 gimple_set_location (call
, loc
);
8355 gimple_set_lhs (call
, ddvar
);
8356 gimple_seq_add_stmt (seq
, call
);
8359 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8360 the loop clauses, from which we extract reductions. Initialize
8364 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8365 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8368 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8369 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8371 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8375 gimple_set_location (private_marker
, loc
);
8376 gimple_call_set_lhs (private_marker
, ddvar
);
8377 gimple_call_set_arg (private_marker
, 1, ddvar
);
8380 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8381 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8384 for (unsigned done
= 1; count
; count
--, done
++)
8386 gimple_seq fork_seq
= NULL
;
8387 gimple_seq join_seq
= NULL
;
8389 tree place
= build_int_cst (integer_type_node
, -1);
8390 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8391 fork_kind
, ddvar
, place
);
8392 gimple_set_location (fork
, loc
);
8393 gimple_set_lhs (fork
, ddvar
);
8395 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8396 join_kind
, ddvar
, place
);
8397 gimple_set_location (join
, loc
);
8398 gimple_set_lhs (join
, ddvar
);
8400 /* Mark the beginning of this level sequence. */
8402 lower_oacc_loop_marker (loc
, ddvar
, true,
8403 build_int_cst (integer_type_node
, count
),
8405 lower_oacc_loop_marker (loc
, ddvar
, false,
8406 build_int_cst (integer_type_node
, done
),
8409 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8410 fork
, (count
== 1) ? private_marker
: NULL
,
8411 join
, &fork_seq
, &join_seq
, ctx
);
8413 /* Append this level to head. */
8414 gimple_seq_add_seq (head
, fork_seq
);
8415 /* Prepend it to tail. */
8416 gimple_seq_add_seq (&join_seq
, *tail
);
8422 /* Mark the end of the sequence. */
8423 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8424 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8427 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8428 catch handler and return it. This prevents programs from violating the
8429 structured block semantics with throws. */
8432 maybe_catch_exception (gimple_seq body
)
8437 if (!flag_exceptions
)
8440 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8441 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8443 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8445 g
= gimple_build_eh_must_not_throw (decl
);
8446 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8449 return gimple_seq_alloc_with_stmt (g
);
8453 /* Routines to lower OMP directives into OMP-GIMPLE. */
8455 /* If ctx is a worksharing context inside of a cancellable parallel
8456 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8457 and conditional branch to parallel's cancel_label to handle
8458 cancellation in the implicit barrier. */
8461 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8464 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8465 if (gimple_omp_return_nowait_p (omp_return
))
8467 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8468 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8469 && outer
->cancellable
)
8471 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8472 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8473 tree lhs
= create_tmp_var (c_bool_type
);
8474 gimple_omp_return_set_lhs (omp_return
, lhs
);
8475 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8476 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8477 fold_convert (c_bool_type
,
8478 boolean_false_node
),
8479 outer
->cancel_label
, fallthru_label
);
8480 gimple_seq_add_stmt (body
, g
);
8481 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8483 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8484 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8488 /* Find the first task_reduction or reduction clause or return NULL
8489 if there are none. */
8492 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8493 enum omp_clause_code ccode
)
8497 clauses
= omp_find_clause (clauses
, ccode
);
8498 if (clauses
== NULL_TREE
)
8500 if (ccode
!= OMP_CLAUSE_REDUCTION
8501 || code
== OMP_TASKLOOP
8502 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8504 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8508 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8509 gimple_seq
*, gimple_seq
*);
8511 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8512 CTX is the enclosing OMP context for the current statement. */
8515 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8517 tree block
, control
;
8518 gimple_stmt_iterator tgsi
;
8519 gomp_sections
*stmt
;
8521 gbind
*new_stmt
, *bind
;
8522 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8524 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8526 push_gimplify_context ();
8532 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8533 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8534 tree rtmp
= NULL_TREE
;
8537 tree type
= build_pointer_type (pointer_sized_int_node
);
8538 tree temp
= create_tmp_var (type
);
8539 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8540 OMP_CLAUSE_DECL (c
) = temp
;
8541 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8542 gimple_omp_sections_set_clauses (stmt
, c
);
8543 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8544 gimple_omp_sections_clauses (stmt
),
8545 &ilist
, &tred_dlist
);
8547 rtmp
= make_ssa_name (type
);
8548 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8551 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8552 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8554 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8555 &ilist
, &dlist
, ctx
, NULL
);
8557 control
= create_tmp_var (unsigned_type_node
, ".section");
8558 gimple_omp_sections_set_control (stmt
, control
);
8560 new_body
= gimple_omp_body (stmt
);
8561 gimple_omp_set_body (stmt
, NULL
);
8562 tgsi
= gsi_start (new_body
);
8563 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8568 sec_start
= gsi_stmt (tgsi
);
8569 sctx
= maybe_lookup_ctx (sec_start
);
8572 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8573 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8574 GSI_CONTINUE_LINKING
);
8575 gimple_omp_set_body (sec_start
, NULL
);
8577 if (gsi_one_before_end_p (tgsi
))
8579 gimple_seq l
= NULL
;
8580 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8581 &ilist
, &l
, &clist
, ctx
);
8582 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8583 gimple_omp_section_set_last (sec_start
);
8586 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8587 GSI_CONTINUE_LINKING
);
8590 block
= make_node (BLOCK
);
8591 bind
= gimple_build_bind (NULL
, new_body
, block
);
8594 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8598 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8599 gcall
*g
= gimple_build_call (fndecl
, 0);
8600 gimple_seq_add_stmt (&olist
, g
);
8601 gimple_seq_add_seq (&olist
, clist
);
8602 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8603 g
= gimple_build_call (fndecl
, 0);
8604 gimple_seq_add_stmt (&olist
, g
);
8607 block
= make_node (BLOCK
);
8608 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8609 gsi_replace (gsi_p
, new_stmt
, true);
8611 pop_gimplify_context (new_stmt
);
8612 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8613 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8614 if (BLOCK_VARS (block
))
8615 TREE_USED (block
) = 1;
8618 gimple_seq_add_seq (&new_body
, ilist
);
8619 gimple_seq_add_stmt (&new_body
, stmt
);
8620 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8621 gimple_seq_add_stmt (&new_body
, bind
);
8623 t
= gimple_build_omp_continue (control
, control
);
8624 gimple_seq_add_stmt (&new_body
, t
);
8626 gimple_seq_add_seq (&new_body
, olist
);
8627 if (ctx
->cancellable
)
8628 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8629 gimple_seq_add_seq (&new_body
, dlist
);
8631 new_body
= maybe_catch_exception (new_body
);
8633 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8634 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8635 t
= gimple_build_omp_return (nowait
);
8636 gimple_seq_add_stmt (&new_body
, t
);
8637 gimple_seq_add_seq (&new_body
, tred_dlist
);
8638 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8641 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8643 gimple_bind_set_body (new_stmt
, new_body
);
8647 /* A subroutine of lower_omp_single. Expand the simple form of
8648 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8650 if (GOMP_single_start ())
8652 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8654 FIXME. It may be better to delay expanding the logic of this until
8655 pass_expand_omp. The expanded logic may make the job more difficult
8656 to a synchronization analysis pass. */
8659 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8661 location_t loc
= gimple_location (single_stmt
);
8662 tree tlabel
= create_artificial_label (loc
);
8663 tree flabel
= create_artificial_label (loc
);
8664 gimple
*call
, *cond
;
8667 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8668 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8669 call
= gimple_build_call (decl
, 0);
8670 gimple_call_set_lhs (call
, lhs
);
8671 gimple_seq_add_stmt (pre_p
, call
);
8673 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8674 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8677 gimple_seq_add_stmt (pre_p
, cond
);
8678 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8679 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8680 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8684 /* A subroutine of lower_omp_single. Expand the simple form of
8685 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8687 #pragma omp single copyprivate (a, b, c)
8689 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8692 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8698 GOMP_single_copy_end (©out);
8709 FIXME. It may be better to delay expanding the logic of this until
8710 pass_expand_omp. The expanded logic may make the job more difficult
8711 to a synchronization analysis pass. */
8714 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8717 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8718 gimple_seq copyin_seq
;
8719 location_t loc
= gimple_location (single_stmt
);
8721 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8723 ptr_type
= build_pointer_type (ctx
->record_type
);
8724 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8726 l0
= create_artificial_label (loc
);
8727 l1
= create_artificial_label (loc
);
8728 l2
= create_artificial_label (loc
);
8730 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8731 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8732 t
= fold_convert_loc (loc
, ptr_type
, t
);
8733 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8735 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8736 build_int_cst (ptr_type
, 0));
8737 t
= build3 (COND_EXPR
, void_type_node
, t
,
8738 build_and_jump (&l0
), build_and_jump (&l1
));
8739 gimplify_and_add (t
, pre_p
);
8741 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8743 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8746 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8749 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8750 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8751 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8752 gimplify_and_add (t
, pre_p
);
8754 t
= build_and_jump (&l2
);
8755 gimplify_and_add (t
, pre_p
);
8757 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8759 gimple_seq_add_seq (pre_p
, copyin_seq
);
8761 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8765 /* Expand code for an OpenMP single directive. */
8768 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8771 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8773 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8775 push_gimplify_context ();
8777 block
= make_node (BLOCK
);
8778 bind
= gimple_build_bind (NULL
, NULL
, block
);
8779 gsi_replace (gsi_p
, bind
, true);
8782 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8783 &bind_body
, &dlist
, ctx
, NULL
);
8784 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8786 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8788 if (ctx
->record_type
)
8789 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8791 lower_omp_single_simple (single_stmt
, &bind_body
);
8793 gimple_omp_set_body (single_stmt
, NULL
);
8795 gimple_seq_add_seq (&bind_body
, dlist
);
8797 bind_body
= maybe_catch_exception (bind_body
);
8799 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8800 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8801 gimple
*g
= gimple_build_omp_return (nowait
);
8802 gimple_seq_add_stmt (&bind_body_tail
, g
);
8803 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8804 if (ctx
->record_type
)
8806 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8807 tree clobber
= build_clobber (ctx
->record_type
);
8808 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8809 clobber
), GSI_SAME_STMT
);
8811 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8812 gimple_bind_set_body (bind
, bind_body
);
8814 pop_gimplify_context (bind
);
8816 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8817 BLOCK_VARS (block
) = ctx
->block_vars
;
8818 if (BLOCK_VARS (block
))
8819 TREE_USED (block
) = 1;
8823 /* Lower code for an OMP scope directive. */
8826 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8829 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8831 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8832 gimple_seq tred_dlist
= NULL
;
8834 push_gimplify_context ();
8836 block
= make_node (BLOCK
);
8837 bind
= gimple_build_bind (NULL
, NULL
, block
);
8838 gsi_replace (gsi_p
, bind
, true);
8843 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8844 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8847 tree type
= build_pointer_type (pointer_sized_int_node
);
8848 tree temp
= create_tmp_var (type
);
8849 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8850 OMP_CLAUSE_DECL (c
) = temp
;
8851 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8852 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8853 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8854 gimple_omp_scope_clauses (scope_stmt
),
8855 &bind_body
, &tred_dlist
);
8857 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8858 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8859 gimple_seq_add_stmt (&bind_body
, stmt
);
8862 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8863 &bind_body
, &dlist
, ctx
, NULL
);
8864 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8866 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8868 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8870 gimple_omp_set_body (scope_stmt
, NULL
);
8872 gimple_seq clist
= NULL
;
8873 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8874 &bind_body
, &clist
, ctx
);
8877 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8878 gcall
*g
= gimple_build_call (fndecl
, 0);
8879 gimple_seq_add_stmt (&bind_body
, g
);
8880 gimple_seq_add_seq (&bind_body
, clist
);
8881 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8882 g
= gimple_build_call (fndecl
, 0);
8883 gimple_seq_add_stmt (&bind_body
, g
);
8886 gimple_seq_add_seq (&bind_body
, dlist
);
8888 bind_body
= maybe_catch_exception (bind_body
);
8890 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8891 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8892 gimple
*g
= gimple_build_omp_return (nowait
);
8893 gimple_seq_add_stmt (&bind_body_tail
, g
);
8894 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8895 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8896 if (ctx
->record_type
)
8898 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8899 tree clobber
= build_clobber (ctx
->record_type
);
8900 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8901 clobber
), GSI_SAME_STMT
);
8903 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8905 gimple_bind_set_body (bind
, bind_body
);
8907 pop_gimplify_context (bind
);
8909 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8910 BLOCK_VARS (block
) = ctx
->block_vars
;
8911 if (BLOCK_VARS (block
))
8912 TREE_USED (block
) = 1;
8914 /* Expand code for an OpenMP master or masked directive. */
8917 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8919 tree block
, lab
= NULL
, x
, bfn_decl
;
8920 gimple
*stmt
= gsi_stmt (*gsi_p
);
8922 location_t loc
= gimple_location (stmt
);
8924 tree filter
= integer_zero_node
;
8926 push_gimplify_context ();
8928 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
8930 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
8933 filter
= fold_convert (integer_type_node
,
8934 OMP_CLAUSE_FILTER_EXPR (filter
));
8936 filter
= integer_zero_node
;
8938 block
= make_node (BLOCK
);
8939 bind
= gimple_build_bind (NULL
, NULL
, block
);
8940 gsi_replace (gsi_p
, bind
, true);
8941 gimple_bind_add_stmt (bind
, stmt
);
8943 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8944 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8945 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
8946 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8948 gimplify_and_add (x
, &tseq
);
8949 gimple_bind_add_seq (bind
, tseq
);
8951 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8952 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8953 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8954 gimple_omp_set_body (stmt
, NULL
);
8956 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8958 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8960 pop_gimplify_context (bind
);
8962 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8963 BLOCK_VARS (block
) = ctx
->block_vars
;
8966 /* Helper function for lower_omp_task_reductions. For a specific PASS
8967 find out the current clause it should be processed, or return false
8968 if all have been processed already. */
8971 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8972 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8973 tree
*type
, tree
*next
)
8975 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8977 if (ccode
== OMP_CLAUSE_REDUCTION
8978 && code
!= OMP_TASKLOOP
8979 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8981 *decl
= OMP_CLAUSE_DECL (*c
);
8982 *type
= TREE_TYPE (*decl
);
8983 if (TREE_CODE (*decl
) == MEM_REF
)
8990 if (omp_privatize_by_reference (*decl
))
8991 *type
= TREE_TYPE (*type
);
8992 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8995 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9004 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9005 OMP_TASKGROUP only with task modifier). Register mapping of those in
9006 START sequence and reducing them and unregister them in the END sequence. */
9009 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9010 gimple_seq
*start
, gimple_seq
*end
)
9012 enum omp_clause_code ccode
9013 = (code
== OMP_TASKGROUP
9014 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9015 tree cancellable
= NULL_TREE
;
9016 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9017 if (clauses
== NULL_TREE
)
9019 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9021 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9022 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9023 && outer
->cancellable
)
9025 cancellable
= error_mark_node
;
9028 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9029 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9032 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9033 tree
*last
= &TYPE_FIELDS (record_type
);
9037 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9039 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9042 DECL_CHAIN (field
) = ifield
;
9043 last
= &DECL_CHAIN (ifield
);
9044 DECL_CONTEXT (field
) = record_type
;
9045 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9046 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9047 DECL_CONTEXT (ifield
) = record_type
;
9048 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9049 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9051 for (int pass
= 0; pass
< 2; pass
++)
9053 tree decl
, type
, next
;
9054 for (tree c
= clauses
;
9055 omp_task_reduction_iterate (pass
, code
, ccode
,
9056 &c
, &decl
, &type
, &next
); c
= next
)
9059 tree new_type
= type
;
9061 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9063 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9064 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9066 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9068 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9069 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9070 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9073 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9074 DECL_CONTEXT (field
) = record_type
;
9075 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9076 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9078 last
= &DECL_CHAIN (field
);
9080 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9082 DECL_CONTEXT (bfield
) = record_type
;
9083 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9084 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9086 last
= &DECL_CHAIN (bfield
);
9090 layout_type (record_type
);
9092 /* Build up an array which registers with the runtime all the reductions
9093 and deregisters them at the end. Format documented in libgomp/task.c. */
9094 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9095 tree avar
= create_tmp_var_raw (atype
);
9096 gimple_add_tmp_var (avar
);
9097 TREE_ADDRESSABLE (avar
) = 1;
9098 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9099 NULL_TREE
, NULL_TREE
);
9100 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9101 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9102 gimple_seq seq
= NULL
;
9103 tree sz
= fold_convert (pointer_sized_int_node
,
9104 TYPE_SIZE_UNIT (record_type
));
9106 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9107 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9108 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9109 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9110 ctx
->task_reductions
.create (1 + cnt
);
9111 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9112 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9114 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9115 gimple_seq_add_seq (start
, seq
);
9116 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9117 NULL_TREE
, NULL_TREE
);
9118 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9119 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9120 NULL_TREE
, NULL_TREE
);
9121 t
= build_int_cst (pointer_sized_int_node
,
9122 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9123 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9124 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9125 NULL_TREE
, NULL_TREE
);
9126 t
= build_int_cst (pointer_sized_int_node
, -1);
9127 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9128 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9129 NULL_TREE
, NULL_TREE
);
9130 t
= build_int_cst (pointer_sized_int_node
, 0);
9131 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9133 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9134 and for each task reduction checks a bool right after the private variable
9135 within that thread's chunk; if the bool is clear, it hasn't been
9136 initialized and thus isn't going to be reduced nor destructed, otherwise
9137 reduce and destruct it. */
9138 tree idx
= create_tmp_var (size_type_node
);
9139 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9140 tree num_thr_sz
= create_tmp_var (size_type_node
);
9141 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9142 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9143 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9145 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9147 /* For worksharing constructs or scope, only perform it in the master
9148 thread, with the exception of cancelled implicit barriers - then only
9149 handle the current thread. */
9150 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9151 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9152 tree thr_num
= create_tmp_var (integer_type_node
);
9153 g
= gimple_build_call (t
, 0);
9154 gimple_call_set_lhs (g
, thr_num
);
9155 gimple_seq_add_stmt (end
, g
);
9159 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9160 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9161 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9162 if (code
== OMP_FOR
)
9163 c
= gimple_omp_for_clauses (ctx
->stmt
);
9164 else if (code
== OMP_SECTIONS
)
9165 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9166 else /* if (code == OMP_SCOPE) */
9167 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9168 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9170 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9172 gimple_seq_add_stmt (end
, g
);
9173 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9174 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9175 gimple_seq_add_stmt (end
, g
);
9176 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9177 build_one_cst (TREE_TYPE (idx
)));
9178 gimple_seq_add_stmt (end
, g
);
9179 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9180 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9182 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9183 gimple_seq_add_stmt (end
, g
);
9184 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9186 if (code
!= OMP_PARALLEL
)
9188 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9189 tree num_thr
= create_tmp_var (integer_type_node
);
9190 g
= gimple_build_call (t
, 0);
9191 gimple_call_set_lhs (g
, num_thr
);
9192 gimple_seq_add_stmt (end
, g
);
9193 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9194 gimple_seq_add_stmt (end
, g
);
9196 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9200 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9201 OMP_CLAUSE__REDUCTEMP_
);
9202 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9203 t
= fold_convert (size_type_node
, t
);
9204 gimplify_assign (num_thr_sz
, t
, end
);
9206 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9207 NULL_TREE
, NULL_TREE
);
9208 tree data
= create_tmp_var (pointer_sized_int_node
);
9209 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9210 if (code
== OMP_TASKLOOP
)
9212 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9213 g
= gimple_build_cond (NE_EXPR
, data
,
9214 build_zero_cst (pointer_sized_int_node
),
9216 gimple_seq_add_stmt (end
, g
);
9218 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9220 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9221 ptr
= create_tmp_var (build_pointer_type (record_type
));
9223 ptr
= create_tmp_var (ptr_type_node
);
9224 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9226 tree field
= TYPE_FIELDS (record_type
);
9229 field
= DECL_CHAIN (DECL_CHAIN (field
));
9230 for (int pass
= 0; pass
< 2; pass
++)
9232 tree decl
, type
, next
;
9233 for (tree c
= clauses
;
9234 omp_task_reduction_iterate (pass
, code
, ccode
,
9235 &c
, &decl
, &type
, &next
); c
= next
)
9237 tree var
= decl
, ref
;
9238 if (TREE_CODE (decl
) == MEM_REF
)
9240 var
= TREE_OPERAND (var
, 0);
9241 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9242 var
= TREE_OPERAND (var
, 0);
9244 if (TREE_CODE (var
) == ADDR_EXPR
)
9245 var
= TREE_OPERAND (var
, 0);
9246 else if (TREE_CODE (var
) == INDIRECT_REF
)
9247 var
= TREE_OPERAND (var
, 0);
9248 tree orig_var
= var
;
9249 if (is_variable_sized (var
))
9251 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9252 var
= DECL_VALUE_EXPR (var
);
9253 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9254 var
= TREE_OPERAND (var
, 0);
9255 gcc_assert (DECL_P (var
));
9257 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9258 if (orig_var
!= var
)
9259 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9260 else if (TREE_CODE (v
) == ADDR_EXPR
)
9261 t
= build_fold_addr_expr (t
);
9262 else if (TREE_CODE (v
) == INDIRECT_REF
)
9263 t
= build_fold_indirect_ref (t
);
9264 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9266 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9267 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9268 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9270 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9271 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9272 fold_convert (size_type_node
,
9273 TREE_OPERAND (decl
, 1)));
9277 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9278 if (!omp_privatize_by_reference (decl
))
9279 t
= build_fold_addr_expr (t
);
9281 t
= fold_convert (pointer_sized_int_node
, t
);
9283 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9284 gimple_seq_add_seq (start
, seq
);
9285 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9286 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9287 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9288 t
= unshare_expr (byte_position (field
));
9289 t
= fold_convert (pointer_sized_int_node
, t
);
9290 ctx
->task_reduction_map
->put (c
, cnt
);
9291 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9294 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9295 gimple_seq_add_seq (start
, seq
);
9296 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9297 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9298 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9300 tree bfield
= DECL_CHAIN (field
);
9302 if (code
== OMP_PARALLEL
9304 || code
== OMP_SECTIONS
9305 || code
== OMP_SCOPE
)
9306 /* In parallel, worksharing or scope all threads unconditionally
9307 initialize all their task reduction private variables. */
9308 cond
= boolean_true_node
;
9309 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9311 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9312 unshare_expr (byte_position (bfield
)));
9314 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9315 gimple_seq_add_seq (end
, seq
);
9316 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9317 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9318 build_int_cst (pbool
, 0));
9321 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9322 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9323 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9324 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9325 tree condv
= create_tmp_var (boolean_type_node
);
9326 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9327 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9329 gimple_seq_add_stmt (end
, g
);
9330 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9331 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9333 /* If this reduction doesn't need destruction and parallel
9334 has been cancelled, there is nothing to do for this
9335 reduction, so jump around the merge operation. */
9336 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9337 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9338 build_zero_cst (TREE_TYPE (cancellable
)),
9340 gimple_seq_add_stmt (end
, g
);
9341 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9345 if (TREE_TYPE (ptr
) == ptr_type_node
)
9347 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9348 unshare_expr (byte_position (field
)));
9350 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9351 gimple_seq_add_seq (end
, seq
);
9352 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9353 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9354 build_int_cst (pbool
, 0));
9357 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9358 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9360 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9361 if (TREE_CODE (decl
) != MEM_REF
9362 && omp_privatize_by_reference (decl
))
9363 ref
= build_simple_mem_ref (ref
);
9364 /* reduction(-:var) sums up the partial results, so it acts
9365 identically to reduction(+:var). */
9366 if (rcode
== MINUS_EXPR
)
9368 if (TREE_CODE (decl
) == MEM_REF
)
9370 tree type
= TREE_TYPE (new_var
);
9371 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9372 tree i
= create_tmp_var (TREE_TYPE (v
));
9373 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9376 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9377 tree vv
= create_tmp_var (TREE_TYPE (v
));
9378 gimplify_assign (vv
, v
, start
);
9381 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9382 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9383 new_var
= build_fold_addr_expr (new_var
);
9384 new_var
= fold_convert (ptype
, new_var
);
9385 ref
= fold_convert (ptype
, ref
);
9386 tree m
= create_tmp_var (ptype
);
9387 gimplify_assign (m
, new_var
, end
);
9389 m
= create_tmp_var (ptype
);
9390 gimplify_assign (m
, ref
, end
);
9392 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9393 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9394 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9395 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9396 tree priv
= build_simple_mem_ref (new_var
);
9397 tree out
= build_simple_mem_ref (ref
);
9398 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9400 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9401 tree decl_placeholder
9402 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9403 tree lab6
= NULL_TREE
;
9406 /* If this reduction needs destruction and parallel
9407 has been cancelled, jump around the merge operation
9408 to the destruction. */
9409 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9410 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9411 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9412 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9414 gimple_seq_add_stmt (end
, g
);
9415 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9417 SET_DECL_VALUE_EXPR (placeholder
, out
);
9418 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9419 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9420 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9421 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9422 gimple_seq_add_seq (end
,
9423 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9424 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9425 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9427 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9428 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9431 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9432 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9435 gimple_seq tseq
= NULL
;
9436 gimplify_stmt (&x
, &tseq
);
9437 gimple_seq_add_seq (end
, tseq
);
9442 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9443 out
= unshare_expr (out
);
9444 gimplify_assign (out
, x
, end
);
9447 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9448 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9449 gimple_seq_add_stmt (end
, g
);
9450 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9451 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9452 gimple_seq_add_stmt (end
, g
);
9453 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9454 build_int_cst (TREE_TYPE (i
), 1));
9455 gimple_seq_add_stmt (end
, g
);
9456 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9457 gimple_seq_add_stmt (end
, g
);
9458 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9460 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9462 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9463 tree oldv
= NULL_TREE
;
9464 tree lab6
= NULL_TREE
;
9467 /* If this reduction needs destruction and parallel
9468 has been cancelled, jump around the merge operation
9469 to the destruction. */
9470 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9471 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9472 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9473 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9475 gimple_seq_add_stmt (end
, g
);
9476 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9478 if (omp_privatize_by_reference (decl
)
9479 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9481 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9482 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9483 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9484 gimplify_assign (refv
, ref
, end
);
9485 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9486 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9487 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9488 tree d
= maybe_lookup_decl (decl
, ctx
);
9490 if (DECL_HAS_VALUE_EXPR_P (d
))
9491 oldv
= DECL_VALUE_EXPR (d
);
9492 if (omp_privatize_by_reference (var
))
9494 tree v
= fold_convert (TREE_TYPE (d
),
9495 build_fold_addr_expr (new_var
));
9496 SET_DECL_VALUE_EXPR (d
, v
);
9499 SET_DECL_VALUE_EXPR (d
, new_var
);
9500 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9501 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9503 SET_DECL_VALUE_EXPR (d
, oldv
);
9506 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9507 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9509 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9510 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9511 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9512 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9514 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9515 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9518 gimple_seq tseq
= NULL
;
9519 gimplify_stmt (&x
, &tseq
);
9520 gimple_seq_add_seq (end
, tseq
);
9525 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9526 ref
= unshare_expr (ref
);
9527 gimplify_assign (ref
, x
, end
);
9529 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9531 field
= DECL_CHAIN (bfield
);
9535 if (code
== OMP_TASKGROUP
)
9537 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9538 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9539 gimple_seq_add_stmt (start
, g
);
9544 if (code
== OMP_FOR
)
9545 c
= gimple_omp_for_clauses (ctx
->stmt
);
9546 else if (code
== OMP_SECTIONS
)
9547 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9548 else if (code
== OMP_SCOPE
)
9549 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9551 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9552 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9553 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9554 build_fold_addr_expr (avar
));
9555 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9558 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9559 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9561 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9562 gimple_seq_add_stmt (end
, g
);
9563 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9564 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9566 enum built_in_function bfn
9567 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9568 t
= builtin_decl_explicit (bfn
);
9569 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9573 arg
= create_tmp_var (c_bool_type
);
9574 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9578 arg
= build_int_cst (c_bool_type
, 0);
9579 g
= gimple_build_call (t
, 1, arg
);
9583 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9584 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9586 gimple_seq_add_stmt (end
, g
);
9588 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9589 t
= build_constructor (atype
, NULL
);
9590 TREE_THIS_VOLATILE (t
) = 1;
9591 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9594 /* Expand code for an OpenMP taskgroup directive. */
9597 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9599 gimple
*stmt
= gsi_stmt (*gsi_p
);
9602 gimple_seq dseq
= NULL
;
9603 tree block
= make_node (BLOCK
);
9605 bind
= gimple_build_bind (NULL
, NULL
, block
);
9606 gsi_replace (gsi_p
, bind
, true);
9607 gimple_bind_add_stmt (bind
, stmt
);
9609 push_gimplify_context ();
9611 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9613 gimple_bind_add_stmt (bind
, x
);
9615 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9616 gimple_omp_taskgroup_clauses (stmt
),
9617 gimple_bind_body_ptr (bind
), &dseq
);
9619 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9620 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9621 gimple_omp_set_body (stmt
, NULL
);
9623 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9624 gimple_bind_add_seq (bind
, dseq
);
9626 pop_gimplify_context (bind
);
9628 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9629 BLOCK_VARS (block
) = ctx
->block_vars
;
9633 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9636 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9639 struct omp_for_data fd
;
9640 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9643 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9644 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9645 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9649 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9650 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9651 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
9652 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9654 /* Merge depend clauses from multiple adjacent
9655 #pragma omp ordered depend(sink:...) constructs
9656 into one #pragma omp ordered depend(sink:...), so that
9657 we can optimize them together. */
9658 gimple_stmt_iterator gsi
= *gsi_p
;
9660 while (!gsi_end_p (gsi
))
9662 gimple
*stmt
= gsi_stmt (gsi
);
9663 if (is_gimple_debug (stmt
)
9664 || gimple_code (stmt
) == GIMPLE_NOP
)
9669 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9671 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9672 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9674 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
9675 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9678 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9680 gsi_remove (&gsi
, true);
9684 /* Canonicalize sink dependence clauses into one folded clause if
9687 The basic algorithm is to create a sink vector whose first
9688 element is the GCD of all the first elements, and whose remaining
9689 elements are the minimum of the subsequent columns.
9691 We ignore dependence vectors whose first element is zero because
9692 such dependencies are known to be executed by the same thread.
9694 We take into account the direction of the loop, so a minimum
9695 becomes a maximum if the loop is iterating forwards. We also
9696 ignore sink clauses where the loop direction is unknown, or where
9697 the offsets are clearly invalid because they are not a multiple
9698 of the loop increment.
9702 #pragma omp for ordered(2)
9703 for (i=0; i < N; ++i)
9704 for (j=0; j < M; ++j)
9706 #pragma omp ordered \
9707 depend(sink:i-8,j-2) \
9708 depend(sink:i,j-1) \ // Completely ignored because i+0.
9709 depend(sink:i-4,j-3) \
9710 depend(sink:i-6,j-4)
9711 #pragma omp ordered depend(source)
9716 depend(sink:-gcd(8,4,6),-min(2,3,4))
9721 /* FIXME: Computing GCD's where the first element is zero is
9722 non-trivial in the presence of collapsed loops. Do this later. */
9723 if (fd
.collapse
> 1)
9726 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9728 /* wide_int is not a POD so it must be default-constructed. */
9729 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9730 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9732 tree folded_dep
= NULL_TREE
;
9733 /* TRUE if the first dimension's offset is negative. */
9734 bool neg_offset_p
= false;
9736 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9738 while ((c
= *list_p
) != NULL
)
9740 bool remove
= false;
9742 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9743 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9744 goto next_ordered_clause
;
9747 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9748 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9749 vec
= TREE_CHAIN (vec
), ++i
)
9751 gcc_assert (i
< len
);
9753 /* omp_extract_for_data has canonicalized the condition. */
9754 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9755 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9756 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9757 bool maybe_lexically_later
= true;
9759 /* While the committee makes up its mind, bail if we have any
9760 non-constant steps. */
9761 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9762 goto lower_omp_ordered_ret
;
9764 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9765 if (POINTER_TYPE_P (itype
))
9767 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9768 TYPE_PRECISION (itype
),
9771 /* Ignore invalid offsets that are not multiples of the step. */
9772 if (!wi::multiple_of_p (wi::abs (offset
),
9773 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9776 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9777 "ignoring sink clause with offset that is not "
9778 "a multiple of the loop step");
9780 goto next_ordered_clause
;
9783 /* Calculate the first dimension. The first dimension of
9784 the folded dependency vector is the GCD of the first
9785 elements, while ignoring any first elements whose offset
9789 /* Ignore dependence vectors whose first dimension is 0. */
9793 goto next_ordered_clause
;
9797 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9799 error_at (OMP_CLAUSE_LOCATION (c
),
9800 "first offset must be in opposite direction "
9801 "of loop iterations");
9802 goto lower_omp_ordered_ret
;
9806 neg_offset_p
= forward
;
9807 /* Initialize the first time around. */
9808 if (folded_dep
== NULL_TREE
)
9811 folded_deps
[0] = offset
;
9814 folded_deps
[0] = wi::gcd (folded_deps
[0],
9818 /* Calculate minimum for the remaining dimensions. */
9821 folded_deps
[len
+ i
- 1] = offset
;
9822 if (folded_dep
== c
)
9823 folded_deps
[i
] = offset
;
9824 else if (maybe_lexically_later
9825 && !wi::eq_p (folded_deps
[i
], offset
))
9827 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9831 for (j
= 1; j
<= i
; j
++)
9832 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9835 maybe_lexically_later
= false;
9839 gcc_assert (i
== len
);
9843 next_ordered_clause
:
9845 *list_p
= OMP_CLAUSE_CHAIN (c
);
9847 list_p
= &OMP_CLAUSE_CHAIN (c
);
9853 folded_deps
[0] = -folded_deps
[0];
9855 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9856 if (POINTER_TYPE_P (itype
))
9859 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9860 = wide_int_to_tree (itype
, folded_deps
[0]);
9861 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9862 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9865 lower_omp_ordered_ret
:
9867 /* Ordered without clauses is #pragma omp threads, while we want
9868 a nop instead if we remove all clauses. */
9869 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9870 gsi_replace (gsi_p
, gimple_build_nop (), true);
9874 /* Expand code for an OpenMP ordered directive. */
9877 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9880 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9881 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9884 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9886 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9889 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9890 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9891 OMP_CLAUSE_THREADS
);
9893 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9896 /* FIXME: This is needs to be moved to the expansion to verify various
9897 conditions only testable on cfg with dominators computed, and also
9898 all the depend clauses to be merged still might need to be available
9899 for the runtime checks. */
9901 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9905 push_gimplify_context ();
9907 block
= make_node (BLOCK
);
9908 bind
= gimple_build_bind (NULL
, NULL
, block
);
9909 gsi_replace (gsi_p
, bind
, true);
9910 gimple_bind_add_stmt (bind
, stmt
);
9914 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9915 build_int_cst (NULL_TREE
, threads
));
9916 cfun
->has_simduid_loops
= true;
9919 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9921 gimple_bind_add_stmt (bind
, x
);
9923 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9926 counter
= create_tmp_var (integer_type_node
);
9927 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9928 gimple_call_set_lhs (g
, counter
);
9929 gimple_bind_add_stmt (bind
, g
);
9931 body
= create_artificial_label (UNKNOWN_LOCATION
);
9932 test
= create_artificial_label (UNKNOWN_LOCATION
);
9933 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9935 tree simt_pred
= create_tmp_var (integer_type_node
);
9936 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9937 gimple_call_set_lhs (g
, simt_pred
);
9938 gimple_bind_add_stmt (bind
, g
);
9940 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9941 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9942 gimple_bind_add_stmt (bind
, g
);
9944 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9946 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9947 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9948 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9949 gimple_omp_set_body (stmt
, NULL
);
9953 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9954 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9955 gimple_bind_add_stmt (bind
, g
);
9957 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9958 tree nonneg
= create_tmp_var (integer_type_node
);
9959 gimple_seq tseq
= NULL
;
9960 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9961 gimple_bind_add_seq (bind
, tseq
);
9963 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9964 gimple_call_set_lhs (g
, nonneg
);
9965 gimple_bind_add_stmt (bind
, g
);
9967 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9968 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9969 gimple_bind_add_stmt (bind
, g
);
9971 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9974 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9975 build_int_cst (NULL_TREE
, threads
));
9977 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9979 gimple_bind_add_stmt (bind
, x
);
9981 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9983 pop_gimplify_context (bind
);
9985 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9986 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9990 /* Expand code for an OpenMP scan directive and the structured block
9991 before the scan directive. */
9994 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9996 gimple
*stmt
= gsi_stmt (*gsi_p
);
9998 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9999 tree lane
= NULL_TREE
;
10000 gimple_seq before
= NULL
;
10001 omp_context
*octx
= ctx
->outer
;
10003 if (octx
->scan_exclusive
&& !has_clauses
)
10005 gimple_stmt_iterator gsi2
= *gsi_p
;
10007 gimple
*stmt2
= gsi_stmt (gsi2
);
10008 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10009 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10010 the one with exclusive clause(s), comes first. */
10012 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10013 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10015 gsi_remove (gsi_p
, false);
10016 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10017 ctx
= maybe_lookup_ctx (stmt2
);
10019 lower_omp_scan (gsi_p
, ctx
);
10024 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10025 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10026 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10027 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10028 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10029 && !gimple_omp_for_combined_p (octx
->stmt
));
10030 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10031 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10034 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10035 OMP_CLAUSE__SIMDUID_
))
10037 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10038 lane
= create_tmp_var (unsigned_type_node
);
10039 tree t
= build_int_cst (integer_type_node
,
10041 : octx
->scan_inclusive
? 2 : 3);
10043 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10044 gimple_call_set_lhs (g
, lane
);
10045 gimple_seq_add_stmt (&before
, g
);
10048 if (is_simd
|| is_for
)
10050 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10051 c
; c
= OMP_CLAUSE_CHAIN (c
))
10052 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10053 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10055 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10056 tree var
= OMP_CLAUSE_DECL (c
);
10057 tree new_var
= lookup_decl (var
, octx
);
10058 tree val
= new_var
;
10059 tree var2
= NULL_TREE
;
10060 tree var3
= NULL_TREE
;
10061 tree var4
= NULL_TREE
;
10062 tree lane0
= NULL_TREE
;
10063 tree new_vard
= new_var
;
10064 if (omp_privatize_by_reference (var
))
10066 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10069 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10071 val
= DECL_VALUE_EXPR (new_vard
);
10072 if (new_vard
!= new_var
)
10074 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10075 val
= TREE_OPERAND (val
, 0);
10077 if (TREE_CODE (val
) == ARRAY_REF
10078 && VAR_P (TREE_OPERAND (val
, 0)))
10080 tree v
= TREE_OPERAND (val
, 0);
10081 if (lookup_attribute ("omp simd array",
10082 DECL_ATTRIBUTES (v
)))
10084 val
= unshare_expr (val
);
10085 lane0
= TREE_OPERAND (val
, 1);
10086 TREE_OPERAND (val
, 1) = lane
;
10087 var2
= lookup_decl (v
, octx
);
10088 if (octx
->scan_exclusive
)
10089 var4
= lookup_decl (var2
, octx
);
10091 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10092 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10095 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10096 var2
, lane
, NULL_TREE
, NULL_TREE
);
10097 TREE_THIS_NOTRAP (var2
) = 1;
10098 if (octx
->scan_exclusive
)
10100 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10101 var4
, lane
, NULL_TREE
,
10103 TREE_THIS_NOTRAP (var4
) = 1;
10114 var2
= build_outer_var_ref (var
, octx
);
10115 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10117 var3
= maybe_lookup_decl (new_vard
, octx
);
10118 if (var3
== new_vard
|| var3
== NULL_TREE
)
10120 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10122 var4
= maybe_lookup_decl (var3
, octx
);
10123 if (var4
== var3
|| var4
== NULL_TREE
)
10125 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10136 && octx
->scan_exclusive
10138 && var4
== NULL_TREE
)
10139 var4
= create_tmp_var (TREE_TYPE (val
));
10141 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10143 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10148 /* If we've added a separate identity element
10149 variable, copy it over into val. */
10150 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10152 gimplify_and_add (x
, &before
);
10154 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10156 /* Otherwise, assign to it the identity element. */
10157 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10159 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10160 tree ref
= build_outer_var_ref (var
, octx
);
10161 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10162 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10165 if (new_vard
!= new_var
)
10166 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10167 SET_DECL_VALUE_EXPR (new_vard
, val
);
10169 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10170 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10171 lower_omp (&tseq
, octx
);
10173 SET_DECL_VALUE_EXPR (new_vard
, x
);
10174 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10175 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10176 gimple_seq_add_seq (&before
, tseq
);
10178 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10184 if (octx
->scan_exclusive
)
10186 tree v4
= unshare_expr (var4
);
10187 tree v2
= unshare_expr (var2
);
10188 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10189 gimplify_and_add (x
, &before
);
10191 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10192 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10193 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10195 if (x
&& new_vard
!= new_var
)
10196 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10198 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10199 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10200 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10201 lower_omp (&tseq
, octx
);
10202 gimple_seq_add_seq (&before
, tseq
);
10203 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10205 SET_DECL_VALUE_EXPR (new_vard
, x
);
10206 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10207 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10208 if (octx
->scan_inclusive
)
10210 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10212 gimplify_and_add (x
, &before
);
10214 else if (lane0
== NULL_TREE
)
10216 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10218 gimplify_and_add (x
, &before
);
10226 /* input phase. Set val to initializer before
10228 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10229 gimplify_assign (val
, x
, &before
);
10234 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10235 if (code
== MINUS_EXPR
)
10238 tree x
= build2 (code
, TREE_TYPE (var2
),
10239 unshare_expr (var2
), unshare_expr (val
));
10240 if (octx
->scan_inclusive
)
10242 gimplify_assign (unshare_expr (var2
), x
, &before
);
10243 gimplify_assign (val
, var2
, &before
);
10247 gimplify_assign (unshare_expr (var4
),
10248 unshare_expr (var2
), &before
);
10249 gimplify_assign (var2
, x
, &before
);
10250 if (lane0
== NULL_TREE
)
10251 gimplify_assign (val
, var4
, &before
);
10255 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10257 tree vexpr
= unshare_expr (var4
);
10258 TREE_OPERAND (vexpr
, 1) = lane0
;
10259 if (new_vard
!= new_var
)
10260 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10261 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10265 if (is_simd
&& !is_for_simd
)
10267 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10268 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10269 gsi_replace (gsi_p
, gimple_build_nop (), true);
10272 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10275 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
10276 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10281 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10282 substitution of a couple of function calls. But in the NAMED case,
10283 requires that languages coordinate a symbol name. It is therefore
10284 best put here in common code. */
10286 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10289 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10292 tree name
, lock
, unlock
;
10293 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10295 location_t loc
= gimple_location (stmt
);
10298 name
= gimple_omp_critical_name (stmt
);
10303 if (!critical_name_mutexes
)
10304 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10306 tree
*n
= critical_name_mutexes
->get (name
);
10311 decl
= create_tmp_var_raw (ptr_type_node
);
10313 new_str
= ACONCAT ((".gomp_critical_user_",
10314 IDENTIFIER_POINTER (name
), NULL
));
10315 DECL_NAME (decl
) = get_identifier (new_str
);
10316 TREE_PUBLIC (decl
) = 1;
10317 TREE_STATIC (decl
) = 1;
10318 DECL_COMMON (decl
) = 1;
10319 DECL_ARTIFICIAL (decl
) = 1;
10320 DECL_IGNORED_P (decl
) = 1;
10322 varpool_node::finalize_decl (decl
);
10324 critical_name_mutexes
->put (name
, decl
);
10329 /* If '#pragma omp critical' is inside offloaded region or
10330 inside function marked as offloadable, the symbol must be
10331 marked as offloadable too. */
10333 if (cgraph_node::get (current_function_decl
)->offloadable
)
10334 varpool_node::get_create (decl
)->offloadable
= 1;
10336 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10337 if (is_gimple_omp_offloaded (octx
->stmt
))
10339 varpool_node::get_create (decl
)->offloadable
= 1;
10343 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10344 lock
= build_call_expr_loc (loc
, lock
, 1,
10345 build_fold_addr_expr_loc (loc
, decl
));
10347 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10348 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10349 build_fold_addr_expr_loc (loc
, decl
));
10353 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10354 lock
= build_call_expr_loc (loc
, lock
, 0);
10356 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10357 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10360 push_gimplify_context ();
10362 block
= make_node (BLOCK
);
10363 bind
= gimple_build_bind (NULL
, NULL
, block
);
10364 gsi_replace (gsi_p
, bind
, true);
10365 gimple_bind_add_stmt (bind
, stmt
);
10367 tbody
= gimple_bind_body (bind
);
10368 gimplify_and_add (lock
, &tbody
);
10369 gimple_bind_set_body (bind
, tbody
);
10371 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10372 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10373 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10374 gimple_omp_set_body (stmt
, NULL
);
10376 tbody
= gimple_bind_body (bind
);
10377 gimplify_and_add (unlock
, &tbody
);
10378 gimple_bind_set_body (bind
, tbody
);
10380 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10382 pop_gimplify_context (bind
);
10383 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10384 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10387 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10388 for a lastprivate clause. Given a loop control predicate of (V
10389 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10390 is appended to *DLIST, iterator initialization is appended to
10391 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10392 to be emitted in a critical section. */
10395 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10396 gimple_seq
*dlist
, gimple_seq
*clist
,
10397 struct omp_context
*ctx
)
10399 tree clauses
, cond
, vinit
;
10400 enum tree_code cond_code
;
10403 cond_code
= fd
->loop
.cond_code
;
10404 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10406 /* When possible, use a strict equality expression. This can let VRP
10407 type optimizations deduce the value and remove a copy. */
10408 if (tree_fits_shwi_p (fd
->loop
.step
))
10410 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10411 if (step
== 1 || step
== -1)
10412 cond_code
= EQ_EXPR
;
10415 tree n2
= fd
->loop
.n2
;
10416 if (fd
->collapse
> 1
10417 && TREE_CODE (n2
) != INTEGER_CST
10418 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10420 struct omp_context
*taskreg_ctx
= NULL
;
10421 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10423 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10424 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10425 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10427 if (gimple_omp_for_combined_into_p (gfor
))
10429 gcc_assert (ctx
->outer
->outer
10430 && is_parallel_ctx (ctx
->outer
->outer
));
10431 taskreg_ctx
= ctx
->outer
->outer
;
10435 struct omp_for_data outer_fd
;
10436 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10437 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10440 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10441 taskreg_ctx
= ctx
->outer
->outer
;
10443 else if (is_taskreg_ctx (ctx
->outer
))
10444 taskreg_ctx
= ctx
->outer
;
10448 tree taskreg_clauses
10449 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10450 tree innerc
= omp_find_clause (taskreg_clauses
,
10451 OMP_CLAUSE__LOOPTEMP_
);
10452 gcc_assert (innerc
);
10453 int count
= fd
->collapse
;
10455 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10456 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10457 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10459 for (i
= 0; i
< count
; i
++)
10461 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10462 OMP_CLAUSE__LOOPTEMP_
);
10463 gcc_assert (innerc
);
10465 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10466 OMP_CLAUSE__LOOPTEMP_
);
10468 n2
= fold_convert (TREE_TYPE (n2
),
10469 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10473 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10475 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10477 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10478 if (!gimple_seq_empty_p (stmts
))
10480 gimple_seq_add_seq (&stmts
, *dlist
);
10483 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10484 vinit
= fd
->loop
.n1
;
10485 if (cond_code
== EQ_EXPR
10486 && tree_fits_shwi_p (fd
->loop
.n2
)
10487 && ! integer_zerop (fd
->loop
.n2
))
10488 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10490 vinit
= unshare_expr (vinit
);
10492 /* Initialize the iterator variable, so that threads that don't execute
10493 any iterations don't execute the lastprivate clauses by accident. */
10494 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10498 /* OpenACC privatization.
10500 Or, in other words, *sharing* at the respective OpenACC level of
10503 From a correctness perspective, a non-addressable variable can't be accessed
10504 outside the current thread, so it can go in a (faster than shared memory)
10505 register -- though that register may need to be broadcast in some
10506 circumstances. A variable can only meaningfully be "shared" across workers
10507 or vector lanes if its address is taken, e.g. by a call to an atomic
10510 From an optimisation perspective, the answer might be fuzzier: maybe
10511 sometimes, using shared memory directly would be faster than
10515 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10516 const location_t loc
, const tree c
,
10519 const dump_user_location_t d_u_loc
10520 = dump_user_location_t::from_location_t (loc
);
10521 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10523 # pragma GCC diagnostic push
10524 # pragma GCC diagnostic ignored "-Wformat"
10526 dump_printf_loc (l_dump_flags
, d_u_loc
,
10527 "variable %<%T%> ", decl
);
10529 # pragma GCC diagnostic pop
10532 dump_printf (l_dump_flags
,
10534 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10536 dump_printf (l_dump_flags
,
10537 "declared in block ");
10541 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10544 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10546 /* There is some differentiation depending on block vs. clause. */
10551 if (res
&& !VAR_P (decl
))
10555 if (dump_enabled_p ())
10557 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10558 dump_printf (l_dump_flags
,
10559 "potentially has improper OpenACC privatization level: %qs\n",
10560 get_tree_code_name (TREE_CODE (decl
)));
10564 if (res
&& block
&& TREE_STATIC (decl
))
10568 if (dump_enabled_p ())
10570 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10571 dump_printf (l_dump_flags
,
10572 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10577 if (res
&& block
&& DECL_EXTERNAL (decl
))
10581 if (dump_enabled_p ())
10583 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10584 dump_printf (l_dump_flags
,
10585 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10590 if (res
&& !TREE_ADDRESSABLE (decl
))
10594 if (dump_enabled_p ())
10596 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10597 dump_printf (l_dump_flags
,
10598 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10599 "not addressable");
10605 if (dump_enabled_p ())
10607 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10608 dump_printf (l_dump_flags
,
10609 "is candidate for adjusting OpenACC privatization level\n");
10613 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10615 print_generic_decl (dump_file
, decl
, dump_flags
);
10616 fprintf (dump_file
, "\n");
10622 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10626 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10628 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10629 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10631 tree decl
= OMP_CLAUSE_DECL (c
);
10633 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
, decl
))
10636 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10637 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10641 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10645 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10647 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10649 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
, decl
))
10652 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10653 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10657 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10660 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10661 struct walk_stmt_info
*wi
)
10663 gimple
*stmt
= gsi_stmt (*gsi_p
);
10665 *handled_ops_p
= true;
10666 switch (gimple_code (stmt
))
10670 case GIMPLE_OMP_FOR
:
10671 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10672 && gimple_omp_for_combined_into_p (stmt
))
10673 *handled_ops_p
= false;
10676 case GIMPLE_OMP_SCAN
:
10677 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10678 return integer_zero_node
;
10685 /* Helper function for lower_omp_for, add transformations for a worksharing
10686 loop with scan directives inside of it.
10687 For worksharing loop not combined with simd, transform:
10688 #pragma omp for reduction(inscan,+:r) private(i)
10689 for (i = 0; i < n; i = i + 1)
10694 #pragma omp scan inclusive(r)
10700 into two worksharing loops + code to merge results:
10702 num_threads = omp_get_num_threads ();
10703 thread_num = omp_get_thread_num ();
10704 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10709 // For UDRs this is UDR init, or if ctors are needed, copy from
10710 // var3 that has been constructed to contain the neutral element.
10714 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10715 // a shared array with num_threads elements and rprivb to a local array
10716 // number of elements equal to the number of (contiguous) iterations the
10717 // current thread will perform. controlb and controlp variables are
10718 // temporaries to handle deallocation of rprivb at the end of second
10720 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10721 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10722 for (i = 0; i < n; i = i + 1)
10725 // For UDRs this is UDR init or copy from var3.
10727 // This is the input phase from user code.
10731 // For UDRs this is UDR merge.
10733 // Rather than handing it over to the user, save to local thread's
10735 rprivb[ivar] = var2;
10736 // For exclusive scan, the above two statements are swapped.
10740 // And remember the final value from this thread's into the shared
10742 rpriva[(sizetype) thread_num] = var2;
10743 // If more than one thread, compute using Work-Efficient prefix sum
10744 // the inclusive parallel scan of the rpriva array.
10745 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10750 num_threadsu = (unsigned int) num_threads;
10751 thread_numup1 = (unsigned int) thread_num + 1;
10754 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10758 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10763 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10764 mul = REALPART_EXPR <cplx>;
10765 ovf = IMAGPART_EXPR <cplx>;
10766 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10769 andvm1 = andv + 4294967295;
10771 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10773 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10774 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10775 rpriva[l] = rpriva[l - k] + rpriva[l];
10777 if (down == 0) goto <D.2121>; else goto <D.2122>;
10785 if (k != 0) goto <D.2108>; else goto <D.2103>;
10787 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10789 // For UDRs this is UDR init or copy from var3.
10793 var2 = rpriva[thread_num - 1];
10796 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10797 reduction(inscan,+:r) private(i)
10798 for (i = 0; i < n; i = i + 1)
10801 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10802 r = var2 + rprivb[ivar];
10805 // This is the scan phase from user code.
10807 // Plus a bump of the iterator.
10813 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10814 struct omp_for_data
*fd
, omp_context
*ctx
)
10816 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10817 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10819 gimple_seq body
= gimple_omp_body (stmt
);
10820 gimple_stmt_iterator input1_gsi
= gsi_none ();
10821 struct walk_stmt_info wi
;
10822 memset (&wi
, 0, sizeof (wi
));
10823 wi
.val_only
= true;
10824 wi
.info
= (void *) &input1_gsi
;
10825 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10826 gcc_assert (!gsi_end_p (input1_gsi
));
10828 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10829 gimple_stmt_iterator gsi
= input1_gsi
;
10831 gimple_stmt_iterator scan1_gsi
= gsi
;
10832 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10833 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10835 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10836 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10837 gimple_omp_set_body (input_stmt1
, NULL
);
10838 gimple_omp_set_body (scan_stmt1
, NULL
);
10839 gimple_omp_set_body (stmt
, NULL
);
10841 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10842 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10843 gimple_omp_set_body (stmt
, body
);
10844 gimple_omp_set_body (input_stmt1
, input_body
);
10846 gimple_stmt_iterator input2_gsi
= gsi_none ();
10847 memset (&wi
, 0, sizeof (wi
));
10848 wi
.val_only
= true;
10849 wi
.info
= (void *) &input2_gsi
;
10850 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10851 gcc_assert (!gsi_end_p (input2_gsi
));
10853 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10856 gimple_stmt_iterator scan2_gsi
= gsi
;
10857 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10858 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10859 gimple_omp_set_body (scan_stmt2
, scan_body
);
10861 gimple_stmt_iterator input3_gsi
= gsi_none ();
10862 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10863 gimple_stmt_iterator input4_gsi
= gsi_none ();
10864 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10865 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10866 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10867 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10870 memset (&wi
, 0, sizeof (wi
));
10871 wi
.val_only
= true;
10872 wi
.info
= (void *) &input3_gsi
;
10873 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10874 gcc_assert (!gsi_end_p (input3_gsi
));
10876 input_stmt3
= gsi_stmt (input3_gsi
);
10880 scan_stmt3
= gsi_stmt (gsi
);
10881 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10883 memset (&wi
, 0, sizeof (wi
));
10884 wi
.val_only
= true;
10885 wi
.info
= (void *) &input4_gsi
;
10886 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10887 gcc_assert (!gsi_end_p (input4_gsi
));
10889 input_stmt4
= gsi_stmt (input4_gsi
);
10893 scan_stmt4
= gsi_stmt (gsi
);
10894 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10896 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10897 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10900 tree num_threads
= create_tmp_var (integer_type_node
);
10901 tree thread_num
= create_tmp_var (integer_type_node
);
10902 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10903 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10904 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10905 gimple_call_set_lhs (g
, num_threads
);
10906 gimple_seq_add_stmt (body_p
, g
);
10907 g
= gimple_build_call (threadnum_decl
, 0);
10908 gimple_call_set_lhs (g
, thread_num
);
10909 gimple_seq_add_stmt (body_p
, g
);
10911 tree ivar
= create_tmp_var (sizetype
);
10912 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10913 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10914 tree k
= create_tmp_var (unsigned_type_node
);
10915 tree l
= create_tmp_var (unsigned_type_node
);
10917 gimple_seq clist
= NULL
, mdlist
= NULL
;
10918 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10919 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10920 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10921 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10922 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10923 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10924 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10926 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10927 tree var
= OMP_CLAUSE_DECL (c
);
10928 tree new_var
= lookup_decl (var
, ctx
);
10929 tree var3
= NULL_TREE
;
10930 tree new_vard
= new_var
;
10931 if (omp_privatize_by_reference (var
))
10932 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10933 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10935 var3
= maybe_lookup_decl (new_vard
, ctx
);
10936 if (var3
== new_vard
)
10940 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10941 tree rpriva
= create_tmp_var (ptype
);
10942 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10943 OMP_CLAUSE_DECL (nc
) = rpriva
;
10945 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10947 tree rprivb
= create_tmp_var (ptype
);
10948 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10949 OMP_CLAUSE_DECL (nc
) = rprivb
;
10950 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10952 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10954 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10955 if (new_vard
!= new_var
)
10956 TREE_ADDRESSABLE (var2
) = 1;
10957 gimple_add_tmp_var (var2
);
10959 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
10960 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10961 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10962 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10963 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10965 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
10966 thread_num
, integer_minus_one_node
);
10967 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10968 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10969 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10970 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10971 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10973 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
10974 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10975 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10976 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10977 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10979 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
10980 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10981 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10982 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10983 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10984 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10986 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10987 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10988 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10989 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10991 tree var4
= is_for_simd
? new_var
: var2
;
10992 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10995 var5
= lookup_decl (var
, input_simd_ctx
);
10996 var6
= lookup_decl (var
, scan_simd_ctx
);
10997 if (new_vard
!= new_var
)
10999 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11000 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11003 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11005 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11008 x
= lang_hooks
.decls
.omp_clause_default_ctor
11009 (c
, var2
, build_outer_var_ref (var
, ctx
));
11011 gimplify_and_add (x
, &clist
);
11013 x
= build_outer_var_ref (var
, ctx
);
11014 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11016 gimplify_and_add (x
, &thr01_list
);
11018 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11019 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11022 x
= unshare_expr (var4
);
11023 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11024 gimplify_and_add (x
, &thrn1_list
);
11025 x
= unshare_expr (var4
);
11026 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11027 gimplify_and_add (x
, &thr02_list
);
11029 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11031 /* Otherwise, assign to it the identity element. */
11032 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11033 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11036 if (new_vard
!= new_var
)
11037 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11038 SET_DECL_VALUE_EXPR (new_vard
, val
);
11039 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11041 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11042 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11043 lower_omp (&tseq
, ctx
);
11044 gimple_seq_add_seq (&thrn1_list
, tseq
);
11045 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11046 lower_omp (&tseq
, ctx
);
11047 gimple_seq_add_seq (&thr02_list
, tseq
);
11048 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11049 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11050 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11052 SET_DECL_VALUE_EXPR (new_vard
, y
);
11055 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11056 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11060 x
= unshare_expr (var4
);
11061 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11062 gimplify_and_add (x
, &thrn2_list
);
11066 x
= unshare_expr (rprivb_ref
);
11067 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11068 gimplify_and_add (x
, &scan1_list
);
11072 if (ctx
->scan_exclusive
)
11074 x
= unshare_expr (rprivb_ref
);
11075 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11076 gimplify_and_add (x
, &scan1_list
);
11079 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11080 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11081 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11082 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11083 lower_omp (&tseq
, ctx
);
11084 gimple_seq_add_seq (&scan1_list
, tseq
);
11086 if (ctx
->scan_inclusive
)
11088 x
= unshare_expr (rprivb_ref
);
11089 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11090 gimplify_and_add (x
, &scan1_list
);
11094 x
= unshare_expr (rpriva_ref
);
11095 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11096 unshare_expr (var4
));
11097 gimplify_and_add (x
, &mdlist
);
11099 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11100 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11101 gimplify_and_add (x
, &input2_list
);
11104 if (new_vard
!= new_var
)
11105 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11107 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11108 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11109 SET_DECL_VALUE_EXPR (new_vard
, val
);
11110 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11113 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11114 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11117 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11118 lower_omp (&tseq
, ctx
);
11120 SET_DECL_VALUE_EXPR (new_vard
, y
);
11123 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11124 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11128 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11129 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11130 lower_omp (&tseq
, ctx
);
11132 gimple_seq_add_seq (&input2_list
, tseq
);
11134 x
= build_outer_var_ref (var
, ctx
);
11135 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11136 gimplify_and_add (x
, &last_list
);
11138 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11139 gimplify_and_add (x
, &reduc_list
);
11140 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11141 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11143 if (new_vard
!= new_var
)
11144 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11145 SET_DECL_VALUE_EXPR (new_vard
, val
);
11146 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11147 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11148 lower_omp (&tseq
, ctx
);
11149 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11150 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11151 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11153 SET_DECL_VALUE_EXPR (new_vard
, y
);
11156 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11157 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11159 gimple_seq_add_seq (&reduc_list
, tseq
);
11160 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11161 gimplify_and_add (x
, &reduc_list
);
11163 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11165 gimplify_and_add (x
, dlist
);
11169 x
= build_outer_var_ref (var
, ctx
);
11170 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11172 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11173 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11175 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11177 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11179 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11180 if (code
== MINUS_EXPR
)
11184 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11187 if (ctx
->scan_exclusive
)
11188 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11190 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11191 gimplify_assign (var2
, x
, &scan1_list
);
11192 if (ctx
->scan_inclusive
)
11193 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11197 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11200 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11201 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11203 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11206 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11207 unshare_expr (rprival_ref
));
11208 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11212 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11213 gimple_seq_add_stmt (&scan1_list
, g
);
11214 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11215 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11216 ? scan_stmt4
: scan_stmt2
), g
);
11218 tree controlb
= create_tmp_var (boolean_type_node
);
11219 tree controlp
= create_tmp_var (ptr_type_node
);
11220 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11221 OMP_CLAUSE_DECL (nc
) = controlb
;
11222 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11224 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11225 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11226 OMP_CLAUSE_DECL (nc
) = controlp
;
11227 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11229 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11230 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11231 OMP_CLAUSE_DECL (nc
) = controlb
;
11232 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11234 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11235 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11236 OMP_CLAUSE_DECL (nc
) = controlp
;
11237 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11239 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11241 *cp1
= gimple_omp_for_clauses (stmt
);
11242 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11243 *cp2
= gimple_omp_for_clauses (new_stmt
);
11244 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11248 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11249 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11251 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11253 gsi_remove (&input3_gsi
, true);
11254 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11256 gsi_remove (&scan3_gsi
, true);
11257 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11259 gsi_remove (&input4_gsi
, true);
11260 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11262 gsi_remove (&scan4_gsi
, true);
11266 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11267 gimple_omp_set_body (input_stmt2
, input2_list
);
11270 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11272 gsi_remove (&input1_gsi
, true);
11273 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11275 gsi_remove (&scan1_gsi
, true);
11276 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11278 gsi_remove (&input2_gsi
, true);
11279 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11281 gsi_remove (&scan2_gsi
, true);
11283 gimple_seq_add_seq (body_p
, clist
);
11285 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11286 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11287 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11288 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11289 gimple_seq_add_stmt (body_p
, g
);
11290 g
= gimple_build_label (lab1
);
11291 gimple_seq_add_stmt (body_p
, g
);
11292 gimple_seq_add_seq (body_p
, thr01_list
);
11293 g
= gimple_build_goto (lab3
);
11294 gimple_seq_add_stmt (body_p
, g
);
11295 g
= gimple_build_label (lab2
);
11296 gimple_seq_add_stmt (body_p
, g
);
11297 gimple_seq_add_seq (body_p
, thrn1_list
);
11298 g
= gimple_build_label (lab3
);
11299 gimple_seq_add_stmt (body_p
, g
);
11301 g
= gimple_build_assign (ivar
, size_zero_node
);
11302 gimple_seq_add_stmt (body_p
, g
);
11304 gimple_seq_add_stmt (body_p
, stmt
);
11305 gimple_seq_add_seq (body_p
, body
);
11306 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11309 g
= gimple_build_omp_return (true);
11310 gimple_seq_add_stmt (body_p
, g
);
11311 gimple_seq_add_seq (body_p
, mdlist
);
11313 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11314 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11315 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11316 gimple_seq_add_stmt (body_p
, g
);
11317 g
= gimple_build_label (lab1
);
11318 gimple_seq_add_stmt (body_p
, g
);
11320 g
= omp_build_barrier (NULL
);
11321 gimple_seq_add_stmt (body_p
, g
);
11323 tree down
= create_tmp_var (unsigned_type_node
);
11324 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11325 gimple_seq_add_stmt (body_p
, g
);
11327 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11328 gimple_seq_add_stmt (body_p
, g
);
11330 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11331 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11332 gimple_seq_add_stmt (body_p
, g
);
11334 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11335 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11336 gimple_seq_add_stmt (body_p
, g
);
11338 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11339 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11340 build_int_cst (unsigned_type_node
, 1));
11341 gimple_seq_add_stmt (body_p
, g
);
11343 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11344 g
= gimple_build_label (lab3
);
11345 gimple_seq_add_stmt (body_p
, g
);
11347 tree twok
= create_tmp_var (unsigned_type_node
);
11348 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11349 gimple_seq_add_stmt (body_p
, g
);
11351 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11352 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11353 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11354 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11355 gimple_seq_add_stmt (body_p
, g
);
11356 g
= gimple_build_label (lab4
);
11357 gimple_seq_add_stmt (body_p
, g
);
11358 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11359 gimple_seq_add_stmt (body_p
, g
);
11360 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11361 gimple_seq_add_stmt (body_p
, g
);
11363 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11364 gimple_seq_add_stmt (body_p
, g
);
11365 g
= gimple_build_label (lab6
);
11366 gimple_seq_add_stmt (body_p
, g
);
11368 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11369 gimple_seq_add_stmt (body_p
, g
);
11371 g
= gimple_build_label (lab5
);
11372 gimple_seq_add_stmt (body_p
, g
);
11374 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11375 gimple_seq_add_stmt (body_p
, g
);
11377 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11378 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11379 gimple_call_set_lhs (g
, cplx
);
11380 gimple_seq_add_stmt (body_p
, g
);
11381 tree mul
= create_tmp_var (unsigned_type_node
);
11382 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11383 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11384 gimple_seq_add_stmt (body_p
, g
);
11385 tree ovf
= create_tmp_var (unsigned_type_node
);
11386 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11387 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11388 gimple_seq_add_stmt (body_p
, g
);
11390 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11391 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11392 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11394 gimple_seq_add_stmt (body_p
, g
);
11395 g
= gimple_build_label (lab7
);
11396 gimple_seq_add_stmt (body_p
, g
);
11398 tree andv
= create_tmp_var (unsigned_type_node
);
11399 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11400 gimple_seq_add_stmt (body_p
, g
);
11401 tree andvm1
= create_tmp_var (unsigned_type_node
);
11402 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11403 build_minus_one_cst (unsigned_type_node
));
11404 gimple_seq_add_stmt (body_p
, g
);
11406 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11407 gimple_seq_add_stmt (body_p
, g
);
11409 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11410 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11411 gimple_seq_add_stmt (body_p
, g
);
11412 g
= gimple_build_label (lab9
);
11413 gimple_seq_add_stmt (body_p
, g
);
11414 gimple_seq_add_seq (body_p
, reduc_list
);
11415 g
= gimple_build_label (lab8
);
11416 gimple_seq_add_stmt (body_p
, g
);
11418 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11419 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11420 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11421 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11423 gimple_seq_add_stmt (body_p
, g
);
11424 g
= gimple_build_label (lab10
);
11425 gimple_seq_add_stmt (body_p
, g
);
11426 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11427 gimple_seq_add_stmt (body_p
, g
);
11428 g
= gimple_build_goto (lab12
);
11429 gimple_seq_add_stmt (body_p
, g
);
11430 g
= gimple_build_label (lab11
);
11431 gimple_seq_add_stmt (body_p
, g
);
11432 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11433 gimple_seq_add_stmt (body_p
, g
);
11434 g
= gimple_build_label (lab12
);
11435 gimple_seq_add_stmt (body_p
, g
);
11437 g
= omp_build_barrier (NULL
);
11438 gimple_seq_add_stmt (body_p
, g
);
11440 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11442 gimple_seq_add_stmt (body_p
, g
);
11444 g
= gimple_build_label (lab2
);
11445 gimple_seq_add_stmt (body_p
, g
);
11447 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11448 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11449 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11450 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11451 gimple_seq_add_stmt (body_p
, g
);
11452 g
= gimple_build_label (lab1
);
11453 gimple_seq_add_stmt (body_p
, g
);
11454 gimple_seq_add_seq (body_p
, thr02_list
);
11455 g
= gimple_build_goto (lab3
);
11456 gimple_seq_add_stmt (body_p
, g
);
11457 g
= gimple_build_label (lab2
);
11458 gimple_seq_add_stmt (body_p
, g
);
11459 gimple_seq_add_seq (body_p
, thrn2_list
);
11460 g
= gimple_build_label (lab3
);
11461 gimple_seq_add_stmt (body_p
, g
);
11463 g
= gimple_build_assign (ivar
, size_zero_node
);
11464 gimple_seq_add_stmt (body_p
, g
);
11465 gimple_seq_add_stmt (body_p
, new_stmt
);
11466 gimple_seq_add_seq (body_p
, new_body
);
11468 gimple_seq new_dlist
= NULL
;
11469 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11470 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11471 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11472 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11473 integer_minus_one_node
);
11474 gimple_seq_add_stmt (&new_dlist
, g
);
11475 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11476 gimple_seq_add_stmt (&new_dlist
, g
);
11477 g
= gimple_build_label (lab1
);
11478 gimple_seq_add_stmt (&new_dlist
, g
);
11479 gimple_seq_add_seq (&new_dlist
, last_list
);
11480 g
= gimple_build_label (lab2
);
11481 gimple_seq_add_stmt (&new_dlist
, g
);
11482 gimple_seq_add_seq (&new_dlist
, *dlist
);
11483 *dlist
= new_dlist
;
11486 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11487 the addresses of variables to be made private at the surrounding
11488 parallelism level. Such functions appear in the gimple code stream in two
11489 forms, e.g. for a partitioned loop:
11491 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11492 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11493 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11494 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11496 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11497 not as part of a HEAD_MARK sequence:
11499 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11501 For such stand-alone appearances, the 3rd argument is always 0, denoting
11502 gang partitioning. */
11505 lower_oacc_private_marker (omp_context
*ctx
)
11507 if (ctx
->oacc_privatization_candidates
.length () == 0)
11510 auto_vec
<tree
, 5> args
;
11512 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11513 args
.quick_push (integer_zero_node
);
11514 args
.quick_push (integer_minus_one_node
);
11518 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11520 for (omp_context
*thisctx
= ctx
; thisctx
; thisctx
= thisctx
->outer
)
11522 tree inner_decl
= maybe_lookup_decl (decl
, thisctx
);
11529 gcc_checking_assert (decl
);
11531 tree addr
= build_fold_addr_expr (decl
);
11532 args
.safe_push (addr
);
11535 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11538 /* Lower code for an OMP loop directive. */
11541 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11543 tree
*rhs_p
, block
;
11544 struct omp_for_data fd
, *fdp
= NULL
;
11545 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11547 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11548 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11549 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11552 push_gimplify_context ();
11554 if (is_gimple_omp_oacc (ctx
->stmt
))
11555 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11557 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11559 block
= make_node (BLOCK
);
11560 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11561 /* Replace at gsi right away, so that 'stmt' is no member
11562 of a sequence anymore as we're going to add to a different
11564 gsi_replace (gsi_p
, new_stmt
, true);
11566 /* Move declaration of temporaries in the loop body before we make
11568 omp_for_body
= gimple_omp_body (stmt
);
11569 if (!gimple_seq_empty_p (omp_for_body
)
11570 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11573 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11574 tree vars
= gimple_bind_vars (inner_bind
);
11575 if (is_gimple_omp_oacc (ctx
->stmt
))
11576 oacc_privatization_scan_decl_chain (ctx
, vars
);
11577 gimple_bind_append_vars (new_stmt
, vars
);
11578 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11579 keep them on the inner_bind and it's block. */
11580 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11581 if (gimple_bind_block (inner_bind
))
11582 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11585 if (gimple_omp_for_combined_into_p (stmt
))
11587 omp_extract_for_data (stmt
, &fd
, NULL
);
11590 /* We need two temporaries with fd.loop.v type (istart/iend)
11591 and then (fd.collapse - 1) temporaries with the same
11592 type for count2 ... countN-1 vars if not constant. */
11594 tree type
= fd
.iter_type
;
11595 if (fd
.collapse
> 1
11596 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11597 count
+= fd
.collapse
- 1;
11599 tree type2
= NULL_TREE
;
11601 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11602 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11603 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11605 tree clauses
= *pc
;
11606 if (fd
.collapse
> 1
11608 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11609 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11610 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11611 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11613 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11614 type2
= TREE_TYPE (v
);
11620 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11621 OMP_CLAUSE__LOOPTEMP_
);
11622 if (ctx
->simt_stmt
)
11623 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11624 OMP_CLAUSE__LOOPTEMP_
);
11625 for (i
= 0; i
< count
+ count2
; i
++)
11630 gcc_assert (outerc
);
11631 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11632 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11633 OMP_CLAUSE__LOOPTEMP_
);
11637 /* If there are 2 adjacent SIMD stmts, one with _simt_
11638 clause, another without, make sure they have the same
11639 decls in _looptemp_ clauses, because the outer stmt
11640 they are combined into will look up just one inner_stmt. */
11641 if (ctx
->simt_stmt
)
11642 temp
= OMP_CLAUSE_DECL (simtc
);
11644 temp
= create_tmp_var (i
>= count
? type2
: type
);
11645 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11647 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11648 OMP_CLAUSE_DECL (*pc
) = temp
;
11649 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11650 if (ctx
->simt_stmt
)
11651 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11652 OMP_CLAUSE__LOOPTEMP_
);
11657 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11661 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11662 OMP_CLAUSE_REDUCTION
);
11663 tree rtmp
= NULL_TREE
;
11666 tree type
= build_pointer_type (pointer_sized_int_node
);
11667 tree temp
= create_tmp_var (type
);
11668 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11669 OMP_CLAUSE_DECL (c
) = temp
;
11670 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11671 gimple_omp_for_set_clauses (stmt
, c
);
11672 lower_omp_task_reductions (ctx
, OMP_FOR
,
11673 gimple_omp_for_clauses (stmt
),
11674 &tred_ilist
, &tred_dlist
);
11676 rtmp
= make_ssa_name (type
);
11677 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11680 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11683 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11685 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11686 gimple_omp_for_pre_body (stmt
));
11688 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11690 gcall
*private_marker
= NULL
;
11691 if (is_gimple_omp_oacc (ctx
->stmt
)
11692 && !gimple_seq_empty_p (omp_for_body
))
11693 private_marker
= lower_oacc_private_marker (ctx
);
11695 /* Lower the header expressions. At this point, we can assume that
11696 the header is of the form:
11698 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11700 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11701 using the .omp_data_s mapping, if needed. */
11702 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11704 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11705 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11707 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11708 TREE_VEC_ELT (*rhs_p
, 1)
11709 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11710 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11711 TREE_VEC_ELT (*rhs_p
, 2)
11712 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11714 else if (!is_gimple_min_invariant (*rhs_p
))
11715 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11716 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11717 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11719 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11720 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11722 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11723 TREE_VEC_ELT (*rhs_p
, 1)
11724 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11725 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11726 TREE_VEC_ELT (*rhs_p
, 2)
11727 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11729 else if (!is_gimple_min_invariant (*rhs_p
))
11730 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11731 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11732 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11734 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11735 if (!is_gimple_min_invariant (*rhs_p
))
11736 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11739 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11741 gimple_seq_add_seq (&body
, cnt_list
);
11743 /* Once lowered, extract the bounds and clauses. */
11744 omp_extract_for_data (stmt
, &fd
, NULL
);
11746 if (is_gimple_omp_oacc (ctx
->stmt
)
11747 && !ctx_in_oacc_kernels_region (ctx
))
11748 lower_oacc_head_tail (gimple_location (stmt
),
11749 gimple_omp_for_clauses (stmt
), private_marker
,
11750 &oacc_head
, &oacc_tail
, ctx
);
11752 /* Add OpenACC partitioning and reduction markers just before the loop. */
11754 gimple_seq_add_seq (&body
, oacc_head
);
11756 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11758 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11759 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11760 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11761 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11763 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11764 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11765 OMP_CLAUSE_LINEAR_STEP (c
)
11766 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11770 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11771 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11772 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11775 gimple_seq_add_stmt (&body
, stmt
);
11776 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11779 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11782 /* After the loop, add exit clauses. */
11783 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11787 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11788 gcall
*g
= gimple_build_call (fndecl
, 0);
11789 gimple_seq_add_stmt (&body
, g
);
11790 gimple_seq_add_seq (&body
, clist
);
11791 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11792 g
= gimple_build_call (fndecl
, 0);
11793 gimple_seq_add_stmt (&body
, g
);
11796 if (ctx
->cancellable
)
11797 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11799 gimple_seq_add_seq (&body
, dlist
);
11803 gimple_seq_add_seq (&tred_ilist
, body
);
11807 body
= maybe_catch_exception (body
);
11809 /* Region exit marker goes at the end of the loop body. */
11810 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11811 gimple_seq_add_stmt (&body
, g
);
11813 gimple_seq_add_seq (&body
, tred_dlist
);
11815 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11818 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11820 /* Add OpenACC joining and reduction markers just after the loop. */
11822 gimple_seq_add_seq (&body
, oacc_tail
);
11824 pop_gimplify_context (new_stmt
);
11826 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11827 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11828 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11829 if (BLOCK_VARS (block
))
11830 TREE_USED (block
) = 1;
11832 gimple_bind_set_body (new_stmt
, body
);
11833 gimple_omp_set_body (stmt
, NULL
);
11834 gimple_omp_for_set_pre_body (stmt
, NULL
);
11837 /* Callback for walk_stmts. Check if the current statement only contains
11838 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11841 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11842 bool *handled_ops_p
,
11843 struct walk_stmt_info
*wi
)
11845 int *info
= (int *) wi
->info
;
11846 gimple
*stmt
= gsi_stmt (*gsi_p
);
11848 *handled_ops_p
= true;
11849 switch (gimple_code (stmt
))
11855 case GIMPLE_OMP_FOR
:
11856 case GIMPLE_OMP_SECTIONS
:
11857 *info
= *info
== 0 ? 1 : -1;
11866 struct omp_taskcopy_context
11868 /* This field must be at the beginning, as we do "inheritance": Some
11869 callback functions for tree-inline.c (e.g., omp_copy_decl)
11870 receive a copy_body_data pointer that is up-casted to an
11871 omp_context pointer. */
11877 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11879 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11881 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11882 return create_tmp_var (TREE_TYPE (var
));
11888 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11890 tree name
, new_fields
= NULL
, type
, f
;
11892 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11893 name
= DECL_NAME (TYPE_NAME (orig_type
));
11894 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11895 TYPE_DECL
, name
, type
);
11896 TYPE_NAME (type
) = name
;
11898 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11900 tree new_f
= copy_node (f
);
11901 DECL_CONTEXT (new_f
) = type
;
11902 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11903 TREE_CHAIN (new_f
) = new_fields
;
11904 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11905 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11906 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11908 new_fields
= new_f
;
11909 tcctx
->cb
.decl_map
->put (f
, new_f
);
11911 TYPE_FIELDS (type
) = nreverse (new_fields
);
11912 layout_type (type
);
11916 /* Create task copyfn. */
11919 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
11921 struct function
*child_cfun
;
11922 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11923 tree record_type
, srecord_type
, bind
, list
;
11924 bool record_needs_remap
= false, srecord_needs_remap
= false;
11926 struct omp_taskcopy_context tcctx
;
11927 location_t loc
= gimple_location (task_stmt
);
11928 size_t looptempno
= 0;
11930 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11931 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11932 gcc_assert (child_cfun
->cfg
== NULL
);
11933 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11935 /* Reset DECL_CONTEXT on function arguments. */
11936 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11937 DECL_CONTEXT (t
) = child_fn
;
11939 /* Populate the function. */
11940 push_gimplify_context ();
11941 push_cfun (child_cfun
);
11943 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11944 TREE_SIDE_EFFECTS (bind
) = 1;
11946 DECL_SAVED_TREE (child_fn
) = bind
;
11947 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11949 /* Remap src and dst argument types if needed. */
11950 record_type
= ctx
->record_type
;
11951 srecord_type
= ctx
->srecord_type
;
11952 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11953 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11955 record_needs_remap
= true;
11958 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11959 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11961 srecord_needs_remap
= true;
11965 if (record_needs_remap
|| srecord_needs_remap
)
11967 memset (&tcctx
, '\0', sizeof (tcctx
));
11968 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
11969 tcctx
.cb
.dst_fn
= child_fn
;
11970 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
11971 gcc_checking_assert (tcctx
.cb
.src_node
);
11972 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
11973 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
11974 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
11975 tcctx
.cb
.eh_lp_nr
= 0;
11976 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
11977 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
11980 if (record_needs_remap
)
11981 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
11982 if (srecord_needs_remap
)
11983 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
11986 tcctx
.cb
.decl_map
= NULL
;
11988 arg
= DECL_ARGUMENTS (child_fn
);
11989 TREE_TYPE (arg
) = build_pointer_type (record_type
);
11990 sarg
= DECL_CHAIN (arg
);
11991 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
11993 /* First pass: initialize temporaries used in record_type and srecord_type
11994 sizes and field offsets. */
11995 if (tcctx
.cb
.decl_map
)
11996 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11997 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12001 decl
= OMP_CLAUSE_DECL (c
);
12002 p
= tcctx
.cb
.decl_map
->get (decl
);
12005 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12006 sf
= (tree
) n
->value
;
12007 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12008 src
= build_simple_mem_ref_loc (loc
, sarg
);
12009 src
= omp_build_component_ref (src
, sf
);
12010 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12011 append_to_statement_list (t
, &list
);
12014 /* Second pass: copy shared var pointers and copy construct non-VLA
12015 firstprivate vars. */
12016 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12017 switch (OMP_CLAUSE_CODE (c
))
12019 splay_tree_key key
;
12020 case OMP_CLAUSE_SHARED
:
12021 decl
= OMP_CLAUSE_DECL (c
);
12022 key
= (splay_tree_key
) decl
;
12023 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12024 key
= (splay_tree_key
) &DECL_UID (decl
);
12025 n
= splay_tree_lookup (ctx
->field_map
, key
);
12028 f
= (tree
) n
->value
;
12029 if (tcctx
.cb
.decl_map
)
12030 f
= *tcctx
.cb
.decl_map
->get (f
);
12031 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12032 sf
= (tree
) n
->value
;
12033 if (tcctx
.cb
.decl_map
)
12034 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12035 src
= build_simple_mem_ref_loc (loc
, sarg
);
12036 src
= omp_build_component_ref (src
, sf
);
12037 dst
= build_simple_mem_ref_loc (loc
, arg
);
12038 dst
= omp_build_component_ref (dst
, f
);
12039 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12040 append_to_statement_list (t
, &list
);
12042 case OMP_CLAUSE_REDUCTION
:
12043 case OMP_CLAUSE_IN_REDUCTION
:
12044 decl
= OMP_CLAUSE_DECL (c
);
12045 if (TREE_CODE (decl
) == MEM_REF
)
12047 decl
= TREE_OPERAND (decl
, 0);
12048 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12049 decl
= TREE_OPERAND (decl
, 0);
12050 if (TREE_CODE (decl
) == INDIRECT_REF
12051 || TREE_CODE (decl
) == ADDR_EXPR
)
12052 decl
= TREE_OPERAND (decl
, 0);
12054 key
= (splay_tree_key
) decl
;
12055 n
= splay_tree_lookup (ctx
->field_map
, key
);
12058 f
= (tree
) n
->value
;
12059 if (tcctx
.cb
.decl_map
)
12060 f
= *tcctx
.cb
.decl_map
->get (f
);
12061 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12062 sf
= (tree
) n
->value
;
12063 if (tcctx
.cb
.decl_map
)
12064 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12065 src
= build_simple_mem_ref_loc (loc
, sarg
);
12066 src
= omp_build_component_ref (src
, sf
);
12067 if (decl
!= OMP_CLAUSE_DECL (c
)
12068 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12069 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12070 src
= build_simple_mem_ref_loc (loc
, src
);
12071 dst
= build_simple_mem_ref_loc (loc
, arg
);
12072 dst
= omp_build_component_ref (dst
, f
);
12073 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12074 append_to_statement_list (t
, &list
);
12076 case OMP_CLAUSE__LOOPTEMP_
:
12077 /* Fields for first two _looptemp_ clauses are initialized by
12078 GOMP_taskloop*, the rest are handled like firstprivate. */
12079 if (looptempno
< 2)
12085 case OMP_CLAUSE__REDUCTEMP_
:
12086 case OMP_CLAUSE_FIRSTPRIVATE
:
12087 decl
= OMP_CLAUSE_DECL (c
);
12088 if (is_variable_sized (decl
))
12090 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12093 f
= (tree
) n
->value
;
12094 if (tcctx
.cb
.decl_map
)
12095 f
= *tcctx
.cb
.decl_map
->get (f
);
12096 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12099 sf
= (tree
) n
->value
;
12100 if (tcctx
.cb
.decl_map
)
12101 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12102 src
= build_simple_mem_ref_loc (loc
, sarg
);
12103 src
= omp_build_component_ref (src
, sf
);
12104 if (use_pointer_for_field (decl
, NULL
)
12105 || omp_privatize_by_reference (decl
))
12106 src
= build_simple_mem_ref_loc (loc
, src
);
12110 dst
= build_simple_mem_ref_loc (loc
, arg
);
12111 dst
= omp_build_component_ref (dst
, f
);
12112 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12113 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12116 if (ctx
->allocate_map
)
12117 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12119 tree allocator
= *allocatorp
;
12120 HOST_WIDE_INT ialign
= 0;
12121 if (TREE_CODE (allocator
) == TREE_LIST
)
12123 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12124 allocator
= TREE_PURPOSE (allocator
);
12126 if (TREE_CODE (allocator
) != INTEGER_CST
)
12128 n
= splay_tree_lookup (ctx
->sfield_map
,
12129 (splay_tree_key
) allocator
);
12130 allocator
= (tree
) n
->value
;
12131 if (tcctx
.cb
.decl_map
)
12132 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12133 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12134 allocator
= omp_build_component_ref (a
, allocator
);
12136 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12137 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12138 tree align
= build_int_cst (size_type_node
,
12140 DECL_ALIGN_UNIT (decl
)));
12141 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12142 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12144 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12145 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12146 append_to_statement_list (t
, &list
);
12147 dst
= build_simple_mem_ref_loc (loc
, dst
);
12149 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12151 append_to_statement_list (t
, &list
);
12153 case OMP_CLAUSE_PRIVATE
:
12154 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12156 decl
= OMP_CLAUSE_DECL (c
);
12157 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12158 f
= (tree
) n
->value
;
12159 if (tcctx
.cb
.decl_map
)
12160 f
= *tcctx
.cb
.decl_map
->get (f
);
12161 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12164 sf
= (tree
) n
->value
;
12165 if (tcctx
.cb
.decl_map
)
12166 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12167 src
= build_simple_mem_ref_loc (loc
, sarg
);
12168 src
= omp_build_component_ref (src
, sf
);
12169 if (use_pointer_for_field (decl
, NULL
))
12170 src
= build_simple_mem_ref_loc (loc
, src
);
12174 dst
= build_simple_mem_ref_loc (loc
, arg
);
12175 dst
= omp_build_component_ref (dst
, f
);
12176 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12177 append_to_statement_list (t
, &list
);
12183 /* Last pass: handle VLA firstprivates. */
12184 if (tcctx
.cb
.decl_map
)
12185 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12186 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12190 decl
= OMP_CLAUSE_DECL (c
);
12191 if (!is_variable_sized (decl
))
12193 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12196 f
= (tree
) n
->value
;
12197 f
= *tcctx
.cb
.decl_map
->get (f
);
12198 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12199 ind
= DECL_VALUE_EXPR (decl
);
12200 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12201 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12202 n
= splay_tree_lookup (ctx
->sfield_map
,
12203 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12204 sf
= (tree
) n
->value
;
12205 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12206 src
= build_simple_mem_ref_loc (loc
, sarg
);
12207 src
= omp_build_component_ref (src
, sf
);
12208 src
= build_simple_mem_ref_loc (loc
, src
);
12209 dst
= build_simple_mem_ref_loc (loc
, arg
);
12210 dst
= omp_build_component_ref (dst
, f
);
12211 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12212 append_to_statement_list (t
, &list
);
12213 n
= splay_tree_lookup (ctx
->field_map
,
12214 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12215 df
= (tree
) n
->value
;
12216 df
= *tcctx
.cb
.decl_map
->get (df
);
12217 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12218 ptr
= omp_build_component_ref (ptr
, df
);
12219 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12220 build_fold_addr_expr_loc (loc
, dst
));
12221 append_to_statement_list (t
, &list
);
12224 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12225 append_to_statement_list (t
, &list
);
12227 if (tcctx
.cb
.decl_map
)
12228 delete tcctx
.cb
.decl_map
;
12229 pop_gimplify_context (NULL
);
12230 BIND_EXPR_BODY (bind
) = list
;
12235 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12239 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
12241 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12242 gcc_assert (clauses
);
12243 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12244 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12245 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12247 case OMP_CLAUSE_DEPEND_LAST
:
12248 /* Lowering already done at gimplification. */
12250 case OMP_CLAUSE_DEPEND_IN
:
12253 case OMP_CLAUSE_DEPEND_OUT
:
12254 case OMP_CLAUSE_DEPEND_INOUT
:
12257 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12260 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12263 case OMP_CLAUSE_DEPEND_SOURCE
:
12264 case OMP_CLAUSE_DEPEND_SINK
:
12267 gcc_unreachable ();
12269 if (cnt
[1] || cnt
[3])
12271 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
12272 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
12273 tree array
= create_tmp_var (type
);
12274 TREE_ADDRESSABLE (array
) = 1;
12275 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12279 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12280 gimple_seq_add_stmt (iseq
, g
);
12281 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12284 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12285 gimple_seq_add_stmt (iseq
, g
);
12286 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12288 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12289 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12290 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12291 gimple_seq_add_stmt (iseq
, g
);
12293 for (i
= 0; i
< 4; i
++)
12297 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12298 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12302 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12304 case OMP_CLAUSE_DEPEND_IN
:
12308 case OMP_CLAUSE_DEPEND_OUT
:
12309 case OMP_CLAUSE_DEPEND_INOUT
:
12313 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12317 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12322 gcc_unreachable ();
12324 tree t
= OMP_CLAUSE_DECL (c
);
12325 t
= fold_convert (ptr_type_node
, t
);
12326 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12327 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12328 NULL_TREE
, NULL_TREE
);
12329 g
= gimple_build_assign (r
, t
);
12330 gimple_seq_add_stmt (iseq
, g
);
12333 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12334 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12335 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12336 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12338 tree clobber
= build_clobber (type
);
12339 g
= gimple_build_assign (array
, clobber
);
12340 gimple_seq_add_stmt (oseq
, g
);
12343 /* Lower the OpenMP parallel or task directive in the current statement
12344 in GSI_P. CTX holds context information for the directive. */
12347 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12351 gimple
*stmt
= gsi_stmt (*gsi_p
);
12352 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12353 gimple_seq par_body
;
12354 location_t loc
= gimple_location (stmt
);
12356 clauses
= gimple_omp_taskreg_clauses (stmt
);
12357 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12358 && gimple_omp_task_taskwait_p (stmt
))
12366 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12367 par_body
= gimple_bind_body (par_bind
);
12369 child_fn
= ctx
->cb
.dst_fn
;
12370 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12371 && !gimple_omp_parallel_combined_p (stmt
))
12373 struct walk_stmt_info wi
;
12376 memset (&wi
, 0, sizeof (wi
));
12378 wi
.val_only
= true;
12379 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12381 gimple_omp_parallel_set_combined_p (stmt
, true);
12383 gimple_seq dep_ilist
= NULL
;
12384 gimple_seq dep_olist
= NULL
;
12385 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12386 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12388 push_gimplify_context ();
12389 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12390 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12391 &dep_ilist
, &dep_olist
);
12394 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12395 && gimple_omp_task_taskwait_p (stmt
))
12399 gsi_replace (gsi_p
, dep_bind
, true);
12400 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12401 gimple_bind_add_stmt (dep_bind
, stmt
);
12402 gimple_bind_add_seq (dep_bind
, dep_olist
);
12403 pop_gimplify_context (dep_bind
);
12408 if (ctx
->srecord_type
)
12409 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12411 gimple_seq tskred_ilist
= NULL
;
12412 gimple_seq tskred_olist
= NULL
;
12413 if ((is_task_ctx (ctx
)
12414 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12415 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12416 OMP_CLAUSE_REDUCTION
))
12417 || (is_parallel_ctx (ctx
)
12418 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12419 OMP_CLAUSE__REDUCTEMP_
)))
12421 if (dep_bind
== NULL
)
12423 push_gimplify_context ();
12424 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12426 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12428 gimple_omp_taskreg_clauses (ctx
->stmt
),
12429 &tskred_ilist
, &tskred_olist
);
12432 push_gimplify_context ();
12434 gimple_seq par_olist
= NULL
;
12435 gimple_seq par_ilist
= NULL
;
12436 gimple_seq par_rlist
= NULL
;
12437 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12438 lower_omp (&par_body
, ctx
);
12439 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12440 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12442 /* Declare all the variables created by mapping and the variables
12443 declared in the scope of the parallel body. */
12444 record_vars_into (ctx
->block_vars
, child_fn
);
12445 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12446 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12448 if (ctx
->record_type
)
12451 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12452 : ctx
->record_type
, ".omp_data_o");
12453 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12454 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12455 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12458 gimple_seq olist
= NULL
;
12459 gimple_seq ilist
= NULL
;
12460 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12461 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12463 if (ctx
->record_type
)
12465 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12466 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12470 /* Once all the expansions are done, sequence all the different
12471 fragments inside gimple_omp_body. */
12473 gimple_seq new_body
= NULL
;
12475 if (ctx
->record_type
)
12477 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12478 /* fixup_child_record_type might have changed receiver_decl's type. */
12479 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12480 gimple_seq_add_stmt (&new_body
,
12481 gimple_build_assign (ctx
->receiver_decl
, t
));
12484 gimple_seq_add_seq (&new_body
, par_ilist
);
12485 gimple_seq_add_seq (&new_body
, par_body
);
12486 gimple_seq_add_seq (&new_body
, par_rlist
);
12487 if (ctx
->cancellable
)
12488 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12489 gimple_seq_add_seq (&new_body
, par_olist
);
12490 new_body
= maybe_catch_exception (new_body
);
12491 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12492 gimple_seq_add_stmt (&new_body
,
12493 gimple_build_omp_continue (integer_zero_node
,
12494 integer_zero_node
));
12495 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12496 gimple_omp_set_body (stmt
, new_body
);
12498 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12499 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12501 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12502 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12503 gimple_bind_add_seq (bind
, ilist
);
12504 gimple_bind_add_stmt (bind
, stmt
);
12505 gimple_bind_add_seq (bind
, olist
);
12507 pop_gimplify_context (NULL
);
12511 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12512 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12513 gimple_bind_add_stmt (dep_bind
, bind
);
12514 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12515 gimple_bind_add_seq (dep_bind
, dep_olist
);
12516 pop_gimplify_context (dep_bind
);
12520 /* Lower the GIMPLE_OMP_TARGET in the current statement
12521 in GSI_P. CTX holds context information for the directive. */
12524 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12527 tree child_fn
, t
, c
;
12528 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12529 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12530 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12531 location_t loc
= gimple_location (stmt
);
12532 bool offloaded
, data_region
;
12533 unsigned int map_cnt
= 0;
12534 tree in_reduction_clauses
= NULL_TREE
;
12536 offloaded
= is_gimple_omp_offloaded (stmt
);
12537 switch (gimple_omp_target_kind (stmt
))
12539 case GF_OMP_TARGET_KIND_REGION
:
12541 q
= &in_reduction_clauses
;
12542 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12543 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12546 q
= &OMP_CLAUSE_CHAIN (*q
);
12547 *p
= OMP_CLAUSE_CHAIN (*p
);
12550 p
= &OMP_CLAUSE_CHAIN (*p
);
12552 *p
= in_reduction_clauses
;
12554 case GF_OMP_TARGET_KIND_UPDATE
:
12555 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12556 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12557 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12558 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12559 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12560 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12561 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12562 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12563 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12564 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12565 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12566 data_region
= false;
12568 case GF_OMP_TARGET_KIND_DATA
:
12569 case GF_OMP_TARGET_KIND_OACC_DATA
:
12570 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12571 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12572 data_region
= true;
12575 gcc_unreachable ();
12578 clauses
= gimple_omp_target_clauses (stmt
);
12580 gimple_seq dep_ilist
= NULL
;
12581 gimple_seq dep_olist
= NULL
;
12582 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12583 if (has_depend
|| in_reduction_clauses
)
12585 push_gimplify_context ();
12586 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12588 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12589 &dep_ilist
, &dep_olist
);
12590 if (in_reduction_clauses
)
12591 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12599 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12600 tgt_body
= gimple_bind_body (tgt_bind
);
12602 else if (data_region
)
12603 tgt_body
= gimple_omp_body (stmt
);
12604 child_fn
= ctx
->cb
.dst_fn
;
12606 push_gimplify_context ();
12609 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12610 switch (OMP_CLAUSE_CODE (c
))
12616 case OMP_CLAUSE_MAP
:
12618 /* First check what we're prepared to handle in the following. */
12619 switch (OMP_CLAUSE_MAP_KIND (c
))
12621 case GOMP_MAP_ALLOC
:
12623 case GOMP_MAP_FROM
:
12624 case GOMP_MAP_TOFROM
:
12625 case GOMP_MAP_POINTER
:
12626 case GOMP_MAP_TO_PSET
:
12627 case GOMP_MAP_DELETE
:
12628 case GOMP_MAP_RELEASE
:
12629 case GOMP_MAP_ALWAYS_TO
:
12630 case GOMP_MAP_ALWAYS_FROM
:
12631 case GOMP_MAP_ALWAYS_TOFROM
:
12632 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12633 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12634 case GOMP_MAP_STRUCT
:
12635 case GOMP_MAP_ALWAYS_POINTER
:
12636 case GOMP_MAP_ATTACH
:
12637 case GOMP_MAP_DETACH
:
12638 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12639 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12641 case GOMP_MAP_IF_PRESENT
:
12642 case GOMP_MAP_FORCE_ALLOC
:
12643 case GOMP_MAP_FORCE_TO
:
12644 case GOMP_MAP_FORCE_FROM
:
12645 case GOMP_MAP_FORCE_TOFROM
:
12646 case GOMP_MAP_FORCE_PRESENT
:
12647 case GOMP_MAP_FORCE_DEVICEPTR
:
12648 case GOMP_MAP_DEVICE_RESIDENT
:
12649 case GOMP_MAP_LINK
:
12650 case GOMP_MAP_FORCE_DETACH
:
12651 gcc_assert (is_gimple_omp_oacc (stmt
));
12654 gcc_unreachable ();
12658 case OMP_CLAUSE_TO
:
12659 case OMP_CLAUSE_FROM
:
12661 var
= OMP_CLAUSE_DECL (c
);
12664 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12665 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12666 && (OMP_CLAUSE_MAP_KIND (c
)
12667 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12672 if (DECL_SIZE (var
)
12673 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12675 tree var2
= DECL_VALUE_EXPR (var
);
12676 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12677 var2
= TREE_OPERAND (var2
, 0);
12678 gcc_assert (DECL_P (var2
));
12683 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12684 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12685 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12687 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12689 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12690 && varpool_node::get_create (var
)->offloadable
)
12693 tree type
= build_pointer_type (TREE_TYPE (var
));
12694 tree new_var
= lookup_decl (var
, ctx
);
12695 x
= create_tmp_var_raw (type
, get_name (new_var
));
12696 gimple_add_tmp_var (x
);
12697 x
= build_simple_mem_ref (x
);
12698 SET_DECL_VALUE_EXPR (new_var
, x
);
12699 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12704 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12705 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12706 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12707 && is_omp_target (stmt
))
12709 gcc_assert (maybe_lookup_field (c
, ctx
));
12714 if (!maybe_lookup_field (var
, ctx
))
12717 /* Don't remap compute constructs' reduction variables, because the
12718 intermediate result must be local to each gang. */
12719 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12720 && is_gimple_omp_oacc (ctx
->stmt
)
12721 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12723 x
= build_receiver_ref (var
, true, ctx
);
12724 tree new_var
= lookup_decl (var
, ctx
);
12726 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12727 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12728 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12729 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12730 x
= build_simple_mem_ref (x
);
12731 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12733 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12734 if (omp_privatize_by_reference (new_var
)
12735 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12736 || DECL_BY_REFERENCE (var
)))
12738 /* Create a local object to hold the instance
12740 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12741 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12742 tree inst
= create_tmp_var (type
, id
);
12743 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12744 x
= build_fold_addr_expr (inst
);
12746 gimplify_assign (new_var
, x
, &fplist
);
12748 else if (DECL_P (new_var
))
12750 SET_DECL_VALUE_EXPR (new_var
, x
);
12751 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12754 gcc_unreachable ();
12759 case OMP_CLAUSE_FIRSTPRIVATE
:
12760 gcc_checking_assert (offloaded
);
12761 if (is_gimple_omp_oacc (ctx
->stmt
))
12763 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12764 gcc_checking_assert (!is_oacc_kernels (ctx
));
12765 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12766 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12768 goto oacc_firstprivate
;
12771 var
= OMP_CLAUSE_DECL (c
);
12772 if (!omp_privatize_by_reference (var
)
12773 && !is_gimple_reg_type (TREE_TYPE (var
)))
12775 tree new_var
= lookup_decl (var
, ctx
);
12776 if (is_variable_sized (var
))
12778 tree pvar
= DECL_VALUE_EXPR (var
);
12779 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12780 pvar
= TREE_OPERAND (pvar
, 0);
12781 gcc_assert (DECL_P (pvar
));
12782 tree new_pvar
= lookup_decl (pvar
, ctx
);
12783 x
= build_fold_indirect_ref (new_pvar
);
12784 TREE_THIS_NOTRAP (x
) = 1;
12787 x
= build_receiver_ref (var
, true, ctx
);
12788 SET_DECL_VALUE_EXPR (new_var
, x
);
12789 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12793 case OMP_CLAUSE_PRIVATE
:
12794 gcc_checking_assert (offloaded
);
12795 if (is_gimple_omp_oacc (ctx
->stmt
))
12797 /* No 'private' clauses on OpenACC 'kernels'. */
12798 gcc_checking_assert (!is_oacc_kernels (ctx
));
12799 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12800 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12804 var
= OMP_CLAUSE_DECL (c
);
12805 if (is_variable_sized (var
))
12807 tree new_var
= lookup_decl (var
, ctx
);
12808 tree pvar
= DECL_VALUE_EXPR (var
);
12809 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12810 pvar
= TREE_OPERAND (pvar
, 0);
12811 gcc_assert (DECL_P (pvar
));
12812 tree new_pvar
= lookup_decl (pvar
, ctx
);
12813 x
= build_fold_indirect_ref (new_pvar
);
12814 TREE_THIS_NOTRAP (x
) = 1;
12815 SET_DECL_VALUE_EXPR (new_var
, x
);
12816 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12820 case OMP_CLAUSE_USE_DEVICE_PTR
:
12821 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12822 case OMP_CLAUSE_IS_DEVICE_PTR
:
12823 var
= OMP_CLAUSE_DECL (c
);
12825 if (is_variable_sized (var
))
12827 tree new_var
= lookup_decl (var
, ctx
);
12828 tree pvar
= DECL_VALUE_EXPR (var
);
12829 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12830 pvar
= TREE_OPERAND (pvar
, 0);
12831 gcc_assert (DECL_P (pvar
));
12832 tree new_pvar
= lookup_decl (pvar
, ctx
);
12833 x
= build_fold_indirect_ref (new_pvar
);
12834 TREE_THIS_NOTRAP (x
) = 1;
12835 SET_DECL_VALUE_EXPR (new_var
, x
);
12836 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12838 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12839 && !omp_privatize_by_reference (var
)
12840 && !omp_is_allocatable_or_ptr (var
)
12841 && !lang_hooks
.decls
.omp_array_data (var
, true))
12842 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12844 tree new_var
= lookup_decl (var
, ctx
);
12845 tree type
= build_pointer_type (TREE_TYPE (var
));
12846 x
= create_tmp_var_raw (type
, get_name (new_var
));
12847 gimple_add_tmp_var (x
);
12848 x
= build_simple_mem_ref (x
);
12849 SET_DECL_VALUE_EXPR (new_var
, x
);
12850 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12854 tree new_var
= lookup_decl (var
, ctx
);
12855 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12856 gimple_add_tmp_var (x
);
12857 SET_DECL_VALUE_EXPR (new_var
, x
);
12858 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12865 target_nesting_level
++;
12866 lower_omp (&tgt_body
, ctx
);
12867 target_nesting_level
--;
12869 else if (data_region
)
12870 lower_omp (&tgt_body
, ctx
);
12874 /* Declare all the variables created by mapping and the variables
12875 declared in the scope of the target body. */
12876 record_vars_into (ctx
->block_vars
, child_fn
);
12877 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
12878 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
12883 if (ctx
->record_type
)
12886 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
12887 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12888 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12889 t
= make_tree_vec (3);
12890 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
12891 TREE_VEC_ELT (t
, 1)
12892 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
12893 ".omp_data_sizes");
12894 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
12895 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
12896 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
12897 tree tkind_type
= short_unsigned_type_node
;
12898 int talign_shift
= 8;
12899 TREE_VEC_ELT (t
, 2)
12900 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
12901 ".omp_data_kinds");
12902 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
12903 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
12904 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
12905 gimple_omp_target_set_data_arg (stmt
, t
);
12907 vec
<constructor_elt
, va_gc
> *vsize
;
12908 vec
<constructor_elt
, va_gc
> *vkind
;
12909 vec_alloc (vsize
, map_cnt
);
12910 vec_alloc (vkind
, map_cnt
);
12911 unsigned int map_idx
= 0;
12913 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12914 switch (OMP_CLAUSE_CODE (c
))
12916 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
12917 unsigned int talign
;
12922 case OMP_CLAUSE_MAP
:
12923 case OMP_CLAUSE_TO
:
12924 case OMP_CLAUSE_FROM
:
12925 oacc_firstprivate_map
:
12927 ovar
= OMP_CLAUSE_DECL (c
);
12928 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12929 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12930 || (OMP_CLAUSE_MAP_KIND (c
)
12931 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12933 if (!DECL_P (ovar
))
12935 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12936 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
12938 nc
= OMP_CLAUSE_CHAIN (c
);
12939 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
12940 == get_base_address (ovar
));
12941 ovar
= OMP_CLAUSE_DECL (nc
);
12945 tree x
= build_sender_ref (ovar
, ctx
);
12947 if (in_reduction_clauses
12948 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12949 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12951 v
= unshare_expr (v
);
12953 while (handled_component_p (*p
)
12954 || TREE_CODE (*p
) == INDIRECT_REF
12955 || TREE_CODE (*p
) == ADDR_EXPR
12956 || TREE_CODE (*p
) == MEM_REF
12957 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12958 p
= &TREE_OPERAND (*p
, 0);
12960 if (is_variable_sized (d
))
12962 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12963 d
= DECL_VALUE_EXPR (d
);
12964 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12965 d
= TREE_OPERAND (d
, 0);
12966 gcc_assert (DECL_P (d
));
12969 = (splay_tree_key
) &DECL_CONTEXT (d
);
12970 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12975 *p
= build_fold_indirect_ref (nd
);
12977 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
12978 gimplify_assign (x
, v
, &ilist
);
12984 if (DECL_SIZE (ovar
)
12985 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
12987 tree ovar2
= DECL_VALUE_EXPR (ovar
);
12988 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
12989 ovar2
= TREE_OPERAND (ovar2
, 0);
12990 gcc_assert (DECL_P (ovar2
));
12993 if (!maybe_lookup_field (ovar
, ctx
)
12994 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12995 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12996 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13000 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13001 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13002 talign
= DECL_ALIGN_UNIT (ovar
);
13007 if (in_reduction_clauses
13008 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13009 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13012 if (is_variable_sized (d
))
13014 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13015 d
= DECL_VALUE_EXPR (d
);
13016 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13017 d
= TREE_OPERAND (d
, 0);
13018 gcc_assert (DECL_P (d
));
13021 = (splay_tree_key
) &DECL_CONTEXT (d
);
13022 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13027 var
= build_fold_indirect_ref (nd
);
13030 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13033 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13034 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13035 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13036 && is_omp_target (stmt
))
13038 x
= build_sender_ref (c
, ctx
);
13039 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13043 x
= build_sender_ref (ovar
, ctx
);
13045 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13046 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13047 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13048 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13050 gcc_assert (offloaded
);
13052 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13053 mark_addressable (avar
);
13054 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13055 talign
= DECL_ALIGN_UNIT (avar
);
13056 avar
= build_fold_addr_expr (avar
);
13057 gimplify_assign (x
, avar
, &ilist
);
13059 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13061 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13062 if (!omp_privatize_by_reference (var
))
13064 if (is_gimple_reg (var
)
13065 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13066 suppress_warning (var
);
13067 var
= build_fold_addr_expr (var
);
13070 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13071 gimplify_assign (x
, var
, &ilist
);
13073 else if (is_gimple_reg (var
))
13075 gcc_assert (offloaded
);
13076 tree avar
= create_tmp_var (TREE_TYPE (var
));
13077 mark_addressable (avar
);
13078 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13079 if (GOMP_MAP_COPY_TO_P (map_kind
)
13080 || map_kind
== GOMP_MAP_POINTER
13081 || map_kind
== GOMP_MAP_TO_PSET
13082 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13084 /* If we need to initialize a temporary
13085 with VAR because it is not addressable, and
13086 the variable hasn't been initialized yet, then
13087 we'll get a warning for the store to avar.
13088 Don't warn in that case, the mapping might
13090 suppress_warning (var
, OPT_Wuninitialized
);
13091 gimplify_assign (avar
, var
, &ilist
);
13093 avar
= build_fold_addr_expr (avar
);
13094 gimplify_assign (x
, avar
, &ilist
);
13095 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13096 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13097 && !TYPE_READONLY (TREE_TYPE (var
)))
13099 x
= unshare_expr (x
);
13100 x
= build_simple_mem_ref (x
);
13101 gimplify_assign (var
, x
, &olist
);
13106 /* While MAP is handled explicitly by the FE,
13107 for 'target update', only the identified is passed. */
13108 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13109 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13110 && (omp_is_allocatable_or_ptr (var
)
13111 && omp_check_optional_argument (var
, false)))
13112 var
= build_fold_indirect_ref (var
);
13113 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13114 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13115 || (!omp_is_allocatable_or_ptr (var
)
13116 && !omp_check_optional_argument (var
, false)))
13117 var
= build_fold_addr_expr (var
);
13118 gimplify_assign (x
, var
, &ilist
);
13122 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13124 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13125 s
= TREE_TYPE (ovar
);
13126 if (TREE_CODE (s
) == REFERENCE_TYPE
13127 || omp_check_optional_argument (ovar
, false))
13129 s
= TYPE_SIZE_UNIT (s
);
13132 s
= OMP_CLAUSE_SIZE (c
);
13133 if (s
== NULL_TREE
)
13134 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13135 s
= fold_convert (size_type_node
, s
);
13136 purpose
= size_int (map_idx
++);
13137 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13138 if (TREE_CODE (s
) != INTEGER_CST
)
13139 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13141 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13142 switch (OMP_CLAUSE_CODE (c
))
13144 case OMP_CLAUSE_MAP
:
13145 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13146 tkind_zero
= tkind
;
13147 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13150 case GOMP_MAP_ALLOC
:
13151 case GOMP_MAP_IF_PRESENT
:
13153 case GOMP_MAP_FROM
:
13154 case GOMP_MAP_TOFROM
:
13155 case GOMP_MAP_ALWAYS_TO
:
13156 case GOMP_MAP_ALWAYS_FROM
:
13157 case GOMP_MAP_ALWAYS_TOFROM
:
13158 case GOMP_MAP_RELEASE
:
13159 case GOMP_MAP_FORCE_TO
:
13160 case GOMP_MAP_FORCE_FROM
:
13161 case GOMP_MAP_FORCE_TOFROM
:
13162 case GOMP_MAP_FORCE_PRESENT
:
13163 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13165 case GOMP_MAP_DELETE
:
13166 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13170 if (tkind_zero
!= tkind
)
13172 if (integer_zerop (s
))
13173 tkind
= tkind_zero
;
13174 else if (integer_nonzerop (s
))
13175 tkind_zero
= tkind
;
13177 if (tkind_zero
== tkind
13178 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13179 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13180 & ~GOMP_MAP_IMPLICIT
)
13183 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13184 bits are not interfered by other special bit encodings,
13185 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13187 tkind
|= GOMP_MAP_IMPLICIT
;
13188 tkind_zero
= tkind
;
13191 case OMP_CLAUSE_FIRSTPRIVATE
:
13192 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13193 tkind
= GOMP_MAP_TO
;
13194 tkind_zero
= tkind
;
13196 case OMP_CLAUSE_TO
:
13197 tkind
= GOMP_MAP_TO
;
13198 tkind_zero
= tkind
;
13200 case OMP_CLAUSE_FROM
:
13201 tkind
= GOMP_MAP_FROM
;
13202 tkind_zero
= tkind
;
13205 gcc_unreachable ();
13207 gcc_checking_assert (tkind
13208 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13209 gcc_checking_assert (tkind_zero
13210 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13211 talign
= ceil_log2 (talign
);
13212 tkind
|= talign
<< talign_shift
;
13213 tkind_zero
|= talign
<< talign_shift
;
13214 gcc_checking_assert (tkind
13215 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13216 gcc_checking_assert (tkind_zero
13217 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13218 if (tkind
== tkind_zero
)
13219 x
= build_int_cstu (tkind_type
, tkind
);
13222 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13223 x
= build3 (COND_EXPR
, tkind_type
,
13224 fold_build2 (EQ_EXPR
, boolean_type_node
,
13225 unshare_expr (s
), size_zero_node
),
13226 build_int_cstu (tkind_type
, tkind_zero
),
13227 build_int_cstu (tkind_type
, tkind
));
13229 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13234 case OMP_CLAUSE_FIRSTPRIVATE
:
13235 if (is_gimple_omp_oacc (ctx
->stmt
))
13236 goto oacc_firstprivate_map
;
13237 ovar
= OMP_CLAUSE_DECL (c
);
13238 if (omp_privatize_by_reference (ovar
))
13239 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13241 talign
= DECL_ALIGN_UNIT (ovar
);
13242 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13243 x
= build_sender_ref (ovar
, ctx
);
13244 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13245 type
= TREE_TYPE (ovar
);
13246 if (omp_privatize_by_reference (ovar
))
13247 type
= TREE_TYPE (type
);
13248 if ((INTEGRAL_TYPE_P (type
)
13249 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13250 || TREE_CODE (type
) == POINTER_TYPE
)
13252 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13254 if (omp_privatize_by_reference (var
))
13255 t
= build_simple_mem_ref (var
);
13256 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13257 suppress_warning (var
);
13258 if (TREE_CODE (type
) != POINTER_TYPE
)
13259 t
= fold_convert (pointer_sized_int_node
, t
);
13260 t
= fold_convert (TREE_TYPE (x
), t
);
13261 gimplify_assign (x
, t
, &ilist
);
13263 else if (omp_privatize_by_reference (var
))
13264 gimplify_assign (x
, var
, &ilist
);
13265 else if (is_gimple_reg (var
))
13267 tree avar
= create_tmp_var (TREE_TYPE (var
));
13268 mark_addressable (avar
);
13269 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13270 suppress_warning (var
);
13271 gimplify_assign (avar
, var
, &ilist
);
13272 avar
= build_fold_addr_expr (avar
);
13273 gimplify_assign (x
, avar
, &ilist
);
13277 var
= build_fold_addr_expr (var
);
13278 gimplify_assign (x
, var
, &ilist
);
13280 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13282 else if (omp_privatize_by_reference (ovar
))
13283 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13285 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13286 s
= fold_convert (size_type_node
, s
);
13287 purpose
= size_int (map_idx
++);
13288 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13289 if (TREE_CODE (s
) != INTEGER_CST
)
13290 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13292 gcc_checking_assert (tkind
13293 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13294 talign
= ceil_log2 (talign
);
13295 tkind
|= talign
<< talign_shift
;
13296 gcc_checking_assert (tkind
13297 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13298 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13299 build_int_cstu (tkind_type
, tkind
));
13302 case OMP_CLAUSE_USE_DEVICE_PTR
:
13303 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13304 case OMP_CLAUSE_IS_DEVICE_PTR
:
13305 ovar
= OMP_CLAUSE_DECL (c
);
13306 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13308 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13310 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13311 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13312 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13314 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13316 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13317 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13321 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13322 x
= build_sender_ref (ovar
, ctx
);
13325 if (is_gimple_omp_oacc (ctx
->stmt
))
13327 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13329 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13330 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13333 type
= TREE_TYPE (ovar
);
13334 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13335 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
13336 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13337 && !omp_privatize_by_reference (ovar
)
13338 && !omp_is_allocatable_or_ptr (ovar
))
13339 || TREE_CODE (type
) == ARRAY_TYPE
)
13340 var
= build_fold_addr_expr (var
);
13343 if (omp_privatize_by_reference (ovar
)
13344 || omp_check_optional_argument (ovar
, false)
13345 || omp_is_allocatable_or_ptr (ovar
))
13347 type
= TREE_TYPE (type
);
13348 if (POINTER_TYPE_P (type
)
13349 && TREE_CODE (type
) != ARRAY_TYPE
13350 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13351 && !omp_is_allocatable_or_ptr (ovar
))
13352 || (omp_privatize_by_reference (ovar
)
13353 && omp_is_allocatable_or_ptr (ovar
))))
13354 var
= build_simple_mem_ref (var
);
13355 var
= fold_convert (TREE_TYPE (x
), var
);
13359 present
= omp_check_optional_argument (ovar
, true);
13362 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13363 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13364 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13365 tree new_x
= unshare_expr (x
);
13366 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13368 gcond
*cond
= gimple_build_cond_from_tree (present
,
13371 gimple_seq_add_stmt (&ilist
, cond
);
13372 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13373 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13374 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13375 gimple_seq_add_stmt (&ilist
,
13376 gimple_build_label (notnull_label
));
13377 gimplify_assign (x
, var
, &ilist
);
13378 gimple_seq_add_stmt (&ilist
,
13379 gimple_build_label (opt_arg_label
));
13382 gimplify_assign (x
, var
, &ilist
);
13384 purpose
= size_int (map_idx
++);
13385 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13386 gcc_checking_assert (tkind
13387 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13388 gcc_checking_assert (tkind
13389 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13390 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13391 build_int_cstu (tkind_type
, tkind
));
13395 gcc_assert (map_idx
== map_cnt
);
13397 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13398 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13399 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13400 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13401 for (int i
= 1; i
<= 2; i
++)
13402 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13404 gimple_seq initlist
= NULL
;
13405 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13406 TREE_VEC_ELT (t
, i
)),
13407 &initlist
, true, NULL_TREE
);
13408 gimple_seq_add_seq (&ilist
, initlist
);
13410 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13411 gimple_seq_add_stmt (&olist
,
13412 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13415 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13417 tree id
= get_identifier ("omp declare target");
13418 tree decl
= TREE_VEC_ELT (t
, i
);
13419 DECL_ATTRIBUTES (decl
)
13420 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13421 varpool_node
*node
= varpool_node::get (decl
);
13424 node
->offloadable
= 1;
13425 if (ENABLE_OFFLOADING
)
13427 g
->have_offload
= true;
13428 vec_safe_push (offload_vars
, t
);
13433 tree clobber
= build_clobber (ctx
->record_type
);
13434 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13438 /* Once all the expansions are done, sequence all the different
13439 fragments inside gimple_omp_body. */
13444 && ctx
->record_type
)
13446 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13447 /* fixup_child_record_type might have changed receiver_decl's type. */
13448 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13449 gimple_seq_add_stmt (&new_body
,
13450 gimple_build_assign (ctx
->receiver_decl
, t
));
13452 gimple_seq_add_seq (&new_body
, fplist
);
13454 if (offloaded
|| data_region
)
13456 tree prev
= NULL_TREE
;
13457 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13458 switch (OMP_CLAUSE_CODE (c
))
13463 case OMP_CLAUSE_FIRSTPRIVATE
:
13464 if (is_gimple_omp_oacc (ctx
->stmt
))
13466 var
= OMP_CLAUSE_DECL (c
);
13467 if (omp_privatize_by_reference (var
)
13468 || is_gimple_reg_type (TREE_TYPE (var
)))
13470 tree new_var
= lookup_decl (var
, ctx
);
13472 type
= TREE_TYPE (var
);
13473 if (omp_privatize_by_reference (var
))
13474 type
= TREE_TYPE (type
);
13475 if ((INTEGRAL_TYPE_P (type
)
13476 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13477 || TREE_CODE (type
) == POINTER_TYPE
)
13479 x
= build_receiver_ref (var
, false, ctx
);
13480 if (TREE_CODE (type
) != POINTER_TYPE
)
13481 x
= fold_convert (pointer_sized_int_node
, x
);
13482 x
= fold_convert (type
, x
);
13483 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13485 if (omp_privatize_by_reference (var
))
13487 tree v
= create_tmp_var_raw (type
, get_name (var
));
13488 gimple_add_tmp_var (v
);
13489 TREE_ADDRESSABLE (v
) = 1;
13490 gimple_seq_add_stmt (&new_body
,
13491 gimple_build_assign (v
, x
));
13492 x
= build_fold_addr_expr (v
);
13494 gimple_seq_add_stmt (&new_body
,
13495 gimple_build_assign (new_var
, x
));
13499 bool by_ref
= !omp_privatize_by_reference (var
);
13500 x
= build_receiver_ref (var
, by_ref
, ctx
);
13501 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13503 gimple_seq_add_stmt (&new_body
,
13504 gimple_build_assign (new_var
, x
));
13507 else if (is_variable_sized (var
))
13509 tree pvar
= DECL_VALUE_EXPR (var
);
13510 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13511 pvar
= TREE_OPERAND (pvar
, 0);
13512 gcc_assert (DECL_P (pvar
));
13513 tree new_var
= lookup_decl (pvar
, ctx
);
13514 x
= build_receiver_ref (var
, false, ctx
);
13515 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13516 gimple_seq_add_stmt (&new_body
,
13517 gimple_build_assign (new_var
, x
));
13520 case OMP_CLAUSE_PRIVATE
:
13521 if (is_gimple_omp_oacc (ctx
->stmt
))
13523 var
= OMP_CLAUSE_DECL (c
);
13524 if (omp_privatize_by_reference (var
))
13526 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13527 tree new_var
= lookup_decl (var
, ctx
);
13528 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13529 if (TREE_CONSTANT (x
))
13531 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13533 gimple_add_tmp_var (x
);
13534 TREE_ADDRESSABLE (x
) = 1;
13535 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13540 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13541 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13542 gimple_seq_add_stmt (&new_body
,
13543 gimple_build_assign (new_var
, x
));
13546 case OMP_CLAUSE_USE_DEVICE_PTR
:
13547 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13548 case OMP_CLAUSE_IS_DEVICE_PTR
:
13550 gimple_seq assign_body
;
13551 bool is_array_data
;
13552 bool do_optional_check
;
13553 assign_body
= NULL
;
13554 do_optional_check
= false;
13555 var
= OMP_CLAUSE_DECL (c
);
13556 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13558 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13559 x
= build_sender_ref (is_array_data
13560 ? (splay_tree_key
) &DECL_NAME (var
)
13561 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13563 x
= build_receiver_ref (var
, false, ctx
);
13567 bool is_ref
= omp_privatize_by_reference (var
);
13568 do_optional_check
= true;
13569 /* First, we copy the descriptor data from the host; then
13570 we update its data to point to the target address. */
13571 new_var
= lookup_decl (var
, ctx
);
13572 new_var
= DECL_VALUE_EXPR (new_var
);
13577 var
= build_fold_indirect_ref (var
);
13578 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
13580 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
13581 gimple_add_tmp_var (v
);
13582 TREE_ADDRESSABLE (v
) = 1;
13583 gimple_seq_add_stmt (&assign_body
,
13584 gimple_build_assign (v
, var
));
13585 tree rhs
= build_fold_addr_expr (v
);
13586 gimple_seq_add_stmt (&assign_body
,
13587 gimple_build_assign (new_var
, rhs
));
13590 gimple_seq_add_stmt (&assign_body
,
13591 gimple_build_assign (new_var
, var
));
13593 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13595 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13596 gimple_seq_add_stmt (&assign_body
,
13597 gimple_build_assign (v2
, x
));
13599 else if (is_variable_sized (var
))
13601 tree pvar
= DECL_VALUE_EXPR (var
);
13602 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13603 pvar
= TREE_OPERAND (pvar
, 0);
13604 gcc_assert (DECL_P (pvar
));
13605 new_var
= lookup_decl (pvar
, ctx
);
13606 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13607 gimple_seq_add_stmt (&assign_body
,
13608 gimple_build_assign (new_var
, x
));
13610 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13611 && !omp_privatize_by_reference (var
)
13612 && !omp_is_allocatable_or_ptr (var
))
13613 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13615 new_var
= lookup_decl (var
, ctx
);
13616 new_var
= DECL_VALUE_EXPR (new_var
);
13617 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13618 new_var
= TREE_OPERAND (new_var
, 0);
13619 gcc_assert (DECL_P (new_var
));
13620 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13621 gimple_seq_add_stmt (&assign_body
,
13622 gimple_build_assign (new_var
, x
));
13626 tree type
= TREE_TYPE (var
);
13627 new_var
= lookup_decl (var
, ctx
);
13628 if (omp_privatize_by_reference (var
))
13630 type
= TREE_TYPE (type
);
13631 if (POINTER_TYPE_P (type
)
13632 && TREE_CODE (type
) != ARRAY_TYPE
13633 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13634 || (omp_privatize_by_reference (var
)
13635 && omp_is_allocatable_or_ptr (var
))))
13637 tree v
= create_tmp_var_raw (type
, get_name (var
));
13638 gimple_add_tmp_var (v
);
13639 TREE_ADDRESSABLE (v
) = 1;
13640 x
= fold_convert (type
, x
);
13641 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13643 gimple_seq_add_stmt (&assign_body
,
13644 gimple_build_assign (v
, x
));
13645 x
= build_fold_addr_expr (v
);
13646 do_optional_check
= true;
13649 new_var
= DECL_VALUE_EXPR (new_var
);
13650 x
= fold_convert (TREE_TYPE (new_var
), x
);
13651 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13652 gimple_seq_add_stmt (&assign_body
,
13653 gimple_build_assign (new_var
, x
));
13656 present
= (do_optional_check
13657 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13661 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13662 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13663 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13664 glabel
*null_glabel
= gimple_build_label (null_label
);
13665 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13666 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13667 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13669 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13671 gcond
*cond
= gimple_build_cond_from_tree (present
,
13674 gimple_seq_add_stmt (&new_body
, cond
);
13675 gimple_seq_add_stmt (&new_body
, null_glabel
);
13676 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13677 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13678 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13679 gimple_seq_add_seq (&new_body
, assign_body
);
13680 gimple_seq_add_stmt (&new_body
,
13681 gimple_build_label (opt_arg_label
));
13684 gimple_seq_add_seq (&new_body
, assign_body
);
13687 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13688 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13689 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13690 or references to VLAs. */
13691 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13692 switch (OMP_CLAUSE_CODE (c
))
13697 case OMP_CLAUSE_MAP
:
13698 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13699 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13701 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13702 poly_int64 offset
= 0;
13704 var
= OMP_CLAUSE_DECL (c
);
13706 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13707 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13709 && varpool_node::get_create (var
)->offloadable
)
13711 if (TREE_CODE (var
) == INDIRECT_REF
13712 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13713 var
= TREE_OPERAND (var
, 0);
13714 if (TREE_CODE (var
) == COMPONENT_REF
)
13716 var
= get_addr_base_and_unit_offset (var
, &offset
);
13717 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13719 else if (DECL_SIZE (var
)
13720 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13722 tree var2
= DECL_VALUE_EXPR (var
);
13723 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13724 var2
= TREE_OPERAND (var2
, 0);
13725 gcc_assert (DECL_P (var2
));
13728 tree new_var
= lookup_decl (var
, ctx
), x
;
13729 tree type
= TREE_TYPE (new_var
);
13731 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13732 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13735 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13737 new_var
= build2 (MEM_REF
, type
,
13738 build_fold_addr_expr (new_var
),
13739 build_int_cst (build_pointer_type (type
),
13742 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13744 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13745 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13746 new_var
= build2 (MEM_REF
, type
,
13747 build_fold_addr_expr (new_var
),
13748 build_int_cst (build_pointer_type (type
),
13752 is_ref
= omp_privatize_by_reference (var
);
13753 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13755 bool ref_to_array
= false;
13758 type
= TREE_TYPE (type
);
13759 if (TREE_CODE (type
) == ARRAY_TYPE
)
13761 type
= build_pointer_type (type
);
13762 ref_to_array
= true;
13765 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13767 tree decl2
= DECL_VALUE_EXPR (new_var
);
13768 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
13769 decl2
= TREE_OPERAND (decl2
, 0);
13770 gcc_assert (DECL_P (decl2
));
13772 type
= TREE_TYPE (new_var
);
13774 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
13775 x
= fold_convert_loc (clause_loc
, type
, x
);
13776 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
13778 tree bias
= OMP_CLAUSE_SIZE (c
);
13780 bias
= lookup_decl (bias
, ctx
);
13781 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
13782 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
13784 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
13785 TREE_TYPE (x
), x
, bias
);
13788 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13789 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13790 if (is_ref
&& !ref_to_array
)
13792 tree t
= create_tmp_var_raw (type
, get_name (var
));
13793 gimple_add_tmp_var (t
);
13794 TREE_ADDRESSABLE (t
) = 1;
13795 gimple_seq_add_stmt (&new_body
,
13796 gimple_build_assign (t
, x
));
13797 x
= build_fold_addr_expr_loc (clause_loc
, t
);
13799 gimple_seq_add_stmt (&new_body
,
13800 gimple_build_assign (new_var
, x
));
13803 else if (OMP_CLAUSE_CHAIN (c
)
13804 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
13806 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13807 == GOMP_MAP_FIRSTPRIVATE_POINTER
13808 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13809 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13812 case OMP_CLAUSE_PRIVATE
:
13813 var
= OMP_CLAUSE_DECL (c
);
13814 if (is_variable_sized (var
))
13816 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13817 tree new_var
= lookup_decl (var
, ctx
);
13818 tree pvar
= DECL_VALUE_EXPR (var
);
13819 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13820 pvar
= TREE_OPERAND (pvar
, 0);
13821 gcc_assert (DECL_P (pvar
));
13822 tree new_pvar
= lookup_decl (pvar
, ctx
);
13823 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13824 tree al
= size_int (DECL_ALIGN (var
));
13825 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
13826 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13827 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
13828 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13829 gimple_seq_add_stmt (&new_body
,
13830 gimple_build_assign (new_pvar
, x
));
13832 else if (omp_privatize_by_reference (var
)
13833 && !is_gimple_omp_oacc (ctx
->stmt
))
13835 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13836 tree new_var
= lookup_decl (var
, ctx
);
13837 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13838 if (TREE_CONSTANT (x
))
13843 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13844 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
13845 tree al
= size_int (TYPE_ALIGN (rtype
));
13846 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13849 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13850 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13851 gimple_seq_add_stmt (&new_body
,
13852 gimple_build_assign (new_var
, x
));
13857 gimple_seq fork_seq
= NULL
;
13858 gimple_seq join_seq
= NULL
;
13860 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
13862 /* If there are reductions on the offloaded region itself, treat
13863 them as a dummy GANG loop. */
13864 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
13866 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
13868 if (private_marker
)
13869 gimple_call_set_arg (private_marker
, 2, level
);
13871 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
13872 false, NULL
, private_marker
, NULL
, &fork_seq
,
13876 gimple_seq_add_seq (&new_body
, fork_seq
);
13877 gimple_seq_add_seq (&new_body
, tgt_body
);
13878 gimple_seq_add_seq (&new_body
, join_seq
);
13882 new_body
= maybe_catch_exception (new_body
);
13883 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
13885 gimple_omp_set_body (stmt
, new_body
);
13888 bind
= gimple_build_bind (NULL
, NULL
,
13889 tgt_bind
? gimple_bind_block (tgt_bind
)
13891 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
13892 gimple_bind_add_seq (bind
, ilist
);
13893 gimple_bind_add_stmt (bind
, stmt
);
13894 gimple_bind_add_seq (bind
, olist
);
13896 pop_gimplify_context (NULL
);
13900 gimple_bind_add_seq (dep_bind
, dep_ilist
);
13901 gimple_bind_add_stmt (dep_bind
, bind
);
13902 gimple_bind_add_seq (dep_bind
, dep_olist
);
13903 pop_gimplify_context (dep_bind
);
13907 /* Expand code for an OpenMP teams directive. */
13910 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
13912 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
13913 push_gimplify_context ();
13915 tree block
= make_node (BLOCK
);
13916 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
13917 gsi_replace (gsi_p
, bind
, true);
13918 gimple_seq bind_body
= NULL
;
13919 gimple_seq dlist
= NULL
;
13920 gimple_seq olist
= NULL
;
13922 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13923 OMP_CLAUSE_NUM_TEAMS
);
13924 tree num_teams_lower
= NULL_TREE
;
13925 if (num_teams
== NULL_TREE
)
13926 num_teams
= build_int_cst (unsigned_type_node
, 0);
13929 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
13930 if (num_teams_lower
)
13932 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
13933 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
13936 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
13937 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
13938 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
13940 if (num_teams_lower
== NULL_TREE
)
13941 num_teams_lower
= num_teams
;
13942 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13943 OMP_CLAUSE_THREAD_LIMIT
);
13944 if (thread_limit
== NULL_TREE
)
13945 thread_limit
= build_int_cst (unsigned_type_node
, 0);
13948 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
13949 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
13950 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
13953 location_t loc
= gimple_location (teams_stmt
);
13954 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
13955 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
13956 tree first
= create_tmp_var (rettype
);
13957 gimple_seq_add_stmt (&bind_body
,
13958 gimple_build_assign (first
, build_one_cst (rettype
)));
13959 tree llabel
= create_artificial_label (loc
);
13960 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
13962 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
13964 gimple_set_location (call
, loc
);
13965 tree temp
= create_tmp_var (rettype
);
13966 gimple_call_set_lhs (call
, temp
);
13967 gimple_seq_add_stmt (&bind_body
, call
);
13969 tree tlabel
= create_artificial_label (loc
);
13970 tree flabel
= create_artificial_label (loc
);
13971 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
13973 gimple_seq_add_stmt (&bind_body
, cond
);
13974 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
13975 gimple_seq_add_stmt (&bind_body
,
13976 gimple_build_assign (first
, build_zero_cst (rettype
)));
13978 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
13979 &bind_body
, &dlist
, ctx
, NULL
);
13980 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
13981 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
13983 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
13985 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
13986 gimple_omp_set_body (teams_stmt
, NULL
);
13987 gimple_seq_add_seq (&bind_body
, olist
);
13988 gimple_seq_add_seq (&bind_body
, dlist
);
13989 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
13990 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
13991 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
13992 gimple_bind_set_body (bind
, bind_body
);
13994 pop_gimplify_context (bind
);
13996 gimple_bind_append_vars (bind
, ctx
->block_vars
);
13997 BLOCK_VARS (block
) = ctx
->block_vars
;
13998 if (BLOCK_VARS (block
))
13999 TREE_USED (block
) = 1;
14002 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14003 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14004 of OMP context, but with task_shared_vars set. */
14007 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14012 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14013 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14015 && DECL_HAS_VALUE_EXPR_P (t
))
14018 if (task_shared_vars
14020 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
14023 /* If a global variable has been privatized, TREE_CONSTANT on
14024 ADDR_EXPR might be wrong. */
14025 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14026 recompute_tree_invariant_for_addr_expr (t
);
14028 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14032 /* Data to be communicated between lower_omp_regimplify_operands and
14033 lower_omp_regimplify_operands_p. */
14035 struct lower_omp_regimplify_operands_data
14041 /* Helper function for lower_omp_regimplify_operands. Find
14042 omp_member_access_dummy_var vars and adjust temporarily their
14043 DECL_VALUE_EXPRs if needed. */
14046 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14049 tree t
= omp_member_access_dummy_var (*tp
);
14052 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14053 lower_omp_regimplify_operands_data
*ldata
14054 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14055 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14058 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14059 ldata
->decls
->safe_push (*tp
);
14060 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14061 SET_DECL_VALUE_EXPR (*tp
, v
);
14064 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14068 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14069 of omp_member_access_dummy_var vars during regimplification. */
14072 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14073 gimple_stmt_iterator
*gsi_p
)
14075 auto_vec
<tree
, 10> decls
;
14078 struct walk_stmt_info wi
;
14079 memset (&wi
, '\0', sizeof (wi
));
14080 struct lower_omp_regimplify_operands_data data
;
14082 data
.decls
= &decls
;
14084 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14086 gimple_regimplify_operands (stmt
, gsi_p
);
14087 while (!decls
.is_empty ())
14089 tree t
= decls
.pop ();
14090 tree v
= decls
.pop ();
14091 SET_DECL_VALUE_EXPR (t
, v
);
14096 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14098 gimple
*stmt
= gsi_stmt (*gsi_p
);
14099 struct walk_stmt_info wi
;
14102 if (gimple_has_location (stmt
))
14103 input_location
= gimple_location (stmt
);
14105 if (task_shared_vars
)
14106 memset (&wi
, '\0', sizeof (wi
));
14108 /* If we have issued syntax errors, avoid doing any heavy lifting.
14109 Just replace the OMP directives with a NOP to avoid
14110 confusing RTL expansion. */
14111 if (seen_error () && is_gimple_omp (stmt
))
14113 gsi_replace (gsi_p
, gimple_build_nop (), true);
14117 switch (gimple_code (stmt
))
14121 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14122 if ((ctx
|| task_shared_vars
)
14123 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14124 lower_omp_regimplify_p
,
14125 ctx
? NULL
: &wi
, NULL
)
14126 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14127 lower_omp_regimplify_p
,
14128 ctx
? NULL
: &wi
, NULL
)))
14129 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14133 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14135 case GIMPLE_EH_FILTER
:
14136 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14139 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14140 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14142 case GIMPLE_TRANSACTION
:
14143 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14147 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14149 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14150 oacc_privatization_scan_decl_chain (ctx
, vars
);
14152 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14153 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14155 case GIMPLE_OMP_PARALLEL
:
14156 case GIMPLE_OMP_TASK
:
14157 ctx
= maybe_lookup_ctx (stmt
);
14159 if (ctx
->cancellable
)
14160 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14161 lower_omp_taskreg (gsi_p
, ctx
);
14163 case GIMPLE_OMP_FOR
:
14164 ctx
= maybe_lookup_ctx (stmt
);
14166 if (ctx
->cancellable
)
14167 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14168 lower_omp_for (gsi_p
, ctx
);
14170 case GIMPLE_OMP_SECTIONS
:
14171 ctx
= maybe_lookup_ctx (stmt
);
14173 if (ctx
->cancellable
)
14174 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14175 lower_omp_sections (gsi_p
, ctx
);
14177 case GIMPLE_OMP_SCOPE
:
14178 ctx
= maybe_lookup_ctx (stmt
);
14180 lower_omp_scope (gsi_p
, ctx
);
14182 case GIMPLE_OMP_SINGLE
:
14183 ctx
= maybe_lookup_ctx (stmt
);
14185 lower_omp_single (gsi_p
, ctx
);
14187 case GIMPLE_OMP_MASTER
:
14188 case GIMPLE_OMP_MASKED
:
14189 ctx
= maybe_lookup_ctx (stmt
);
14191 lower_omp_master (gsi_p
, ctx
);
14193 case GIMPLE_OMP_TASKGROUP
:
14194 ctx
= maybe_lookup_ctx (stmt
);
14196 lower_omp_taskgroup (gsi_p
, ctx
);
14198 case GIMPLE_OMP_ORDERED
:
14199 ctx
= maybe_lookup_ctx (stmt
);
14201 lower_omp_ordered (gsi_p
, ctx
);
14203 case GIMPLE_OMP_SCAN
:
14204 ctx
= maybe_lookup_ctx (stmt
);
14206 lower_omp_scan (gsi_p
, ctx
);
14208 case GIMPLE_OMP_CRITICAL
:
14209 ctx
= maybe_lookup_ctx (stmt
);
14211 lower_omp_critical (gsi_p
, ctx
);
14213 case GIMPLE_OMP_ATOMIC_LOAD
:
14214 if ((ctx
|| task_shared_vars
)
14215 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14216 as_a
<gomp_atomic_load
*> (stmt
)),
14217 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14218 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14220 case GIMPLE_OMP_TARGET
:
14221 ctx
= maybe_lookup_ctx (stmt
);
14223 lower_omp_target (gsi_p
, ctx
);
14225 case GIMPLE_OMP_TEAMS
:
14226 ctx
= maybe_lookup_ctx (stmt
);
14228 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14229 lower_omp_taskreg (gsi_p
, ctx
);
14231 lower_omp_teams (gsi_p
, ctx
);
14235 call_stmt
= as_a
<gcall
*> (stmt
);
14236 fndecl
= gimple_call_fndecl (call_stmt
);
14238 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14239 switch (DECL_FUNCTION_CODE (fndecl
))
14241 case BUILT_IN_GOMP_BARRIER
:
14245 case BUILT_IN_GOMP_CANCEL
:
14246 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14249 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14250 cctx
= cctx
->outer
;
14251 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14252 if (!cctx
->cancellable
)
14254 if (DECL_FUNCTION_CODE (fndecl
)
14255 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14257 stmt
= gimple_build_nop ();
14258 gsi_replace (gsi_p
, stmt
, false);
14262 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14264 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14265 gimple_call_set_fndecl (call_stmt
, fndecl
);
14266 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14269 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14270 gimple_call_set_lhs (call_stmt
, lhs
);
14271 tree fallthru_label
;
14272 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14274 g
= gimple_build_label (fallthru_label
);
14275 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14276 g
= gimple_build_cond (NE_EXPR
, lhs
,
14277 fold_convert (TREE_TYPE (lhs
),
14278 boolean_false_node
),
14279 cctx
->cancel_label
, fallthru_label
);
14280 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14287 case GIMPLE_ASSIGN
:
14288 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14290 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14291 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14292 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14293 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14294 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14295 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14296 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14297 && (gimple_omp_target_kind (up
->stmt
)
14298 == GF_OMP_TARGET_KIND_DATA
)))
14300 else if (!up
->lastprivate_conditional_map
)
14302 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14303 if (TREE_CODE (lhs
) == MEM_REF
14304 && DECL_P (TREE_OPERAND (lhs
, 0))
14305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14306 0))) == REFERENCE_TYPE
)
14307 lhs
= TREE_OPERAND (lhs
, 0);
14309 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14312 if (up
->combined_into_simd_safelen1
)
14315 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14318 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14319 clauses
= gimple_omp_for_clauses (up
->stmt
);
14321 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14322 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14323 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14324 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14325 OMP_CLAUSE__CONDTEMP_
);
14326 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14327 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14328 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14335 if ((ctx
|| task_shared_vars
)
14336 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14339 /* Just remove clobbers, this should happen only if we have
14340 "privatized" local addressable variables in SIMD regions,
14341 the clobber isn't needed in that case and gimplifying address
14342 of the ARRAY_REF into a pointer and creating MEM_REF based
14343 clobber would create worse code than we get with the clobber
14345 if (gimple_clobber_p (stmt
))
14347 gsi_replace (gsi_p
, gimple_build_nop (), true);
14350 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14357 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14359 location_t saved_location
= input_location
;
14360 gimple_stmt_iterator gsi
;
14361 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14362 lower_omp_1 (&gsi
, ctx
);
14363 /* During gimplification, we haven't folded statments inside offloading
14364 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14365 if (target_nesting_level
|| taskreg_nesting_level
)
14366 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14368 input_location
= saved_location
;
14371 /* Main entry point. */
14373 static unsigned int
14374 execute_lower_omp (void)
14380 /* This pass always runs, to provide PROP_gimple_lomp.
14381 But often, there is nothing to do. */
14382 if (flag_openacc
== 0 && flag_openmp
== 0
14383 && flag_openmp_simd
== 0)
14386 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14387 delete_omp_context
);
14389 body
= gimple_body (current_function_decl
);
14391 scan_omp (&body
, NULL
);
14392 gcc_assert (taskreg_nesting_level
== 0);
14393 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14394 finish_taskreg_scan (ctx
);
14395 taskreg_contexts
.release ();
14397 if (all_contexts
->root
)
14399 if (task_shared_vars
)
14400 push_gimplify_context ();
14401 lower_omp (&body
, NULL
);
14402 if (task_shared_vars
)
14403 pop_gimplify_context (NULL
);
14408 splay_tree_delete (all_contexts
);
14409 all_contexts
= NULL
;
14411 BITMAP_FREE (task_shared_vars
);
14412 BITMAP_FREE (global_nonaddressable_vars
);
14414 /* If current function is a method, remove artificial dummy VAR_DECL created
14415 for non-static data member privatization, they aren't needed for
14416 debuginfo nor anything else, have been already replaced everywhere in the
14417 IL and cause problems with LTO. */
14418 if (DECL_ARGUMENTS (current_function_decl
)
14419 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14420 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14422 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14428 const pass_data pass_data_lower_omp
=
14430 GIMPLE_PASS
, /* type */
14431 "omplower", /* name */
14432 OPTGROUP_OMP
, /* optinfo_flags */
14433 TV_NONE
, /* tv_id */
14434 PROP_gimple_any
, /* properties_required */
14435 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14436 0, /* properties_destroyed */
14437 0, /* todo_flags_start */
14438 0, /* todo_flags_finish */
14441 class pass_lower_omp
: public gimple_opt_pass
14444 pass_lower_omp (gcc::context
*ctxt
)
14445 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14448 /* opt_pass methods: */
14449 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
14451 }; // class pass_lower_omp
14453 } // anon namespace
14456 make_pass_lower_omp (gcc::context
*ctxt
)
14458 return new pass_lower_omp (ctxt
);
14461 /* The following is a utility to diagnose structured block violations.
14462 It is not part of the "omplower" pass, as that's invoked too late. It
14463 should be invoked by the respective front ends after gimplification. */
14465 static splay_tree all_labels
;
14467 /* Check for mismatched contexts and generate an error if needed. Return
14468 true if an error is detected. */
14471 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14472 gimple
*branch_ctx
, gimple
*label_ctx
)
14474 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14475 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14477 if (label_ctx
== branch_ctx
)
14480 const char* kind
= NULL
;
14484 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14485 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14487 gcc_checking_assert (kind
== NULL
);
14493 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14497 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14498 so we could traverse it and issue a correct "exit" or "enter" error
14499 message upon a structured block violation.
14501 We built the context by building a list with tree_cons'ing, but there is
14502 no easy counterpart in gimple tuples. It seems like far too much work
14503 for issuing exit/enter error messages. If someone really misses the
14504 distinct error message... patches welcome. */
14507 /* Try to avoid confusing the user by producing and error message
14508 with correct "exit" or "enter" verbiage. We prefer "exit"
14509 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14510 if (branch_ctx
== NULL
)
14516 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14521 label_ctx
= TREE_CHAIN (label_ctx
);
14526 error ("invalid exit from %s structured block", kind
);
14528 error ("invalid entry to %s structured block", kind
);
14531 /* If it's obvious we have an invalid entry, be specific about the error. */
14532 if (branch_ctx
== NULL
)
14533 error ("invalid entry to %s structured block", kind
);
14536 /* Otherwise, be vague and lazy, but efficient. */
14537 error ("invalid branch to/from %s structured block", kind
);
14540 gsi_replace (gsi_p
, gimple_build_nop (), false);
14544 /* Pass 1: Create a minimal tree of structured blocks, and record
14545 where each label is found. */
14548 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14549 struct walk_stmt_info
*wi
)
14551 gimple
*context
= (gimple
*) wi
->info
;
14552 gimple
*inner_context
;
14553 gimple
*stmt
= gsi_stmt (*gsi_p
);
14555 *handled_ops_p
= true;
14557 switch (gimple_code (stmt
))
14561 case GIMPLE_OMP_PARALLEL
:
14562 case GIMPLE_OMP_TASK
:
14563 case GIMPLE_OMP_SCOPE
:
14564 case GIMPLE_OMP_SECTIONS
:
14565 case GIMPLE_OMP_SINGLE
:
14566 case GIMPLE_OMP_SECTION
:
14567 case GIMPLE_OMP_MASTER
:
14568 case GIMPLE_OMP_MASKED
:
14569 case GIMPLE_OMP_ORDERED
:
14570 case GIMPLE_OMP_SCAN
:
14571 case GIMPLE_OMP_CRITICAL
:
14572 case GIMPLE_OMP_TARGET
:
14573 case GIMPLE_OMP_TEAMS
:
14574 case GIMPLE_OMP_TASKGROUP
:
14575 /* The minimal context here is just the current OMP construct. */
14576 inner_context
= stmt
;
14577 wi
->info
= inner_context
;
14578 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14579 wi
->info
= context
;
14582 case GIMPLE_OMP_FOR
:
14583 inner_context
= stmt
;
14584 wi
->info
= inner_context
;
14585 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14587 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14588 diagnose_sb_1
, NULL
, wi
);
14589 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14590 wi
->info
= context
;
14594 splay_tree_insert (all_labels
,
14595 (splay_tree_key
) gimple_label_label (
14596 as_a
<glabel
*> (stmt
)),
14597 (splay_tree_value
) context
);
14607 /* Pass 2: Check each branch and see if its context differs from that of
14608 the destination label's context. */
14611 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14612 struct walk_stmt_info
*wi
)
14614 gimple
*context
= (gimple
*) wi
->info
;
14616 gimple
*stmt
= gsi_stmt (*gsi_p
);
14618 *handled_ops_p
= true;
14620 switch (gimple_code (stmt
))
14624 case GIMPLE_OMP_PARALLEL
:
14625 case GIMPLE_OMP_TASK
:
14626 case GIMPLE_OMP_SCOPE
:
14627 case GIMPLE_OMP_SECTIONS
:
14628 case GIMPLE_OMP_SINGLE
:
14629 case GIMPLE_OMP_SECTION
:
14630 case GIMPLE_OMP_MASTER
:
14631 case GIMPLE_OMP_MASKED
:
14632 case GIMPLE_OMP_ORDERED
:
14633 case GIMPLE_OMP_SCAN
:
14634 case GIMPLE_OMP_CRITICAL
:
14635 case GIMPLE_OMP_TARGET
:
14636 case GIMPLE_OMP_TEAMS
:
14637 case GIMPLE_OMP_TASKGROUP
:
14639 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14640 wi
->info
= context
;
14643 case GIMPLE_OMP_FOR
:
14645 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14647 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14648 diagnose_sb_2
, NULL
, wi
);
14649 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14650 wi
->info
= context
;
14655 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14656 tree lab
= gimple_cond_true_label (cond_stmt
);
14659 n
= splay_tree_lookup (all_labels
,
14660 (splay_tree_key
) lab
);
14661 diagnose_sb_0 (gsi_p
, context
,
14662 n
? (gimple
*) n
->value
: NULL
);
14664 lab
= gimple_cond_false_label (cond_stmt
);
14667 n
= splay_tree_lookup (all_labels
,
14668 (splay_tree_key
) lab
);
14669 diagnose_sb_0 (gsi_p
, context
,
14670 n
? (gimple
*) n
->value
: NULL
);
14677 tree lab
= gimple_goto_dest (stmt
);
14678 if (TREE_CODE (lab
) != LABEL_DECL
)
14681 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14682 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14686 case GIMPLE_SWITCH
:
14688 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14690 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14692 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14693 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14694 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14700 case GIMPLE_RETURN
:
14701 diagnose_sb_0 (gsi_p
, context
, NULL
);
14711 static unsigned int
14712 diagnose_omp_structured_block_errors (void)
14714 struct walk_stmt_info wi
;
14715 gimple_seq body
= gimple_body (current_function_decl
);
14717 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14719 memset (&wi
, 0, sizeof (wi
));
14720 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14722 memset (&wi
, 0, sizeof (wi
));
14723 wi
.want_locations
= true;
14724 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14726 gimple_set_body (current_function_decl
, body
);
14728 splay_tree_delete (all_labels
);
14736 const pass_data pass_data_diagnose_omp_blocks
=
14738 GIMPLE_PASS
, /* type */
14739 "*diagnose_omp_blocks", /* name */
14740 OPTGROUP_OMP
, /* optinfo_flags */
14741 TV_NONE
, /* tv_id */
14742 PROP_gimple_any
, /* properties_required */
14743 0, /* properties_provided */
14744 0, /* properties_destroyed */
14745 0, /* todo_flags_start */
14746 0, /* todo_flags_finish */
14749 class pass_diagnose_omp_blocks
: public gimple_opt_pass
14752 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14753 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
14756 /* opt_pass methods: */
14757 virtual bool gate (function
*)
14759 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
14761 virtual unsigned int execute (function
*)
14763 return diagnose_omp_structured_block_errors ();
14766 }; // class pass_diagnose_omp_blocks
14768 } // anon namespace
14771 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14773 return new pass_diagnose_omp_blocks (ctxt
);
14777 #include "gt-omp-low.h"