1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap task_shared_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
195 static void scan_omp (gimple_seq
*, omp_context
*);
196 static tree
scan_omp_1_op (tree
*, int *, void *);
198 #define WALK_SUBSTMTS \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212 is_oacc_parallel_or_serial (omp_context
*ctx
)
214 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
215 return ((outer_type
== GIMPLE_OMP_TARGET
)
216 && ((gimple_omp_target_kind (ctx
->stmt
)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
218 || (gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226 is_oacc_kernels (omp_context
*ctx
)
228 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
229 return ((outer_type
== GIMPLE_OMP_TARGET
)
230 && (gimple_omp_target_kind (ctx
->stmt
)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
239 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
240 return ((outer_type
== GIMPLE_OMP_TARGET
)
241 && ((gimple_omp_target_kind (ctx
->stmt
)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
243 || (gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
249 /* Return true if STMT corresponds to an OpenMP target region. */
251 is_omp_target (gimple
*stmt
)
253 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
255 int kind
= gimple_omp_target_kind (stmt
);
256 return (kind
== GF_OMP_TARGET_KIND_REGION
257 || kind
== GF_OMP_TARGET_KIND_DATA
258 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
269 omp_member_access_dummy_var (tree decl
)
272 || !DECL_ARTIFICIAL (decl
)
273 || !DECL_IGNORED_P (decl
)
274 || !DECL_HAS_VALUE_EXPR_P (decl
)
275 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
278 tree v
= DECL_VALUE_EXPR (decl
);
279 if (TREE_CODE (v
) != COMPONENT_REF
)
283 switch (TREE_CODE (v
))
289 case POINTER_PLUS_EXPR
:
290 v
= TREE_OPERAND (v
, 0);
293 if (DECL_CONTEXT (v
) == current_function_decl
294 && DECL_ARTIFICIAL (v
)
295 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
303 /* Helper for unshare_and_remap, called through walk_tree. */
306 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
308 tree
*pair
= (tree
*) data
;
311 *tp
= unshare_expr (pair
[1]);
314 else if (IS_TYPE_OR_DECL_P (*tp
))
319 /* Return unshare_expr (X) with all occurrences of FROM
323 unshare_and_remap (tree x
, tree from
, tree to
)
325 tree pair
[2] = { from
, to
};
326 x
= unshare_expr (x
);
327 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
334 scan_omp_op (tree
*tp
, omp_context
*ctx
)
336 struct walk_stmt_info wi
;
338 memset (&wi
, 0, sizeof (wi
));
340 wi
.want_locations
= true;
342 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
345 static void lower_omp (gimple_seq
*, omp_context
*);
346 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
347 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 /* Return true if CTX is for an omp parallel. */
352 is_parallel_ctx (omp_context
*ctx
)
354 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
358 /* Return true if CTX is for an omp task. */
361 is_task_ctx (omp_context
*ctx
)
363 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
367 /* Return true if CTX is for an omp taskloop. */
370 is_taskloop_ctx (omp_context
*ctx
)
372 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
377 /* Return true if CTX is for a host omp teams. */
380 is_host_teams_ctx (omp_context
*ctx
)
382 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
391 is_taskreg_ctx (omp_context
*ctx
)
393 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
396 /* Return true if EXPR is variable sized. */
399 is_variable_sized (const_tree expr
)
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
409 lookup_decl (tree var
, omp_context
*ctx
)
411 tree
*n
= ctx
->cb
.decl_map
->get (var
);
416 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
418 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
419 return n
? *n
: NULL_TREE
;
423 lookup_field (tree var
, omp_context
*ctx
)
426 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
427 return (tree
) n
->value
;
431 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
434 n
= splay_tree_lookup (ctx
->sfield_map
435 ? ctx
->sfield_map
: ctx
->field_map
, key
);
436 return (tree
) n
->value
;
440 lookup_sfield (tree var
, omp_context
*ctx
)
442 return lookup_sfield ((splay_tree_key
) var
, ctx
);
446 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
449 n
= splay_tree_lookup (ctx
->field_map
, key
);
450 return n
? (tree
) n
->value
: NULL_TREE
;
454 maybe_lookup_field (tree var
, omp_context
*ctx
)
456 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
463 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
466 || TYPE_ATOMIC (TREE_TYPE (decl
)))
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
489 /* Do not use copy-in/copy-out for variables that have their
491 if (is_global_var (decl
))
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl
))
501 if (!global_nonaddressable_vars
)
502 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
503 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars
,
510 else if (TREE_ADDRESSABLE (decl
))
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 if (TREE_READONLY (decl
)
516 || ((TREE_CODE (decl
) == RESULT_DECL
517 || TREE_CODE (decl
) == PARM_DECL
)
518 && DECL_BY_REFERENCE (decl
)))
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx
->is_nested
)
530 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
531 if ((is_taskreg_ctx (up
)
532 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up
->stmt
)))
534 && maybe_lookup_decl (decl
, up
))
541 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
543 for (c
= gimple_omp_target_clauses (up
->stmt
);
544 c
; c
= OMP_CLAUSE_CHAIN (c
))
545 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c
) == decl
)
550 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
551 c
; c
= OMP_CLAUSE_CHAIN (c
))
552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c
) == decl
)
557 goto maybe_mark_addressable_and_ret
;
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx
))
567 maybe_mark_addressable_and_ret
:
568 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
569 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
574 if (!task_shared_vars
)
575 task_shared_vars
= BITMAP_ALLOC (NULL
);
576 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
577 TREE_ADDRESSABLE (outer
) = 1;
586 /* Construct a new automatic decl similar to VAR. */
589 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
591 tree copy
= copy_var_decl (var
, name
, type
);
593 DECL_CONTEXT (copy
) = current_function_decl
;
594 DECL_CHAIN (copy
) = ctx
->block_vars
;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
599 if (TREE_ADDRESSABLE (var
)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
604 TREE_ADDRESSABLE (copy
) = 0;
605 ctx
->block_vars
= copy
;
611 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
613 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
619 omp_build_component_ref (tree obj
, tree field
)
621 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
622 if (TREE_THIS_VOLATILE (field
))
623 TREE_THIS_VOLATILE (ret
) |= 1;
624 if (TREE_READONLY (field
))
625 TREE_READONLY (ret
) |= 1;
629 /* Build tree nodes to access the field for VAR on the receiver side. */
632 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
634 tree x
, field
= lookup_field (var
, ctx
);
636 /* If the receiver record type was remapped in the child function,
637 remap the field into the new record type. */
638 x
= maybe_lookup_field (field
, ctx
);
642 x
= build_simple_mem_ref (ctx
->receiver_decl
);
643 TREE_THIS_NOTRAP (x
) = 1;
644 x
= omp_build_component_ref (x
, field
);
647 x
= build_simple_mem_ref (x
);
648 TREE_THIS_NOTRAP (x
) = 1;
654 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
655 of a parallel, this is a component reference; for workshare constructs
656 this is some variable. */
659 build_outer_var_ref (tree var
, omp_context
*ctx
,
660 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
663 omp_context
*outer
= ctx
->outer
;
664 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
665 outer
= outer
->outer
;
667 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
669 else if (is_variable_sized (var
))
671 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
672 x
= build_outer_var_ref (x
, ctx
, code
);
673 x
= build_simple_mem_ref (x
);
675 else if (is_taskreg_ctx (ctx
))
677 bool by_ref
= use_pointer_for_field (var
, NULL
);
678 x
= build_receiver_ref (var
, by_ref
, ctx
);
680 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
681 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
683 || (code
== OMP_CLAUSE_PRIVATE
684 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
685 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
686 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
688 /* #pragma omp simd isn't a worksharing construct, and can reference
689 even private vars in its linear etc. clauses.
690 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
691 to private vars in all worksharing constructs. */
693 if (outer
&& is_taskreg_ctx (outer
))
694 x
= lookup_decl (var
, outer
);
696 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
700 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
704 = splay_tree_lookup (outer
->field_map
,
705 (splay_tree_key
) &DECL_UID (var
));
708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
711 x
= lookup_decl (var
, outer
);
715 tree field
= (tree
) n
->value
;
716 /* If the receiver record type was remapped in the child function,
717 remap the field into the new record type. */
718 x
= maybe_lookup_field (field
, outer
);
722 x
= build_simple_mem_ref (outer
->receiver_decl
);
723 x
= omp_build_component_ref (x
, field
);
724 if (use_pointer_for_field (var
, outer
))
725 x
= build_simple_mem_ref (x
);
729 x
= lookup_decl (var
, outer
);
730 else if (omp_is_reference (var
))
731 /* This can happen with orphaned constructs. If var is reference, it is
732 possible it is shared and as such valid. */
734 else if (omp_member_access_dummy_var (var
))
741 tree t
= omp_member_access_dummy_var (var
);
744 x
= DECL_VALUE_EXPR (var
);
745 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
747 x
= unshare_and_remap (x
, t
, o
);
749 x
= unshare_expr (x
);
753 if (omp_is_reference (var
))
754 x
= build_simple_mem_ref (x
);
759 /* Build tree nodes to access the field for VAR on the sender side. */
762 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
764 tree field
= lookup_sfield (key
, ctx
);
765 return omp_build_component_ref (ctx
->sender_decl
, field
);
769 build_sender_ref (tree var
, omp_context
*ctx
)
771 return build_sender_ref ((splay_tree_key
) var
, ctx
);
774 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
775 BASE_POINTERS_RESTRICT, declare the field with restrict. */
778 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
780 tree field
, type
, sfield
= NULL_TREE
;
781 splay_tree_key key
= (splay_tree_key
) var
;
783 if ((mask
& 16) != 0)
785 key
= (splay_tree_key
) &DECL_NAME (var
);
786 gcc_checking_assert (key
!= (splay_tree_key
) var
);
790 key
= (splay_tree_key
) &DECL_UID (var
);
791 gcc_checking_assert (key
!= (splay_tree_key
) var
);
793 gcc_assert ((mask
& 1) == 0
794 || !splay_tree_lookup (ctx
->field_map
, key
));
795 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
796 || !splay_tree_lookup (ctx
->sfield_map
, key
));
797 gcc_assert ((mask
& 3) == 3
798 || !is_gimple_omp_oacc (ctx
->stmt
));
800 type
= TREE_TYPE (var
);
801 if ((mask
& 16) != 0)
802 type
= lang_hooks
.decls
.omp_array_data (var
, true);
804 /* Prevent redeclaring the var in the split-off function with a restrict
805 pointer type. Note that we only clear type itself, restrict qualifiers in
806 the pointed-to type will be ignored by points-to analysis. */
807 if (POINTER_TYPE_P (type
)
808 && TYPE_RESTRICT (type
))
809 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
813 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
814 type
= build_pointer_type (build_pointer_type (type
));
817 type
= build_pointer_type (type
);
818 else if ((mask
& (32 | 3)) == 1 && omp_is_reference (var
))
819 type
= TREE_TYPE (type
);
821 field
= build_decl (DECL_SOURCE_LOCATION (var
),
822 FIELD_DECL
, DECL_NAME (var
), type
);
824 /* Remember what variable this field was created for. This does have a
825 side effect of making dwarf2out ignore this member, so for helpful
826 debugging we clear it later in delete_omp_context. */
827 DECL_ABSTRACT_ORIGIN (field
) = var
;
828 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
830 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
831 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
832 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
835 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
839 insert_field_into_struct (ctx
->record_type
, field
);
840 if (ctx
->srecord_type
)
842 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
843 FIELD_DECL
, DECL_NAME (var
), type
);
844 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
845 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
846 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
847 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
848 insert_field_into_struct (ctx
->srecord_type
, sfield
);
853 if (ctx
->srecord_type
== NULL_TREE
)
857 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
858 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
859 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
861 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
862 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
863 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
864 insert_field_into_struct (ctx
->srecord_type
, sfield
);
865 splay_tree_insert (ctx
->sfield_map
,
866 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
867 (splay_tree_value
) sfield
);
871 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
872 : ctx
->srecord_type
, field
);
876 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
877 if ((mask
& 2) && ctx
->sfield_map
)
878 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
882 install_var_local (tree var
, omp_context
*ctx
)
884 tree new_var
= omp_copy_decl_1 (var
, ctx
);
885 insert_decl_map (&ctx
->cb
, var
, new_var
);
889 /* Adjust the replacement for DECL in CTX for the new context. This means
890 copying the DECL_VALUE_EXPR, and fixing up the type. */
893 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
897 new_decl
= lookup_decl (decl
, ctx
);
899 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
901 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
902 && DECL_HAS_VALUE_EXPR_P (decl
))
904 tree ve
= DECL_VALUE_EXPR (decl
);
905 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
906 SET_DECL_VALUE_EXPR (new_decl
, ve
);
907 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
910 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
912 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
913 if (size
== error_mark_node
)
914 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
915 DECL_SIZE (new_decl
) = size
;
917 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
918 if (size
== error_mark_node
)
919 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
920 DECL_SIZE_UNIT (new_decl
) = size
;
924 /* The callback for remap_decl. Search all containing contexts for a
925 mapping of the variable; this avoids having to duplicate the splay
926 tree ahead of time. We know a mapping doesn't already exist in the
927 given context. Create new mappings to implement default semantics. */
930 omp_copy_decl (tree var
, copy_body_data
*cb
)
932 omp_context
*ctx
= (omp_context
*) cb
;
935 if (TREE_CODE (var
) == LABEL_DECL
)
937 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
939 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
940 DECL_CONTEXT (new_var
) = current_function_decl
;
941 insert_decl_map (&ctx
->cb
, var
, new_var
);
945 while (!is_taskreg_ctx (ctx
))
950 new_var
= maybe_lookup_decl (var
, ctx
);
955 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
958 return error_mark_node
;
961 /* Create a new context, with OUTER_CTX being the surrounding context. */
964 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
966 omp_context
*ctx
= XCNEW (omp_context
);
968 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
969 (splay_tree_value
) ctx
);
974 ctx
->outer
= outer_ctx
;
975 ctx
->cb
= outer_ctx
->cb
;
976 ctx
->cb
.block
= NULL
;
977 ctx
->depth
= outer_ctx
->depth
+ 1;
981 ctx
->cb
.src_fn
= current_function_decl
;
982 ctx
->cb
.dst_fn
= current_function_decl
;
983 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
984 gcc_checking_assert (ctx
->cb
.src_node
);
985 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
986 ctx
->cb
.src_cfun
= cfun
;
987 ctx
->cb
.copy_decl
= omp_copy_decl
;
988 ctx
->cb
.eh_lp_nr
= 0;
989 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
990 ctx
->cb
.adjust_array_error_bounds
= true;
991 ctx
->cb
.dont_remap_vla_if_no_change
= true;
995 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1000 static gimple_seq
maybe_catch_exception (gimple_seq
);
1002 /* Finalize task copyfn. */
1005 finalize_task_copyfn (gomp_task
*task_stmt
)
1007 struct function
*child_cfun
;
1009 gimple_seq seq
= NULL
, new_seq
;
1012 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1013 if (child_fn
== NULL_TREE
)
1016 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1017 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1019 push_cfun (child_cfun
);
1020 bind
= gimplify_body (child_fn
, false);
1021 gimple_seq_add_stmt (&seq
, bind
);
1022 new_seq
= maybe_catch_exception (seq
);
1025 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1027 gimple_seq_add_stmt (&seq
, bind
);
1029 gimple_set_body (child_fn
, seq
);
1032 /* Inform the callgraph about the new function. */
1033 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1034 node
->parallelized_function
= 1;
1035 cgraph_node::add_new_function (child_fn
, false);
1038 /* Destroy a omp_context data structures. Called through the splay tree
1039 value delete callback. */
1042 delete_omp_context (splay_tree_value value
)
1044 omp_context
*ctx
= (omp_context
*) value
;
1046 delete ctx
->cb
.decl_map
;
1049 splay_tree_delete (ctx
->field_map
);
1050 if (ctx
->sfield_map
)
1051 splay_tree_delete (ctx
->sfield_map
);
1053 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1054 it produces corrupt debug information. */
1055 if (ctx
->record_type
)
1058 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1059 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1061 if (ctx
->srecord_type
)
1064 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1065 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1068 if (is_task_ctx (ctx
))
1069 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
1071 if (ctx
->task_reduction_map
)
1073 ctx
->task_reductions
.release ();
1074 delete ctx
->task_reduction_map
;
1077 delete ctx
->lastprivate_conditional_map
;
1078 delete ctx
->allocate_map
;
1083 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 fixup_child_record_type (omp_context
*ctx
)
1089 tree f
, type
= ctx
->record_type
;
1091 if (!ctx
->receiver_decl
)
1093 /* ??? It isn't sufficient to just call remap_type here, because
1094 variably_modified_type_p doesn't work the way we expect for
1095 record types. Testing each field for whether it needs remapping
1096 and creating a new record by hand works, however. */
1097 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1098 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1102 tree name
, new_fields
= NULL
;
1104 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1105 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1106 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1107 TYPE_DECL
, name
, type
);
1108 TYPE_NAME (type
) = name
;
1110 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1112 tree new_f
= copy_node (f
);
1113 DECL_CONTEXT (new_f
) = type
;
1114 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1115 DECL_CHAIN (new_f
) = new_fields
;
1116 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1117 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1119 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1123 /* Arrange to be able to look up the receiver field
1124 given the sender field. */
1125 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1126 (splay_tree_value
) new_f
);
1128 TYPE_FIELDS (type
) = nreverse (new_fields
);
1132 /* In a target region we never modify any of the pointers in *.omp_data_i,
1133 so attempt to help the optimizers. */
1134 if (is_gimple_omp_offloaded (ctx
->stmt
))
1135 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1137 TREE_TYPE (ctx
->receiver_decl
)
1138 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1141 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1142 specified by CLAUSES. */
1145 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1148 bool scan_array_reductions
= false;
1150 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1151 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1152 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1153 /* omp_default_mem_alloc is 1 */
1154 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))))
1156 if (ctx
->allocate_map
== NULL
)
1157 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1158 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
),
1159 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
1160 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
1161 : integer_zero_node
);
1164 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1168 switch (OMP_CLAUSE_CODE (c
))
1170 case OMP_CLAUSE_PRIVATE
:
1171 decl
= OMP_CLAUSE_DECL (c
);
1172 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1174 else if (!is_variable_sized (decl
))
1175 install_var_local (decl
, ctx
);
1178 case OMP_CLAUSE_SHARED
:
1179 decl
= OMP_CLAUSE_DECL (c
);
1180 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1181 ctx
->allocate_map
->remove (decl
);
1182 /* Ignore shared directives in teams construct inside of
1183 target construct. */
1184 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1185 && !is_host_teams_ctx (ctx
))
1187 /* Global variables don't need to be copied,
1188 the receiver side will use them directly. */
1189 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1190 if (is_global_var (odecl
))
1192 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1195 gcc_assert (is_taskreg_ctx (ctx
));
1196 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1197 || !is_variable_sized (decl
));
1198 /* Global variables don't need to be copied,
1199 the receiver side will use them directly. */
1200 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1202 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1204 use_pointer_for_field (decl
, ctx
);
1207 by_ref
= use_pointer_for_field (decl
, NULL
);
1208 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1209 || TREE_ADDRESSABLE (decl
)
1211 || omp_is_reference (decl
))
1213 by_ref
= use_pointer_for_field (decl
, ctx
);
1214 install_var_field (decl
, by_ref
, 3, ctx
);
1215 install_var_local (decl
, ctx
);
1218 /* We don't need to copy const scalar vars back. */
1219 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1222 case OMP_CLAUSE_REDUCTION
:
1223 /* Collect 'reduction' clauses on OpenACC compute construct. */
1224 if (is_gimple_omp_oacc (ctx
->stmt
)
1225 && is_gimple_omp_offloaded (ctx
->stmt
))
1227 /* No 'reduction' clauses on OpenACC 'kernels'. */
1228 gcc_checking_assert (!is_oacc_kernels (ctx
));
1229 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1230 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1232 ctx
->local_reduction_clauses
1233 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1237 case OMP_CLAUSE_IN_REDUCTION
:
1238 decl
= OMP_CLAUSE_DECL (c
);
1239 if (ctx
->allocate_map
1240 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1241 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1242 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1243 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1244 || is_task_ctx (ctx
)))
1247 if (ctx
->allocate_map
->get (decl
))
1248 ctx
->allocate_map
->remove (decl
);
1250 if (TREE_CODE (decl
) == MEM_REF
)
1252 tree t
= TREE_OPERAND (decl
, 0);
1253 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1254 t
= TREE_OPERAND (t
, 0);
1255 if (TREE_CODE (t
) == INDIRECT_REF
1256 || TREE_CODE (t
) == ADDR_EXPR
)
1257 t
= TREE_OPERAND (t
, 0);
1258 if (is_omp_target (ctx
->stmt
))
1260 if (is_variable_sized (t
))
1262 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1263 t
= DECL_VALUE_EXPR (t
);
1264 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1265 t
= TREE_OPERAND (t
, 0);
1266 gcc_assert (DECL_P (t
));
1270 scan_omp_op (&at
, ctx
->outer
);
1271 tree nt
= omp_copy_decl_1 (at
, ctx
);
1272 splay_tree_insert (ctx
->field_map
,
1273 (splay_tree_key
) &DECL_CONTEXT (t
),
1274 (splay_tree_value
) nt
);
1276 splay_tree_insert (ctx
->field_map
,
1277 (splay_tree_key
) &DECL_CONTEXT (at
),
1278 (splay_tree_value
) nt
);
1281 install_var_local (t
, ctx
);
1282 if (is_taskreg_ctx (ctx
)
1283 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1284 || (is_task_ctx (ctx
)
1285 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1286 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1287 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1288 == POINTER_TYPE
)))))
1289 && !is_variable_sized (t
)
1290 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1291 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1292 && !is_task_ctx (ctx
))))
1294 by_ref
= use_pointer_for_field (t
, NULL
);
1295 if (is_task_ctx (ctx
)
1296 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1297 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1299 install_var_field (t
, false, 1, ctx
);
1300 install_var_field (t
, by_ref
, 2, ctx
);
1303 install_var_field (t
, by_ref
, 3, ctx
);
1307 if (is_omp_target (ctx
->stmt
))
1311 scan_omp_op (&at
, ctx
->outer
);
1312 tree nt
= omp_copy_decl_1 (at
, ctx
);
1313 splay_tree_insert (ctx
->field_map
,
1314 (splay_tree_key
) &DECL_CONTEXT (decl
),
1315 (splay_tree_value
) nt
);
1317 splay_tree_insert (ctx
->field_map
,
1318 (splay_tree_key
) &DECL_CONTEXT (at
),
1319 (splay_tree_value
) nt
);
1322 if (is_task_ctx (ctx
)
1323 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1324 && OMP_CLAUSE_REDUCTION_TASK (c
)
1325 && is_parallel_ctx (ctx
)))
1327 /* Global variables don't need to be copied,
1328 the receiver side will use them directly. */
1329 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1331 by_ref
= use_pointer_for_field (decl
, ctx
);
1332 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1333 install_var_field (decl
, by_ref
, 3, ctx
);
1335 install_var_local (decl
, ctx
);
1338 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c
))
1341 install_var_local (decl
, ctx
);
1346 case OMP_CLAUSE_LASTPRIVATE
:
1347 /* Let the corresponding firstprivate clause create
1349 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1353 case OMP_CLAUSE_FIRSTPRIVATE
:
1354 case OMP_CLAUSE_LINEAR
:
1355 decl
= OMP_CLAUSE_DECL (c
);
1357 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1358 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1359 && is_gimple_omp_offloaded (ctx
->stmt
))
1361 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1362 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1363 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1364 install_var_field (decl
, true, 3, ctx
);
1366 install_var_field (decl
, false, 3, ctx
);
1368 if (is_variable_sized (decl
))
1370 if (is_task_ctx (ctx
))
1372 if (ctx
->allocate_map
1373 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1376 if (ctx
->allocate_map
->get (decl
))
1377 ctx
->allocate_map
->remove (decl
);
1379 install_var_field (decl
, false, 1, ctx
);
1383 else if (is_taskreg_ctx (ctx
))
1386 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1387 by_ref
= use_pointer_for_field (decl
, NULL
);
1389 if (is_task_ctx (ctx
)
1390 && (global
|| by_ref
|| omp_is_reference (decl
)))
1392 if (ctx
->allocate_map
1393 && ctx
->allocate_map
->get (decl
))
1394 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1396 install_var_field (decl
, false, 1, ctx
);
1398 install_var_field (decl
, by_ref
, 2, ctx
);
1401 install_var_field (decl
, by_ref
, 3, ctx
);
1403 install_var_local (decl
, ctx
);
1406 case OMP_CLAUSE_USE_DEVICE_PTR
:
1407 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1408 decl
= OMP_CLAUSE_DECL (c
);
1410 /* Fortran array descriptors. */
1411 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1412 install_var_field (decl
, false, 19, ctx
);
1413 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1414 && !omp_is_reference (decl
)
1415 && !omp_is_allocatable_or_ptr (decl
))
1416 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1417 install_var_field (decl
, true, 11, ctx
);
1419 install_var_field (decl
, false, 11, ctx
);
1420 if (DECL_SIZE (decl
)
1421 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1423 tree decl2
= DECL_VALUE_EXPR (decl
);
1424 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1425 decl2
= TREE_OPERAND (decl2
, 0);
1426 gcc_assert (DECL_P (decl2
));
1427 install_var_local (decl2
, ctx
);
1429 install_var_local (decl
, ctx
);
1432 case OMP_CLAUSE_IS_DEVICE_PTR
:
1433 decl
= OMP_CLAUSE_DECL (c
);
1436 case OMP_CLAUSE__LOOPTEMP_
:
1437 case OMP_CLAUSE__REDUCTEMP_
:
1438 gcc_assert (is_taskreg_ctx (ctx
));
1439 decl
= OMP_CLAUSE_DECL (c
);
1440 install_var_field (decl
, false, 3, ctx
);
1441 install_var_local (decl
, ctx
);
1444 case OMP_CLAUSE_COPYPRIVATE
:
1445 case OMP_CLAUSE_COPYIN
:
1446 decl
= OMP_CLAUSE_DECL (c
);
1447 by_ref
= use_pointer_for_field (decl
, NULL
);
1448 install_var_field (decl
, by_ref
, 3, ctx
);
1451 case OMP_CLAUSE_FINAL
:
1453 case OMP_CLAUSE_NUM_THREADS
:
1454 case OMP_CLAUSE_NUM_TEAMS
:
1455 case OMP_CLAUSE_THREAD_LIMIT
:
1456 case OMP_CLAUSE_DEVICE
:
1457 case OMP_CLAUSE_SCHEDULE
:
1458 case OMP_CLAUSE_DIST_SCHEDULE
:
1459 case OMP_CLAUSE_DEPEND
:
1460 case OMP_CLAUSE_PRIORITY
:
1461 case OMP_CLAUSE_GRAINSIZE
:
1462 case OMP_CLAUSE_NUM_TASKS
:
1463 case OMP_CLAUSE_NUM_GANGS
:
1464 case OMP_CLAUSE_NUM_WORKERS
:
1465 case OMP_CLAUSE_VECTOR_LENGTH
:
1466 case OMP_CLAUSE_DETACH
:
1468 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1472 case OMP_CLAUSE_FROM
:
1473 case OMP_CLAUSE_MAP
:
1475 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1476 decl
= OMP_CLAUSE_DECL (c
);
1477 /* Global variables with "omp declare target" attribute
1478 don't need to be copied, the receiver side will use them
1479 directly. However, global variables with "omp declare target link"
1480 attribute need to be copied. Or when ALWAYS modifier is used. */
1481 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1483 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1484 && (OMP_CLAUSE_MAP_KIND (c
)
1485 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1486 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1487 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1488 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1489 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1490 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1491 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1492 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1493 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1494 && varpool_node::get_create (decl
)->offloadable
1495 && !lookup_attribute ("omp declare target link",
1496 DECL_ATTRIBUTES (decl
)))
1498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1499 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1501 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1502 not offloaded; there is nothing to map for those. */
1503 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1504 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1505 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1508 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1510 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1511 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1512 && is_omp_target (ctx
->stmt
))
1514 /* If this is an offloaded region, an attach operation should
1515 only exist when the pointer variable is mapped in a prior
1517 if (is_gimple_omp_offloaded (ctx
->stmt
))
1519 (maybe_lookup_decl (decl
, ctx
)
1520 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1521 && lookup_attribute ("omp declare target",
1522 DECL_ATTRIBUTES (decl
))));
1524 /* By itself, attach/detach is generated as part of pointer
1525 variable mapping and should not create new variables in the
1526 offloaded region, however sender refs for it must be created
1527 for its address to be passed to the runtime. */
1529 = build_decl (OMP_CLAUSE_LOCATION (c
),
1530 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1531 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1532 insert_field_into_struct (ctx
->record_type
, field
);
1533 /* To not clash with a map of the pointer variable itself,
1534 attach/detach maps have their field looked up by the *clause*
1535 tree expression, not the decl. */
1536 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1537 (splay_tree_key
) c
));
1538 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1539 (splay_tree_value
) field
);
1542 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1543 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1544 || (OMP_CLAUSE_MAP_KIND (c
)
1545 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1547 if (TREE_CODE (decl
) == COMPONENT_REF
1548 || (TREE_CODE (decl
) == INDIRECT_REF
1549 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1550 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1551 == REFERENCE_TYPE
)))
1553 if (DECL_SIZE (decl
)
1554 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1556 tree decl2
= DECL_VALUE_EXPR (decl
);
1557 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1558 decl2
= TREE_OPERAND (decl2
, 0);
1559 gcc_assert (DECL_P (decl2
));
1560 install_var_local (decl2
, ctx
);
1562 install_var_local (decl
, ctx
);
1567 if (DECL_SIZE (decl
)
1568 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1570 tree decl2
= DECL_VALUE_EXPR (decl
);
1571 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1572 decl2
= TREE_OPERAND (decl2
, 0);
1573 gcc_assert (DECL_P (decl2
));
1574 install_var_field (decl2
, true, 3, ctx
);
1575 install_var_local (decl2
, ctx
);
1576 install_var_local (decl
, ctx
);
1580 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1581 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1582 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1583 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1584 install_var_field (decl
, true, 7, ctx
);
1586 install_var_field (decl
, true, 3, ctx
);
1587 if (is_gimple_omp_offloaded (ctx
->stmt
)
1588 && !(is_gimple_omp_oacc (ctx
->stmt
)
1589 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1590 install_var_local (decl
, ctx
);
1595 tree base
= get_base_address (decl
);
1596 tree nc
= OMP_CLAUSE_CHAIN (c
);
1599 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1600 && OMP_CLAUSE_DECL (nc
) == base
1601 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1602 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1604 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1605 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1611 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1612 decl
= OMP_CLAUSE_DECL (c
);
1614 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1615 (splay_tree_key
) decl
));
1617 = build_decl (OMP_CLAUSE_LOCATION (c
),
1618 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1619 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1620 insert_field_into_struct (ctx
->record_type
, field
);
1621 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1622 (splay_tree_value
) field
);
1627 case OMP_CLAUSE_ORDER
:
1628 ctx
->order_concurrent
= true;
1631 case OMP_CLAUSE_BIND
:
1635 case OMP_CLAUSE_NOWAIT
:
1636 case OMP_CLAUSE_ORDERED
:
1637 case OMP_CLAUSE_COLLAPSE
:
1638 case OMP_CLAUSE_UNTIED
:
1639 case OMP_CLAUSE_MERGEABLE
:
1640 case OMP_CLAUSE_PROC_BIND
:
1641 case OMP_CLAUSE_SAFELEN
:
1642 case OMP_CLAUSE_SIMDLEN
:
1643 case OMP_CLAUSE_THREADS
:
1644 case OMP_CLAUSE_SIMD
:
1645 case OMP_CLAUSE_NOGROUP
:
1646 case OMP_CLAUSE_DEFAULTMAP
:
1647 case OMP_CLAUSE_ASYNC
:
1648 case OMP_CLAUSE_WAIT
:
1649 case OMP_CLAUSE_GANG
:
1650 case OMP_CLAUSE_WORKER
:
1651 case OMP_CLAUSE_VECTOR
:
1652 case OMP_CLAUSE_INDEPENDENT
:
1653 case OMP_CLAUSE_AUTO
:
1654 case OMP_CLAUSE_SEQ
:
1655 case OMP_CLAUSE_TILE
:
1656 case OMP_CLAUSE__SIMT_
:
1657 case OMP_CLAUSE_DEFAULT
:
1658 case OMP_CLAUSE_NONTEMPORAL
:
1659 case OMP_CLAUSE_IF_PRESENT
:
1660 case OMP_CLAUSE_FINALIZE
:
1661 case OMP_CLAUSE_TASK_REDUCTION
:
1662 case OMP_CLAUSE_ALLOCATE
:
1665 case OMP_CLAUSE_ALIGNED
:
1666 decl
= OMP_CLAUSE_DECL (c
);
1667 if (is_global_var (decl
)
1668 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1669 install_var_local (decl
, ctx
);
1672 case OMP_CLAUSE__CONDTEMP_
:
1673 decl
= OMP_CLAUSE_DECL (c
);
1674 if (is_parallel_ctx (ctx
))
1676 install_var_field (decl
, false, 3, ctx
);
1677 install_var_local (decl
, ctx
);
1679 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1680 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1681 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1682 install_var_local (decl
, ctx
);
1685 case OMP_CLAUSE__CACHE_
:
1691 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1693 switch (OMP_CLAUSE_CODE (c
))
1695 case OMP_CLAUSE_LASTPRIVATE
:
1696 /* Let the corresponding firstprivate clause create
1698 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1699 scan_array_reductions
= true;
1700 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1704 case OMP_CLAUSE_FIRSTPRIVATE
:
1705 case OMP_CLAUSE_PRIVATE
:
1706 case OMP_CLAUSE_LINEAR
:
1707 case OMP_CLAUSE_IS_DEVICE_PTR
:
1708 decl
= OMP_CLAUSE_DECL (c
);
1709 if (is_variable_sized (decl
))
1711 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1712 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1713 && is_gimple_omp_offloaded (ctx
->stmt
))
1715 tree decl2
= DECL_VALUE_EXPR (decl
);
1716 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1717 decl2
= TREE_OPERAND (decl2
, 0);
1718 gcc_assert (DECL_P (decl2
));
1719 install_var_local (decl2
, ctx
);
1720 fixup_remapped_decl (decl2
, ctx
, false);
1722 install_var_local (decl
, ctx
);
1724 fixup_remapped_decl (decl
, ctx
,
1725 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1726 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1727 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1728 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1729 scan_array_reductions
= true;
1732 case OMP_CLAUSE_REDUCTION
:
1733 case OMP_CLAUSE_IN_REDUCTION
:
1734 decl
= OMP_CLAUSE_DECL (c
);
1735 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1737 if (is_variable_sized (decl
))
1738 install_var_local (decl
, ctx
);
1739 fixup_remapped_decl (decl
, ctx
, false);
1741 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1742 scan_array_reductions
= true;
1745 case OMP_CLAUSE_TASK_REDUCTION
:
1746 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1747 scan_array_reductions
= true;
1750 case OMP_CLAUSE_SHARED
:
1751 /* Ignore shared directives in teams construct inside of
1752 target construct. */
1753 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1754 && !is_host_teams_ctx (ctx
))
1756 decl
= OMP_CLAUSE_DECL (c
);
1757 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1759 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1761 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1764 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1765 install_var_field (decl
, by_ref
, 11, ctx
);
1768 fixup_remapped_decl (decl
, ctx
, false);
1771 case OMP_CLAUSE_MAP
:
1772 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1774 decl
= OMP_CLAUSE_DECL (c
);
1776 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1777 && (OMP_CLAUSE_MAP_KIND (c
)
1778 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1779 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1780 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1781 && varpool_node::get_create (decl
)->offloadable
)
1783 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1784 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1785 && is_omp_target (ctx
->stmt
)
1786 && !is_gimple_omp_offloaded (ctx
->stmt
))
1790 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1791 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1792 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1793 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1795 tree new_decl
= lookup_decl (decl
, ctx
);
1796 TREE_TYPE (new_decl
)
1797 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1799 else if (DECL_SIZE (decl
)
1800 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1802 tree decl2
= DECL_VALUE_EXPR (decl
);
1803 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1804 decl2
= TREE_OPERAND (decl2
, 0);
1805 gcc_assert (DECL_P (decl2
));
1806 fixup_remapped_decl (decl2
, ctx
, false);
1807 fixup_remapped_decl (decl
, ctx
, true);
1810 fixup_remapped_decl (decl
, ctx
, false);
1814 case OMP_CLAUSE_COPYPRIVATE
:
1815 case OMP_CLAUSE_COPYIN
:
1816 case OMP_CLAUSE_DEFAULT
:
1818 case OMP_CLAUSE_NUM_THREADS
:
1819 case OMP_CLAUSE_NUM_TEAMS
:
1820 case OMP_CLAUSE_THREAD_LIMIT
:
1821 case OMP_CLAUSE_DEVICE
:
1822 case OMP_CLAUSE_SCHEDULE
:
1823 case OMP_CLAUSE_DIST_SCHEDULE
:
1824 case OMP_CLAUSE_NOWAIT
:
1825 case OMP_CLAUSE_ORDERED
:
1826 case OMP_CLAUSE_COLLAPSE
:
1827 case OMP_CLAUSE_UNTIED
:
1828 case OMP_CLAUSE_FINAL
:
1829 case OMP_CLAUSE_MERGEABLE
:
1830 case OMP_CLAUSE_PROC_BIND
:
1831 case OMP_CLAUSE_SAFELEN
:
1832 case OMP_CLAUSE_SIMDLEN
:
1833 case OMP_CLAUSE_ALIGNED
:
1834 case OMP_CLAUSE_DEPEND
:
1835 case OMP_CLAUSE_DETACH
:
1836 case OMP_CLAUSE_ALLOCATE
:
1837 case OMP_CLAUSE__LOOPTEMP_
:
1838 case OMP_CLAUSE__REDUCTEMP_
:
1840 case OMP_CLAUSE_FROM
:
1841 case OMP_CLAUSE_PRIORITY
:
1842 case OMP_CLAUSE_GRAINSIZE
:
1843 case OMP_CLAUSE_NUM_TASKS
:
1844 case OMP_CLAUSE_THREADS
:
1845 case OMP_CLAUSE_SIMD
:
1846 case OMP_CLAUSE_NOGROUP
:
1847 case OMP_CLAUSE_DEFAULTMAP
:
1848 case OMP_CLAUSE_ORDER
:
1849 case OMP_CLAUSE_BIND
:
1850 case OMP_CLAUSE_USE_DEVICE_PTR
:
1851 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1852 case OMP_CLAUSE_NONTEMPORAL
:
1853 case OMP_CLAUSE_ASYNC
:
1854 case OMP_CLAUSE_WAIT
:
1855 case OMP_CLAUSE_NUM_GANGS
:
1856 case OMP_CLAUSE_NUM_WORKERS
:
1857 case OMP_CLAUSE_VECTOR_LENGTH
:
1858 case OMP_CLAUSE_GANG
:
1859 case OMP_CLAUSE_WORKER
:
1860 case OMP_CLAUSE_VECTOR
:
1861 case OMP_CLAUSE_INDEPENDENT
:
1862 case OMP_CLAUSE_AUTO
:
1863 case OMP_CLAUSE_SEQ
:
1864 case OMP_CLAUSE_TILE
:
1865 case OMP_CLAUSE__SIMT_
:
1866 case OMP_CLAUSE_IF_PRESENT
:
1867 case OMP_CLAUSE_FINALIZE
:
1868 case OMP_CLAUSE__CONDTEMP_
:
1871 case OMP_CLAUSE__CACHE_
:
1877 gcc_checking_assert (!scan_array_reductions
1878 || !is_gimple_omp_oacc (ctx
->stmt
));
1879 if (scan_array_reductions
)
1881 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1882 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1883 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1884 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1885 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1887 omp_context
*rctx
= ctx
;
1888 if (is_omp_target (ctx
->stmt
))
1890 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1891 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1893 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1894 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1895 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1896 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1897 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1898 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1902 /* Create a new name for omp child function. Returns an identifier. */
1905 create_omp_child_function_name (bool task_copy
)
1907 return clone_function_name_numbered (current_function_decl
,
1908 task_copy
? "_omp_cpyfn" : "_omp_fn");
1911 /* Return true if CTX may belong to offloaded code: either if current function
1912 is offloaded, or any enclosing context corresponds to a target region. */
1915 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1917 if (cgraph_node::get (current_function_decl
)->offloadable
)
1919 for (; ctx
; ctx
= ctx
->outer
)
1920 if (is_gimple_omp_offloaded (ctx
->stmt
))
1925 /* Build a decl for the omp child function. It'll not contain a body
1926 yet, just the bare decl. */
1929 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1931 tree decl
, type
, name
, t
;
1933 name
= create_omp_child_function_name (task_copy
);
1935 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1936 ptr_type_node
, NULL_TREE
);
1938 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1940 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1942 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1945 ctx
->cb
.dst_fn
= decl
;
1947 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1949 TREE_STATIC (decl
) = 1;
1950 TREE_USED (decl
) = 1;
1951 DECL_ARTIFICIAL (decl
) = 1;
1952 DECL_IGNORED_P (decl
) = 0;
1953 TREE_PUBLIC (decl
) = 0;
1954 DECL_UNINLINABLE (decl
) = 1;
1955 DECL_EXTERNAL (decl
) = 0;
1956 DECL_CONTEXT (decl
) = NULL_TREE
;
1957 DECL_INITIAL (decl
) = make_node (BLOCK
);
1958 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1959 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1960 /* Remove omp declare simd attribute from the new attributes. */
1961 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1963 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1966 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1967 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1968 *p
= TREE_CHAIN (*p
);
1971 tree chain
= TREE_CHAIN (*p
);
1972 *p
= copy_node (*p
);
1973 p
= &TREE_CHAIN (*p
);
1977 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1978 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1979 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1980 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1981 DECL_FUNCTION_VERSIONED (decl
)
1982 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1984 if (omp_maybe_offloaded_ctx (ctx
))
1986 cgraph_node::get_create (decl
)->offloadable
= 1;
1987 if (ENABLE_OFFLOADING
)
1988 g
->have_offload
= true;
1991 if (cgraph_node::get_create (decl
)->offloadable
)
1993 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1994 ? "omp target entrypoint"
1995 : "omp declare target");
1996 if (lookup_attribute ("omp declare target",
1997 DECL_ATTRIBUTES (current_function_decl
)))
1999 if (is_gimple_omp_offloaded (ctx
->stmt
))
2000 DECL_ATTRIBUTES (decl
)
2001 = remove_attribute ("omp declare target",
2002 copy_list (DECL_ATTRIBUTES (decl
)));
2007 DECL_ATTRIBUTES (decl
)
2008 = tree_cons (get_identifier (target_attr
),
2009 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2012 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2013 RESULT_DECL
, NULL_TREE
, void_type_node
);
2014 DECL_ARTIFICIAL (t
) = 1;
2015 DECL_IGNORED_P (t
) = 1;
2016 DECL_CONTEXT (t
) = decl
;
2017 DECL_RESULT (decl
) = t
;
2019 tree data_name
= get_identifier (".omp_data_i");
2020 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2022 DECL_ARTIFICIAL (t
) = 1;
2023 DECL_NAMELESS (t
) = 1;
2024 DECL_ARG_TYPE (t
) = ptr_type_node
;
2025 DECL_CONTEXT (t
) = current_function_decl
;
2027 TREE_READONLY (t
) = 1;
2028 DECL_ARGUMENTS (decl
) = t
;
2030 ctx
->receiver_decl
= t
;
2033 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2034 PARM_DECL
, get_identifier (".omp_data_o"),
2036 DECL_ARTIFICIAL (t
) = 1;
2037 DECL_NAMELESS (t
) = 1;
2038 DECL_ARG_TYPE (t
) = ptr_type_node
;
2039 DECL_CONTEXT (t
) = current_function_decl
;
2041 TREE_ADDRESSABLE (t
) = 1;
2042 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2043 DECL_ARGUMENTS (decl
) = t
;
2046 /* Allocate memory for the function structure. The call to
2047 allocate_struct_function clobbers CFUN, so we need to restore
2049 push_struct_function (decl
);
2050 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2051 init_tree_ssa (cfun
);
2055 /* Callback for walk_gimple_seq. Check if combined parallel
2056 contains gimple_omp_for_combined_into_p OMP_FOR. */
2059 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2060 bool *handled_ops_p
,
2061 struct walk_stmt_info
*wi
)
2063 gimple
*stmt
= gsi_stmt (*gsi_p
);
2065 *handled_ops_p
= true;
2066 switch (gimple_code (stmt
))
2070 case GIMPLE_OMP_FOR
:
2071 if (gimple_omp_for_combined_into_p (stmt
)
2072 && gimple_omp_for_kind (stmt
)
2073 == *(const enum gf_mask
*) (wi
->info
))
2076 return integer_zero_node
;
2085 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2088 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2089 omp_context
*outer_ctx
)
2091 struct walk_stmt_info wi
;
2093 memset (&wi
, 0, sizeof (wi
));
2095 wi
.info
= (void *) &msk
;
2096 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2097 if (wi
.info
!= (void *) &msk
)
2099 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2100 struct omp_for_data fd
;
2101 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2102 /* We need two temporaries with fd.loop.v type (istart/iend)
2103 and then (fd.collapse - 1) temporaries with the same
2104 type for count2 ... countN-1 vars if not constant. */
2105 size_t count
= 2, i
;
2106 tree type
= fd
.iter_type
;
2108 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2110 count
+= fd
.collapse
- 1;
2111 /* If there are lastprivate clauses on the inner
2112 GIMPLE_OMP_FOR, add one more temporaries for the total number
2113 of iterations (product of count1 ... countN-1). */
2114 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2115 OMP_CLAUSE_LASTPRIVATE
)
2116 || (msk
== GF_OMP_FOR_KIND_FOR
2117 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2118 OMP_CLAUSE_LASTPRIVATE
)))
2120 tree temp
= create_tmp_var (type
);
2121 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2122 OMP_CLAUSE__LOOPTEMP_
);
2123 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2124 OMP_CLAUSE_DECL (c
) = temp
;
2125 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2126 gimple_omp_taskreg_set_clauses (stmt
, c
);
2129 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2130 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2131 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2133 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2134 tree type2
= TREE_TYPE (v
);
2136 for (i
= 0; i
< 3; i
++)
2138 tree temp
= create_tmp_var (type2
);
2139 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2140 OMP_CLAUSE__LOOPTEMP_
);
2141 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2142 OMP_CLAUSE_DECL (c
) = temp
;
2143 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2144 gimple_omp_taskreg_set_clauses (stmt
, c
);
2148 for (i
= 0; i
< count
; i
++)
2150 tree temp
= create_tmp_var (type
);
2151 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2152 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2153 OMP_CLAUSE_DECL (c
) = temp
;
2154 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2155 gimple_omp_taskreg_set_clauses (stmt
, c
);
2158 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2159 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2160 OMP_CLAUSE_REDUCTION
))
2162 tree type
= build_pointer_type (pointer_sized_int_node
);
2163 tree temp
= create_tmp_var (type
);
2164 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2165 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2166 OMP_CLAUSE_DECL (c
) = temp
;
2167 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2168 gimple_omp_task_set_clauses (stmt
, c
);
2172 /* Scan an OpenMP parallel directive. */
2175 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2179 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2181 /* Ignore parallel directives with empty bodies, unless there
2182 are copyin clauses. */
2184 && empty_body_p (gimple_omp_body (stmt
))
2185 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2186 OMP_CLAUSE_COPYIN
) == NULL
)
2188 gsi_replace (gsi
, gimple_build_nop (), false);
2192 if (gimple_omp_parallel_combined_p (stmt
))
2193 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2194 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2195 OMP_CLAUSE_REDUCTION
);
2196 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2197 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2199 tree type
= build_pointer_type (pointer_sized_int_node
);
2200 tree temp
= create_tmp_var (type
);
2201 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2203 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2204 OMP_CLAUSE_DECL (c
) = temp
;
2205 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2206 gimple_omp_parallel_set_clauses (stmt
, c
);
2209 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2212 ctx
= new_omp_context (stmt
, outer_ctx
);
2213 taskreg_contexts
.safe_push (ctx
);
2214 if (taskreg_nesting_level
> 1)
2215 ctx
->is_nested
= true;
2216 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2217 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2218 name
= create_tmp_var_name (".omp_data_s");
2219 name
= build_decl (gimple_location (stmt
),
2220 TYPE_DECL
, name
, ctx
->record_type
);
2221 DECL_ARTIFICIAL (name
) = 1;
2222 DECL_NAMELESS (name
) = 1;
2223 TYPE_NAME (ctx
->record_type
) = name
;
2224 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2225 create_omp_child_function (ctx
, false);
2226 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2228 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2229 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2231 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2232 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2235 /* Scan an OpenMP task directive. */
2238 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2242 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2244 /* Ignore task directives with empty bodies, unless they have depend
2247 && gimple_omp_body (stmt
)
2248 && empty_body_p (gimple_omp_body (stmt
))
2249 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2251 gsi_replace (gsi
, gimple_build_nop (), false);
2255 if (gimple_omp_task_taskloop_p (stmt
))
2256 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2258 ctx
= new_omp_context (stmt
, outer_ctx
);
2260 if (gimple_omp_task_taskwait_p (stmt
))
2262 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2266 taskreg_contexts
.safe_push (ctx
);
2267 if (taskreg_nesting_level
> 1)
2268 ctx
->is_nested
= true;
2269 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2270 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2271 name
= create_tmp_var_name (".omp_data_s");
2272 name
= build_decl (gimple_location (stmt
),
2273 TYPE_DECL
, name
, ctx
->record_type
);
2274 DECL_ARTIFICIAL (name
) = 1;
2275 DECL_NAMELESS (name
) = 1;
2276 TYPE_NAME (ctx
->record_type
) = name
;
2277 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2278 create_omp_child_function (ctx
, false);
2279 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2281 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2283 if (ctx
->srecord_type
)
2285 name
= create_tmp_var_name (".omp_data_a");
2286 name
= build_decl (gimple_location (stmt
),
2287 TYPE_DECL
, name
, ctx
->srecord_type
);
2288 DECL_ARTIFICIAL (name
) = 1;
2289 DECL_NAMELESS (name
) = 1;
2290 TYPE_NAME (ctx
->srecord_type
) = name
;
2291 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2292 create_omp_child_function (ctx
, true);
2295 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2297 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2299 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2300 t
= build_int_cst (long_integer_type_node
, 0);
2301 gimple_omp_task_set_arg_size (stmt
, t
);
2302 t
= build_int_cst (long_integer_type_node
, 1);
2303 gimple_omp_task_set_arg_align (stmt
, t
);
2307 /* Helper function for finish_taskreg_scan, called through walk_tree.
2308 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2309 tree, replace it in the expression. */
2312 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2316 omp_context
*ctx
= (omp_context
*) data
;
2317 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2320 if (DECL_HAS_VALUE_EXPR_P (t
))
2321 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2326 else if (IS_TYPE_OR_DECL_P (*tp
))
2331 /* If any decls have been made addressable during scan_omp,
2332 adjust their fields if needed, and layout record types
2333 of parallel/task constructs. */
2336 finish_taskreg_scan (omp_context
*ctx
)
2338 if (ctx
->record_type
== NULL_TREE
)
2341 /* If any task_shared_vars were needed, verify all
2342 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2343 statements if use_pointer_for_field hasn't changed
2344 because of that. If it did, update field types now. */
2345 if (task_shared_vars
)
2349 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2350 c
; c
= OMP_CLAUSE_CHAIN (c
))
2351 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2352 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2354 tree decl
= OMP_CLAUSE_DECL (c
);
2356 /* Global variables don't need to be copied,
2357 the receiver side will use them directly. */
2358 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2360 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2361 || !use_pointer_for_field (decl
, ctx
))
2363 tree field
= lookup_field (decl
, ctx
);
2364 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2365 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2367 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2368 TREE_THIS_VOLATILE (field
) = 0;
2369 DECL_USER_ALIGN (field
) = 0;
2370 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2371 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2372 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2373 if (ctx
->srecord_type
)
2375 tree sfield
= lookup_sfield (decl
, ctx
);
2376 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2377 TREE_THIS_VOLATILE (sfield
) = 0;
2378 DECL_USER_ALIGN (sfield
) = 0;
2379 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2380 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2381 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2386 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2388 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2389 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2392 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2393 expects to find it at the start of data. */
2394 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2395 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2399 *p
= DECL_CHAIN (*p
);
2403 p
= &DECL_CHAIN (*p
);
2404 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2405 TYPE_FIELDS (ctx
->record_type
) = f
;
2407 layout_type (ctx
->record_type
);
2408 fixup_child_record_type (ctx
);
2410 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2412 layout_type (ctx
->record_type
);
2413 fixup_child_record_type (ctx
);
2417 location_t loc
= gimple_location (ctx
->stmt
);
2418 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2420 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2422 /* Move VLA fields to the end. */
2423 p
= &TYPE_FIELDS (ctx
->record_type
);
2425 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2426 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2429 *p
= TREE_CHAIN (*p
);
2430 TREE_CHAIN (*q
) = NULL_TREE
;
2431 q
= &TREE_CHAIN (*q
);
2434 p
= &DECL_CHAIN (*p
);
2436 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2438 /* Move fields corresponding to first and second _looptemp_
2439 clause first. There are filled by GOMP_taskloop
2440 and thus need to be in specific positions. */
2441 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2442 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2443 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2444 OMP_CLAUSE__LOOPTEMP_
);
2445 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2446 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2447 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2448 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2449 p
= &TYPE_FIELDS (ctx
->record_type
);
2451 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2452 *p
= DECL_CHAIN (*p
);
2454 p
= &DECL_CHAIN (*p
);
2455 DECL_CHAIN (f1
) = f2
;
2458 DECL_CHAIN (f2
) = f3
;
2459 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2462 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2463 TYPE_FIELDS (ctx
->record_type
) = f1
;
2464 if (ctx
->srecord_type
)
2466 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2467 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2469 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2470 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2472 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2473 *p
= DECL_CHAIN (*p
);
2475 p
= &DECL_CHAIN (*p
);
2476 DECL_CHAIN (f1
) = f2
;
2477 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2480 DECL_CHAIN (f2
) = f3
;
2481 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2484 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2485 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2492 /* Look for a firstprivate clause with the detach event handle. */
2493 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2494 c
; c
= OMP_CLAUSE_CHAIN (c
))
2496 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2498 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2499 == OMP_CLAUSE_DECL (detach_clause
))
2504 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2506 /* Move field corresponding to the detach clause first.
2507 This is filled by GOMP_task and needs to be in a
2508 specific position. */
2509 p
= &TYPE_FIELDS (ctx
->record_type
);
2512 *p
= DECL_CHAIN (*p
);
2514 p
= &DECL_CHAIN (*p
);
2515 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2516 TYPE_FIELDS (ctx
->record_type
) = field
;
2517 if (ctx
->srecord_type
)
2519 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2520 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2523 *p
= DECL_CHAIN (*p
);
2525 p
= &DECL_CHAIN (*p
);
2526 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2527 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2530 layout_type (ctx
->record_type
);
2531 fixup_child_record_type (ctx
);
2532 if (ctx
->srecord_type
)
2533 layout_type (ctx
->srecord_type
);
2534 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2535 TYPE_SIZE_UNIT (ctx
->record_type
));
2536 if (TREE_CODE (t
) != INTEGER_CST
)
2538 t
= unshare_expr (t
);
2539 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2541 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2542 t
= build_int_cst (long_integer_type_node
,
2543 TYPE_ALIGN_UNIT (ctx
->record_type
));
2544 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2548 /* Find the enclosing offload context. */
2550 static omp_context
*
2551 enclosing_target_ctx (omp_context
*ctx
)
2553 for (; ctx
; ctx
= ctx
->outer
)
2554 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2560 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2562 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2565 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2567 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2569 gimple
*stmt
= ctx
->stmt
;
2570 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2571 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2578 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2579 (This doesn't include OpenACC 'kernels' decomposed parts.)
2580 Until kernels handling moves to use the same loop indirection
2581 scheme as parallel, we need to do this checking early. */
2584 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2586 bool checking
= true;
2587 unsigned outer_mask
= 0;
2588 unsigned this_mask
= 0;
2589 bool has_seq
= false, has_auto
= false;
2592 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2596 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2598 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2601 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2603 switch (OMP_CLAUSE_CODE (c
))
2605 case OMP_CLAUSE_GANG
:
2606 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2608 case OMP_CLAUSE_WORKER
:
2609 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2611 case OMP_CLAUSE_VECTOR
:
2612 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2614 case OMP_CLAUSE_SEQ
:
2617 case OMP_CLAUSE_AUTO
:
2627 if (has_seq
&& (this_mask
|| has_auto
))
2628 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2629 " OpenACC loop specifiers");
2630 else if (has_auto
&& this_mask
)
2631 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2632 " OpenACC loop specifiers");
2634 if (this_mask
& outer_mask
)
2635 error_at (gimple_location (stmt
), "inner loop uses same"
2636 " OpenACC parallelism as containing loop");
2639 return outer_mask
| this_mask
;
2642 /* Scan a GIMPLE_OMP_FOR. */
2644 static omp_context
*
2645 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2649 tree clauses
= gimple_omp_for_clauses (stmt
);
2651 ctx
= new_omp_context (stmt
, outer_ctx
);
2653 if (is_gimple_omp_oacc (stmt
))
2655 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2657 if (!(tgt
&& is_oacc_kernels (tgt
)))
2658 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2661 switch (OMP_CLAUSE_CODE (c
))
2663 case OMP_CLAUSE_GANG
:
2664 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2667 case OMP_CLAUSE_WORKER
:
2668 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2671 case OMP_CLAUSE_VECTOR
:
2672 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2681 /* By construction, this is impossible for OpenACC 'kernels'
2682 decomposed parts. */
2683 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2685 error_at (OMP_CLAUSE_LOCATION (c
),
2686 "argument not permitted on %qs clause",
2687 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2689 inform (gimple_location (tgt
->stmt
),
2690 "enclosing parent compute construct");
2691 else if (oacc_get_fn_attrib (current_function_decl
))
2692 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2693 "enclosing routine");
2699 if (tgt
&& is_oacc_kernels (tgt
))
2700 check_oacc_kernel_gwv (stmt
, ctx
);
2702 /* Collect all variables named in reductions on this loop. Ensure
2703 that, if this loop has a reduction on some variable v, and there is
2704 a reduction on v somewhere in an outer context, then there is a
2705 reduction on v on all intervening loops as well. */
2706 tree local_reduction_clauses
= NULL
;
2707 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2709 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2710 local_reduction_clauses
2711 = tree_cons (NULL
, c
, local_reduction_clauses
);
2713 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2714 ctx
->outer_reduction_clauses
2715 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2716 ctx
->outer
->outer_reduction_clauses
);
2717 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2718 tree local_iter
= local_reduction_clauses
;
2719 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2721 tree local_clause
= TREE_VALUE (local_iter
);
2722 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2723 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2724 bool have_outer_reduction
= false;
2725 tree ctx_iter
= outer_reduction_clauses
;
2726 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2728 tree outer_clause
= TREE_VALUE (ctx_iter
);
2729 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2730 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2731 if (outer_var
== local_var
&& outer_op
!= local_op
)
2733 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2734 "conflicting reduction operations for %qE",
2736 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2737 "location of the previous reduction for %qE",
2740 if (outer_var
== local_var
)
2742 have_outer_reduction
= true;
2746 if (have_outer_reduction
)
2748 /* There is a reduction on outer_var both on this loop and on
2749 some enclosing loop. Walk up the context tree until such a
2750 loop with a reduction on outer_var is found, and complain
2751 about all intervening loops that do not have such a
2753 struct omp_context
*curr_loop
= ctx
->outer
;
2755 while (curr_loop
!= NULL
)
2757 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2758 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2760 tree curr_clause
= TREE_VALUE (curr_iter
);
2761 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2762 if (curr_var
== local_var
)
2769 warning_at (gimple_location (curr_loop
->stmt
), 0,
2770 "nested loop in reduction needs "
2771 "reduction clause for %qE",
2775 curr_loop
= curr_loop
->outer
;
2779 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2780 ctx
->outer_reduction_clauses
2781 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2782 ctx
->outer_reduction_clauses
);
2784 if (tgt
&& is_oacc_kernels (tgt
))
2786 /* Strip out reductions, as they are not handled yet. */
2787 tree
*prev_ptr
= &clauses
;
2789 while (tree probe
= *prev_ptr
)
2791 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2793 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2794 *prev_ptr
= *next_ptr
;
2796 prev_ptr
= next_ptr
;
2799 gimple_omp_for_set_clauses (stmt
, clauses
);
2803 scan_sharing_clauses (clauses
, ctx
);
2805 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2806 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2808 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2809 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2810 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2811 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2813 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2817 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2820 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2821 omp_context
*outer_ctx
)
2823 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2824 gsi_replace (gsi
, bind
, false);
2825 gimple_seq seq
= NULL
;
2826 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2827 tree cond
= create_tmp_var_raw (integer_type_node
);
2828 DECL_CONTEXT (cond
) = current_function_decl
;
2829 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2830 gimple_bind_set_vars (bind
, cond
);
2831 gimple_call_set_lhs (g
, cond
);
2832 gimple_seq_add_stmt (&seq
, g
);
2833 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2834 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2835 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2836 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2837 gimple_seq_add_stmt (&seq
, g
);
2838 g
= gimple_build_label (lab1
);
2839 gimple_seq_add_stmt (&seq
, g
);
2840 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2841 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2842 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2843 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2844 gimple_omp_for_set_clauses (new_stmt
, clause
);
2845 gimple_seq_add_stmt (&seq
, new_stmt
);
2846 g
= gimple_build_goto (lab3
);
2847 gimple_seq_add_stmt (&seq
, g
);
2848 g
= gimple_build_label (lab2
);
2849 gimple_seq_add_stmt (&seq
, g
);
2850 gimple_seq_add_stmt (&seq
, stmt
);
2851 g
= gimple_build_label (lab3
);
2852 gimple_seq_add_stmt (&seq
, g
);
2853 gimple_bind_set_body (bind
, seq
);
2855 scan_omp_for (new_stmt
, outer_ctx
);
2856 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2859 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2860 struct walk_stmt_info
*);
2861 static omp_context
*maybe_lookup_ctx (gimple
*);
2863 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2864 for scan phase loop. */
2867 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2868 omp_context
*outer_ctx
)
2870 /* The only change between inclusive and exclusive scan will be
2871 within the first simd loop, so just use inclusive in the
2872 worksharing loop. */
2873 outer_ctx
->scan_inclusive
= true;
2874 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2875 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2877 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2878 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2879 gsi_replace (gsi
, input_stmt
, false);
2880 gimple_seq input_body
= NULL
;
2881 gimple_seq_add_stmt (&input_body
, stmt
);
2882 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2884 gimple_stmt_iterator input1_gsi
= gsi_none ();
2885 struct walk_stmt_info wi
;
2886 memset (&wi
, 0, sizeof (wi
));
2888 wi
.info
= (void *) &input1_gsi
;
2889 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2890 gcc_assert (!gsi_end_p (input1_gsi
));
2892 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2893 gsi_next (&input1_gsi
);
2894 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2895 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2896 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2897 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2898 std::swap (input_stmt1
, scan_stmt1
);
2900 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2901 gimple_omp_set_body (input_stmt1
, NULL
);
2903 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2904 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2906 gimple_omp_set_body (input_stmt1
, input_body1
);
2907 gimple_omp_set_body (scan_stmt1
, NULL
);
2909 gimple_stmt_iterator input2_gsi
= gsi_none ();
2910 memset (&wi
, 0, sizeof (wi
));
2912 wi
.info
= (void *) &input2_gsi
;
2913 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2915 gcc_assert (!gsi_end_p (input2_gsi
));
2917 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2918 gsi_next (&input2_gsi
);
2919 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2920 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2921 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2922 std::swap (input_stmt2
, scan_stmt2
);
2924 gimple_omp_set_body (input_stmt2
, NULL
);
2926 gimple_omp_set_body (input_stmt
, input_body
);
2927 gimple_omp_set_body (scan_stmt
, scan_body
);
2929 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2930 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2932 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2933 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2935 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2938 /* Scan an OpenMP sections directive. */
2941 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2945 ctx
= new_omp_context (stmt
, outer_ctx
);
2946 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2947 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2950 /* Scan an OpenMP single directive. */
2953 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2958 ctx
= new_omp_context (stmt
, outer_ctx
);
2959 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2960 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2961 name
= create_tmp_var_name (".omp_copy_s");
2962 name
= build_decl (gimple_location (stmt
),
2963 TYPE_DECL
, name
, ctx
->record_type
);
2964 TYPE_NAME (ctx
->record_type
) = name
;
2966 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2967 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2969 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2970 ctx
->record_type
= NULL
;
2972 layout_type (ctx
->record_type
);
2975 /* Scan a GIMPLE_OMP_TARGET. */
2978 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2982 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2983 tree clauses
= gimple_omp_target_clauses (stmt
);
2985 ctx
= new_omp_context (stmt
, outer_ctx
);
2986 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2987 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2988 name
= create_tmp_var_name (".omp_data_t");
2989 name
= build_decl (gimple_location (stmt
),
2990 TYPE_DECL
, name
, ctx
->record_type
);
2991 DECL_ARTIFICIAL (name
) = 1;
2992 DECL_NAMELESS (name
) = 1;
2993 TYPE_NAME (ctx
->record_type
) = name
;
2994 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2998 create_omp_child_function (ctx
, false);
2999 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3002 scan_sharing_clauses (clauses
, ctx
);
3003 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3005 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3006 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3009 TYPE_FIELDS (ctx
->record_type
)
3010 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3013 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3014 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3016 field
= DECL_CHAIN (field
))
3017 gcc_assert (DECL_ALIGN (field
) == align
);
3019 layout_type (ctx
->record_type
);
3021 fixup_child_record_type (ctx
);
3024 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3026 error_at (gimple_location (stmt
),
3027 "%<target%> construct with nested %<teams%> construct "
3028 "contains directives outside of the %<teams%> construct");
3029 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3033 /* Scan an OpenMP teams directive. */
3036 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3038 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3040 if (!gimple_omp_teams_host (stmt
))
3042 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3043 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3046 taskreg_contexts
.safe_push (ctx
);
3047 gcc_assert (taskreg_nesting_level
== 1);
3048 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3049 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3050 tree name
= create_tmp_var_name (".omp_data_s");
3051 name
= build_decl (gimple_location (stmt
),
3052 TYPE_DECL
, name
, ctx
->record_type
);
3053 DECL_ARTIFICIAL (name
) = 1;
3054 DECL_NAMELESS (name
) = 1;
3055 TYPE_NAME (ctx
->record_type
) = name
;
3056 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3057 create_omp_child_function (ctx
, false);
3058 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3060 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3061 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3063 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3064 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3067 /* Check nesting restrictions. */
3069 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3073 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3074 inside an OpenACC CTX. */
3075 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3076 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3077 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3079 else if (!(is_gimple_omp (stmt
)
3080 && is_gimple_omp_oacc (stmt
)))
3082 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3084 error_at (gimple_location (stmt
),
3085 "non-OpenACC construct inside of OpenACC routine");
3089 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3090 if (is_gimple_omp (octx
->stmt
)
3091 && is_gimple_omp_oacc (octx
->stmt
))
3093 error_at (gimple_location (stmt
),
3094 "non-OpenACC construct inside of OpenACC region");
3101 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3102 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3104 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3105 ctx
->teams_nested_p
= true;
3107 ctx
->nonteams_nested_p
= true;
3109 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3111 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3113 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3114 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3118 if (ctx
->order_concurrent
3119 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3120 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3121 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3123 error_at (gimple_location (stmt
),
3124 "OpenMP constructs other than %<parallel%>, %<loop%>"
3125 " or %<simd%> may not be nested inside a region with"
3126 " the %<order(concurrent)%> clause");
3129 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3131 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3132 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3134 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3135 && (ctx
->outer
== NULL
3136 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3137 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3138 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3139 != GF_OMP_FOR_KIND_FOR
)
3140 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3142 error_at (gimple_location (stmt
),
3143 "%<ordered simd threads%> must be closely "
3144 "nested inside of %<%s simd%> region",
3145 lang_GNU_Fortran () ? "do" : "for");
3151 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3152 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3153 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3155 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3156 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3158 error_at (gimple_location (stmt
),
3159 "OpenMP constructs other than "
3160 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3161 "not be nested inside %<simd%> region");
3164 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3166 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3167 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3168 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3169 OMP_CLAUSE_BIND
) == NULL_TREE
))
3170 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3172 error_at (gimple_location (stmt
),
3173 "only %<distribute%>, %<parallel%> or %<loop%> "
3174 "regions are allowed to be strictly nested inside "
3175 "%<teams%> region");
3179 else if (ctx
->order_concurrent
3180 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3181 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3182 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3183 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3186 error_at (gimple_location (stmt
),
3187 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3188 "%<simd%> may not be nested inside a %<loop%> region");
3190 error_at (gimple_location (stmt
),
3191 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3192 "%<simd%> may not be nested inside a region with "
3193 "the %<order(concurrent)%> clause");
3197 switch (gimple_code (stmt
))
3199 case GIMPLE_OMP_FOR
:
3200 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3202 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3204 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3206 error_at (gimple_location (stmt
),
3207 "%<distribute%> region must be strictly nested "
3208 "inside %<teams%> construct");
3213 /* We split taskloop into task and nested taskloop in it. */
3214 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3216 /* For now, hope this will change and loop bind(parallel) will not
3217 be allowed in lots of contexts. */
3218 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3219 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3221 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3226 switch (gimple_code (ctx
->stmt
))
3228 case GIMPLE_OMP_FOR
:
3229 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3230 == GF_OMP_FOR_KIND_OACC_LOOP
);
3233 case GIMPLE_OMP_TARGET
:
3234 switch (gimple_omp_target_kind (ctx
->stmt
))
3236 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3237 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3238 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3239 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3240 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3251 else if (oacc_get_fn_attrib (current_function_decl
))
3255 error_at (gimple_location (stmt
),
3256 "OpenACC loop directive must be associated with"
3257 " an OpenACC compute region");
3263 if (is_gimple_call (stmt
)
3264 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3265 == BUILT_IN_GOMP_CANCEL
3266 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3267 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3269 const char *bad
= NULL
;
3270 const char *kind
= NULL
;
3271 const char *construct
3272 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3273 == BUILT_IN_GOMP_CANCEL
)
3275 : "cancellation point";
3278 error_at (gimple_location (stmt
), "orphaned %qs construct",
3282 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3283 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3287 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3289 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3290 == BUILT_IN_GOMP_CANCEL
3291 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3292 ctx
->cancellable
= true;
3296 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3297 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3299 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3300 == BUILT_IN_GOMP_CANCEL
3301 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3303 ctx
->cancellable
= true;
3304 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3306 warning_at (gimple_location (stmt
), 0,
3307 "%<cancel for%> inside "
3308 "%<nowait%> for construct");
3309 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3310 OMP_CLAUSE_ORDERED
))
3311 warning_at (gimple_location (stmt
), 0,
3312 "%<cancel for%> inside "
3313 "%<ordered%> for construct");
3318 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3319 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3321 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3322 == BUILT_IN_GOMP_CANCEL
3323 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3325 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3327 ctx
->cancellable
= true;
3328 if (omp_find_clause (gimple_omp_sections_clauses
3331 warning_at (gimple_location (stmt
), 0,
3332 "%<cancel sections%> inside "
3333 "%<nowait%> sections construct");
3337 gcc_assert (ctx
->outer
3338 && gimple_code (ctx
->outer
->stmt
)
3339 == GIMPLE_OMP_SECTIONS
);
3340 ctx
->outer
->cancellable
= true;
3341 if (omp_find_clause (gimple_omp_sections_clauses
3344 warning_at (gimple_location (stmt
), 0,
3345 "%<cancel sections%> inside "
3346 "%<nowait%> sections construct");
3352 if (!is_task_ctx (ctx
)
3353 && (!is_taskloop_ctx (ctx
)
3354 || ctx
->outer
== NULL
3355 || !is_task_ctx (ctx
->outer
)))
3359 for (omp_context
*octx
= ctx
->outer
;
3360 octx
; octx
= octx
->outer
)
3362 switch (gimple_code (octx
->stmt
))
3364 case GIMPLE_OMP_TASKGROUP
:
3366 case GIMPLE_OMP_TARGET
:
3367 if (gimple_omp_target_kind (octx
->stmt
)
3368 != GF_OMP_TARGET_KIND_REGION
)
3371 case GIMPLE_OMP_PARALLEL
:
3372 case GIMPLE_OMP_TEAMS
:
3373 error_at (gimple_location (stmt
),
3374 "%<%s taskgroup%> construct not closely "
3375 "nested inside of %<taskgroup%> region",
3378 case GIMPLE_OMP_TASK
:
3379 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3381 && is_taskloop_ctx (octx
->outer
))
3384 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3385 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3394 ctx
->cancellable
= true;
3399 error_at (gimple_location (stmt
), "invalid arguments");
3404 error_at (gimple_location (stmt
),
3405 "%<%s %s%> construct not closely nested inside of %qs",
3406 construct
, kind
, bad
);
3411 case GIMPLE_OMP_SECTIONS
:
3412 case GIMPLE_OMP_SINGLE
:
3413 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3414 switch (gimple_code (ctx
->stmt
))
3416 case GIMPLE_OMP_FOR
:
3417 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3418 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3421 case GIMPLE_OMP_SECTIONS
:
3422 case GIMPLE_OMP_SINGLE
:
3423 case GIMPLE_OMP_ORDERED
:
3424 case GIMPLE_OMP_MASTER
:
3425 case GIMPLE_OMP_TASK
:
3426 case GIMPLE_OMP_CRITICAL
:
3427 if (is_gimple_call (stmt
))
3429 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3430 != BUILT_IN_GOMP_BARRIER
)
3432 error_at (gimple_location (stmt
),
3433 "barrier region may not be closely nested inside "
3434 "of work-sharing, %<loop%>, %<critical%>, "
3435 "%<ordered%>, %<master%>, explicit %<task%> or "
3436 "%<taskloop%> region");
3439 error_at (gimple_location (stmt
),
3440 "work-sharing region may not be closely nested inside "
3441 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3442 "%<master%>, explicit %<task%> or %<taskloop%> region");
3444 case GIMPLE_OMP_PARALLEL
:
3445 case GIMPLE_OMP_TEAMS
:
3447 case GIMPLE_OMP_TARGET
:
3448 if (gimple_omp_target_kind (ctx
->stmt
)
3449 == GF_OMP_TARGET_KIND_REGION
)
3456 case GIMPLE_OMP_MASTER
:
3457 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3458 switch (gimple_code (ctx
->stmt
))
3460 case GIMPLE_OMP_FOR
:
3461 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3462 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3465 case GIMPLE_OMP_SECTIONS
:
3466 case GIMPLE_OMP_SINGLE
:
3467 case GIMPLE_OMP_TASK
:
3468 error_at (gimple_location (stmt
),
3469 "%<master%> region may not be closely nested inside "
3470 "of work-sharing, %<loop%>, explicit %<task%> or "
3471 "%<taskloop%> region");
3473 case GIMPLE_OMP_PARALLEL
:
3474 case GIMPLE_OMP_TEAMS
:
3476 case GIMPLE_OMP_TARGET
:
3477 if (gimple_omp_target_kind (ctx
->stmt
)
3478 == GF_OMP_TARGET_KIND_REGION
)
3485 case GIMPLE_OMP_TASK
:
3486 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3487 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3488 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3489 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3491 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3492 error_at (OMP_CLAUSE_LOCATION (c
),
3493 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3494 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3498 case GIMPLE_OMP_ORDERED
:
3499 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3500 c
; c
= OMP_CLAUSE_CHAIN (c
))
3502 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3504 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3505 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3508 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3509 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3510 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3513 /* Look for containing ordered(N) loop. */
3515 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3517 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3518 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3520 error_at (OMP_CLAUSE_LOCATION (c
),
3521 "%<ordered%> construct with %<depend%> clause "
3522 "must be closely nested inside an %<ordered%> "
3526 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3528 error_at (OMP_CLAUSE_LOCATION (c
),
3529 "%<ordered%> construct with %<depend%> clause "
3530 "must be closely nested inside a loop with "
3531 "%<ordered%> clause with a parameter");
3537 error_at (OMP_CLAUSE_LOCATION (c
),
3538 "invalid depend kind in omp %<ordered%> %<depend%>");
3542 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3543 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3545 /* ordered simd must be closely nested inside of simd region,
3546 and simd region must not encounter constructs other than
3547 ordered simd, therefore ordered simd may be either orphaned,
3548 or ctx->stmt must be simd. The latter case is handled already
3552 error_at (gimple_location (stmt
),
3553 "%<ordered%> %<simd%> must be closely nested inside "
3558 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3559 switch (gimple_code (ctx
->stmt
))
3561 case GIMPLE_OMP_CRITICAL
:
3562 case GIMPLE_OMP_TASK
:
3563 case GIMPLE_OMP_ORDERED
:
3564 ordered_in_taskloop
:
3565 error_at (gimple_location (stmt
),
3566 "%<ordered%> region may not be closely nested inside "
3567 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3568 "%<taskloop%> region");
3570 case GIMPLE_OMP_FOR
:
3571 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3572 goto ordered_in_taskloop
;
3574 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3575 OMP_CLAUSE_ORDERED
);
3578 error_at (gimple_location (stmt
),
3579 "%<ordered%> region must be closely nested inside "
3580 "a loop region with an %<ordered%> clause");
3583 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3584 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3586 error_at (gimple_location (stmt
),
3587 "%<ordered%> region without %<depend%> clause may "
3588 "not be closely nested inside a loop region with "
3589 "an %<ordered%> clause with a parameter");
3593 case GIMPLE_OMP_TARGET
:
3594 if (gimple_omp_target_kind (ctx
->stmt
)
3595 != GF_OMP_TARGET_KIND_REGION
)
3598 case GIMPLE_OMP_PARALLEL
:
3599 case GIMPLE_OMP_TEAMS
:
3600 error_at (gimple_location (stmt
),
3601 "%<ordered%> region must be closely nested inside "
3602 "a loop region with an %<ordered%> clause");
3608 case GIMPLE_OMP_CRITICAL
:
3611 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3612 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3613 if (gomp_critical
*other_crit
3614 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3615 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3617 error_at (gimple_location (stmt
),
3618 "%<critical%> region may not be nested inside "
3619 "a %<critical%> region with the same name");
3624 case GIMPLE_OMP_TEAMS
:
3627 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3628 || (gimple_omp_target_kind (ctx
->stmt
)
3629 != GF_OMP_TARGET_KIND_REGION
))
3631 /* Teams construct can appear either strictly nested inside of
3632 target construct with no intervening stmts, or can be encountered
3633 only by initial task (so must not appear inside any OpenMP
3635 error_at (gimple_location (stmt
),
3636 "%<teams%> construct must be closely nested inside of "
3637 "%<target%> construct or not nested in any OpenMP "
3642 case GIMPLE_OMP_TARGET
:
3643 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3644 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3645 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3646 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3648 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3649 error_at (OMP_CLAUSE_LOCATION (c
),
3650 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3651 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3654 if (is_gimple_omp_offloaded (stmt
)
3655 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3657 error_at (gimple_location (stmt
),
3658 "OpenACC region inside of OpenACC routine, nested "
3659 "parallelism not supported yet");
3662 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3664 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3666 if (is_gimple_omp (stmt
)
3667 && is_gimple_omp_oacc (stmt
)
3668 && is_gimple_omp (ctx
->stmt
))
3670 error_at (gimple_location (stmt
),
3671 "OpenACC construct inside of non-OpenACC region");
3677 const char *stmt_name
, *ctx_stmt_name
;
3678 switch (gimple_omp_target_kind (stmt
))
3680 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3681 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3682 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3683 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3684 stmt_name
= "target enter data"; break;
3685 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3686 stmt_name
= "target exit data"; break;
3687 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3688 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3689 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3690 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3691 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3692 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3693 stmt_name
= "enter data"; break;
3694 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3695 stmt_name
= "exit data"; break;
3696 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3697 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3699 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3700 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3701 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3702 /* OpenACC 'kernels' decomposed parts. */
3703 stmt_name
= "kernels"; break;
3704 default: gcc_unreachable ();
3706 switch (gimple_omp_target_kind (ctx
->stmt
))
3708 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3709 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3710 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3711 ctx_stmt_name
= "parallel"; break;
3712 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3713 ctx_stmt_name
= "kernels"; break;
3714 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3715 ctx_stmt_name
= "serial"; break;
3716 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3717 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3718 ctx_stmt_name
= "host_data"; break;
3719 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3720 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3721 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3722 /* OpenACC 'kernels' decomposed parts. */
3723 ctx_stmt_name
= "kernels"; break;
3724 default: gcc_unreachable ();
3727 /* OpenACC/OpenMP mismatch? */
3728 if (is_gimple_omp_oacc (stmt
)
3729 != is_gimple_omp_oacc (ctx
->stmt
))
3731 error_at (gimple_location (stmt
),
3732 "%s %qs construct inside of %s %qs region",
3733 (is_gimple_omp_oacc (stmt
)
3734 ? "OpenACC" : "OpenMP"), stmt_name
,
3735 (is_gimple_omp_oacc (ctx
->stmt
)
3736 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3739 if (is_gimple_omp_offloaded (ctx
->stmt
))
3741 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3742 if (is_gimple_omp_oacc (ctx
->stmt
))
3744 error_at (gimple_location (stmt
),
3745 "%qs construct inside of %qs region",
3746 stmt_name
, ctx_stmt_name
);
3751 warning_at (gimple_location (stmt
), 0,
3752 "%qs construct inside of %qs region",
3753 stmt_name
, ctx_stmt_name
);
3765 /* Helper function scan_omp.
3767 Callback for walk_tree or operators in walk_gimple_stmt used to
3768 scan for OMP directives in TP. */
3771 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3773 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3774 omp_context
*ctx
= (omp_context
*) wi
->info
;
3777 switch (TREE_CODE (t
))
3785 tree repl
= remap_decl (t
, &ctx
->cb
);
3786 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3792 if (ctx
&& TYPE_P (t
))
3793 *tp
= remap_type (t
, &ctx
->cb
);
3794 else if (!DECL_P (t
))
3799 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3800 if (tem
!= TREE_TYPE (t
))
3802 if (TREE_CODE (t
) == INTEGER_CST
)
3803 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3805 TREE_TYPE (t
) = tem
;
3815 /* Return true if FNDECL is a setjmp or a longjmp. */
3818 setjmp_or_longjmp_p (const_tree fndecl
)
3820 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3821 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3824 tree declname
= DECL_NAME (fndecl
);
3826 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3827 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3828 || !TREE_PUBLIC (fndecl
))
3831 const char *name
= IDENTIFIER_POINTER (declname
);
3832 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3835 /* Return true if FNDECL is an omp_* runtime API call. */
3838 omp_runtime_api_call (const_tree fndecl
)
3840 tree declname
= DECL_NAME (fndecl
);
3842 || (DECL_CONTEXT (fndecl
) != NULL_TREE
3843 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
3844 || !TREE_PUBLIC (fndecl
))
3847 const char *name
= IDENTIFIER_POINTER (declname
);
3848 if (!startswith (name
, "omp_"))
3851 static const char *omp_runtime_apis
[] =
3853 /* This array has 3 sections. First omp_* calls that don't
3854 have any suffixes. */
3856 "target_associate_ptr",
3857 "target_disassociate_ptr",
3859 "target_is_present",
3861 "target_memcpy_rect",
3863 /* Now omp_* calls that are available as omp_* and omp_*_. */
3866 "destroy_nest_lock",
3869 "get_affinity_format",
3871 "get_default_device",
3873 "get_initial_device",
3875 "get_max_active_levels",
3876 "get_max_task_priority",
3884 "get_partition_num_places",
3896 "is_initial_device",
3898 "pause_resource_all",
3899 "set_affinity_format",
3907 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3908 "get_ancestor_thread_num",
3909 "get_partition_place_nums",
3910 "get_place_num_procs",
3911 "get_place_proc_ids",
3914 "set_default_device",
3916 "set_max_active_levels",
3923 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
3925 if (omp_runtime_apis
[i
] == NULL
)
3930 size_t len
= strlen (omp_runtime_apis
[i
]);
3931 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
3932 && (name
[4 + len
] == '\0'
3934 && name
[4 + len
] == '_'
3935 && (name
[4 + len
+ 1] == '\0'
3937 && strcmp (name
+ 4 + len
+ 1, "8_") == 0)))))
3943 /* Helper function for scan_omp.
3945 Callback for walk_gimple_stmt used to scan for OMP directives in
3946 the current statement in GSI. */
3949 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3950 struct walk_stmt_info
*wi
)
3952 gimple
*stmt
= gsi_stmt (*gsi
);
3953 omp_context
*ctx
= (omp_context
*) wi
->info
;
3955 if (gimple_has_location (stmt
))
3956 input_location
= gimple_location (stmt
);
3958 /* Check the nesting restrictions. */
3959 bool remove
= false;
3960 if (is_gimple_omp (stmt
))
3961 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3962 else if (is_gimple_call (stmt
))
3964 tree fndecl
= gimple_call_fndecl (stmt
);
3968 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3969 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3970 && setjmp_or_longjmp_p (fndecl
)
3974 error_at (gimple_location (stmt
),
3975 "setjmp/longjmp inside %<simd%> construct");
3977 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3978 switch (DECL_FUNCTION_CODE (fndecl
))
3980 case BUILT_IN_GOMP_BARRIER
:
3981 case BUILT_IN_GOMP_CANCEL
:
3982 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3983 case BUILT_IN_GOMP_TASKYIELD
:
3984 case BUILT_IN_GOMP_TASKWAIT
:
3985 case BUILT_IN_GOMP_TASKGROUP_START
:
3986 case BUILT_IN_GOMP_TASKGROUP_END
:
3987 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3994 omp_context
*octx
= ctx
;
3995 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
3997 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4000 error_at (gimple_location (stmt
),
4001 "OpenMP runtime API call %qD in a region with "
4002 "%<order(concurrent)%> clause", fndecl
);
4009 stmt
= gimple_build_nop ();
4010 gsi_replace (gsi
, stmt
, false);
4013 *handled_ops_p
= true;
4015 switch (gimple_code (stmt
))
4017 case GIMPLE_OMP_PARALLEL
:
4018 taskreg_nesting_level
++;
4019 scan_omp_parallel (gsi
, ctx
);
4020 taskreg_nesting_level
--;
4023 case GIMPLE_OMP_TASK
:
4024 taskreg_nesting_level
++;
4025 scan_omp_task (gsi
, ctx
);
4026 taskreg_nesting_level
--;
4029 case GIMPLE_OMP_FOR
:
4030 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4031 == GF_OMP_FOR_KIND_SIMD
)
4032 && gimple_omp_for_combined_into_p (stmt
)
4033 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4035 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4036 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4037 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4039 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4043 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4044 == GF_OMP_FOR_KIND_SIMD
)
4045 && omp_maybe_offloaded_ctx (ctx
)
4046 && omp_max_simt_vf ()
4047 && gimple_omp_for_collapse (stmt
) == 1)
4048 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4050 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4053 case GIMPLE_OMP_SECTIONS
:
4054 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4057 case GIMPLE_OMP_SINGLE
:
4058 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4061 case GIMPLE_OMP_SCAN
:
4062 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4064 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4065 ctx
->scan_inclusive
= true;
4066 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4067 ctx
->scan_exclusive
= true;
4070 case GIMPLE_OMP_SECTION
:
4071 case GIMPLE_OMP_MASTER
:
4072 case GIMPLE_OMP_ORDERED
:
4073 case GIMPLE_OMP_CRITICAL
:
4074 ctx
= new_omp_context (stmt
, ctx
);
4075 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4078 case GIMPLE_OMP_TASKGROUP
:
4079 ctx
= new_omp_context (stmt
, ctx
);
4080 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4081 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4084 case GIMPLE_OMP_TARGET
:
4085 if (is_gimple_omp_offloaded (stmt
))
4087 taskreg_nesting_level
++;
4088 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4089 taskreg_nesting_level
--;
4092 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4095 case GIMPLE_OMP_TEAMS
:
4096 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4098 taskreg_nesting_level
++;
4099 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4100 taskreg_nesting_level
--;
4103 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4110 *handled_ops_p
= false;
4112 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4114 var
= DECL_CHAIN (var
))
4115 insert_decl_map (&ctx
->cb
, var
, var
);
4119 *handled_ops_p
= false;
4127 /* Scan all the statements starting at the current statement. CTX
4128 contains context information about the OMP directives and
4129 clauses found during the scan. */
4132 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4134 location_t saved_location
;
4135 struct walk_stmt_info wi
;
4137 memset (&wi
, 0, sizeof (wi
));
4139 wi
.want_locations
= true;
4141 saved_location
= input_location
;
4142 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4143 input_location
= saved_location
;
4146 /* Re-gimplification and code generation routines. */
4148 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4149 of BIND if in a method. */
4152 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4154 if (DECL_ARGUMENTS (current_function_decl
)
4155 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4156 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4159 tree vars
= gimple_bind_vars (bind
);
4160 for (tree
*pvar
= &vars
; *pvar
; )
4161 if (omp_member_access_dummy_var (*pvar
))
4162 *pvar
= DECL_CHAIN (*pvar
);
4164 pvar
= &DECL_CHAIN (*pvar
);
4165 gimple_bind_set_vars (bind
, vars
);
4169 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4170 block and its subblocks. */
4173 remove_member_access_dummy_vars (tree block
)
4175 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4176 if (omp_member_access_dummy_var (*pvar
))
4177 *pvar
= DECL_CHAIN (*pvar
);
4179 pvar
= &DECL_CHAIN (*pvar
);
4181 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4182 remove_member_access_dummy_vars (block
);
4185 /* If a context was created for STMT when it was scanned, return it. */
4187 static omp_context
*
4188 maybe_lookup_ctx (gimple
*stmt
)
4191 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4192 return n
? (omp_context
*) n
->value
: NULL
;
4196 /* Find the mapping for DECL in CTX or the immediately enclosing
4197 context that has a mapping for DECL.
4199 If CTX is a nested parallel directive, we may have to use the decl
4200 mappings created in CTX's parent context. Suppose that we have the
4201 following parallel nesting (variable UIDs showed for clarity):
4204 #omp parallel shared(iD.1562) -> outer parallel
4205 iD.1562 = iD.1562 + 1;
4207 #omp parallel shared (iD.1562) -> inner parallel
4208 iD.1562 = iD.1562 - 1;
4210 Each parallel structure will create a distinct .omp_data_s structure
4211 for copying iD.1562 in/out of the directive:
4213 outer parallel .omp_data_s.1.i -> iD.1562
4214 inner parallel .omp_data_s.2.i -> iD.1562
4216 A shared variable mapping will produce a copy-out operation before
4217 the parallel directive and a copy-in operation after it. So, in
4218 this case we would have:
4221 .omp_data_o.1.i = iD.1562;
4222 #omp parallel shared(iD.1562) -> outer parallel
4223 .omp_data_i.1 = &.omp_data_o.1
4224 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4226 .omp_data_o.2.i = iD.1562; -> **
4227 #omp parallel shared(iD.1562) -> inner parallel
4228 .omp_data_i.2 = &.omp_data_o.2
4229 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4232 ** This is a problem. The symbol iD.1562 cannot be referenced
4233 inside the body of the outer parallel region. But since we are
4234 emitting this copy operation while expanding the inner parallel
4235 directive, we need to access the CTX structure of the outer
4236 parallel directive to get the correct mapping:
4238 .omp_data_o.2.i = .omp_data_i.1->i
4240 Since there may be other workshare or parallel directives enclosing
4241 the parallel directive, it may be necessary to walk up the context
4242 parent chain. This is not a problem in general because nested
4243 parallelism happens only rarely. */
4246 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4251 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4252 t
= maybe_lookup_decl (decl
, up
);
4254 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4256 return t
? t
: decl
;
4260 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4261 in outer contexts. */
4264 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4269 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4270 t
= maybe_lookup_decl (decl
, up
);
4272 return t
? t
: decl
;
4276 /* Construct the initialization value for reduction operation OP. */
4279 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4288 case TRUTH_ORIF_EXPR
:
4289 case TRUTH_XOR_EXPR
:
4291 return build_zero_cst (type
);
4294 case TRUTH_AND_EXPR
:
4295 case TRUTH_ANDIF_EXPR
:
4297 return fold_convert_loc (loc
, type
, integer_one_node
);
4300 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4303 if (SCALAR_FLOAT_TYPE_P (type
))
4305 REAL_VALUE_TYPE max
, min
;
4306 if (HONOR_INFINITIES (type
))
4309 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
4312 real_maxval (&min
, 1, TYPE_MODE (type
));
4313 return build_real (type
, min
);
4315 else if (POINTER_TYPE_P (type
))
4318 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4319 return wide_int_to_tree (type
, min
);
4323 gcc_assert (INTEGRAL_TYPE_P (type
));
4324 return TYPE_MIN_VALUE (type
);
4328 if (SCALAR_FLOAT_TYPE_P (type
))
4330 REAL_VALUE_TYPE max
;
4331 if (HONOR_INFINITIES (type
))
4334 real_maxval (&max
, 0, TYPE_MODE (type
));
4335 return build_real (type
, max
);
4337 else if (POINTER_TYPE_P (type
))
4340 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4341 return wide_int_to_tree (type
, max
);
4345 gcc_assert (INTEGRAL_TYPE_P (type
));
4346 return TYPE_MAX_VALUE (type
);
4354 /* Construct the initialization value for reduction CLAUSE. */
4357 omp_reduction_init (tree clause
, tree type
)
4359 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4360 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4363 /* Return alignment to be assumed for var in CLAUSE, which should be
4364 OMP_CLAUSE_ALIGNED. */
4367 omp_clause_aligned_alignment (tree clause
)
4369 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4370 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4372 /* Otherwise return implementation defined alignment. */
4373 unsigned int al
= 1;
4374 opt_scalar_mode mode_iter
;
4375 auto_vector_modes modes
;
4376 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4377 static enum mode_class classes
[]
4378 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4379 for (int i
= 0; i
< 4; i
+= 2)
4380 /* The for loop above dictates that we only walk through scalar classes. */
4381 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4383 scalar_mode mode
= mode_iter
.require ();
4384 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4385 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4387 machine_mode alt_vmode
;
4388 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4389 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4390 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4393 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4394 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4396 type
= build_vector_type_for_mode (type
, vmode
);
4397 if (TYPE_MODE (type
) != vmode
)
4399 if (TYPE_ALIGN_UNIT (type
) > al
)
4400 al
= TYPE_ALIGN_UNIT (type
);
4402 return build_int_cst (integer_type_node
, al
);
4406 /* This structure is part of the interface between lower_rec_simd_input_clauses
4407 and lower_rec_input_clauses. */
4409 class omplow_simd_context
{
4411 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4415 vec
<tree
, va_heap
> simt_eargs
;
4416 gimple_seq simt_dlist
;
4417 poly_uint64_pod max_vf
;
4421 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4425 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4426 omplow_simd_context
*sctx
, tree
&ivar
,
4427 tree
&lvar
, tree
*rvar
= NULL
,
4430 if (known_eq (sctx
->max_vf
, 0U))
4432 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4433 if (maybe_gt (sctx
->max_vf
, 1U))
4435 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4436 OMP_CLAUSE_SAFELEN
);
4439 poly_uint64 safe_len
;
4440 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4441 || maybe_lt (safe_len
, 1U))
4444 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4447 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4449 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4450 c
= OMP_CLAUSE_CHAIN (c
))
4452 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4455 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4457 /* UDR reductions are not supported yet for SIMT, disable
4463 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4464 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4466 /* Doing boolean operations on non-integral types is
4467 for conformance only, it's not worth supporting this
4474 if (maybe_gt (sctx
->max_vf
, 1U))
4476 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4477 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4480 if (known_eq (sctx
->max_vf
, 1U))
4485 if (is_gimple_reg (new_var
))
4487 ivar
= lvar
= new_var
;
4490 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4491 ivar
= lvar
= create_tmp_var (type
);
4492 TREE_ADDRESSABLE (ivar
) = 1;
4493 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4494 NULL
, DECL_ATTRIBUTES (ivar
));
4495 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4496 tree clobber
= build_clobber (type
);
4497 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4498 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4502 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4503 tree avar
= create_tmp_var_raw (atype
);
4504 if (TREE_ADDRESSABLE (new_var
))
4505 TREE_ADDRESSABLE (avar
) = 1;
4506 DECL_ATTRIBUTES (avar
)
4507 = tree_cons (get_identifier ("omp simd array"), NULL
,
4508 DECL_ATTRIBUTES (avar
));
4509 gimple_add_tmp_var (avar
);
4511 if (rvar
&& !ctx
->for_simd_scan_phase
)
4513 /* For inscan reductions, create another array temporary,
4514 which will hold the reduced value. */
4515 iavar
= create_tmp_var_raw (atype
);
4516 if (TREE_ADDRESSABLE (new_var
))
4517 TREE_ADDRESSABLE (iavar
) = 1;
4518 DECL_ATTRIBUTES (iavar
)
4519 = tree_cons (get_identifier ("omp simd array"), NULL
,
4520 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4521 DECL_ATTRIBUTES (iavar
)));
4522 gimple_add_tmp_var (iavar
);
4523 ctx
->cb
.decl_map
->put (avar
, iavar
);
4524 if (sctx
->lastlane
== NULL_TREE
)
4525 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4526 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4527 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4528 TREE_THIS_NOTRAP (*rvar
) = 1;
4530 if (ctx
->scan_exclusive
)
4532 /* And for exclusive scan yet another one, which will
4533 hold the value during the scan phase. */
4534 tree savar
= create_tmp_var_raw (atype
);
4535 if (TREE_ADDRESSABLE (new_var
))
4536 TREE_ADDRESSABLE (savar
) = 1;
4537 DECL_ATTRIBUTES (savar
)
4538 = tree_cons (get_identifier ("omp simd array"), NULL
,
4539 tree_cons (get_identifier ("omp simd inscan "
4541 DECL_ATTRIBUTES (savar
)));
4542 gimple_add_tmp_var (savar
);
4543 ctx
->cb
.decl_map
->put (iavar
, savar
);
4544 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4545 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4546 TREE_THIS_NOTRAP (*rvar2
) = 1;
4549 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4550 NULL_TREE
, NULL_TREE
);
4551 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4552 NULL_TREE
, NULL_TREE
);
4553 TREE_THIS_NOTRAP (ivar
) = 1;
4554 TREE_THIS_NOTRAP (lvar
) = 1;
4556 if (DECL_P (new_var
))
4558 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4559 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4564 /* Helper function of lower_rec_input_clauses. For a reference
4565 in simd reduction, add an underlying variable it will reference. */
4568 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4570 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4571 if (TREE_CONSTANT (z
))
4573 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4574 get_name (new_vard
));
4575 gimple_add_tmp_var (z
);
4576 TREE_ADDRESSABLE (z
) = 1;
4577 z
= build_fold_addr_expr_loc (loc
, z
);
4578 gimplify_assign (new_vard
, z
, ilist
);
4582 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4583 code to emit (type) (tskred_temp[idx]). */
4586 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4589 unsigned HOST_WIDE_INT sz
4590 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4591 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4592 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4594 tree v
= create_tmp_var (pointer_sized_int_node
);
4595 gimple
*g
= gimple_build_assign (v
, r
);
4596 gimple_seq_add_stmt (ilist
, g
);
4597 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4599 v
= create_tmp_var (type
);
4600 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4601 gimple_seq_add_stmt (ilist
, g
);
4606 /* Lower early initialization of privatized variable NEW_VAR
4607 if it needs an allocator (has allocate clause). */
4610 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4611 tree
&allocate_ptr
, gimple_seq
*ilist
,
4612 omp_context
*ctx
, bool is_ref
, tree size
)
4616 gcc_assert (allocate_ptr
== NULL_TREE
);
4617 if (ctx
->allocate_map
4618 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4619 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4620 allocator
= *allocatorp
;
4621 if (allocator
== NULL_TREE
)
4623 if (!is_ref
&& omp_is_reference (var
))
4625 allocator
= NULL_TREE
;
4629 if (TREE_CODE (allocator
) != INTEGER_CST
)
4630 allocator
= build_outer_var_ref (allocator
, ctx
);
4631 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4632 if (TREE_CODE (allocator
) != INTEGER_CST
)
4634 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4635 gimplify_assign (var
, allocator
, ilist
);
4639 tree ptr_type
, align
, sz
= size
;
4640 if (TYPE_P (new_var
))
4642 ptr_type
= build_pointer_type (new_var
);
4643 align
= build_int_cst (size_type_node
, TYPE_ALIGN_UNIT (new_var
));
4647 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4648 align
= build_int_cst (size_type_node
,
4649 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4653 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4654 align
= build_int_cst (size_type_node
, DECL_ALIGN_UNIT (new_var
));
4655 if (sz
== NULL_TREE
)
4656 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4658 if (TREE_CODE (sz
) != INTEGER_CST
)
4660 tree szvar
= create_tmp_var (size_type_node
);
4661 gimplify_assign (szvar
, sz
, ilist
);
4664 allocate_ptr
= create_tmp_var (ptr_type
);
4665 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4666 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4667 gimple_call_set_lhs (g
, allocate_ptr
);
4668 gimple_seq_add_stmt (ilist
, g
);
4671 tree x
= build_simple_mem_ref (allocate_ptr
);
4672 TREE_THIS_NOTRAP (x
) = 1;
4673 SET_DECL_VALUE_EXPR (new_var
, x
);
4674 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4679 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4680 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4681 private variables. Initialization statements go in ILIST, while calls
4682 to destructors go in DLIST. */
4685 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4686 omp_context
*ctx
, struct omp_for_data
*fd
)
4688 tree c
, copyin_seq
, x
, ptr
;
4689 bool copyin_by_ref
= false;
4690 bool lastprivate_firstprivate
= false;
4691 bool reduction_omp_orig_ref
= false;
4693 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4694 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4695 omplow_simd_context sctx
= omplow_simd_context ();
4696 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4697 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4698 gimple_seq llist
[4] = { };
4699 tree nonconst_simd_if
= NULL_TREE
;
4702 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4704 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4705 with data sharing clauses referencing variable sized vars. That
4706 is unnecessarily hard to support and very unlikely to result in
4707 vectorized code anyway. */
4709 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4710 switch (OMP_CLAUSE_CODE (c
))
4712 case OMP_CLAUSE_LINEAR
:
4713 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4716 case OMP_CLAUSE_PRIVATE
:
4717 case OMP_CLAUSE_FIRSTPRIVATE
:
4718 case OMP_CLAUSE_LASTPRIVATE
:
4719 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4721 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4723 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4724 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4728 case OMP_CLAUSE_REDUCTION
:
4729 case OMP_CLAUSE_IN_REDUCTION
:
4730 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4731 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4733 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4735 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4736 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4741 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4743 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4744 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4746 case OMP_CLAUSE_SIMDLEN
:
4747 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4750 case OMP_CLAUSE__CONDTEMP_
:
4751 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4759 /* Add a placeholder for simduid. */
4760 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4761 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4763 unsigned task_reduction_cnt
= 0;
4764 unsigned task_reduction_cntorig
= 0;
4765 unsigned task_reduction_cnt_full
= 0;
4766 unsigned task_reduction_cntorig_full
= 0;
4767 unsigned task_reduction_other_cnt
= 0;
4768 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4769 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4770 /* Do all the fixed sized types in the first pass, and the variable sized
4771 types in the second pass. This makes sure that the scalar arguments to
4772 the variable sized types are processed before we use them in the
4773 variable sized operations. For task reductions we use 4 passes, in the
4774 first two we ignore them, in the third one gather arguments for
4775 GOMP_task_reduction_remap call and in the last pass actually handle
4776 the task reductions. */
4777 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4780 if (pass
== 2 && task_reduction_cnt
)
4783 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4784 + task_reduction_cntorig
);
4785 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4786 gimple_add_tmp_var (tskred_avar
);
4787 TREE_ADDRESSABLE (tskred_avar
) = 1;
4788 task_reduction_cnt_full
= task_reduction_cnt
;
4789 task_reduction_cntorig_full
= task_reduction_cntorig
;
4791 else if (pass
== 3 && task_reduction_cnt
)
4793 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4795 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4796 size_int (task_reduction_cntorig
),
4797 build_fold_addr_expr (tskred_avar
));
4798 gimple_seq_add_stmt (ilist
, g
);
4800 if (pass
== 3 && task_reduction_other_cnt
)
4802 /* For reduction clauses, build
4803 tskred_base = (void *) tskred_temp[2]
4804 + omp_get_thread_num () * tskred_temp[1]
4805 or if tskred_temp[1] is known to be constant, that constant
4806 directly. This is the start of the private reduction copy block
4807 for the current thread. */
4808 tree v
= create_tmp_var (integer_type_node
);
4809 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4810 gimple
*g
= gimple_build_call (x
, 0);
4811 gimple_call_set_lhs (g
, v
);
4812 gimple_seq_add_stmt (ilist
, g
);
4813 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4814 tskred_temp
= OMP_CLAUSE_DECL (c
);
4815 if (is_taskreg_ctx (ctx
))
4816 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4817 tree v2
= create_tmp_var (sizetype
);
4818 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4819 gimple_seq_add_stmt (ilist
, g
);
4820 if (ctx
->task_reductions
[0])
4821 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4823 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4824 tree v3
= create_tmp_var (sizetype
);
4825 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4826 gimple_seq_add_stmt (ilist
, g
);
4827 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4828 tskred_base
= create_tmp_var (ptr_type_node
);
4829 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4830 gimple_seq_add_stmt (ilist
, g
);
4832 task_reduction_cnt
= 0;
4833 task_reduction_cntorig
= 0;
4834 task_reduction_other_cnt
= 0;
4835 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4837 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4840 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4841 bool task_reduction_p
= false;
4842 bool task_reduction_needs_orig_p
= false;
4843 tree cond
= NULL_TREE
;
4844 tree allocator
, allocate_ptr
;
4848 case OMP_CLAUSE_PRIVATE
:
4849 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4852 case OMP_CLAUSE_SHARED
:
4853 /* Ignore shared directives in teams construct inside
4854 of target construct. */
4855 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4856 && !is_host_teams_ctx (ctx
))
4858 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4860 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4861 || is_global_var (OMP_CLAUSE_DECL (c
)));
4864 case OMP_CLAUSE_FIRSTPRIVATE
:
4865 case OMP_CLAUSE_COPYIN
:
4867 case OMP_CLAUSE_LINEAR
:
4868 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4869 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4870 lastprivate_firstprivate
= true;
4872 case OMP_CLAUSE_REDUCTION
:
4873 case OMP_CLAUSE_IN_REDUCTION
:
4874 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
4875 || is_task_ctx (ctx
)
4876 || OMP_CLAUSE_REDUCTION_TASK (c
))
4878 task_reduction_p
= true;
4879 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4881 task_reduction_other_cnt
++;
4886 task_reduction_cnt
++;
4887 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4889 var
= OMP_CLAUSE_DECL (c
);
4890 /* If var is a global variable that isn't privatized
4891 in outer contexts, we don't need to look up the
4892 original address, it is always the address of the
4893 global variable itself. */
4895 || omp_is_reference (var
)
4897 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4899 task_reduction_needs_orig_p
= true;
4900 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4901 task_reduction_cntorig
++;
4905 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4906 reduction_omp_orig_ref
= true;
4908 case OMP_CLAUSE__REDUCTEMP_
:
4909 if (!is_taskreg_ctx (ctx
))
4912 case OMP_CLAUSE__LOOPTEMP_
:
4913 /* Handle _looptemp_/_reductemp_ clauses only on
4918 case OMP_CLAUSE_LASTPRIVATE
:
4919 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4921 lastprivate_firstprivate
= true;
4922 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4925 /* Even without corresponding firstprivate, if
4926 decl is Fortran allocatable, it needs outer var
4929 && lang_hooks
.decls
.omp_private_outer_ref
4930 (OMP_CLAUSE_DECL (c
)))
4931 lastprivate_firstprivate
= true;
4933 case OMP_CLAUSE_ALIGNED
:
4936 var
= OMP_CLAUSE_DECL (c
);
4937 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4938 && !is_global_var (var
))
4940 new_var
= maybe_lookup_decl (var
, ctx
);
4941 if (new_var
== NULL_TREE
)
4942 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4943 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4944 tree alarg
= omp_clause_aligned_alignment (c
);
4945 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4946 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4947 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4948 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4949 gimplify_and_add (x
, ilist
);
4951 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4952 && is_global_var (var
))
4954 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4955 new_var
= lookup_decl (var
, ctx
);
4956 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4957 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4958 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4959 tree alarg
= omp_clause_aligned_alignment (c
);
4960 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4961 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4962 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4963 x
= create_tmp_var (ptype
);
4964 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4965 gimplify_and_add (t
, ilist
);
4966 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4967 SET_DECL_VALUE_EXPR (new_var
, t
);
4968 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4971 case OMP_CLAUSE__CONDTEMP_
:
4972 if (is_parallel_ctx (ctx
)
4973 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4980 if (task_reduction_p
!= (pass
>= 2))
4983 allocator
= NULL_TREE
;
4984 allocate_ptr
= NULL_TREE
;
4985 new_var
= var
= OMP_CLAUSE_DECL (c
);
4986 if ((c_kind
== OMP_CLAUSE_REDUCTION
4987 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4988 && TREE_CODE (var
) == MEM_REF
)
4990 var
= TREE_OPERAND (var
, 0);
4991 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4992 var
= TREE_OPERAND (var
, 0);
4993 if (TREE_CODE (var
) == INDIRECT_REF
4994 || TREE_CODE (var
) == ADDR_EXPR
)
4995 var
= TREE_OPERAND (var
, 0);
4996 if (is_variable_sized (var
))
4998 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4999 var
= DECL_VALUE_EXPR (var
);
5000 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5001 var
= TREE_OPERAND (var
, 0);
5002 gcc_assert (DECL_P (var
));
5006 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5008 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5009 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5011 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5012 new_var
= lookup_decl (var
, ctx
);
5014 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5019 /* C/C++ array section reductions. */
5020 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5021 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5022 && var
!= OMP_CLAUSE_DECL (c
))
5027 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5028 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5030 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5032 tree b
= TREE_OPERAND (orig_var
, 1);
5033 if (is_omp_target (ctx
->stmt
))
5036 b
= maybe_lookup_decl (b
, ctx
);
5039 b
= TREE_OPERAND (orig_var
, 1);
5040 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5042 if (integer_zerop (bias
))
5046 bias
= fold_convert_loc (clause_loc
,
5047 TREE_TYPE (b
), bias
);
5048 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5049 TREE_TYPE (b
), b
, bias
);
5051 orig_var
= TREE_OPERAND (orig_var
, 0);
5055 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5056 if (is_global_var (out
)
5057 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5058 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5059 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5062 else if (is_omp_target (ctx
->stmt
))
5066 bool by_ref
= use_pointer_for_field (var
, NULL
);
5067 x
= build_receiver_ref (var
, by_ref
, ctx
);
5068 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5069 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5071 x
= build_fold_addr_expr (x
);
5073 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5074 x
= build_simple_mem_ref (x
);
5075 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5077 if (var
== TREE_OPERAND (orig_var
, 0))
5078 x
= build_fold_addr_expr (x
);
5080 bias
= fold_convert (sizetype
, bias
);
5081 x
= fold_convert (ptr_type_node
, x
);
5082 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5083 TREE_TYPE (x
), x
, bias
);
5084 unsigned cnt
= task_reduction_cnt
- 1;
5085 if (!task_reduction_needs_orig_p
)
5086 cnt
+= (task_reduction_cntorig_full
5087 - task_reduction_cntorig
);
5089 cnt
= task_reduction_cntorig
- 1;
5090 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5091 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5092 gimplify_assign (r
, x
, ilist
);
5096 if (TREE_CODE (orig_var
) == INDIRECT_REF
5097 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5098 orig_var
= TREE_OPERAND (orig_var
, 0);
5099 tree d
= OMP_CLAUSE_DECL (c
);
5100 tree type
= TREE_TYPE (d
);
5101 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5102 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5104 const char *name
= get_name (orig_var
);
5105 if (pass
!= 3 && !TREE_CONSTANT (v
))
5108 if (is_omp_target (ctx
->stmt
))
5111 t
= maybe_lookup_decl (v
, ctx
);
5115 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5116 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5117 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5119 build_int_cst (TREE_TYPE (v
), 1));
5120 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5122 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5126 tree xv
= create_tmp_var (ptr_type_node
);
5127 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5129 unsigned cnt
= task_reduction_cnt
- 1;
5130 if (!task_reduction_needs_orig_p
)
5131 cnt
+= (task_reduction_cntorig_full
5132 - task_reduction_cntorig
);
5134 cnt
= task_reduction_cntorig
- 1;
5135 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5136 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5138 gimple
*g
= gimple_build_assign (xv
, x
);
5139 gimple_seq_add_stmt (ilist
, g
);
5143 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5145 if (ctx
->task_reductions
[1 + idx
])
5146 off
= fold_convert (sizetype
,
5147 ctx
->task_reductions
[1 + idx
]);
5149 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5151 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5153 gimple_seq_add_stmt (ilist
, g
);
5155 x
= fold_convert (build_pointer_type (boolean_type_node
),
5157 if (TREE_CONSTANT (v
))
5158 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5159 TYPE_SIZE_UNIT (type
));
5163 if (is_omp_target (ctx
->stmt
))
5166 t
= maybe_lookup_decl (v
, ctx
);
5170 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5171 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5173 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5175 build_int_cst (TREE_TYPE (v
), 1));
5176 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5178 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5179 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5181 cond
= create_tmp_var (TREE_TYPE (x
));
5182 gimplify_assign (cond
, x
, ilist
);
5185 else if (lower_private_allocate (var
, type
, allocator
,
5186 allocate_ptr
, ilist
, ctx
,
5189 ? TYPE_SIZE_UNIT (type
)
5192 else if (TREE_CONSTANT (v
))
5194 x
= create_tmp_var_raw (type
, name
);
5195 gimple_add_tmp_var (x
);
5196 TREE_ADDRESSABLE (x
) = 1;
5197 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5202 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5203 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5204 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5207 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5208 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5209 tree y
= create_tmp_var (ptype
, name
);
5210 gimplify_assign (y
, x
, ilist
);
5214 if (!integer_zerop (bias
))
5216 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5218 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5220 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5221 pointer_sized_int_node
, yb
, bias
);
5222 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5223 yb
= create_tmp_var (ptype
, name
);
5224 gimplify_assign (yb
, x
, ilist
);
5228 d
= TREE_OPERAND (d
, 0);
5229 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5230 d
= TREE_OPERAND (d
, 0);
5231 if (TREE_CODE (d
) == ADDR_EXPR
)
5233 if (orig_var
!= var
)
5235 gcc_assert (is_variable_sized (orig_var
));
5236 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5238 gimplify_assign (new_var
, x
, ilist
);
5239 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5240 tree t
= build_fold_indirect_ref (new_var
);
5241 DECL_IGNORED_P (new_var
) = 0;
5242 TREE_THIS_NOTRAP (t
) = 1;
5243 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5244 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5248 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5249 build_int_cst (ptype
, 0));
5250 SET_DECL_VALUE_EXPR (new_var
, x
);
5251 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5256 gcc_assert (orig_var
== var
);
5257 if (TREE_CODE (d
) == INDIRECT_REF
)
5259 x
= create_tmp_var (ptype
, name
);
5260 TREE_ADDRESSABLE (x
) = 1;
5261 gimplify_assign (x
, yb
, ilist
);
5262 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5264 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5265 gimplify_assign (new_var
, x
, ilist
);
5267 /* GOMP_taskgroup_reduction_register memsets the whole
5268 array to zero. If the initializer is zero, we don't
5269 need to initialize it again, just mark it as ever
5270 used unconditionally, i.e. cond = true. */
5272 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5273 && initializer_zerop (omp_reduction_init (c
,
5276 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5278 gimple_seq_add_stmt (ilist
, g
);
5281 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5285 if (!is_parallel_ctx (ctx
))
5287 tree condv
= create_tmp_var (boolean_type_node
);
5288 g
= gimple_build_assign (condv
,
5289 build_simple_mem_ref (cond
));
5290 gimple_seq_add_stmt (ilist
, g
);
5291 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5292 g
= gimple_build_cond (NE_EXPR
, condv
,
5293 boolean_false_node
, end
, lab1
);
5294 gimple_seq_add_stmt (ilist
, g
);
5295 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5297 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5299 gimple_seq_add_stmt (ilist
, g
);
5302 tree y1
= create_tmp_var (ptype
);
5303 gimplify_assign (y1
, y
, ilist
);
5304 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5305 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5306 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5307 if (task_reduction_needs_orig_p
)
5309 y3
= create_tmp_var (ptype
);
5311 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5312 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5313 size_int (task_reduction_cnt_full
5314 + task_reduction_cntorig
- 1),
5315 NULL_TREE
, NULL_TREE
);
5318 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5319 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5322 gimplify_assign (y3
, ref
, ilist
);
5324 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5328 y2
= create_tmp_var (ptype
);
5329 gimplify_assign (y2
, y
, ilist
);
5331 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5333 tree ref
= build_outer_var_ref (var
, ctx
);
5334 /* For ref build_outer_var_ref already performs this. */
5335 if (TREE_CODE (d
) == INDIRECT_REF
)
5336 gcc_assert (omp_is_reference (var
));
5337 else if (TREE_CODE (d
) == ADDR_EXPR
)
5338 ref
= build_fold_addr_expr (ref
);
5339 else if (omp_is_reference (var
))
5340 ref
= build_fold_addr_expr (ref
);
5341 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5342 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5343 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5345 y3
= create_tmp_var (ptype
);
5346 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5350 y4
= create_tmp_var (ptype
);
5351 gimplify_assign (y4
, ref
, dlist
);
5355 tree i
= create_tmp_var (TREE_TYPE (v
));
5356 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5357 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5358 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5361 i2
= create_tmp_var (TREE_TYPE (v
));
5362 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5363 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5364 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5365 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5367 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5369 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5370 tree decl_placeholder
5371 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5372 SET_DECL_VALUE_EXPR (decl_placeholder
,
5373 build_simple_mem_ref (y1
));
5374 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5375 SET_DECL_VALUE_EXPR (placeholder
,
5376 y3
? build_simple_mem_ref (y3
)
5378 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5379 x
= lang_hooks
.decls
.omp_clause_default_ctor
5380 (c
, build_simple_mem_ref (y1
),
5381 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5383 gimplify_and_add (x
, ilist
);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5386 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5387 lower_omp (&tseq
, ctx
);
5388 gimple_seq_add_seq (ilist
, tseq
);
5390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5393 SET_DECL_VALUE_EXPR (decl_placeholder
,
5394 build_simple_mem_ref (y2
));
5395 SET_DECL_VALUE_EXPR (placeholder
,
5396 build_simple_mem_ref (y4
));
5397 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5398 lower_omp (&tseq
, ctx
);
5399 gimple_seq_add_seq (dlist
, tseq
);
5400 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5402 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5403 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5406 x
= lang_hooks
.decls
.omp_clause_dtor
5407 (c
, build_simple_mem_ref (y2
));
5409 gimplify_and_add (x
, dlist
);
5414 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5415 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5417 /* reduction(-:var) sums up the partial results, so it
5418 acts identically to reduction(+:var). */
5419 if (code
== MINUS_EXPR
)
5422 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5425 x
= build2 (code
, TREE_TYPE (type
),
5426 build_simple_mem_ref (y4
),
5427 build_simple_mem_ref (y2
));
5428 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5432 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5433 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5434 gimple_seq_add_stmt (ilist
, g
);
5437 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5438 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5439 gimple_seq_add_stmt (ilist
, g
);
5441 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5442 build_int_cst (TREE_TYPE (i
), 1));
5443 gimple_seq_add_stmt (ilist
, g
);
5444 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5445 gimple_seq_add_stmt (ilist
, g
);
5446 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5449 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5450 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5451 gimple_seq_add_stmt (dlist
, g
);
5454 g
= gimple_build_assign
5455 (y4
, POINTER_PLUS_EXPR
, y4
,
5456 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5457 gimple_seq_add_stmt (dlist
, g
);
5459 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5460 build_int_cst (TREE_TYPE (i2
), 1));
5461 gimple_seq_add_stmt (dlist
, g
);
5462 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5463 gimple_seq_add_stmt (dlist
, g
);
5464 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5468 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5469 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5470 gimple_seq_add_stmt (dlist
, g
);
5476 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5477 if (is_global_var (out
))
5479 else if (is_omp_target (ctx
->stmt
))
5483 bool by_ref
= use_pointer_for_field (var
, ctx
);
5484 x
= build_receiver_ref (var
, by_ref
, ctx
);
5486 if (!omp_is_reference (var
))
5487 x
= build_fold_addr_expr (x
);
5488 x
= fold_convert (ptr_type_node
, x
);
5489 unsigned cnt
= task_reduction_cnt
- 1;
5490 if (!task_reduction_needs_orig_p
)
5491 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5493 cnt
= task_reduction_cntorig
- 1;
5494 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5495 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5496 gimplify_assign (r
, x
, ilist
);
5501 tree type
= TREE_TYPE (new_var
);
5502 if (!omp_is_reference (var
))
5503 type
= build_pointer_type (type
);
5504 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5506 unsigned cnt
= task_reduction_cnt
- 1;
5507 if (!task_reduction_needs_orig_p
)
5508 cnt
+= (task_reduction_cntorig_full
5509 - task_reduction_cntorig
);
5511 cnt
= task_reduction_cntorig
- 1;
5512 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5513 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5517 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5519 if (ctx
->task_reductions
[1 + idx
])
5520 off
= fold_convert (sizetype
,
5521 ctx
->task_reductions
[1 + idx
]);
5523 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5525 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5528 x
= fold_convert (type
, x
);
5530 if (omp_is_reference (var
))
5532 gimplify_assign (new_var
, x
, ilist
);
5534 new_var
= build_simple_mem_ref (new_var
);
5538 t
= create_tmp_var (type
);
5539 gimplify_assign (t
, x
, ilist
);
5540 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5541 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5543 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5544 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5545 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5546 cond
= create_tmp_var (TREE_TYPE (t
));
5547 gimplify_assign (cond
, t
, ilist
);
5549 else if (is_variable_sized (var
))
5551 /* For variable sized types, we need to allocate the
5552 actual storage here. Call alloca and store the
5553 result in the pointer decl that we created elsewhere. */
5557 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5561 ptr
= DECL_VALUE_EXPR (new_var
);
5562 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5563 ptr
= TREE_OPERAND (ptr
, 0);
5564 gcc_assert (DECL_P (ptr
));
5565 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5567 if (lower_private_allocate (var
, new_var
, allocator
,
5568 allocate_ptr
, ilist
, ctx
,
5573 /* void *tmp = __builtin_alloca */
5575 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5577 = gimple_build_call (atmp
, 2, x
,
5578 size_int (DECL_ALIGN (var
)));
5579 cfun
->calls_alloca
= 1;
5580 tmp
= create_tmp_var_raw (ptr_type_node
);
5581 gimple_add_tmp_var (tmp
);
5582 gimple_call_set_lhs (stmt
, tmp
);
5584 gimple_seq_add_stmt (ilist
, stmt
);
5587 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5588 gimplify_assign (ptr
, x
, ilist
);
5591 else if (omp_is_reference (var
)
5592 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5593 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5595 /* For references that are being privatized for Fortran,
5596 allocate new backing storage for the new pointer
5597 variable. This allows us to avoid changing all the
5598 code that expects a pointer to something that expects
5599 a direct variable. */
5603 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5604 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5606 x
= build_receiver_ref (var
, false, ctx
);
5607 if (ctx
->allocate_map
)
5608 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5610 allocator
= *allocatep
;
5611 if (TREE_CODE (allocator
) != INTEGER_CST
)
5612 allocator
= build_outer_var_ref (allocator
, ctx
);
5613 allocator
= fold_convert (pointer_sized_int_node
,
5615 allocate_ptr
= unshare_expr (x
);
5617 if (allocator
== NULL_TREE
)
5618 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5620 else if (lower_private_allocate (var
, new_var
, allocator
,
5622 ilist
, ctx
, true, x
))
5624 else if (TREE_CONSTANT (x
))
5626 /* For reduction in SIMD loop, defer adding the
5627 initialization of the reference, because if we decide
5628 to use SIMD array for it, the initilization could cause
5629 expansion ICE. Ditto for other privatization clauses. */
5634 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5636 gimple_add_tmp_var (x
);
5637 TREE_ADDRESSABLE (x
) = 1;
5638 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5644 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5645 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5646 tree al
= size_int (TYPE_ALIGN (rtype
));
5647 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5652 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5653 gimplify_assign (new_var
, x
, ilist
);
5656 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5658 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5659 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5660 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5668 switch (OMP_CLAUSE_CODE (c
))
5670 case OMP_CLAUSE_SHARED
:
5671 /* Ignore shared directives in teams construct inside
5672 target construct. */
5673 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5674 && !is_host_teams_ctx (ctx
))
5676 /* Shared global vars are just accessed directly. */
5677 if (is_global_var (new_var
))
5679 /* For taskloop firstprivate/lastprivate, represented
5680 as firstprivate and shared clause on the task, new_var
5681 is the firstprivate var. */
5682 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5684 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5685 needs to be delayed until after fixup_child_record_type so
5686 that we get the correct type during the dereference. */
5687 by_ref
= use_pointer_for_field (var
, ctx
);
5688 x
= build_receiver_ref (var
, by_ref
, ctx
);
5689 SET_DECL_VALUE_EXPR (new_var
, x
);
5690 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5692 /* ??? If VAR is not passed by reference, and the variable
5693 hasn't been initialized yet, then we'll get a warning for
5694 the store into the omp_data_s structure. Ideally, we'd be
5695 able to notice this and not store anything at all, but
5696 we're generating code too early. Suppress the warning. */
5698 suppress_warning (var
, OPT_Wuninitialized
);
5701 case OMP_CLAUSE__CONDTEMP_
:
5702 if (is_parallel_ctx (ctx
))
5704 x
= build_receiver_ref (var
, false, ctx
);
5705 SET_DECL_VALUE_EXPR (new_var
, x
);
5706 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5708 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5710 x
= build_zero_cst (TREE_TYPE (var
));
5715 case OMP_CLAUSE_LASTPRIVATE
:
5716 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5720 case OMP_CLAUSE_PRIVATE
:
5721 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5722 x
= build_outer_var_ref (var
, ctx
);
5723 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5725 if (is_task_ctx (ctx
))
5726 x
= build_receiver_ref (var
, false, ctx
);
5728 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5736 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5737 ilist
, ctx
, false, NULL_TREE
);
5738 nx
= unshare_expr (new_var
);
5740 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5741 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5744 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5746 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5749 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5750 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5751 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5752 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5753 || (gimple_omp_for_index (ctx
->stmt
, 0)
5755 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5756 || omp_is_reference (var
))
5757 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5760 if (omp_is_reference (var
))
5762 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5763 tree new_vard
= TREE_OPERAND (new_var
, 0);
5764 gcc_assert (DECL_P (new_vard
));
5765 SET_DECL_VALUE_EXPR (new_vard
,
5766 build_fold_addr_expr (lvar
));
5767 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5772 tree iv
= unshare_expr (ivar
);
5774 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
5777 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
5781 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
5783 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
5784 unshare_expr (ivar
), x
);
5788 gimplify_and_add (x
, &llist
[0]);
5789 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5790 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5795 gcc_assert (TREE_CODE (v
) == MEM_REF
);
5796 v
= TREE_OPERAND (v
, 0);
5797 gcc_assert (DECL_P (v
));
5799 v
= *ctx
->lastprivate_conditional_map
->get (v
);
5800 tree t
= create_tmp_var (TREE_TYPE (v
));
5801 tree z
= build_zero_cst (TREE_TYPE (v
));
5803 = build_outer_var_ref (var
, ctx
,
5804 OMP_CLAUSE_LASTPRIVATE
);
5805 gimple_seq_add_stmt (dlist
,
5806 gimple_build_assign (t
, z
));
5807 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
5808 tree civar
= DECL_VALUE_EXPR (v
);
5809 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
5810 civar
= unshare_expr (civar
);
5811 TREE_OPERAND (civar
, 1) = sctx
.idx
;
5812 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
5813 unshare_expr (civar
));
5814 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
5815 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
5816 orig_v
, unshare_expr (ivar
)));
5817 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5819 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5821 gimple_seq tseq
= NULL
;
5822 gimplify_and_add (x
, &tseq
);
5824 lower_omp (&tseq
, ctx
->outer
);
5825 gimple_seq_add_seq (&llist
[1], tseq
);
5827 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5828 && ctx
->for_simd_scan_phase
)
5830 x
= unshare_expr (ivar
);
5832 = build_outer_var_ref (var
, ctx
,
5833 OMP_CLAUSE_LASTPRIVATE
);
5834 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5836 gimplify_and_add (x
, &llist
[0]);
5840 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5842 gimplify_and_add (y
, &llist
[1]);
5846 if (omp_is_reference (var
))
5848 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5849 tree new_vard
= TREE_OPERAND (new_var
, 0);
5850 gcc_assert (DECL_P (new_vard
));
5851 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5852 x
= TYPE_SIZE_UNIT (type
);
5853 if (TREE_CONSTANT (x
))
5855 x
= create_tmp_var_raw (type
, get_name (var
));
5856 gimple_add_tmp_var (x
);
5857 TREE_ADDRESSABLE (x
) = 1;
5858 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5859 x
= fold_convert_loc (clause_loc
,
5860 TREE_TYPE (new_vard
), x
);
5861 gimplify_assign (new_vard
, x
, ilist
);
5866 gimplify_and_add (nx
, ilist
);
5867 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5869 && ctx
->for_simd_scan_phase
)
5871 tree orig_v
= build_outer_var_ref (var
, ctx
,
5872 OMP_CLAUSE_LASTPRIVATE
);
5873 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5875 gimplify_and_add (x
, ilist
);
5880 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5882 gimplify_and_add (x
, dlist
);
5885 if (!is_gimple_val (allocator
))
5887 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
5888 gimplify_assign (avar
, allocator
, dlist
);
5891 if (!is_gimple_val (allocate_ptr
))
5893 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
5894 gimplify_assign (apvar
, allocate_ptr
, dlist
);
5895 allocate_ptr
= apvar
;
5897 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5899 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5900 gimple_seq_add_stmt (dlist
, g
);
5904 case OMP_CLAUSE_LINEAR
:
5905 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5906 goto do_firstprivate
;
5907 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5910 x
= build_outer_var_ref (var
, ctx
);
5913 case OMP_CLAUSE_FIRSTPRIVATE
:
5914 if (is_task_ctx (ctx
))
5916 if ((omp_is_reference (var
)
5917 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5918 || is_variable_sized (var
))
5920 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5922 || use_pointer_for_field (var
, NULL
))
5924 x
= build_receiver_ref (var
, false, ctx
);
5925 if (ctx
->allocate_map
)
5926 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5928 allocator
= *allocatep
;
5929 if (TREE_CODE (allocator
) != INTEGER_CST
)
5930 allocator
= build_outer_var_ref (allocator
, ctx
);
5931 allocator
= fold_convert (pointer_sized_int_node
,
5933 allocate_ptr
= unshare_expr (x
);
5934 x
= build_simple_mem_ref (x
);
5935 TREE_THIS_NOTRAP (x
) = 1;
5937 SET_DECL_VALUE_EXPR (new_var
, x
);
5938 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5942 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5943 && omp_is_reference (var
))
5945 x
= build_outer_var_ref (var
, ctx
);
5946 gcc_assert (TREE_CODE (x
) == MEM_REF
5947 && integer_zerop (TREE_OPERAND (x
, 1)));
5948 x
= TREE_OPERAND (x
, 0);
5949 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5950 (c
, unshare_expr (new_var
), x
);
5951 gimplify_and_add (x
, ilist
);
5955 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5956 ilist
, ctx
, false, NULL_TREE
);
5957 x
= build_outer_var_ref (var
, ctx
);
5960 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5961 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5963 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5964 tree stept
= TREE_TYPE (t
);
5965 tree ct
= omp_find_clause (clauses
,
5966 OMP_CLAUSE__LOOPTEMP_
);
5968 tree l
= OMP_CLAUSE_DECL (ct
);
5969 tree n1
= fd
->loop
.n1
;
5970 tree step
= fd
->loop
.step
;
5971 tree itype
= TREE_TYPE (l
);
5972 if (POINTER_TYPE_P (itype
))
5973 itype
= signed_type_for (itype
);
5974 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5975 if (TYPE_UNSIGNED (itype
)
5976 && fd
->loop
.cond_code
== GT_EXPR
)
5977 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5978 fold_build1 (NEGATE_EXPR
, itype
, l
),
5979 fold_build1 (NEGATE_EXPR
,
5982 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5983 t
= fold_build2 (MULT_EXPR
, stept
,
5984 fold_convert (stept
, l
), t
);
5986 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5988 if (omp_is_reference (var
))
5990 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5991 tree new_vard
= TREE_OPERAND (new_var
, 0);
5992 gcc_assert (DECL_P (new_vard
));
5993 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5994 nx
= TYPE_SIZE_UNIT (type
);
5995 if (TREE_CONSTANT (nx
))
5997 nx
= create_tmp_var_raw (type
,
5999 gimple_add_tmp_var (nx
);
6000 TREE_ADDRESSABLE (nx
) = 1;
6001 nx
= build_fold_addr_expr_loc (clause_loc
,
6003 nx
= fold_convert_loc (clause_loc
,
6004 TREE_TYPE (new_vard
),
6006 gimplify_assign (new_vard
, nx
, ilist
);
6010 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6012 gimplify_and_add (x
, ilist
);
6016 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6017 x
= fold_build2 (POINTER_PLUS_EXPR
,
6018 TREE_TYPE (x
), x
, t
);
6020 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
6023 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6024 || TREE_ADDRESSABLE (new_var
)
6025 || omp_is_reference (var
))
6026 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6029 if (omp_is_reference (var
))
6031 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6032 tree new_vard
= TREE_OPERAND (new_var
, 0);
6033 gcc_assert (DECL_P (new_vard
));
6034 SET_DECL_VALUE_EXPR (new_vard
,
6035 build_fold_addr_expr (lvar
));
6036 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6038 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6040 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6041 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6042 gimplify_and_add (x
, ilist
);
6043 gimple_stmt_iterator gsi
6044 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6046 = gimple_build_assign (unshare_expr (lvar
), iv
);
6047 gsi_insert_before_without_update (&gsi
, g
,
6049 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6050 enum tree_code code
= PLUS_EXPR
;
6051 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6052 code
= POINTER_PLUS_EXPR
;
6053 g
= gimple_build_assign (iv
, code
, iv
, t
);
6054 gsi_insert_before_without_update (&gsi
, g
,
6058 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6059 (c
, unshare_expr (ivar
), x
);
6060 gimplify_and_add (x
, &llist
[0]);
6061 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6063 gimplify_and_add (x
, &llist
[1]);
6066 if (omp_is_reference (var
))
6068 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6069 tree new_vard
= TREE_OPERAND (new_var
, 0);
6070 gcc_assert (DECL_P (new_vard
));
6071 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6072 nx
= TYPE_SIZE_UNIT (type
);
6073 if (TREE_CONSTANT (nx
))
6075 nx
= create_tmp_var_raw (type
, get_name (var
));
6076 gimple_add_tmp_var (nx
);
6077 TREE_ADDRESSABLE (nx
) = 1;
6078 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6079 nx
= fold_convert_loc (clause_loc
,
6080 TREE_TYPE (new_vard
), nx
);
6081 gimplify_assign (new_vard
, nx
, ilist
);
6085 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6086 (c
, unshare_expr (new_var
), x
);
6087 gimplify_and_add (x
, ilist
);
6090 case OMP_CLAUSE__LOOPTEMP_
:
6091 case OMP_CLAUSE__REDUCTEMP_
:
6092 gcc_assert (is_taskreg_ctx (ctx
));
6093 x
= build_outer_var_ref (var
, ctx
);
6094 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6095 gimplify_and_add (x
, ilist
);
6098 case OMP_CLAUSE_COPYIN
:
6099 by_ref
= use_pointer_for_field (var
, NULL
);
6100 x
= build_receiver_ref (var
, by_ref
, ctx
);
6101 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6102 append_to_statement_list (x
, ©in_seq
);
6103 copyin_by_ref
|= by_ref
;
6106 case OMP_CLAUSE_REDUCTION
:
6107 case OMP_CLAUSE_IN_REDUCTION
:
6108 /* OpenACC reductions are initialized using the
6109 GOACC_REDUCTION internal function. */
6110 if (is_gimple_omp_oacc (ctx
->stmt
))
6112 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6114 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6116 tree ptype
= TREE_TYPE (placeholder
);
6119 x
= error_mark_node
;
6120 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6121 && !task_reduction_needs_orig_p
)
6123 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6125 tree pptype
= build_pointer_type (ptype
);
6126 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6127 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6128 size_int (task_reduction_cnt_full
6129 + task_reduction_cntorig
- 1),
6130 NULL_TREE
, NULL_TREE
);
6134 = *ctx
->task_reduction_map
->get (c
);
6135 x
= task_reduction_read (ilist
, tskred_temp
,
6136 pptype
, 7 + 3 * idx
);
6138 x
= fold_convert (pptype
, x
);
6139 x
= build_simple_mem_ref (x
);
6144 lower_private_allocate (var
, new_var
, allocator
,
6145 allocate_ptr
, ilist
, ctx
, false,
6147 x
= build_outer_var_ref (var
, ctx
);
6149 if (omp_is_reference (var
)
6150 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6151 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6153 SET_DECL_VALUE_EXPR (placeholder
, x
);
6154 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6155 tree new_vard
= new_var
;
6156 if (omp_is_reference (var
))
6158 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6159 new_vard
= TREE_OPERAND (new_var
, 0);
6160 gcc_assert (DECL_P (new_vard
));
6162 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6164 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6165 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6168 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6172 if (new_vard
== new_var
)
6174 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6175 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6179 SET_DECL_VALUE_EXPR (new_vard
,
6180 build_fold_addr_expr (ivar
));
6181 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6183 x
= lang_hooks
.decls
.omp_clause_default_ctor
6184 (c
, unshare_expr (ivar
),
6185 build_outer_var_ref (var
, ctx
));
6186 if (rvarp
&& ctx
->for_simd_scan_phase
)
6189 gimplify_and_add (x
, &llist
[0]);
6190 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6192 gimplify_and_add (x
, &llist
[1]);
6199 gimplify_and_add (x
, &llist
[0]);
6201 tree ivar2
= unshare_expr (lvar
);
6202 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6203 x
= lang_hooks
.decls
.omp_clause_default_ctor
6204 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6205 gimplify_and_add (x
, &llist
[0]);
6209 x
= lang_hooks
.decls
.omp_clause_default_ctor
6210 (c
, unshare_expr (rvar2
),
6211 build_outer_var_ref (var
, ctx
));
6212 gimplify_and_add (x
, &llist
[0]);
6215 /* For types that need construction, add another
6216 private var which will be default constructed
6217 and optionally initialized with
6218 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6219 loop we want to assign this value instead of
6220 constructing and destructing it in each
6222 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6223 gimple_add_tmp_var (nv
);
6224 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6228 x
= lang_hooks
.decls
.omp_clause_default_ctor
6229 (c
, nv
, build_outer_var_ref (var
, ctx
));
6230 gimplify_and_add (x
, ilist
);
6232 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6234 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6235 x
= DECL_VALUE_EXPR (new_vard
);
6237 if (new_vard
!= new_var
)
6238 vexpr
= build_fold_addr_expr (nv
);
6239 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6240 lower_omp (&tseq
, ctx
);
6241 SET_DECL_VALUE_EXPR (new_vard
, x
);
6242 gimple_seq_add_seq (ilist
, tseq
);
6243 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6246 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6248 gimplify_and_add (x
, dlist
);
6251 tree ref
= build_outer_var_ref (var
, ctx
);
6252 x
= unshare_expr (ivar
);
6253 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6255 gimplify_and_add (x
, &llist
[0]);
6257 ref
= build_outer_var_ref (var
, ctx
);
6258 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6260 gimplify_and_add (x
, &llist
[3]);
6262 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6263 if (new_vard
== new_var
)
6264 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6266 SET_DECL_VALUE_EXPR (new_vard
,
6267 build_fold_addr_expr (lvar
));
6269 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6271 gimplify_and_add (x
, &llist
[1]);
6273 tree ivar2
= unshare_expr (lvar
);
6274 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6275 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6277 gimplify_and_add (x
, &llist
[1]);
6281 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6283 gimplify_and_add (x
, &llist
[1]);
6288 gimplify_and_add (x
, &llist
[0]);
6289 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6291 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6292 lower_omp (&tseq
, ctx
);
6293 gimple_seq_add_seq (&llist
[0], tseq
);
6295 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6296 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6297 lower_omp (&tseq
, ctx
);
6298 gimple_seq_add_seq (&llist
[1], tseq
);
6299 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6300 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6301 if (new_vard
== new_var
)
6302 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6304 SET_DECL_VALUE_EXPR (new_vard
,
6305 build_fold_addr_expr (lvar
));
6306 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6308 gimplify_and_add (x
, &llist
[1]);
6311 /* If this is a reference to constant size reduction var
6312 with placeholder, we haven't emitted the initializer
6313 for it because it is undesirable if SIMD arrays are used.
6314 But if they aren't used, we need to emit the deferred
6315 initialization now. */
6316 else if (omp_is_reference (var
) && is_simd
)
6317 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6319 tree lab2
= NULL_TREE
;
6323 if (!is_parallel_ctx (ctx
))
6325 tree condv
= create_tmp_var (boolean_type_node
);
6326 tree m
= build_simple_mem_ref (cond
);
6327 g
= gimple_build_assign (condv
, m
);
6328 gimple_seq_add_stmt (ilist
, g
);
6330 = create_artificial_label (UNKNOWN_LOCATION
);
6331 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6332 g
= gimple_build_cond (NE_EXPR
, condv
,
6335 gimple_seq_add_stmt (ilist
, g
);
6336 gimple_seq_add_stmt (ilist
,
6337 gimple_build_label (lab1
));
6339 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6341 gimple_seq_add_stmt (ilist
, g
);
6343 x
= lang_hooks
.decls
.omp_clause_default_ctor
6344 (c
, unshare_expr (new_var
),
6346 : build_outer_var_ref (var
, ctx
));
6348 gimplify_and_add (x
, ilist
);
6350 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6351 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6353 if (ctx
->for_simd_scan_phase
)
6356 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6358 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6359 gimple_add_tmp_var (nv
);
6360 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6361 x
= lang_hooks
.decls
.omp_clause_default_ctor
6362 (c
, nv
, build_outer_var_ref (var
, ctx
));
6364 gimplify_and_add (x
, ilist
);
6365 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6367 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6369 if (new_vard
!= new_var
)
6370 vexpr
= build_fold_addr_expr (nv
);
6371 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6372 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6373 lower_omp (&tseq
, ctx
);
6374 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6375 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6376 gimple_seq_add_seq (ilist
, tseq
);
6378 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6379 if (is_simd
&& ctx
->scan_exclusive
)
6382 = create_tmp_var_raw (TREE_TYPE (new_var
));
6383 gimple_add_tmp_var (nv2
);
6384 ctx
->cb
.decl_map
->put (nv
, nv2
);
6385 x
= lang_hooks
.decls
.omp_clause_default_ctor
6386 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6387 gimplify_and_add (x
, ilist
);
6388 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6390 gimplify_and_add (x
, dlist
);
6392 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6394 gimplify_and_add (x
, dlist
);
6397 && ctx
->scan_exclusive
6398 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6400 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6401 gimple_add_tmp_var (nv2
);
6402 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6403 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6405 gimplify_and_add (x
, dlist
);
6407 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6411 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6413 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6414 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6415 && is_omp_target (ctx
->stmt
))
6417 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6418 tree oldv
= NULL_TREE
;
6420 if (DECL_HAS_VALUE_EXPR_P (d
))
6421 oldv
= DECL_VALUE_EXPR (d
);
6422 SET_DECL_VALUE_EXPR (d
, new_vard
);
6423 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6424 lower_omp (&tseq
, ctx
);
6426 SET_DECL_VALUE_EXPR (d
, oldv
);
6429 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6430 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6434 lower_omp (&tseq
, ctx
);
6435 gimple_seq_add_seq (ilist
, tseq
);
6437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6440 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6441 lower_omp (&tseq
, ctx
);
6442 gimple_seq_add_seq (dlist
, tseq
);
6443 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6445 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6449 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6456 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6457 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6458 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6463 tree lab2
= NULL_TREE
;
6464 /* GOMP_taskgroup_reduction_register memsets the whole
6465 array to zero. If the initializer is zero, we don't
6466 need to initialize it again, just mark it as ever
6467 used unconditionally, i.e. cond = true. */
6468 if (initializer_zerop (x
))
6470 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6472 gimple_seq_add_stmt (ilist
, g
);
6477 if (!cond) { cond = true; new_var = x; } */
6478 if (!is_parallel_ctx (ctx
))
6480 tree condv
= create_tmp_var (boolean_type_node
);
6481 tree m
= build_simple_mem_ref (cond
);
6482 g
= gimple_build_assign (condv
, m
);
6483 gimple_seq_add_stmt (ilist
, g
);
6485 = create_artificial_label (UNKNOWN_LOCATION
);
6486 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6487 g
= gimple_build_cond (NE_EXPR
, condv
,
6490 gimple_seq_add_stmt (ilist
, g
);
6491 gimple_seq_add_stmt (ilist
,
6492 gimple_build_label (lab1
));
6494 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6496 gimple_seq_add_stmt (ilist
, g
);
6497 gimplify_assign (new_var
, x
, ilist
);
6499 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6503 /* reduction(-:var) sums up the partial results, so it
6504 acts identically to reduction(+:var). */
6505 if (code
== MINUS_EXPR
)
6508 /* C/C++ permits FP/complex with || and &&. */
6510 = ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6511 && (FLOAT_TYPE_P (TREE_TYPE (new_var
))
6512 || TREE_CODE (TREE_TYPE (new_var
)) == COMPLEX_TYPE
));
6513 tree new_vard
= new_var
;
6514 if (is_simd
&& omp_is_reference (var
))
6516 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6517 new_vard
= TREE_OPERAND (new_var
, 0);
6518 gcc_assert (DECL_P (new_vard
));
6520 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6522 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6523 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6526 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6530 if (new_vard
!= new_var
)
6532 SET_DECL_VALUE_EXPR (new_vard
,
6533 build_fold_addr_expr (lvar
));
6534 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6537 tree ref
= build_outer_var_ref (var
, ctx
);
6541 if (ctx
->for_simd_scan_phase
)
6543 gimplify_assign (ivar
, ref
, &llist
[0]);
6544 ref
= build_outer_var_ref (var
, ctx
);
6545 gimplify_assign (ref
, rvar
, &llist
[3]);
6549 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6554 simt_lane
= create_tmp_var (unsigned_type_node
);
6555 x
= build_call_expr_internal_loc
6556 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6557 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6558 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
6559 gimplify_assign (ivar
, x
, &llist
[2]);
6565 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6566 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6567 integer_type_node
, ivar
,
6569 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6570 integer_type_node
, ref
, zero
);
6572 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6574 x
= fold_convert (TREE_TYPE (ref
), x
);
6575 ref
= build_outer_var_ref (var
, ctx
);
6576 gimplify_assign (ref
, x
, &llist
[1]);
6581 lower_private_allocate (var
, new_var
, allocator
,
6582 allocate_ptr
, ilist
, ctx
,
6584 if (omp_is_reference (var
) && is_simd
)
6585 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6586 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6587 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6589 gimplify_assign (new_var
, x
, ilist
);
6592 tree ref
= build_outer_var_ref (var
, ctx
);
6593 tree new_var2
= new_var
;
6597 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6599 = fold_build2_loc (clause_loc
, NE_EXPR
,
6600 integer_type_node
, new_var
,
6602 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6603 integer_type_node
, ref
,
6606 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6608 x
= fold_convert (TREE_TYPE (new_var
), x
);
6609 ref
= build_outer_var_ref (var
, ctx
);
6610 gimplify_assign (ref
, x
, dlist
);
6625 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6626 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6629 if (known_eq (sctx
.max_vf
, 1U))
6631 sctx
.is_simt
= false;
6632 if (ctx
->lastprivate_conditional_map
)
6634 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6636 /* Signal to lower_omp_1 that it should use parent context. */
6637 ctx
->combined_into_simd_safelen1
= true;
6638 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6639 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6640 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6642 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6643 omp_context
*outer
= ctx
->outer
;
6644 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6645 outer
= outer
->outer
;
6646 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6647 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6648 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6654 /* When not vectorized, treat lastprivate(conditional:) like
6655 normal lastprivate, as there will be just one simd lane
6656 writing the privatized variable. */
6657 delete ctx
->lastprivate_conditional_map
;
6658 ctx
->lastprivate_conditional_map
= NULL
;
6663 if (nonconst_simd_if
)
6665 if (sctx
.lane
== NULL_TREE
)
6667 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6668 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6670 /* FIXME: For now. */
6671 sctx
.is_simt
= false;
6674 if (sctx
.lane
|| sctx
.is_simt
)
6676 uid
= create_tmp_var (ptr_type_node
, "simduid");
6677 /* Don't want uninit warnings on simduid, it is always uninitialized,
6678 but we use it not for the value, but for the DECL_UID only. */
6679 suppress_warning (uid
, OPT_Wuninitialized
);
6680 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6681 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6682 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6683 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6685 /* Emit calls denoting privatized variables and initializing a pointer to
6686 structure that holds private variables as fields after ompdevlow pass. */
6689 sctx
.simt_eargs
[0] = uid
;
6691 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6692 gimple_call_set_lhs (g
, uid
);
6693 gimple_seq_add_stmt (ilist
, g
);
6694 sctx
.simt_eargs
.release ();
6696 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6697 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6698 gimple_call_set_lhs (g
, simtrec
);
6699 gimple_seq_add_stmt (ilist
, g
);
6703 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6704 2 + (nonconst_simd_if
!= NULL
),
6705 uid
, integer_zero_node
,
6707 gimple_call_set_lhs (g
, sctx
.lane
);
6708 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
6709 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6710 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6711 build_int_cst (unsigned_type_node
, 0));
6712 gimple_seq_add_stmt (ilist
, g
);
6715 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6717 gimple_call_set_lhs (g
, sctx
.lastlane
);
6718 gimple_seq_add_stmt (dlist
, g
);
6719 gimple_seq_add_seq (dlist
, llist
[3]);
6721 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6724 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6725 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6726 gimple_call_set_lhs (g
, simt_vf
);
6727 gimple_seq_add_stmt (dlist
, g
);
6729 tree t
= build_int_cst (unsigned_type_node
, 1);
6730 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6731 gimple_seq_add_stmt (dlist
, g
);
6733 t
= build_int_cst (unsigned_type_node
, 0);
6734 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6735 gimple_seq_add_stmt (dlist
, g
);
6737 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6738 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6739 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6740 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6741 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6743 gimple_seq_add_seq (dlist
, llist
[2]);
6745 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6746 gimple_seq_add_stmt (dlist
, g
);
6748 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6749 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6750 gimple_seq_add_stmt (dlist
, g
);
6752 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6754 for (int i
= 0; i
< 2; i
++)
6757 tree vf
= create_tmp_var (unsigned_type_node
);
6758 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
6759 gimple_call_set_lhs (g
, vf
);
6760 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
6761 gimple_seq_add_stmt (seq
, g
);
6762 tree t
= build_int_cst (unsigned_type_node
, 0);
6763 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6764 gimple_seq_add_stmt (seq
, g
);
6765 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6766 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6767 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6768 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
6769 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
6770 gimple_seq_add_seq (seq
, llist
[i
]);
6771 t
= build_int_cst (unsigned_type_node
, 1);
6772 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
6773 gimple_seq_add_stmt (seq
, g
);
6774 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
6775 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
6776 gimple_seq_add_stmt (seq
, g
);
6777 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
6782 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
6784 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
6785 gimple_seq_add_stmt (dlist
, g
);
6788 /* The copyin sequence is not to be executed by the main thread, since
6789 that would result in self-copies. Perhaps not visible to scalars,
6790 but it certainly is to C++ operator=. */
6793 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
6795 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
6796 build_int_cst (TREE_TYPE (x
), 0));
6797 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
6798 gimplify_and_add (x
, ilist
);
6801 /* If any copyin variable is passed by reference, we must ensure the
6802 master thread doesn't modify it before it is copied over in all
6803 threads. Similarly for variables in both firstprivate and
6804 lastprivate clauses we need to ensure the lastprivate copying
6805 happens after firstprivate copying in all threads. And similarly
6806 for UDRs if initializer expression refers to omp_orig. */
6807 if (copyin_by_ref
|| lastprivate_firstprivate
6808 || (reduction_omp_orig_ref
6809 && !ctx
->scan_inclusive
6810 && !ctx
->scan_exclusive
))
6812 /* Don't add any barrier for #pragma omp simd or
6813 #pragma omp distribute. */
6814 if (!is_task_ctx (ctx
)
6815 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
6816 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
6817 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
6820 /* If max_vf is non-zero, then we can use only a vectorization factor
6821 up to the max_vf we chose. So stick it into the safelen clause. */
6822 if (maybe_ne (sctx
.max_vf
, 0U))
6824 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
6825 OMP_CLAUSE_SAFELEN
);
6826 poly_uint64 safe_len
;
6828 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
6829 && maybe_gt (safe_len
, sctx
.max_vf
)))
6831 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
6832 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
6834 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6835 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6840 /* Create temporary variables for lastprivate(conditional:) implementation
6841 in context CTX with CLAUSES. */
6844 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
6846 tree iter_type
= NULL_TREE
;
6847 tree cond_ptr
= NULL_TREE
;
6848 tree iter_var
= NULL_TREE
;
6849 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6850 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
6851 tree next
= *clauses
;
6852 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6853 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6854 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6858 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
6860 if (iter_type
== NULL_TREE
)
6862 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
6863 iter_var
= create_tmp_var_raw (iter_type
);
6864 DECL_CONTEXT (iter_var
) = current_function_decl
;
6865 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6866 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6867 ctx
->block_vars
= iter_var
;
6869 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6870 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6871 OMP_CLAUSE_DECL (c3
) = iter_var
;
6872 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
6874 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6876 next
= OMP_CLAUSE_CHAIN (cc
);
6877 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6878 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
6879 ctx
->lastprivate_conditional_map
->put (o
, v
);
6882 if (iter_type
== NULL
)
6884 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
6886 struct omp_for_data fd
;
6887 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
6889 iter_type
= unsigned_type_for (fd
.iter_type
);
6891 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
6892 iter_type
= unsigned_type_node
;
6893 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
6897 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
6898 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6902 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
6903 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
6904 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
6905 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
6906 ctx
->block_vars
= cond_ptr
;
6907 c2
= build_omp_clause (UNKNOWN_LOCATION
,
6908 OMP_CLAUSE__CONDTEMP_
);
6909 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6910 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6913 iter_var
= create_tmp_var_raw (iter_type
);
6914 DECL_CONTEXT (iter_var
) = current_function_decl
;
6915 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6916 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6917 ctx
->block_vars
= iter_var
;
6919 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6920 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6921 OMP_CLAUSE_DECL (c3
) = iter_var
;
6922 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6923 OMP_CLAUSE_CHAIN (c2
) = c3
;
6924 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6926 tree v
= create_tmp_var_raw (iter_type
);
6927 DECL_CONTEXT (v
) = current_function_decl
;
6928 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6929 DECL_CHAIN (v
) = ctx
->block_vars
;
6930 ctx
->block_vars
= v
;
6931 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6932 ctx
->lastprivate_conditional_map
->put (o
, v
);
6937 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6938 both parallel and workshare constructs. PREDICATE may be NULL if it's
6939 always true. BODY_P is the sequence to insert early initialization
6940 if needed, STMT_LIST is where the non-conditional lastprivate handling
6941 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6945 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6946 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6949 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6950 bool par_clauses
= false;
6951 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6952 unsigned HOST_WIDE_INT conditional_off
= 0;
6953 gimple_seq post_stmt_list
= NULL
;
6955 /* Early exit if there are no lastprivate or linear clauses. */
6956 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6957 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6958 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6959 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6961 if (clauses
== NULL
)
6963 /* If this was a workshare clause, see if it had been combined
6964 with its parallel. In that case, look for the clauses on the
6965 parallel statement itself. */
6966 if (is_parallel_ctx (ctx
))
6970 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6973 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6974 OMP_CLAUSE_LASTPRIVATE
);
6975 if (clauses
== NULL
)
6980 bool maybe_simt
= false;
6981 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6982 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
6984 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6985 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6987 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6993 tree label_true
, arm1
, arm2
;
6994 enum tree_code pred_code
= TREE_CODE (predicate
);
6996 label
= create_artificial_label (UNKNOWN_LOCATION
);
6997 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6998 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7000 arm1
= TREE_OPERAND (predicate
, 0);
7001 arm2
= TREE_OPERAND (predicate
, 1);
7002 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7003 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7008 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7009 arm2
= boolean_false_node
;
7010 pred_code
= NE_EXPR
;
7014 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7015 c
= fold_convert (integer_type_node
, c
);
7016 simtcond
= create_tmp_var (integer_type_node
);
7017 gimplify_assign (simtcond
, c
, stmt_list
);
7018 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7020 c
= create_tmp_var (integer_type_node
);
7021 gimple_call_set_lhs (g
, c
);
7022 gimple_seq_add_stmt (stmt_list
, g
);
7023 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7027 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7028 gimple_seq_add_stmt (stmt_list
, stmt
);
7029 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7032 tree cond_ptr
= NULL_TREE
;
7033 for (c
= clauses
; c
;)
7036 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7037 gimple_seq
*this_stmt_list
= stmt_list
;
7038 tree lab2
= NULL_TREE
;
7040 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7041 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7042 && ctx
->lastprivate_conditional_map
7043 && !ctx
->combined_into_simd_safelen1
)
7045 gcc_assert (body_p
);
7048 if (cond_ptr
== NULL_TREE
)
7050 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7051 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7053 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7054 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7055 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7056 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7057 this_stmt_list
= cstmt_list
;
7059 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7061 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7062 build_int_cst (TREE_TYPE (cond_ptr
),
7064 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7067 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7068 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7069 tree mem2
= copy_node (mem
);
7070 gimple_seq seq
= NULL
;
7071 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7072 gimple_seq_add_seq (this_stmt_list
, seq
);
7073 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7074 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7075 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7076 gimple_seq_add_stmt (this_stmt_list
, g
);
7077 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7078 gimplify_assign (mem2
, v
, this_stmt_list
);
7081 && ctx
->combined_into_simd_safelen1
7082 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7083 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7084 && ctx
->lastprivate_conditional_map
)
7085 this_stmt_list
= &post_stmt_list
;
7087 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7088 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7089 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7091 var
= OMP_CLAUSE_DECL (c
);
7092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7093 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7094 && is_taskloop_ctx (ctx
))
7096 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7097 new_var
= lookup_decl (var
, ctx
->outer
);
7101 new_var
= lookup_decl (var
, ctx
);
7102 /* Avoid uninitialized warnings for lastprivate and
7103 for linear iterators. */
7105 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7106 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7107 suppress_warning (new_var
, OPT_Wuninitialized
);
7110 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7112 tree val
= DECL_VALUE_EXPR (new_var
);
7113 if (TREE_CODE (val
) == ARRAY_REF
7114 && VAR_P (TREE_OPERAND (val
, 0))
7115 && lookup_attribute ("omp simd array",
7116 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7119 if (lastlane
== NULL
)
7121 lastlane
= create_tmp_var (unsigned_type_node
);
7123 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7125 TREE_OPERAND (val
, 1));
7126 gimple_call_set_lhs (g
, lastlane
);
7127 gimple_seq_add_stmt (this_stmt_list
, g
);
7129 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7130 TREE_OPERAND (val
, 0), lastlane
,
7131 NULL_TREE
, NULL_TREE
);
7132 TREE_THIS_NOTRAP (new_var
) = 1;
7135 else if (maybe_simt
)
7137 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7138 ? DECL_VALUE_EXPR (new_var
)
7140 if (simtlast
== NULL
)
7142 simtlast
= create_tmp_var (unsigned_type_node
);
7143 gcall
*g
= gimple_build_call_internal
7144 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7145 gimple_call_set_lhs (g
, simtlast
);
7146 gimple_seq_add_stmt (this_stmt_list
, g
);
7148 x
= build_call_expr_internal_loc
7149 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7150 TREE_TYPE (val
), 2, val
, simtlast
);
7151 new_var
= unshare_expr (new_var
);
7152 gimplify_assign (new_var
, x
, this_stmt_list
);
7153 new_var
= unshare_expr (new_var
);
7156 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7157 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7159 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7160 gimple_seq_add_seq (this_stmt_list
,
7161 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7162 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7164 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7165 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7167 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7168 gimple_seq_add_seq (this_stmt_list
,
7169 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7170 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7174 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7175 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7176 && is_taskloop_ctx (ctx
))
7178 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7180 if (is_global_var (ovar
))
7184 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7185 if (omp_is_reference (var
))
7186 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7187 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7188 gimplify_and_add (x
, this_stmt_list
);
7191 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7195 c
= OMP_CLAUSE_CHAIN (c
);
7196 if (c
== NULL
&& !par_clauses
)
7198 /* If this was a workshare clause, see if it had been combined
7199 with its parallel. In that case, continue looking for the
7200 clauses also on the parallel statement itself. */
7201 if (is_parallel_ctx (ctx
))
7205 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7208 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7209 OMP_CLAUSE_LASTPRIVATE
);
7215 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7216 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7219 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7220 (which might be a placeholder). INNER is true if this is an inner
7221 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7222 join markers. Generate the before-loop forking sequence in
7223 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7224 general form of these sequences is
7226 GOACC_REDUCTION_SETUP
7228 GOACC_REDUCTION_INIT
7230 GOACC_REDUCTION_FINI
7232 GOACC_REDUCTION_TEARDOWN. */
7235 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7236 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7237 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7240 gimple_seq before_fork
= NULL
;
7241 gimple_seq after_fork
= NULL
;
7242 gimple_seq before_join
= NULL
;
7243 gimple_seq after_join
= NULL
;
7244 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7245 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7246 unsigned offset
= 0;
7248 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7249 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7251 /* No 'reduction' clauses on OpenACC 'kernels'. */
7252 gcc_checking_assert (!is_oacc_kernels (ctx
));
7253 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7254 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7256 tree orig
= OMP_CLAUSE_DECL (c
);
7257 tree var
= maybe_lookup_decl (orig
, ctx
);
7258 tree ref_to_res
= NULL_TREE
;
7259 tree incoming
, outgoing
, v1
, v2
, v3
;
7260 bool is_private
= false;
7262 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7263 if (rcode
== MINUS_EXPR
)
7265 else if (rcode
== TRUTH_ANDIF_EXPR
)
7266 rcode
= BIT_AND_EXPR
;
7267 else if (rcode
== TRUTH_ORIF_EXPR
)
7268 rcode
= BIT_IOR_EXPR
;
7269 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7274 incoming
= outgoing
= var
;
7278 /* See if an outer construct also reduces this variable. */
7279 omp_context
*outer
= ctx
;
7281 while (omp_context
*probe
= outer
->outer
)
7283 enum gimple_code type
= gimple_code (probe
->stmt
);
7288 case GIMPLE_OMP_FOR
:
7289 cls
= gimple_omp_for_clauses (probe
->stmt
);
7292 case GIMPLE_OMP_TARGET
:
7293 /* No 'reduction' clauses inside OpenACC 'kernels'
7295 gcc_checking_assert (!is_oacc_kernels (probe
));
7297 if (!is_gimple_omp_offloaded (probe
->stmt
))
7300 cls
= gimple_omp_target_clauses (probe
->stmt
);
7308 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7309 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7310 && orig
== OMP_CLAUSE_DECL (cls
))
7312 incoming
= outgoing
= lookup_decl (orig
, probe
);
7313 goto has_outer_reduction
;
7315 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7316 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7317 && orig
== OMP_CLAUSE_DECL (cls
))
7325 /* This is the outermost construct with this reduction,
7326 see if there's a mapping for it. */
7327 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7328 && maybe_lookup_field (orig
, outer
) && !is_private
)
7330 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7331 if (omp_is_reference (orig
))
7332 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7334 tree type
= TREE_TYPE (var
);
7335 if (POINTER_TYPE_P (type
))
7336 type
= TREE_TYPE (type
);
7339 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7343 /* Try to look at enclosing contexts for reduction var,
7344 use original if no mapping found. */
7346 omp_context
*c
= ctx
->outer
;
7349 t
= maybe_lookup_decl (orig
, c
);
7352 incoming
= outgoing
= (t
? t
: orig
);
7355 has_outer_reduction
:;
7359 ref_to_res
= integer_zero_node
;
7361 if (omp_is_reference (orig
))
7363 tree type
= TREE_TYPE (var
);
7364 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7368 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7369 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7372 v1
= create_tmp_var (type
, id
);
7373 v2
= create_tmp_var (type
, id
);
7374 v3
= create_tmp_var (type
, id
);
7376 gimplify_assign (v1
, var
, fork_seq
);
7377 gimplify_assign (v2
, var
, fork_seq
);
7378 gimplify_assign (v3
, var
, fork_seq
);
7380 var
= build_simple_mem_ref (var
);
7381 v1
= build_simple_mem_ref (v1
);
7382 v2
= build_simple_mem_ref (v2
);
7383 v3
= build_simple_mem_ref (v3
);
7384 outgoing
= build_simple_mem_ref (outgoing
);
7386 if (!TREE_CONSTANT (incoming
))
7387 incoming
= build_simple_mem_ref (incoming
);
7392 /* Determine position in reduction buffer, which may be used
7393 by target. The parser has ensured that this is not a
7394 variable-sized type. */
7395 fixed_size_mode mode
7396 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7397 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7398 offset
= (offset
+ align
- 1) & ~(align
- 1);
7399 tree off
= build_int_cst (sizetype
, offset
);
7400 offset
+= GET_MODE_SIZE (mode
);
7404 init_code
= build_int_cst (integer_type_node
,
7405 IFN_GOACC_REDUCTION_INIT
);
7406 fini_code
= build_int_cst (integer_type_node
,
7407 IFN_GOACC_REDUCTION_FINI
);
7408 setup_code
= build_int_cst (integer_type_node
,
7409 IFN_GOACC_REDUCTION_SETUP
);
7410 teardown_code
= build_int_cst (integer_type_node
,
7411 IFN_GOACC_REDUCTION_TEARDOWN
);
7415 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7416 TREE_TYPE (var
), 6, setup_code
,
7417 unshare_expr (ref_to_res
),
7418 incoming
, level
, op
, off
);
7420 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7421 TREE_TYPE (var
), 6, init_code
,
7422 unshare_expr (ref_to_res
),
7423 v1
, level
, op
, off
);
7425 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7426 TREE_TYPE (var
), 6, fini_code
,
7427 unshare_expr (ref_to_res
),
7428 v2
, level
, op
, off
);
7430 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7431 TREE_TYPE (var
), 6, teardown_code
,
7432 ref_to_res
, v3
, level
, op
, off
);
7434 gimplify_assign (v1
, setup_call
, &before_fork
);
7435 gimplify_assign (v2
, init_call
, &after_fork
);
7436 gimplify_assign (v3
, fini_call
, &before_join
);
7437 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7440 /* Now stitch things together. */
7441 gimple_seq_add_seq (fork_seq
, before_fork
);
7443 gimple_seq_add_stmt (fork_seq
, private_marker
);
7445 gimple_seq_add_stmt (fork_seq
, fork
);
7446 gimple_seq_add_seq (fork_seq
, after_fork
);
7448 gimple_seq_add_seq (join_seq
, before_join
);
7450 gimple_seq_add_stmt (join_seq
, join
);
7451 gimple_seq_add_seq (join_seq
, after_join
);
7454 /* Generate code to implement the REDUCTION clauses, append it
7455 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7456 that should be emitted also inside of the critical section,
7457 in that case clear *CLIST afterwards, otherwise leave it as is
7458 and let the caller emit it itself. */
7461 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7462 gimple_seq
*clist
, omp_context
*ctx
)
7464 gimple_seq sub_seq
= NULL
;
7469 /* OpenACC loop reductions are handled elsewhere. */
7470 if (is_gimple_omp_oacc (ctx
->stmt
))
7473 /* SIMD reductions are handled in lower_rec_input_clauses. */
7474 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7475 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7478 /* inscan reductions are handled elsewhere. */
7479 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7482 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7483 update in that case, otherwise use a lock. */
7484 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7485 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7486 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7488 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7489 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7491 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7501 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7503 tree var
, ref
, new_var
, orig_var
;
7504 enum tree_code code
;
7505 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7507 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7508 || OMP_CLAUSE_REDUCTION_TASK (c
))
7511 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7512 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7513 if (TREE_CODE (var
) == MEM_REF
)
7515 var
= TREE_OPERAND (var
, 0);
7516 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7517 var
= TREE_OPERAND (var
, 0);
7518 if (TREE_CODE (var
) == ADDR_EXPR
)
7519 var
= TREE_OPERAND (var
, 0);
7522 /* If this is a pointer or referenced based array
7523 section, the var could be private in the outer
7524 context e.g. on orphaned loop construct. Pretend this
7525 is private variable's outer reference. */
7526 ccode
= OMP_CLAUSE_PRIVATE
;
7527 if (TREE_CODE (var
) == INDIRECT_REF
)
7528 var
= TREE_OPERAND (var
, 0);
7531 if (is_variable_sized (var
))
7533 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7534 var
= DECL_VALUE_EXPR (var
);
7535 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7536 var
= TREE_OPERAND (var
, 0);
7537 gcc_assert (DECL_P (var
));
7540 new_var
= lookup_decl (var
, ctx
);
7541 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
7542 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7543 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7544 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7546 /* reduction(-:var) sums up the partial results, so it acts
7547 identically to reduction(+:var). */
7548 if (code
== MINUS_EXPR
)
7551 /* C/C++ permits FP/complex with || and &&. */
7552 bool is_fp_and_or
= ((code
== TRUTH_ANDIF_EXPR
7553 || code
== TRUTH_ORIF_EXPR
)
7554 && (FLOAT_TYPE_P (TREE_TYPE (new_var
))
7555 || (TREE_CODE (TREE_TYPE (new_var
))
7559 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7561 addr
= save_expr (addr
);
7562 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7563 tree new_var2
= new_var
;
7567 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7568 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7569 integer_type_node
, new_var
, zero
);
7570 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, integer_type_node
,
7573 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7576 x
= fold_convert (TREE_TYPE (new_var
), x
);
7577 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7578 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7579 gimplify_and_add (x
, stmt_seqp
);
7582 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7584 tree d
= OMP_CLAUSE_DECL (c
);
7585 tree type
= TREE_TYPE (d
);
7586 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7587 tree i
= create_tmp_var (TREE_TYPE (v
));
7588 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7589 tree bias
= TREE_OPERAND (d
, 1);
7590 d
= TREE_OPERAND (d
, 0);
7591 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7593 tree b
= TREE_OPERAND (d
, 1);
7594 b
= maybe_lookup_decl (b
, ctx
);
7597 b
= TREE_OPERAND (d
, 1);
7598 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7600 if (integer_zerop (bias
))
7604 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7605 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7606 TREE_TYPE (b
), b
, bias
);
7608 d
= TREE_OPERAND (d
, 0);
7610 /* For ref build_outer_var_ref already performs this, so
7611 only new_var needs a dereference. */
7612 if (TREE_CODE (d
) == INDIRECT_REF
)
7614 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7615 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
7617 else if (TREE_CODE (d
) == ADDR_EXPR
)
7619 if (orig_var
== var
)
7621 new_var
= build_fold_addr_expr (new_var
);
7622 ref
= build_fold_addr_expr (ref
);
7627 gcc_assert (orig_var
== var
);
7628 if (omp_is_reference (var
))
7629 ref
= build_fold_addr_expr (ref
);
7633 tree t
= maybe_lookup_decl (v
, ctx
);
7637 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7638 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7640 if (!integer_zerop (bias
))
7642 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7643 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7644 TREE_TYPE (new_var
), new_var
,
7645 unshare_expr (bias
));
7646 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7647 TREE_TYPE (ref
), ref
, bias
);
7649 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7650 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7651 tree m
= create_tmp_var (ptype
);
7652 gimplify_assign (m
, new_var
, stmt_seqp
);
7654 m
= create_tmp_var (ptype
);
7655 gimplify_assign (m
, ref
, stmt_seqp
);
7657 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7658 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7659 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7660 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7661 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7662 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7663 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7665 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7666 tree decl_placeholder
7667 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7668 SET_DECL_VALUE_EXPR (placeholder
, out
);
7669 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7670 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7671 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7672 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7673 gimple_seq_add_seq (&sub_seq
,
7674 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7675 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7676 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7677 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7685 tree zero
= build_zero_cst (TREE_TYPE (out
));
7686 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7687 integer_type_node
, out
, zero
);
7688 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7689 integer_type_node
, priv
, zero
);
7691 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7693 x
= fold_convert (TREE_TYPE (out
), x
);
7694 out
= unshare_expr (out
);
7695 gimplify_assign (out
, x
, &sub_seq
);
7697 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7698 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7699 gimple_seq_add_stmt (&sub_seq
, g
);
7700 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7701 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7702 gimple_seq_add_stmt (&sub_seq
, g
);
7703 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7704 build_int_cst (TREE_TYPE (i
), 1));
7705 gimple_seq_add_stmt (&sub_seq
, g
);
7706 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7707 gimple_seq_add_stmt (&sub_seq
, g
);
7708 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7710 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7712 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7714 if (omp_is_reference (var
)
7715 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7717 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7718 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7719 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7720 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7721 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7722 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7723 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7727 tree new_var2
= new_var
;
7731 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7732 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7733 integer_type_node
, new_var
, zero
);
7734 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, integer_type_node
,
7737 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7739 x
= fold_convert (TREE_TYPE (new_var
), x
);
7740 ref
= build_outer_var_ref (var
, ctx
);
7741 gimplify_assign (ref
, x
, &sub_seq
);
7745 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7747 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7749 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7753 gimple_seq_add_seq (stmt_seqp
, *clist
);
7757 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
7759 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7763 /* Generate code to implement the COPYPRIVATE clauses. */
7766 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
7771 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7773 tree var
, new_var
, ref
, x
;
7775 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7777 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
7780 var
= OMP_CLAUSE_DECL (c
);
7781 by_ref
= use_pointer_for_field (var
, NULL
);
7783 ref
= build_sender_ref (var
, ctx
);
7784 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
7787 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
7788 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
7790 gimplify_assign (ref
, x
, slist
);
7792 ref
= build_receiver_ref (var
, false, ctx
);
7795 ref
= fold_convert_loc (clause_loc
,
7796 build_pointer_type (TREE_TYPE (new_var
)),
7798 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
7800 if (omp_is_reference (var
))
7802 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
7803 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
7804 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7806 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
7807 gimplify_and_add (x
, rlist
);
7812 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7813 and REDUCTION from the sender (aka parent) side. */
7816 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
7820 int ignored_looptemp
= 0;
7821 bool is_taskloop
= false;
7823 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7824 by GOMP_taskloop. */
7825 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
7827 ignored_looptemp
= 2;
7831 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7833 tree val
, ref
, x
, var
;
7834 bool by_ref
, do_in
= false, do_out
= false;
7835 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7837 switch (OMP_CLAUSE_CODE (c
))
7839 case OMP_CLAUSE_PRIVATE
:
7840 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
7843 case OMP_CLAUSE_FIRSTPRIVATE
:
7844 case OMP_CLAUSE_COPYIN
:
7845 case OMP_CLAUSE_LASTPRIVATE
:
7846 case OMP_CLAUSE_IN_REDUCTION
:
7847 case OMP_CLAUSE__REDUCTEMP_
:
7849 case OMP_CLAUSE_REDUCTION
:
7850 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
7853 case OMP_CLAUSE_SHARED
:
7854 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7857 case OMP_CLAUSE__LOOPTEMP_
:
7858 if (ignored_looptemp
)
7868 val
= OMP_CLAUSE_DECL (c
);
7869 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7870 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
7871 && TREE_CODE (val
) == MEM_REF
)
7873 val
= TREE_OPERAND (val
, 0);
7874 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
7875 val
= TREE_OPERAND (val
, 0);
7876 if (TREE_CODE (val
) == INDIRECT_REF
7877 || TREE_CODE (val
) == ADDR_EXPR
)
7878 val
= TREE_OPERAND (val
, 0);
7879 if (is_variable_sized (val
))
7883 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7884 outer taskloop region. */
7885 omp_context
*ctx_for_o
= ctx
;
7887 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
7888 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
7889 ctx_for_o
= ctx
->outer
;
7891 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
7893 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
7894 && is_global_var (var
)
7895 && (val
== OMP_CLAUSE_DECL (c
)
7896 || !is_task_ctx (ctx
)
7897 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
7898 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
7899 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
7900 != POINTER_TYPE
)))))
7903 t
= omp_member_access_dummy_var (var
);
7906 var
= DECL_VALUE_EXPR (var
);
7907 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
7909 var
= unshare_and_remap (var
, t
, o
);
7911 var
= unshare_expr (var
);
7914 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
7916 /* Handle taskloop firstprivate/lastprivate, where the
7917 lastprivate on GIMPLE_OMP_TASK is represented as
7918 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7919 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
7920 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
7921 if (use_pointer_for_field (val
, ctx
))
7922 var
= build_fold_addr_expr (var
);
7923 gimplify_assign (x
, var
, ilist
);
7924 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
7928 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7929 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
7930 || val
== OMP_CLAUSE_DECL (c
))
7931 && is_variable_sized (val
))
7933 by_ref
= use_pointer_for_field (val
, NULL
);
7935 switch (OMP_CLAUSE_CODE (c
))
7937 case OMP_CLAUSE_FIRSTPRIVATE
:
7938 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
7940 && is_task_ctx (ctx
))
7941 suppress_warning (var
);
7945 case OMP_CLAUSE_PRIVATE
:
7946 case OMP_CLAUSE_COPYIN
:
7947 case OMP_CLAUSE__LOOPTEMP_
:
7948 case OMP_CLAUSE__REDUCTEMP_
:
7952 case OMP_CLAUSE_LASTPRIVATE
:
7953 if (by_ref
|| omp_is_reference (val
))
7955 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
7962 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7967 case OMP_CLAUSE_REDUCTION
:
7968 case OMP_CLAUSE_IN_REDUCTION
:
7970 if (val
== OMP_CLAUSE_DECL (c
))
7972 if (is_task_ctx (ctx
))
7973 by_ref
= use_pointer_for_field (val
, ctx
);
7975 do_out
= !(by_ref
|| omp_is_reference (val
));
7978 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7987 ref
= build_sender_ref (val
, ctx
);
7988 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7989 gimplify_assign (ref
, x
, ilist
);
7990 if (is_task_ctx (ctx
))
7991 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7996 ref
= build_sender_ref (val
, ctx
);
7997 gimplify_assign (var
, ref
, olist
);
8002 /* Generate code to implement SHARED from the sender (aka parent)
8003 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8004 list things that got automatically shared. */
8007 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8009 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8011 if (ctx
->record_type
== NULL
)
8014 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8015 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8017 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8018 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8021 nvar
= maybe_lookup_decl (ovar
, ctx
);
8023 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8024 || (ctx
->allocate_map
8025 && ctx
->allocate_map
->get (ovar
)))
8028 /* If CTX is a nested parallel directive. Find the immediately
8029 enclosing parallel or workshare construct that contains a
8030 mapping for OVAR. */
8031 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8033 t
= omp_member_access_dummy_var (var
);
8036 var
= DECL_VALUE_EXPR (var
);
8037 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8039 var
= unshare_and_remap (var
, t
, o
);
8041 var
= unshare_expr (var
);
8044 if (use_pointer_for_field (ovar
, ctx
))
8046 x
= build_sender_ref (ovar
, ctx
);
8047 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8048 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8050 gcc_assert (is_parallel_ctx (ctx
)
8051 && DECL_ARTIFICIAL (ovar
));
8052 /* _condtemp_ clause. */
8053 var
= build_constructor (TREE_TYPE (x
), NULL
);
8056 var
= build_fold_addr_expr (var
);
8057 gimplify_assign (x
, var
, ilist
);
8061 x
= build_sender_ref (ovar
, ctx
);
8062 gimplify_assign (x
, var
, ilist
);
8064 if (!TREE_READONLY (var
)
8065 /* We don't need to receive a new reference to a result
8066 or parm decl. In fact we may not store to it as we will
8067 invalidate any pending RSO and generate wrong gimple
8069 && !((TREE_CODE (var
) == RESULT_DECL
8070 || TREE_CODE (var
) == PARM_DECL
)
8071 && DECL_BY_REFERENCE (var
)))
8073 x
= build_sender_ref (ovar
, ctx
);
8074 gimplify_assign (var
, x
, olist
);
8080 /* Emit an OpenACC head marker call, encapulating the partitioning and
8081 other information that must be processed by the target compiler.
8082 Return the maximum number of dimensions the associated loop might
8083 be partitioned over. */
8086 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8087 gimple_seq
*seq
, omp_context
*ctx
)
8089 unsigned levels
= 0;
8091 tree gang_static
= NULL_TREE
;
8092 auto_vec
<tree
, 5> args
;
8094 args
.quick_push (build_int_cst
8095 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8096 args
.quick_push (ddvar
);
8097 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8099 switch (OMP_CLAUSE_CODE (c
))
8101 case OMP_CLAUSE_GANG
:
8102 tag
|= OLF_DIM_GANG
;
8103 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8104 /* static:* is represented by -1, and we can ignore it, as
8105 scheduling is always static. */
8106 if (gang_static
&& integer_minus_onep (gang_static
))
8107 gang_static
= NULL_TREE
;
8111 case OMP_CLAUSE_WORKER
:
8112 tag
|= OLF_DIM_WORKER
;
8116 case OMP_CLAUSE_VECTOR
:
8117 tag
|= OLF_DIM_VECTOR
;
8121 case OMP_CLAUSE_SEQ
:
8125 case OMP_CLAUSE_AUTO
:
8129 case OMP_CLAUSE_INDEPENDENT
:
8130 tag
|= OLF_INDEPENDENT
;
8133 case OMP_CLAUSE_TILE
:
8144 if (DECL_P (gang_static
))
8145 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8146 tag
|= OLF_GANG_STATIC
;
8149 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8150 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8152 else if (is_oacc_kernels (tgt
))
8153 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8155 else if (is_oacc_kernels_decomposed_part (tgt
))
8160 /* In a parallel region, loops are implicitly INDEPENDENT. */
8161 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8162 tag
|= OLF_INDEPENDENT
;
8164 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8165 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8166 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8168 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8169 gcc_assert (!(tag
& OLF_AUTO
));
8173 /* Tiling could use all 3 levels. */
8177 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8178 Ensure at least one level, or 2 for possible auto
8180 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8181 << OLF_DIM_BASE
) | OLF_SEQ
));
8183 if (levels
< 1u + maybe_auto
)
8184 levels
= 1u + maybe_auto
;
8187 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8188 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8190 args
.quick_push (gang_static
);
8192 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8193 gimple_set_location (call
, loc
);
8194 gimple_set_lhs (call
, ddvar
);
8195 gimple_seq_add_stmt (seq
, call
);
8200 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8201 partitioning level of the enclosed region. */
8204 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8205 tree tofollow
, gimple_seq
*seq
)
8207 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8208 : IFN_UNIQUE_OACC_TAIL_MARK
);
8209 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8210 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8211 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8212 marker
, ddvar
, tofollow
);
8213 gimple_set_location (call
, loc
);
8214 gimple_set_lhs (call
, ddvar
);
8215 gimple_seq_add_stmt (seq
, call
);
8218 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8219 the loop clauses, from which we extract reductions. Initialize
8223 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8224 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8227 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8228 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8230 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8234 gimple_set_location (private_marker
, loc
);
8235 gimple_call_set_lhs (private_marker
, ddvar
);
8236 gimple_call_set_arg (private_marker
, 1, ddvar
);
8239 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8240 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8243 for (unsigned done
= 1; count
; count
--, done
++)
8245 gimple_seq fork_seq
= NULL
;
8246 gimple_seq join_seq
= NULL
;
8248 tree place
= build_int_cst (integer_type_node
, -1);
8249 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8250 fork_kind
, ddvar
, place
);
8251 gimple_set_location (fork
, loc
);
8252 gimple_set_lhs (fork
, ddvar
);
8254 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8255 join_kind
, ddvar
, place
);
8256 gimple_set_location (join
, loc
);
8257 gimple_set_lhs (join
, ddvar
);
8259 /* Mark the beginning of this level sequence. */
8261 lower_oacc_loop_marker (loc
, ddvar
, true,
8262 build_int_cst (integer_type_node
, count
),
8264 lower_oacc_loop_marker (loc
, ddvar
, false,
8265 build_int_cst (integer_type_node
, done
),
8268 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8269 fork
, (count
== 1) ? private_marker
: NULL
,
8270 join
, &fork_seq
, &join_seq
, ctx
);
8272 /* Append this level to head. */
8273 gimple_seq_add_seq (head
, fork_seq
);
8274 /* Prepend it to tail. */
8275 gimple_seq_add_seq (&join_seq
, *tail
);
8281 /* Mark the end of the sequence. */
8282 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8283 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8286 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8287 catch handler and return it. This prevents programs from violating the
8288 structured block semantics with throws. */
8291 maybe_catch_exception (gimple_seq body
)
8296 if (!flag_exceptions
)
8299 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8300 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8302 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8304 g
= gimple_build_eh_must_not_throw (decl
);
8305 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8308 return gimple_seq_alloc_with_stmt (g
);
8312 /* Routines to lower OMP directives into OMP-GIMPLE. */
8314 /* If ctx is a worksharing context inside of a cancellable parallel
8315 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8316 and conditional branch to parallel's cancel_label to handle
8317 cancellation in the implicit barrier. */
8320 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8323 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8324 if (gimple_omp_return_nowait_p (omp_return
))
8326 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8327 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8328 && outer
->cancellable
)
8330 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8331 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8332 tree lhs
= create_tmp_var (c_bool_type
);
8333 gimple_omp_return_set_lhs (omp_return
, lhs
);
8334 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8335 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8336 fold_convert (c_bool_type
,
8337 boolean_false_node
),
8338 outer
->cancel_label
, fallthru_label
);
8339 gimple_seq_add_stmt (body
, g
);
8340 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8342 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8346 /* Find the first task_reduction or reduction clause or return NULL
8347 if there are none. */
8350 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8351 enum omp_clause_code ccode
)
8355 clauses
= omp_find_clause (clauses
, ccode
);
8356 if (clauses
== NULL_TREE
)
8358 if (ccode
!= OMP_CLAUSE_REDUCTION
8359 || code
== OMP_TASKLOOP
8360 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8362 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8366 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8367 gimple_seq
*, gimple_seq
*);
8369 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8370 CTX is the enclosing OMP context for the current statement. */
8373 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8375 tree block
, control
;
8376 gimple_stmt_iterator tgsi
;
8377 gomp_sections
*stmt
;
8379 gbind
*new_stmt
, *bind
;
8380 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8382 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8384 push_gimplify_context ();
8390 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8391 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8392 tree rtmp
= NULL_TREE
;
8395 tree type
= build_pointer_type (pointer_sized_int_node
);
8396 tree temp
= create_tmp_var (type
);
8397 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8398 OMP_CLAUSE_DECL (c
) = temp
;
8399 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8400 gimple_omp_sections_set_clauses (stmt
, c
);
8401 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8402 gimple_omp_sections_clauses (stmt
),
8403 &ilist
, &tred_dlist
);
8405 rtmp
= make_ssa_name (type
);
8406 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8409 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8410 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8412 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8413 &ilist
, &dlist
, ctx
, NULL
);
8415 control
= create_tmp_var (unsigned_type_node
, ".section");
8416 gimple_omp_sections_set_control (stmt
, control
);
8418 new_body
= gimple_omp_body (stmt
);
8419 gimple_omp_set_body (stmt
, NULL
);
8420 tgsi
= gsi_start (new_body
);
8421 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8426 sec_start
= gsi_stmt (tgsi
);
8427 sctx
= maybe_lookup_ctx (sec_start
);
8430 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8431 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8432 GSI_CONTINUE_LINKING
);
8433 gimple_omp_set_body (sec_start
, NULL
);
8435 if (gsi_one_before_end_p (tgsi
))
8437 gimple_seq l
= NULL
;
8438 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8439 &ilist
, &l
, &clist
, ctx
);
8440 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8441 gimple_omp_section_set_last (sec_start
);
8444 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8445 GSI_CONTINUE_LINKING
);
8448 block
= make_node (BLOCK
);
8449 bind
= gimple_build_bind (NULL
, new_body
, block
);
8452 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8456 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8457 gcall
*g
= gimple_build_call (fndecl
, 0);
8458 gimple_seq_add_stmt (&olist
, g
);
8459 gimple_seq_add_seq (&olist
, clist
);
8460 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8461 g
= gimple_build_call (fndecl
, 0);
8462 gimple_seq_add_stmt (&olist
, g
);
8465 block
= make_node (BLOCK
);
8466 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8467 gsi_replace (gsi_p
, new_stmt
, true);
8469 pop_gimplify_context (new_stmt
);
8470 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8471 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8472 if (BLOCK_VARS (block
))
8473 TREE_USED (block
) = 1;
8476 gimple_seq_add_seq (&new_body
, ilist
);
8477 gimple_seq_add_stmt (&new_body
, stmt
);
8478 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8479 gimple_seq_add_stmt (&new_body
, bind
);
8481 t
= gimple_build_omp_continue (control
, control
);
8482 gimple_seq_add_stmt (&new_body
, t
);
8484 gimple_seq_add_seq (&new_body
, olist
);
8485 if (ctx
->cancellable
)
8486 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8487 gimple_seq_add_seq (&new_body
, dlist
);
8489 new_body
= maybe_catch_exception (new_body
);
8491 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8492 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8493 t
= gimple_build_omp_return (nowait
);
8494 gimple_seq_add_stmt (&new_body
, t
);
8495 gimple_seq_add_seq (&new_body
, tred_dlist
);
8496 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8499 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8501 gimple_bind_set_body (new_stmt
, new_body
);
8505 /* A subroutine of lower_omp_single. Expand the simple form of
8506 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8508 if (GOMP_single_start ())
8510 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8512 FIXME. It may be better to delay expanding the logic of this until
8513 pass_expand_omp. The expanded logic may make the job more difficult
8514 to a synchronization analysis pass. */
8517 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8519 location_t loc
= gimple_location (single_stmt
);
8520 tree tlabel
= create_artificial_label (loc
);
8521 tree flabel
= create_artificial_label (loc
);
8522 gimple
*call
, *cond
;
8525 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8526 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8527 call
= gimple_build_call (decl
, 0);
8528 gimple_call_set_lhs (call
, lhs
);
8529 gimple_seq_add_stmt (pre_p
, call
);
8531 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8532 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8535 gimple_seq_add_stmt (pre_p
, cond
);
8536 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8537 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8538 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8542 /* A subroutine of lower_omp_single. Expand the simple form of
8543 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8545 #pragma omp single copyprivate (a, b, c)
8547 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8550 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8556 GOMP_single_copy_end (©out);
8567 FIXME. It may be better to delay expanding the logic of this until
8568 pass_expand_omp. The expanded logic may make the job more difficult
8569 to a synchronization analysis pass. */
8572 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8575 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8576 gimple_seq copyin_seq
;
8577 location_t loc
= gimple_location (single_stmt
);
8579 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8581 ptr_type
= build_pointer_type (ctx
->record_type
);
8582 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8584 l0
= create_artificial_label (loc
);
8585 l1
= create_artificial_label (loc
);
8586 l2
= create_artificial_label (loc
);
8588 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8589 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8590 t
= fold_convert_loc (loc
, ptr_type
, t
);
8591 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8593 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8594 build_int_cst (ptr_type
, 0));
8595 t
= build3 (COND_EXPR
, void_type_node
, t
,
8596 build_and_jump (&l0
), build_and_jump (&l1
));
8597 gimplify_and_add (t
, pre_p
);
8599 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8601 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8604 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8607 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8608 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8609 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8610 gimplify_and_add (t
, pre_p
);
8612 t
= build_and_jump (&l2
);
8613 gimplify_and_add (t
, pre_p
);
8615 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8617 gimple_seq_add_seq (pre_p
, copyin_seq
);
8619 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8623 /* Expand code for an OpenMP single directive. */
8626 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8629 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8631 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8633 push_gimplify_context ();
8635 block
= make_node (BLOCK
);
8636 bind
= gimple_build_bind (NULL
, NULL
, block
);
8637 gsi_replace (gsi_p
, bind
, true);
8640 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8641 &bind_body
, &dlist
, ctx
, NULL
);
8642 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8644 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8646 if (ctx
->record_type
)
8647 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8649 lower_omp_single_simple (single_stmt
, &bind_body
);
8651 gimple_omp_set_body (single_stmt
, NULL
);
8653 gimple_seq_add_seq (&bind_body
, dlist
);
8655 bind_body
= maybe_catch_exception (bind_body
);
8657 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8658 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8659 gimple
*g
= gimple_build_omp_return (nowait
);
8660 gimple_seq_add_stmt (&bind_body_tail
, g
);
8661 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8662 if (ctx
->record_type
)
8664 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8665 tree clobber
= build_clobber (ctx
->record_type
);
8666 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8667 clobber
), GSI_SAME_STMT
);
8669 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8670 gimple_bind_set_body (bind
, bind_body
);
8672 pop_gimplify_context (bind
);
8674 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8675 BLOCK_VARS (block
) = ctx
->block_vars
;
8676 if (BLOCK_VARS (block
))
8677 TREE_USED (block
) = 1;
8681 /* Expand code for an OpenMP master directive. */
8684 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8686 tree block
, lab
= NULL
, x
, bfn_decl
;
8687 gimple
*stmt
= gsi_stmt (*gsi_p
);
8689 location_t loc
= gimple_location (stmt
);
8692 push_gimplify_context ();
8694 block
= make_node (BLOCK
);
8695 bind
= gimple_build_bind (NULL
, NULL
, block
);
8696 gsi_replace (gsi_p
, bind
, true);
8697 gimple_bind_add_stmt (bind
, stmt
);
8699 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8700 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
8701 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
8702 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
8704 gimplify_and_add (x
, &tseq
);
8705 gimple_bind_add_seq (bind
, tseq
);
8707 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8708 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8709 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8710 gimple_omp_set_body (stmt
, NULL
);
8712 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
8714 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8716 pop_gimplify_context (bind
);
8718 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8719 BLOCK_VARS (block
) = ctx
->block_vars
;
8722 /* Helper function for lower_omp_task_reductions. For a specific PASS
8723 find out the current clause it should be processed, or return false
8724 if all have been processed already. */
8727 omp_task_reduction_iterate (int pass
, enum tree_code code
,
8728 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
8729 tree
*type
, tree
*next
)
8731 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
8733 if (ccode
== OMP_CLAUSE_REDUCTION
8734 && code
!= OMP_TASKLOOP
8735 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
8737 *decl
= OMP_CLAUSE_DECL (*c
);
8738 *type
= TREE_TYPE (*decl
);
8739 if (TREE_CODE (*decl
) == MEM_REF
)
8746 if (omp_is_reference (*decl
))
8747 *type
= TREE_TYPE (*type
);
8748 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
8751 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
8760 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8761 OMP_TASKGROUP only with task modifier). Register mapping of those in
8762 START sequence and reducing them and unregister them in the END sequence. */
8765 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
8766 gimple_seq
*start
, gimple_seq
*end
)
8768 enum omp_clause_code ccode
8769 = (code
== OMP_TASKGROUP
8770 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
8771 tree cancellable
= NULL_TREE
;
8772 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
8773 if (clauses
== NULL_TREE
)
8775 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8777 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8778 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8779 && outer
->cancellable
)
8781 cancellable
= error_mark_node
;
8784 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
8787 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8788 tree
*last
= &TYPE_FIELDS (record_type
);
8792 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8794 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
8797 DECL_CHAIN (field
) = ifield
;
8798 last
= &DECL_CHAIN (ifield
);
8799 DECL_CONTEXT (field
) = record_type
;
8800 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8801 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8802 DECL_CONTEXT (ifield
) = record_type
;
8803 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
8804 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
8806 for (int pass
= 0; pass
< 2; pass
++)
8808 tree decl
, type
, next
;
8809 for (tree c
= clauses
;
8810 omp_task_reduction_iterate (pass
, code
, ccode
,
8811 &c
, &decl
, &type
, &next
); c
= next
)
8814 tree new_type
= type
;
8816 new_type
= remap_type (type
, &ctx
->outer
->cb
);
8818 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
8819 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
8821 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
8823 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
8824 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
8825 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
8828 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
8829 DECL_CONTEXT (field
) = record_type
;
8830 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
8831 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
8833 last
= &DECL_CHAIN (field
);
8835 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
8837 DECL_CONTEXT (bfield
) = record_type
;
8838 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
8839 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
8841 last
= &DECL_CHAIN (bfield
);
8845 layout_type (record_type
);
8847 /* Build up an array which registers with the runtime all the reductions
8848 and deregisters them at the end. Format documented in libgomp/task.c. */
8849 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
8850 tree avar
= create_tmp_var_raw (atype
);
8851 gimple_add_tmp_var (avar
);
8852 TREE_ADDRESSABLE (avar
) = 1;
8853 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
8854 NULL_TREE
, NULL_TREE
);
8855 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
8856 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8857 gimple_seq seq
= NULL
;
8858 tree sz
= fold_convert (pointer_sized_int_node
,
8859 TYPE_SIZE_UNIT (record_type
));
8861 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
8862 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
8863 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
8864 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
8865 ctx
->task_reductions
.create (1 + cnt
);
8866 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
8867 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
8869 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
8870 gimple_seq_add_seq (start
, seq
);
8871 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
8872 NULL_TREE
, NULL_TREE
);
8873 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
8874 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8875 NULL_TREE
, NULL_TREE
);
8876 t
= build_int_cst (pointer_sized_int_node
,
8877 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
8878 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8879 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
8880 NULL_TREE
, NULL_TREE
);
8881 t
= build_int_cst (pointer_sized_int_node
, -1);
8882 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8883 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
8884 NULL_TREE
, NULL_TREE
);
8885 t
= build_int_cst (pointer_sized_int_node
, 0);
8886 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8888 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8889 and for each task reduction checks a bool right after the private variable
8890 within that thread's chunk; if the bool is clear, it hasn't been
8891 initialized and thus isn't going to be reduced nor destructed, otherwise
8892 reduce and destruct it. */
8893 tree idx
= create_tmp_var (size_type_node
);
8894 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
8895 tree num_thr_sz
= create_tmp_var (size_type_node
);
8896 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
8897 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
8898 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
8900 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8902 /* For worksharing constructs, only perform it in the master thread,
8903 with the exception of cancelled implicit barriers - then only handle
8904 the current thread. */
8905 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8906 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
8907 tree thr_num
= create_tmp_var (integer_type_node
);
8908 g
= gimple_build_call (t
, 0);
8909 gimple_call_set_lhs (g
, thr_num
);
8910 gimple_seq_add_stmt (end
, g
);
8914 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8915 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8916 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8917 if (code
== OMP_FOR
)
8918 c
= gimple_omp_for_clauses (ctx
->stmt
);
8919 else /* if (code == OMP_SECTIONS) */
8920 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8921 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
8923 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
8925 gimple_seq_add_stmt (end
, g
);
8926 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8927 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
8928 gimple_seq_add_stmt (end
, g
);
8929 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
8930 build_one_cst (TREE_TYPE (idx
)));
8931 gimple_seq_add_stmt (end
, g
);
8932 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
8933 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8935 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
8936 gimple_seq_add_stmt (end
, g
);
8937 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8939 if (code
!= OMP_PARALLEL
)
8941 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
8942 tree num_thr
= create_tmp_var (integer_type_node
);
8943 g
= gimple_build_call (t
, 0);
8944 gimple_call_set_lhs (g
, num_thr
);
8945 gimple_seq_add_stmt (end
, g
);
8946 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
8947 gimple_seq_add_stmt (end
, g
);
8949 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8953 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
8954 OMP_CLAUSE__REDUCTEMP_
);
8955 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
8956 t
= fold_convert (size_type_node
, t
);
8957 gimplify_assign (num_thr_sz
, t
, end
);
8959 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
8960 NULL_TREE
, NULL_TREE
);
8961 tree data
= create_tmp_var (pointer_sized_int_node
);
8962 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
8963 if (code
== OMP_TASKLOOP
)
8965 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
8966 g
= gimple_build_cond (NE_EXPR
, data
,
8967 build_zero_cst (pointer_sized_int_node
),
8969 gimple_seq_add_stmt (end
, g
);
8971 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
8973 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
8974 ptr
= create_tmp_var (build_pointer_type (record_type
));
8976 ptr
= create_tmp_var (ptr_type_node
);
8977 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
8979 tree field
= TYPE_FIELDS (record_type
);
8982 field
= DECL_CHAIN (DECL_CHAIN (field
));
8983 for (int pass
= 0; pass
< 2; pass
++)
8985 tree decl
, type
, next
;
8986 for (tree c
= clauses
;
8987 omp_task_reduction_iterate (pass
, code
, ccode
,
8988 &c
, &decl
, &type
, &next
); c
= next
)
8990 tree var
= decl
, ref
;
8991 if (TREE_CODE (decl
) == MEM_REF
)
8993 var
= TREE_OPERAND (var
, 0);
8994 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
8995 var
= TREE_OPERAND (var
, 0);
8997 if (TREE_CODE (var
) == ADDR_EXPR
)
8998 var
= TREE_OPERAND (var
, 0);
8999 else if (TREE_CODE (var
) == INDIRECT_REF
)
9000 var
= TREE_OPERAND (var
, 0);
9001 tree orig_var
= var
;
9002 if (is_variable_sized (var
))
9004 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9005 var
= DECL_VALUE_EXPR (var
);
9006 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9007 var
= TREE_OPERAND (var
, 0);
9008 gcc_assert (DECL_P (var
));
9010 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9011 if (orig_var
!= var
)
9012 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9013 else if (TREE_CODE (v
) == ADDR_EXPR
)
9014 t
= build_fold_addr_expr (t
);
9015 else if (TREE_CODE (v
) == INDIRECT_REF
)
9016 t
= build_fold_indirect_ref (t
);
9017 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9019 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9020 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9021 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9023 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9024 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9025 fold_convert (size_type_node
,
9026 TREE_OPERAND (decl
, 1)));
9030 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9031 if (!omp_is_reference (decl
))
9032 t
= build_fold_addr_expr (t
);
9034 t
= fold_convert (pointer_sized_int_node
, t
);
9036 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9037 gimple_seq_add_seq (start
, seq
);
9038 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9039 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9040 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9041 t
= unshare_expr (byte_position (field
));
9042 t
= fold_convert (pointer_sized_int_node
, t
);
9043 ctx
->task_reduction_map
->put (c
, cnt
);
9044 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9047 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9048 gimple_seq_add_seq (start
, seq
);
9049 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9050 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9051 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9053 tree bfield
= DECL_CHAIN (field
);
9055 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
9056 /* In parallel or worksharing all threads unconditionally
9057 initialize all their task reduction private variables. */
9058 cond
= boolean_true_node
;
9059 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9061 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9062 unshare_expr (byte_position (bfield
)));
9064 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9065 gimple_seq_add_seq (end
, seq
);
9066 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9067 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9068 build_int_cst (pbool
, 0));
9071 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9072 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9073 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9074 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9075 tree condv
= create_tmp_var (boolean_type_node
);
9076 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9077 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9079 gimple_seq_add_stmt (end
, g
);
9080 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9081 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9083 /* If this reduction doesn't need destruction and parallel
9084 has been cancelled, there is nothing to do for this
9085 reduction, so jump around the merge operation. */
9086 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9087 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9088 build_zero_cst (TREE_TYPE (cancellable
)),
9090 gimple_seq_add_stmt (end
, g
);
9091 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9095 if (TREE_TYPE (ptr
) == ptr_type_node
)
9097 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9098 unshare_expr (byte_position (field
)));
9100 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9101 gimple_seq_add_seq (end
, seq
);
9102 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9103 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9104 build_int_cst (pbool
, 0));
9107 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9108 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9110 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9111 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
9112 ref
= build_simple_mem_ref (ref
);
9113 /* reduction(-:var) sums up the partial results, so it acts
9114 identically to reduction(+:var). */
9115 if (rcode
== MINUS_EXPR
)
9117 if (TREE_CODE (decl
) == MEM_REF
)
9119 tree type
= TREE_TYPE (new_var
);
9120 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9121 tree i
= create_tmp_var (TREE_TYPE (v
));
9122 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9125 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9126 tree vv
= create_tmp_var (TREE_TYPE (v
));
9127 gimplify_assign (vv
, v
, start
);
9130 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9131 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9132 new_var
= build_fold_addr_expr (new_var
);
9133 new_var
= fold_convert (ptype
, new_var
);
9134 ref
= fold_convert (ptype
, ref
);
9135 tree m
= create_tmp_var (ptype
);
9136 gimplify_assign (m
, new_var
, end
);
9138 m
= create_tmp_var (ptype
);
9139 gimplify_assign (m
, ref
, end
);
9141 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9142 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9143 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9144 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9145 tree priv
= build_simple_mem_ref (new_var
);
9146 tree out
= build_simple_mem_ref (ref
);
9147 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9149 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9150 tree decl_placeholder
9151 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9152 tree lab6
= NULL_TREE
;
9155 /* If this reduction needs destruction and parallel
9156 has been cancelled, jump around the merge operation
9157 to the destruction. */
9158 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9159 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9160 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9161 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9163 gimple_seq_add_stmt (end
, g
);
9164 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9166 SET_DECL_VALUE_EXPR (placeholder
, out
);
9167 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9168 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9169 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9170 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9171 gimple_seq_add_seq (end
,
9172 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9174 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9176 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9180 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9181 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9184 gimple_seq tseq
= NULL
;
9185 gimplify_stmt (&x
, &tseq
);
9186 gimple_seq_add_seq (end
, tseq
);
9191 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9192 out
= unshare_expr (out
);
9193 gimplify_assign (out
, x
, end
);
9196 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9197 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9198 gimple_seq_add_stmt (end
, g
);
9199 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9200 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9201 gimple_seq_add_stmt (end
, g
);
9202 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9203 build_int_cst (TREE_TYPE (i
), 1));
9204 gimple_seq_add_stmt (end
, g
);
9205 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9206 gimple_seq_add_stmt (end
, g
);
9207 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9209 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9211 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9212 tree oldv
= NULL_TREE
;
9213 tree lab6
= NULL_TREE
;
9216 /* If this reduction needs destruction and parallel
9217 has been cancelled, jump around the merge operation
9218 to the destruction. */
9219 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9220 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9221 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9222 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9224 gimple_seq_add_stmt (end
, g
);
9225 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9227 if (omp_is_reference (decl
)
9228 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9230 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9231 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9232 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9233 gimplify_assign (refv
, ref
, end
);
9234 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9235 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9236 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9237 tree d
= maybe_lookup_decl (decl
, ctx
);
9239 if (DECL_HAS_VALUE_EXPR_P (d
))
9240 oldv
= DECL_VALUE_EXPR (d
);
9241 if (omp_is_reference (var
))
9243 tree v
= fold_convert (TREE_TYPE (d
),
9244 build_fold_addr_expr (new_var
));
9245 SET_DECL_VALUE_EXPR (d
, v
);
9248 SET_DECL_VALUE_EXPR (d
, new_var
);
9249 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9252 SET_DECL_VALUE_EXPR (d
, oldv
);
9255 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9256 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9258 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9260 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9261 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9263 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9264 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9267 gimple_seq tseq
= NULL
;
9268 gimplify_stmt (&x
, &tseq
);
9269 gimple_seq_add_seq (end
, tseq
);
9274 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9275 ref
= unshare_expr (ref
);
9276 gimplify_assign (ref
, x
, end
);
9278 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9280 field
= DECL_CHAIN (bfield
);
9284 if (code
== OMP_TASKGROUP
)
9286 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9287 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9288 gimple_seq_add_stmt (start
, g
);
9293 if (code
== OMP_FOR
)
9294 c
= gimple_omp_for_clauses (ctx
->stmt
);
9295 else if (code
== OMP_SECTIONS
)
9296 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9298 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9299 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9300 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9301 build_fold_addr_expr (avar
));
9302 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9305 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9306 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9308 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9309 gimple_seq_add_stmt (end
, g
);
9310 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9311 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
9313 enum built_in_function bfn
9314 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9315 t
= builtin_decl_explicit (bfn
);
9316 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9320 arg
= create_tmp_var (c_bool_type
);
9321 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9325 arg
= build_int_cst (c_bool_type
, 0);
9326 g
= gimple_build_call (t
, 1, arg
);
9330 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9331 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9333 gimple_seq_add_stmt (end
, g
);
9335 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9336 t
= build_constructor (atype
, NULL
);
9337 TREE_THIS_VOLATILE (t
) = 1;
9338 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9341 /* Expand code for an OpenMP taskgroup directive. */
9344 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9346 gimple
*stmt
= gsi_stmt (*gsi_p
);
9349 gimple_seq dseq
= NULL
;
9350 tree block
= make_node (BLOCK
);
9352 bind
= gimple_build_bind (NULL
, NULL
, block
);
9353 gsi_replace (gsi_p
, bind
, true);
9354 gimple_bind_add_stmt (bind
, stmt
);
9356 push_gimplify_context ();
9358 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9360 gimple_bind_add_stmt (bind
, x
);
9362 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9363 gimple_omp_taskgroup_clauses (stmt
),
9364 gimple_bind_body_ptr (bind
), &dseq
);
9366 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9367 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9368 gimple_omp_set_body (stmt
, NULL
);
9370 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9371 gimple_bind_add_seq (bind
, dseq
);
9373 pop_gimplify_context (bind
);
9375 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9376 BLOCK_VARS (block
) = ctx
->block_vars
;
9380 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9383 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9386 struct omp_for_data fd
;
9387 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9390 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9391 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9392 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9396 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9397 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9398 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
9399 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9401 /* Merge depend clauses from multiple adjacent
9402 #pragma omp ordered depend(sink:...) constructs
9403 into one #pragma omp ordered depend(sink:...), so that
9404 we can optimize them together. */
9405 gimple_stmt_iterator gsi
= *gsi_p
;
9407 while (!gsi_end_p (gsi
))
9409 gimple
*stmt
= gsi_stmt (gsi
);
9410 if (is_gimple_debug (stmt
)
9411 || gimple_code (stmt
) == GIMPLE_NOP
)
9416 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9418 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9419 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9421 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
9422 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9425 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9427 gsi_remove (&gsi
, true);
9431 /* Canonicalize sink dependence clauses into one folded clause if
9434 The basic algorithm is to create a sink vector whose first
9435 element is the GCD of all the first elements, and whose remaining
9436 elements are the minimum of the subsequent columns.
9438 We ignore dependence vectors whose first element is zero because
9439 such dependencies are known to be executed by the same thread.
9441 We take into account the direction of the loop, so a minimum
9442 becomes a maximum if the loop is iterating forwards. We also
9443 ignore sink clauses where the loop direction is unknown, or where
9444 the offsets are clearly invalid because they are not a multiple
9445 of the loop increment.
9449 #pragma omp for ordered(2)
9450 for (i=0; i < N; ++i)
9451 for (j=0; j < M; ++j)
9453 #pragma omp ordered \
9454 depend(sink:i-8,j-2) \
9455 depend(sink:i,j-1) \ // Completely ignored because i+0.
9456 depend(sink:i-4,j-3) \
9457 depend(sink:i-6,j-4)
9458 #pragma omp ordered depend(source)
9463 depend(sink:-gcd(8,4,6),-min(2,3,4))
9468 /* FIXME: Computing GCD's where the first element is zero is
9469 non-trivial in the presence of collapsed loops. Do this later. */
9470 if (fd
.collapse
> 1)
9473 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9475 /* wide_int is not a POD so it must be default-constructed. */
9476 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9477 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9479 tree folded_dep
= NULL_TREE
;
9480 /* TRUE if the first dimension's offset is negative. */
9481 bool neg_offset_p
= false;
9483 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9485 while ((c
= *list_p
) != NULL
)
9487 bool remove
= false;
9489 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
9490 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
9491 goto next_ordered_clause
;
9494 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9495 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9496 vec
= TREE_CHAIN (vec
), ++i
)
9498 gcc_assert (i
< len
);
9500 /* omp_extract_for_data has canonicalized the condition. */
9501 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9502 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9503 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9504 bool maybe_lexically_later
= true;
9506 /* While the committee makes up its mind, bail if we have any
9507 non-constant steps. */
9508 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9509 goto lower_omp_ordered_ret
;
9511 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9512 if (POINTER_TYPE_P (itype
))
9514 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9515 TYPE_PRECISION (itype
),
9518 /* Ignore invalid offsets that are not multiples of the step. */
9519 if (!wi::multiple_of_p (wi::abs (offset
),
9520 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9523 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9524 "ignoring sink clause with offset that is not "
9525 "a multiple of the loop step");
9527 goto next_ordered_clause
;
9530 /* Calculate the first dimension. The first dimension of
9531 the folded dependency vector is the GCD of the first
9532 elements, while ignoring any first elements whose offset
9536 /* Ignore dependence vectors whose first dimension is 0. */
9540 goto next_ordered_clause
;
9544 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9546 error_at (OMP_CLAUSE_LOCATION (c
),
9547 "first offset must be in opposite direction "
9548 "of loop iterations");
9549 goto lower_omp_ordered_ret
;
9553 neg_offset_p
= forward
;
9554 /* Initialize the first time around. */
9555 if (folded_dep
== NULL_TREE
)
9558 folded_deps
[0] = offset
;
9561 folded_deps
[0] = wi::gcd (folded_deps
[0],
9565 /* Calculate minimum for the remaining dimensions. */
9568 folded_deps
[len
+ i
- 1] = offset
;
9569 if (folded_dep
== c
)
9570 folded_deps
[i
] = offset
;
9571 else if (maybe_lexically_later
9572 && !wi::eq_p (folded_deps
[i
], offset
))
9574 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9578 for (j
= 1; j
<= i
; j
++)
9579 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9582 maybe_lexically_later
= false;
9586 gcc_assert (i
== len
);
9590 next_ordered_clause
:
9592 *list_p
= OMP_CLAUSE_CHAIN (c
);
9594 list_p
= &OMP_CLAUSE_CHAIN (c
);
9600 folded_deps
[0] = -folded_deps
[0];
9602 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9603 if (POINTER_TYPE_P (itype
))
9606 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9607 = wide_int_to_tree (itype
, folded_deps
[0]);
9608 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9609 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9612 lower_omp_ordered_ret
:
9614 /* Ordered without clauses is #pragma omp threads, while we want
9615 a nop instead if we remove all clauses. */
9616 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9617 gsi_replace (gsi_p
, gimple_build_nop (), true);
9621 /* Expand code for an OpenMP ordered directive. */
9624 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9627 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9628 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9631 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9633 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9636 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9637 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9638 OMP_CLAUSE_THREADS
);
9640 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9643 /* FIXME: This is needs to be moved to the expansion to verify various
9644 conditions only testable on cfg with dominators computed, and also
9645 all the depend clauses to be merged still might need to be available
9646 for the runtime checks. */
9648 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
9652 push_gimplify_context ();
9654 block
= make_node (BLOCK
);
9655 bind
= gimple_build_bind (NULL
, NULL
, block
);
9656 gsi_replace (gsi_p
, bind
, true);
9657 gimple_bind_add_stmt (bind
, stmt
);
9661 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
9662 build_int_cst (NULL_TREE
, threads
));
9663 cfun
->has_simduid_loops
= true;
9666 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
9668 gimple_bind_add_stmt (bind
, x
);
9670 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
9673 counter
= create_tmp_var (integer_type_node
);
9674 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
9675 gimple_call_set_lhs (g
, counter
);
9676 gimple_bind_add_stmt (bind
, g
);
9678 body
= create_artificial_label (UNKNOWN_LOCATION
);
9679 test
= create_artificial_label (UNKNOWN_LOCATION
);
9680 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
9682 tree simt_pred
= create_tmp_var (integer_type_node
);
9683 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
9684 gimple_call_set_lhs (g
, simt_pred
);
9685 gimple_bind_add_stmt (bind
, g
);
9687 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
9688 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
9689 gimple_bind_add_stmt (bind
, g
);
9691 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
9693 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9694 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9695 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9696 gimple_omp_set_body (stmt
, NULL
);
9700 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
9701 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
9702 gimple_bind_add_stmt (bind
, g
);
9704 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
9705 tree nonneg
= create_tmp_var (integer_type_node
);
9706 gimple_seq tseq
= NULL
;
9707 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
9708 gimple_bind_add_seq (bind
, tseq
);
9710 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
9711 gimple_call_set_lhs (g
, nonneg
);
9712 gimple_bind_add_stmt (bind
, g
);
9714 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
9715 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
9716 gimple_bind_add_stmt (bind
, g
);
9718 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
9721 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
9722 build_int_cst (NULL_TREE
, threads
));
9724 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
9726 gimple_bind_add_stmt (bind
, x
);
9728 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9730 pop_gimplify_context (bind
);
9732 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9733 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9737 /* Expand code for an OpenMP scan directive and the structured block
9738 before the scan directive. */
9741 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9743 gimple
*stmt
= gsi_stmt (*gsi_p
);
9745 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
9746 tree lane
= NULL_TREE
;
9747 gimple_seq before
= NULL
;
9748 omp_context
*octx
= ctx
->outer
;
9750 if (octx
->scan_exclusive
&& !has_clauses
)
9752 gimple_stmt_iterator gsi2
= *gsi_p
;
9754 gimple
*stmt2
= gsi_stmt (gsi2
);
9755 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9756 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9757 the one with exclusive clause(s), comes first. */
9759 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
9760 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
9762 gsi_remove (gsi_p
, false);
9763 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
9764 ctx
= maybe_lookup_ctx (stmt2
);
9766 lower_omp_scan (gsi_p
, ctx
);
9771 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
9772 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9773 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
9774 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
9775 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
9776 && !gimple_omp_for_combined_p (octx
->stmt
));
9777 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
9778 if (is_for_simd
&& octx
->for_simd_scan_phase
)
9781 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
9782 OMP_CLAUSE__SIMDUID_
))
9784 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
9785 lane
= create_tmp_var (unsigned_type_node
);
9786 tree t
= build_int_cst (integer_type_node
,
9788 : octx
->scan_inclusive
? 2 : 3);
9790 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
9791 gimple_call_set_lhs (g
, lane
);
9792 gimple_seq_add_stmt (&before
, g
);
9795 if (is_simd
|| is_for
)
9797 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
9798 c
; c
= OMP_CLAUSE_CHAIN (c
))
9799 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9800 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9802 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9803 tree var
= OMP_CLAUSE_DECL (c
);
9804 tree new_var
= lookup_decl (var
, octx
);
9806 tree var2
= NULL_TREE
;
9807 tree var3
= NULL_TREE
;
9808 tree var4
= NULL_TREE
;
9809 tree lane0
= NULL_TREE
;
9810 tree new_vard
= new_var
;
9811 if (omp_is_reference (var
))
9813 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9816 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
9818 val
= DECL_VALUE_EXPR (new_vard
);
9819 if (new_vard
!= new_var
)
9821 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
9822 val
= TREE_OPERAND (val
, 0);
9824 if (TREE_CODE (val
) == ARRAY_REF
9825 && VAR_P (TREE_OPERAND (val
, 0)))
9827 tree v
= TREE_OPERAND (val
, 0);
9828 if (lookup_attribute ("omp simd array",
9829 DECL_ATTRIBUTES (v
)))
9831 val
= unshare_expr (val
);
9832 lane0
= TREE_OPERAND (val
, 1);
9833 TREE_OPERAND (val
, 1) = lane
;
9834 var2
= lookup_decl (v
, octx
);
9835 if (octx
->scan_exclusive
)
9836 var4
= lookup_decl (var2
, octx
);
9838 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9839 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
9842 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9843 var2
, lane
, NULL_TREE
, NULL_TREE
);
9844 TREE_THIS_NOTRAP (var2
) = 1;
9845 if (octx
->scan_exclusive
)
9847 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
9848 var4
, lane
, NULL_TREE
,
9850 TREE_THIS_NOTRAP (var4
) = 1;
9861 var2
= build_outer_var_ref (var
, octx
);
9862 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9864 var3
= maybe_lookup_decl (new_vard
, octx
);
9865 if (var3
== new_vard
|| var3
== NULL_TREE
)
9867 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
9869 var4
= maybe_lookup_decl (var3
, octx
);
9870 if (var4
== var3
|| var4
== NULL_TREE
)
9872 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
9883 && octx
->scan_exclusive
9885 && var4
== NULL_TREE
)
9886 var4
= create_tmp_var (TREE_TYPE (val
));
9888 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9890 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9895 /* If we've added a separate identity element
9896 variable, copy it over into val. */
9897 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9899 gimplify_and_add (x
, &before
);
9901 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9903 /* Otherwise, assign to it the identity element. */
9904 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9906 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9907 tree ref
= build_outer_var_ref (var
, octx
);
9908 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9909 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9912 if (new_vard
!= new_var
)
9913 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9914 SET_DECL_VALUE_EXPR (new_vard
, val
);
9916 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9917 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9918 lower_omp (&tseq
, octx
);
9920 SET_DECL_VALUE_EXPR (new_vard
, x
);
9921 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9922 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9923 gimple_seq_add_seq (&before
, tseq
);
9925 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9931 if (octx
->scan_exclusive
)
9933 tree v4
= unshare_expr (var4
);
9934 tree v2
= unshare_expr (var2
);
9935 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
9936 gimplify_and_add (x
, &before
);
9938 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9939 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9940 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9942 if (x
&& new_vard
!= new_var
)
9943 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
9945 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9946 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9947 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9948 lower_omp (&tseq
, octx
);
9949 gimple_seq_add_seq (&before
, tseq
);
9950 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9952 SET_DECL_VALUE_EXPR (new_vard
, x
);
9953 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9954 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9955 if (octx
->scan_inclusive
)
9957 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9959 gimplify_and_add (x
, &before
);
9961 else if (lane0
== NULL_TREE
)
9963 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
9965 gimplify_and_add (x
, &before
);
9973 /* input phase. Set val to initializer before
9975 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9976 gimplify_assign (val
, x
, &before
);
9981 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9982 if (code
== MINUS_EXPR
)
9985 tree x
= build2 (code
, TREE_TYPE (var2
),
9986 unshare_expr (var2
), unshare_expr (val
));
9987 if (octx
->scan_inclusive
)
9989 gimplify_assign (unshare_expr (var2
), x
, &before
);
9990 gimplify_assign (val
, var2
, &before
);
9994 gimplify_assign (unshare_expr (var4
),
9995 unshare_expr (var2
), &before
);
9996 gimplify_assign (var2
, x
, &before
);
9997 if (lane0
== NULL_TREE
)
9998 gimplify_assign (val
, var4
, &before
);
10002 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10004 tree vexpr
= unshare_expr (var4
);
10005 TREE_OPERAND (vexpr
, 1) = lane0
;
10006 if (new_vard
!= new_var
)
10007 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10008 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10012 if (is_simd
&& !is_for_simd
)
10014 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10015 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10016 gsi_replace (gsi_p
, gimple_build_nop (), true);
10019 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10022 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
10023 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10028 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10029 substitution of a couple of function calls. But in the NAMED case,
10030 requires that languages coordinate a symbol name. It is therefore
10031 best put here in common code. */
10033 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10036 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10039 tree name
, lock
, unlock
;
10040 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10042 location_t loc
= gimple_location (stmt
);
10045 name
= gimple_omp_critical_name (stmt
);
10050 if (!critical_name_mutexes
)
10051 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10053 tree
*n
= critical_name_mutexes
->get (name
);
10058 decl
= create_tmp_var_raw (ptr_type_node
);
10060 new_str
= ACONCAT ((".gomp_critical_user_",
10061 IDENTIFIER_POINTER (name
), NULL
));
10062 DECL_NAME (decl
) = get_identifier (new_str
);
10063 TREE_PUBLIC (decl
) = 1;
10064 TREE_STATIC (decl
) = 1;
10065 DECL_COMMON (decl
) = 1;
10066 DECL_ARTIFICIAL (decl
) = 1;
10067 DECL_IGNORED_P (decl
) = 1;
10069 varpool_node::finalize_decl (decl
);
10071 critical_name_mutexes
->put (name
, decl
);
10076 /* If '#pragma omp critical' is inside offloaded region or
10077 inside function marked as offloadable, the symbol must be
10078 marked as offloadable too. */
10080 if (cgraph_node::get (current_function_decl
)->offloadable
)
10081 varpool_node::get_create (decl
)->offloadable
= 1;
10083 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10084 if (is_gimple_omp_offloaded (octx
->stmt
))
10086 varpool_node::get_create (decl
)->offloadable
= 1;
10090 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10091 lock
= build_call_expr_loc (loc
, lock
, 1,
10092 build_fold_addr_expr_loc (loc
, decl
));
10094 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10095 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10096 build_fold_addr_expr_loc (loc
, decl
));
10100 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10101 lock
= build_call_expr_loc (loc
, lock
, 0);
10103 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10104 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10107 push_gimplify_context ();
10109 block
= make_node (BLOCK
);
10110 bind
= gimple_build_bind (NULL
, NULL
, block
);
10111 gsi_replace (gsi_p
, bind
, true);
10112 gimple_bind_add_stmt (bind
, stmt
);
10114 tbody
= gimple_bind_body (bind
);
10115 gimplify_and_add (lock
, &tbody
);
10116 gimple_bind_set_body (bind
, tbody
);
10118 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10119 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10120 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10121 gimple_omp_set_body (stmt
, NULL
);
10123 tbody
= gimple_bind_body (bind
);
10124 gimplify_and_add (unlock
, &tbody
);
10125 gimple_bind_set_body (bind
, tbody
);
10127 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10129 pop_gimplify_context (bind
);
10130 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10131 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10134 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10135 for a lastprivate clause. Given a loop control predicate of (V
10136 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10137 is appended to *DLIST, iterator initialization is appended to
10138 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10139 to be emitted in a critical section. */
10142 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10143 gimple_seq
*dlist
, gimple_seq
*clist
,
10144 struct omp_context
*ctx
)
10146 tree clauses
, cond
, vinit
;
10147 enum tree_code cond_code
;
10150 cond_code
= fd
->loop
.cond_code
;
10151 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10153 /* When possible, use a strict equality expression. This can let VRP
10154 type optimizations deduce the value and remove a copy. */
10155 if (tree_fits_shwi_p (fd
->loop
.step
))
10157 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10158 if (step
== 1 || step
== -1)
10159 cond_code
= EQ_EXPR
;
10162 tree n2
= fd
->loop
.n2
;
10163 if (fd
->collapse
> 1
10164 && TREE_CODE (n2
) != INTEGER_CST
10165 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10167 struct omp_context
*taskreg_ctx
= NULL
;
10168 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10170 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10171 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10172 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10174 if (gimple_omp_for_combined_into_p (gfor
))
10176 gcc_assert (ctx
->outer
->outer
10177 && is_parallel_ctx (ctx
->outer
->outer
));
10178 taskreg_ctx
= ctx
->outer
->outer
;
10182 struct omp_for_data outer_fd
;
10183 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10184 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10187 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10188 taskreg_ctx
= ctx
->outer
->outer
;
10190 else if (is_taskreg_ctx (ctx
->outer
))
10191 taskreg_ctx
= ctx
->outer
;
10195 tree taskreg_clauses
10196 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10197 tree innerc
= omp_find_clause (taskreg_clauses
,
10198 OMP_CLAUSE__LOOPTEMP_
);
10199 gcc_assert (innerc
);
10200 int count
= fd
->collapse
;
10202 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10203 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10204 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10206 for (i
= 0; i
< count
; i
++)
10208 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10209 OMP_CLAUSE__LOOPTEMP_
);
10210 gcc_assert (innerc
);
10212 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10213 OMP_CLAUSE__LOOPTEMP_
);
10215 n2
= fold_convert (TREE_TYPE (n2
),
10216 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10220 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10222 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10224 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10225 if (!gimple_seq_empty_p (stmts
))
10227 gimple_seq_add_seq (&stmts
, *dlist
);
10230 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10231 vinit
= fd
->loop
.n1
;
10232 if (cond_code
== EQ_EXPR
10233 && tree_fits_shwi_p (fd
->loop
.n2
)
10234 && ! integer_zerop (fd
->loop
.n2
))
10235 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10237 vinit
= unshare_expr (vinit
);
10239 /* Initialize the iterator variable, so that threads that don't execute
10240 any iterations don't execute the lastprivate clauses by accident. */
10241 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10245 /* OpenACC privatization.
10247 Or, in other words, *sharing* at the respective OpenACC level of
10250 From a correctness perspective, a non-addressable variable can't be accessed
10251 outside the current thread, so it can go in a (faster than shared memory)
10252 register -- though that register may need to be broadcast in some
10253 circumstances. A variable can only meaningfully be "shared" across workers
10254 or vector lanes if its address is taken, e.g. by a call to an atomic
10257 From an optimisation perspective, the answer might be fuzzier: maybe
10258 sometimes, using shared memory directly would be faster than
10262 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10263 const location_t loc
, const tree c
,
10266 const dump_user_location_t d_u_loc
10267 = dump_user_location_t::from_location_t (loc
);
10268 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10270 # pragma GCC diagnostic push
10271 # pragma GCC diagnostic ignored "-Wformat"
10273 dump_printf_loc (l_dump_flags
, d_u_loc
,
10274 "variable %<%T%> ", decl
);
10276 # pragma GCC diagnostic pop
10279 dump_printf (l_dump_flags
,
10281 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10283 dump_printf (l_dump_flags
,
10284 "declared in block ");
10288 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10291 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10293 /* There is some differentiation depending on block vs. clause. */
10298 if (res
&& !VAR_P (decl
))
10302 if (dump_enabled_p ())
10304 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10305 dump_printf (l_dump_flags
,
10306 "potentially has improper OpenACC privatization level: %qs\n",
10307 get_tree_code_name (TREE_CODE (decl
)));
10311 if (res
&& block
&& TREE_STATIC (decl
))
10315 if (dump_enabled_p ())
10317 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10318 dump_printf (l_dump_flags
,
10319 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10324 if (res
&& block
&& DECL_EXTERNAL (decl
))
10328 if (dump_enabled_p ())
10330 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10331 dump_printf (l_dump_flags
,
10332 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10337 if (res
&& !TREE_ADDRESSABLE (decl
))
10341 if (dump_enabled_p ())
10343 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10344 dump_printf (l_dump_flags
,
10345 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10346 "not addressable");
10352 if (dump_enabled_p ())
10354 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10355 dump_printf (l_dump_flags
,
10356 "is candidate for adjusting OpenACC privatization level\n");
10360 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10362 print_generic_decl (dump_file
, decl
, dump_flags
);
10363 fprintf (dump_file
, "\n");
10369 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10373 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10375 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10378 tree decl
= OMP_CLAUSE_DECL (c
);
10380 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
, decl
))
10383 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10384 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10388 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10392 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10394 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10396 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
, decl
))
10399 gcc_checking_assert (!ctx
->oacc_privatization_candidates
.contains (decl
));
10400 ctx
->oacc_privatization_candidates
.safe_push (decl
);
10404 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10407 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10408 struct walk_stmt_info
*wi
)
10410 gimple
*stmt
= gsi_stmt (*gsi_p
);
10412 *handled_ops_p
= true;
10413 switch (gimple_code (stmt
))
10417 case GIMPLE_OMP_FOR
:
10418 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10419 && gimple_omp_for_combined_into_p (stmt
))
10420 *handled_ops_p
= false;
10423 case GIMPLE_OMP_SCAN
:
10424 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10425 return integer_zero_node
;
10432 /* Helper function for lower_omp_for, add transformations for a worksharing
10433 loop with scan directives inside of it.
10434 For worksharing loop not combined with simd, transform:
10435 #pragma omp for reduction(inscan,+:r) private(i)
10436 for (i = 0; i < n; i = i + 1)
10441 #pragma omp scan inclusive(r)
10447 into two worksharing loops + code to merge results:
10449 num_threads = omp_get_num_threads ();
10450 thread_num = omp_get_thread_num ();
10451 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10456 // For UDRs this is UDR init, or if ctors are needed, copy from
10457 // var3 that has been constructed to contain the neutral element.
10461 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10462 // a shared array with num_threads elements and rprivb to a local array
10463 // number of elements equal to the number of (contiguous) iterations the
10464 // current thread will perform. controlb and controlp variables are
10465 // temporaries to handle deallocation of rprivb at the end of second
10467 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10468 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10469 for (i = 0; i < n; i = i + 1)
10472 // For UDRs this is UDR init or copy from var3.
10474 // This is the input phase from user code.
10478 // For UDRs this is UDR merge.
10480 // Rather than handing it over to the user, save to local thread's
10482 rprivb[ivar] = var2;
10483 // For exclusive scan, the above two statements are swapped.
10487 // And remember the final value from this thread's into the shared
10489 rpriva[(sizetype) thread_num] = var2;
10490 // If more than one thread, compute using Work-Efficient prefix sum
10491 // the inclusive parallel scan of the rpriva array.
10492 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10497 num_threadsu = (unsigned int) num_threads;
10498 thread_numup1 = (unsigned int) thread_num + 1;
10501 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10505 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10510 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10511 mul = REALPART_EXPR <cplx>;
10512 ovf = IMAGPART_EXPR <cplx>;
10513 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10516 andvm1 = andv + 4294967295;
10518 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10520 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10521 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10522 rpriva[l] = rpriva[l - k] + rpriva[l];
10524 if (down == 0) goto <D.2121>; else goto <D.2122>;
10532 if (k != 0) goto <D.2108>; else goto <D.2103>;
10534 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10536 // For UDRs this is UDR init or copy from var3.
10540 var2 = rpriva[thread_num - 1];
10543 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10544 reduction(inscan,+:r) private(i)
10545 for (i = 0; i < n; i = i + 1)
10548 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10549 r = var2 + rprivb[ivar];
10552 // This is the scan phase from user code.
10554 // Plus a bump of the iterator.
10560 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10561 struct omp_for_data
*fd
, omp_context
*ctx
)
10563 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10564 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10566 gimple_seq body
= gimple_omp_body (stmt
);
10567 gimple_stmt_iterator input1_gsi
= gsi_none ();
10568 struct walk_stmt_info wi
;
10569 memset (&wi
, 0, sizeof (wi
));
10570 wi
.val_only
= true;
10571 wi
.info
= (void *) &input1_gsi
;
10572 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10573 gcc_assert (!gsi_end_p (input1_gsi
));
10575 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10576 gimple_stmt_iterator gsi
= input1_gsi
;
10578 gimple_stmt_iterator scan1_gsi
= gsi
;
10579 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10580 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10582 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10583 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10584 gimple_omp_set_body (input_stmt1
, NULL
);
10585 gimple_omp_set_body (scan_stmt1
, NULL
);
10586 gimple_omp_set_body (stmt
, NULL
);
10588 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10589 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10590 gimple_omp_set_body (stmt
, body
);
10591 gimple_omp_set_body (input_stmt1
, input_body
);
10593 gimple_stmt_iterator input2_gsi
= gsi_none ();
10594 memset (&wi
, 0, sizeof (wi
));
10595 wi
.val_only
= true;
10596 wi
.info
= (void *) &input2_gsi
;
10597 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10598 gcc_assert (!gsi_end_p (input2_gsi
));
10600 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10603 gimple_stmt_iterator scan2_gsi
= gsi
;
10604 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10605 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10606 gimple_omp_set_body (scan_stmt2
, scan_body
);
10608 gimple_stmt_iterator input3_gsi
= gsi_none ();
10609 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10610 gimple_stmt_iterator input4_gsi
= gsi_none ();
10611 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10612 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10613 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10614 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10617 memset (&wi
, 0, sizeof (wi
));
10618 wi
.val_only
= true;
10619 wi
.info
= (void *) &input3_gsi
;
10620 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10621 gcc_assert (!gsi_end_p (input3_gsi
));
10623 input_stmt3
= gsi_stmt (input3_gsi
);
10627 scan_stmt3
= gsi_stmt (gsi
);
10628 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10630 memset (&wi
, 0, sizeof (wi
));
10631 wi
.val_only
= true;
10632 wi
.info
= (void *) &input4_gsi
;
10633 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
10634 gcc_assert (!gsi_end_p (input4_gsi
));
10636 input_stmt4
= gsi_stmt (input4_gsi
);
10640 scan_stmt4
= gsi_stmt (gsi
);
10641 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
10643 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
10644 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
10647 tree num_threads
= create_tmp_var (integer_type_node
);
10648 tree thread_num
= create_tmp_var (integer_type_node
);
10649 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
10650 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
10651 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
10652 gimple_call_set_lhs (g
, num_threads
);
10653 gimple_seq_add_stmt (body_p
, g
);
10654 g
= gimple_build_call (threadnum_decl
, 0);
10655 gimple_call_set_lhs (g
, thread_num
);
10656 gimple_seq_add_stmt (body_p
, g
);
10658 tree ivar
= create_tmp_var (sizetype
);
10659 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
10660 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
10661 tree k
= create_tmp_var (unsigned_type_node
);
10662 tree l
= create_tmp_var (unsigned_type_node
);
10664 gimple_seq clist
= NULL
, mdlist
= NULL
;
10665 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
10666 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
10667 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
10668 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
10669 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10670 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10671 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10673 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10674 tree var
= OMP_CLAUSE_DECL (c
);
10675 tree new_var
= lookup_decl (var
, ctx
);
10676 tree var3
= NULL_TREE
;
10677 tree new_vard
= new_var
;
10678 if (omp_is_reference (var
))
10679 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10680 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10682 var3
= maybe_lookup_decl (new_vard
, ctx
);
10683 if (var3
== new_vard
)
10687 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
10688 tree rpriva
= create_tmp_var (ptype
);
10689 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10690 OMP_CLAUSE_DECL (nc
) = rpriva
;
10692 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10694 tree rprivb
= create_tmp_var (ptype
);
10695 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
10696 OMP_CLAUSE_DECL (nc
) = rprivb
;
10697 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
10699 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10701 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
10702 if (new_vard
!= new_var
)
10703 TREE_ADDRESSABLE (var2
) = 1;
10704 gimple_add_tmp_var (var2
);
10706 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
10707 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10708 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10709 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10710 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10712 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
10713 thread_num
, integer_minus_one_node
);
10714 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10715 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10716 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10717 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10718 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10720 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
10721 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10722 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10723 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10724 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10726 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
10727 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
10728 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
10729 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10730 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
10731 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10733 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
10734 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
10735 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
10736 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
10738 tree var4
= is_for_simd
? new_var
: var2
;
10739 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
10742 var5
= lookup_decl (var
, input_simd_ctx
);
10743 var6
= lookup_decl (var
, scan_simd_ctx
);
10744 if (new_vard
!= new_var
)
10746 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
10747 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
10750 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10752 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10755 x
= lang_hooks
.decls
.omp_clause_default_ctor
10756 (c
, var2
, build_outer_var_ref (var
, ctx
));
10758 gimplify_and_add (x
, &clist
);
10760 x
= build_outer_var_ref (var
, ctx
);
10761 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
10763 gimplify_and_add (x
, &thr01_list
);
10765 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10766 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10769 x
= unshare_expr (var4
);
10770 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10771 gimplify_and_add (x
, &thrn1_list
);
10772 x
= unshare_expr (var4
);
10773 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
10774 gimplify_and_add (x
, &thr02_list
);
10776 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10778 /* Otherwise, assign to it the identity element. */
10779 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10780 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10783 if (new_vard
!= new_var
)
10784 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10785 SET_DECL_VALUE_EXPR (new_vard
, val
);
10786 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10788 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
10789 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10790 lower_omp (&tseq
, ctx
);
10791 gimple_seq_add_seq (&thrn1_list
, tseq
);
10792 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10793 lower_omp (&tseq
, ctx
);
10794 gimple_seq_add_seq (&thr02_list
, tseq
);
10795 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10796 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10797 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10799 SET_DECL_VALUE_EXPR (new_vard
, y
);
10802 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10803 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10807 x
= unshare_expr (var4
);
10808 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
10809 gimplify_and_add (x
, &thrn2_list
);
10813 x
= unshare_expr (rprivb_ref
);
10814 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
10815 gimplify_and_add (x
, &scan1_list
);
10819 if (ctx
->scan_exclusive
)
10821 x
= unshare_expr (rprivb_ref
);
10822 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10823 gimplify_and_add (x
, &scan1_list
);
10826 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10827 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10828 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10829 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10830 lower_omp (&tseq
, ctx
);
10831 gimple_seq_add_seq (&scan1_list
, tseq
);
10833 if (ctx
->scan_inclusive
)
10835 x
= unshare_expr (rprivb_ref
);
10836 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
10837 gimplify_and_add (x
, &scan1_list
);
10841 x
= unshare_expr (rpriva_ref
);
10842 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
10843 unshare_expr (var4
));
10844 gimplify_and_add (x
, &mdlist
);
10846 x
= unshare_expr (is_for_simd
? var6
: new_var
);
10847 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
10848 gimplify_and_add (x
, &input2_list
);
10851 if (new_vard
!= new_var
)
10852 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10854 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10855 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10856 SET_DECL_VALUE_EXPR (new_vard
, val
);
10857 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10860 SET_DECL_VALUE_EXPR (placeholder
, var6
);
10861 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10864 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10865 lower_omp (&tseq
, ctx
);
10867 SET_DECL_VALUE_EXPR (new_vard
, y
);
10870 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10871 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10875 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
10876 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10877 lower_omp (&tseq
, ctx
);
10879 gimple_seq_add_seq (&input2_list
, tseq
);
10881 x
= build_outer_var_ref (var
, ctx
);
10882 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
10883 gimplify_and_add (x
, &last_list
);
10885 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
10886 gimplify_and_add (x
, &reduc_list
);
10887 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10888 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10890 if (new_vard
!= new_var
)
10891 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10892 SET_DECL_VALUE_EXPR (new_vard
, val
);
10893 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
10894 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10895 lower_omp (&tseq
, ctx
);
10896 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10897 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10898 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10900 SET_DECL_VALUE_EXPR (new_vard
, y
);
10903 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
10904 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
10906 gimple_seq_add_seq (&reduc_list
, tseq
);
10907 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
10908 gimplify_and_add (x
, &reduc_list
);
10910 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
10912 gimplify_and_add (x
, dlist
);
10916 x
= build_outer_var_ref (var
, ctx
);
10917 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
10919 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10920 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
10922 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
10924 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
10926 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10927 if (code
== MINUS_EXPR
)
10931 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
10934 if (ctx
->scan_exclusive
)
10935 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10937 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
10938 gimplify_assign (var2
, x
, &scan1_list
);
10939 if (ctx
->scan_inclusive
)
10940 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
10944 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
10947 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
10948 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
10950 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
10953 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
10954 unshare_expr (rprival_ref
));
10955 gimplify_assign (rprival_ref
, x
, &reduc_list
);
10959 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10960 gimple_seq_add_stmt (&scan1_list
, g
);
10961 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
10962 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10963 ? scan_stmt4
: scan_stmt2
), g
);
10965 tree controlb
= create_tmp_var (boolean_type_node
);
10966 tree controlp
= create_tmp_var (ptr_type_node
);
10967 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10968 OMP_CLAUSE_DECL (nc
) = controlb
;
10969 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10971 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10972 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10973 OMP_CLAUSE_DECL (nc
) = controlp
;
10974 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10976 cp1
= &OMP_CLAUSE_CHAIN (nc
);
10977 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10978 OMP_CLAUSE_DECL (nc
) = controlb
;
10979 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10981 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10982 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
10983 OMP_CLAUSE_DECL (nc
) = controlp
;
10984 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
10986 cp2
= &OMP_CLAUSE_CHAIN (nc
);
10988 *cp1
= gimple_omp_for_clauses (stmt
);
10989 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
10990 *cp2
= gimple_omp_for_clauses (new_stmt
);
10991 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
10995 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
10996 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
10998 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11000 gsi_remove (&input3_gsi
, true);
11001 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11003 gsi_remove (&scan3_gsi
, true);
11004 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11006 gsi_remove (&input4_gsi
, true);
11007 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11009 gsi_remove (&scan4_gsi
, true);
11013 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11014 gimple_omp_set_body (input_stmt2
, input2_list
);
11017 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11019 gsi_remove (&input1_gsi
, true);
11020 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11022 gsi_remove (&scan1_gsi
, true);
11023 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11025 gsi_remove (&input2_gsi
, true);
11026 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11028 gsi_remove (&scan2_gsi
, true);
11030 gimple_seq_add_seq (body_p
, clist
);
11032 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11033 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11034 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11035 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11036 gimple_seq_add_stmt (body_p
, g
);
11037 g
= gimple_build_label (lab1
);
11038 gimple_seq_add_stmt (body_p
, g
);
11039 gimple_seq_add_seq (body_p
, thr01_list
);
11040 g
= gimple_build_goto (lab3
);
11041 gimple_seq_add_stmt (body_p
, g
);
11042 g
= gimple_build_label (lab2
);
11043 gimple_seq_add_stmt (body_p
, g
);
11044 gimple_seq_add_seq (body_p
, thrn1_list
);
11045 g
= gimple_build_label (lab3
);
11046 gimple_seq_add_stmt (body_p
, g
);
11048 g
= gimple_build_assign (ivar
, size_zero_node
);
11049 gimple_seq_add_stmt (body_p
, g
);
11051 gimple_seq_add_stmt (body_p
, stmt
);
11052 gimple_seq_add_seq (body_p
, body
);
11053 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11056 g
= gimple_build_omp_return (true);
11057 gimple_seq_add_stmt (body_p
, g
);
11058 gimple_seq_add_seq (body_p
, mdlist
);
11060 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11061 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11062 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11063 gimple_seq_add_stmt (body_p
, g
);
11064 g
= gimple_build_label (lab1
);
11065 gimple_seq_add_stmt (body_p
, g
);
11067 g
= omp_build_barrier (NULL
);
11068 gimple_seq_add_stmt (body_p
, g
);
11070 tree down
= create_tmp_var (unsigned_type_node
);
11071 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11072 gimple_seq_add_stmt (body_p
, g
);
11074 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11075 gimple_seq_add_stmt (body_p
, g
);
11077 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11078 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11079 gimple_seq_add_stmt (body_p
, g
);
11081 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11082 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11083 gimple_seq_add_stmt (body_p
, g
);
11085 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11086 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11087 build_int_cst (unsigned_type_node
, 1));
11088 gimple_seq_add_stmt (body_p
, g
);
11090 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11091 g
= gimple_build_label (lab3
);
11092 gimple_seq_add_stmt (body_p
, g
);
11094 tree twok
= create_tmp_var (unsigned_type_node
);
11095 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11096 gimple_seq_add_stmt (body_p
, g
);
11098 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11099 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11100 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11101 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11102 gimple_seq_add_stmt (body_p
, g
);
11103 g
= gimple_build_label (lab4
);
11104 gimple_seq_add_stmt (body_p
, g
);
11105 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11106 gimple_seq_add_stmt (body_p
, g
);
11107 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11108 gimple_seq_add_stmt (body_p
, g
);
11110 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11111 gimple_seq_add_stmt (body_p
, g
);
11112 g
= gimple_build_label (lab6
);
11113 gimple_seq_add_stmt (body_p
, g
);
11115 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11116 gimple_seq_add_stmt (body_p
, g
);
11118 g
= gimple_build_label (lab5
);
11119 gimple_seq_add_stmt (body_p
, g
);
11121 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11122 gimple_seq_add_stmt (body_p
, g
);
11124 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11125 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11126 gimple_call_set_lhs (g
, cplx
);
11127 gimple_seq_add_stmt (body_p
, g
);
11128 tree mul
= create_tmp_var (unsigned_type_node
);
11129 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11130 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11131 gimple_seq_add_stmt (body_p
, g
);
11132 tree ovf
= create_tmp_var (unsigned_type_node
);
11133 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11134 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11135 gimple_seq_add_stmt (body_p
, g
);
11137 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11138 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11139 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11141 gimple_seq_add_stmt (body_p
, g
);
11142 g
= gimple_build_label (lab7
);
11143 gimple_seq_add_stmt (body_p
, g
);
11145 tree andv
= create_tmp_var (unsigned_type_node
);
11146 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11147 gimple_seq_add_stmt (body_p
, g
);
11148 tree andvm1
= create_tmp_var (unsigned_type_node
);
11149 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11150 build_minus_one_cst (unsigned_type_node
));
11151 gimple_seq_add_stmt (body_p
, g
);
11153 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11154 gimple_seq_add_stmt (body_p
, g
);
11156 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11157 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11158 gimple_seq_add_stmt (body_p
, g
);
11159 g
= gimple_build_label (lab9
);
11160 gimple_seq_add_stmt (body_p
, g
);
11161 gimple_seq_add_seq (body_p
, reduc_list
);
11162 g
= gimple_build_label (lab8
);
11163 gimple_seq_add_stmt (body_p
, g
);
11165 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11166 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11167 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11168 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11170 gimple_seq_add_stmt (body_p
, g
);
11171 g
= gimple_build_label (lab10
);
11172 gimple_seq_add_stmt (body_p
, g
);
11173 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11174 gimple_seq_add_stmt (body_p
, g
);
11175 g
= gimple_build_goto (lab12
);
11176 gimple_seq_add_stmt (body_p
, g
);
11177 g
= gimple_build_label (lab11
);
11178 gimple_seq_add_stmt (body_p
, g
);
11179 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11180 gimple_seq_add_stmt (body_p
, g
);
11181 g
= gimple_build_label (lab12
);
11182 gimple_seq_add_stmt (body_p
, g
);
11184 g
= omp_build_barrier (NULL
);
11185 gimple_seq_add_stmt (body_p
, g
);
11187 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11189 gimple_seq_add_stmt (body_p
, g
);
11191 g
= gimple_build_label (lab2
);
11192 gimple_seq_add_stmt (body_p
, g
);
11194 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11195 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11196 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11197 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11198 gimple_seq_add_stmt (body_p
, g
);
11199 g
= gimple_build_label (lab1
);
11200 gimple_seq_add_stmt (body_p
, g
);
11201 gimple_seq_add_seq (body_p
, thr02_list
);
11202 g
= gimple_build_goto (lab3
);
11203 gimple_seq_add_stmt (body_p
, g
);
11204 g
= gimple_build_label (lab2
);
11205 gimple_seq_add_stmt (body_p
, g
);
11206 gimple_seq_add_seq (body_p
, thrn2_list
);
11207 g
= gimple_build_label (lab3
);
11208 gimple_seq_add_stmt (body_p
, g
);
11210 g
= gimple_build_assign (ivar
, size_zero_node
);
11211 gimple_seq_add_stmt (body_p
, g
);
11212 gimple_seq_add_stmt (body_p
, new_stmt
);
11213 gimple_seq_add_seq (body_p
, new_body
);
11215 gimple_seq new_dlist
= NULL
;
11216 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11217 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11218 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11219 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11220 integer_minus_one_node
);
11221 gimple_seq_add_stmt (&new_dlist
, g
);
11222 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11223 gimple_seq_add_stmt (&new_dlist
, g
);
11224 g
= gimple_build_label (lab1
);
11225 gimple_seq_add_stmt (&new_dlist
, g
);
11226 gimple_seq_add_seq (&new_dlist
, last_list
);
11227 g
= gimple_build_label (lab2
);
11228 gimple_seq_add_stmt (&new_dlist
, g
);
11229 gimple_seq_add_seq (&new_dlist
, *dlist
);
11230 *dlist
= new_dlist
;
11233 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11234 the addresses of variables to be made private at the surrounding
11235 parallelism level. Such functions appear in the gimple code stream in two
11236 forms, e.g. for a partitioned loop:
11238 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11239 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11240 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11241 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11243 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11244 not as part of a HEAD_MARK sequence:
11246 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11248 For such stand-alone appearances, the 3rd argument is always 0, denoting
11249 gang partitioning. */
11252 lower_oacc_private_marker (omp_context
*ctx
)
11254 if (ctx
->oacc_privatization_candidates
.length () == 0)
11257 auto_vec
<tree
, 5> args
;
11259 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11260 args
.quick_push (integer_zero_node
);
11261 args
.quick_push (integer_minus_one_node
);
11265 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11267 for (omp_context
*thisctx
= ctx
; thisctx
; thisctx
= thisctx
->outer
)
11269 tree inner_decl
= maybe_lookup_decl (decl
, thisctx
);
11276 gcc_checking_assert (decl
);
11278 tree addr
= build_fold_addr_expr (decl
);
11279 args
.safe_push (addr
);
11282 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11285 /* Lower code for an OMP loop directive. */
11288 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11290 tree
*rhs_p
, block
;
11291 struct omp_for_data fd
, *fdp
= NULL
;
11292 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11294 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11295 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11296 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11299 push_gimplify_context ();
11301 if (is_gimple_omp_oacc (ctx
->stmt
))
11302 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11304 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11306 block
= make_node (BLOCK
);
11307 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11308 /* Replace at gsi right away, so that 'stmt' is no member
11309 of a sequence anymore as we're going to add to a different
11311 gsi_replace (gsi_p
, new_stmt
, true);
11313 /* Move declaration of temporaries in the loop body before we make
11315 omp_for_body
= gimple_omp_body (stmt
);
11316 if (!gimple_seq_empty_p (omp_for_body
)
11317 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11320 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11321 tree vars
= gimple_bind_vars (inner_bind
);
11322 if (is_gimple_omp_oacc (ctx
->stmt
))
11323 oacc_privatization_scan_decl_chain (ctx
, vars
);
11324 gimple_bind_append_vars (new_stmt
, vars
);
11325 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11326 keep them on the inner_bind and it's block. */
11327 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11328 if (gimple_bind_block (inner_bind
))
11329 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11332 if (gimple_omp_for_combined_into_p (stmt
))
11334 omp_extract_for_data (stmt
, &fd
, NULL
);
11337 /* We need two temporaries with fd.loop.v type (istart/iend)
11338 and then (fd.collapse - 1) temporaries with the same
11339 type for count2 ... countN-1 vars if not constant. */
11341 tree type
= fd
.iter_type
;
11342 if (fd
.collapse
> 1
11343 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11344 count
+= fd
.collapse
- 1;
11346 tree type2
= NULL_TREE
;
11348 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11349 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11350 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11352 tree clauses
= *pc
;
11353 if (fd
.collapse
> 1
11355 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11356 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11357 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11358 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11360 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11361 type2
= TREE_TYPE (v
);
11367 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11368 OMP_CLAUSE__LOOPTEMP_
);
11369 if (ctx
->simt_stmt
)
11370 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11371 OMP_CLAUSE__LOOPTEMP_
);
11372 for (i
= 0; i
< count
+ count2
; i
++)
11377 gcc_assert (outerc
);
11378 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11379 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11380 OMP_CLAUSE__LOOPTEMP_
);
11384 /* If there are 2 adjacent SIMD stmts, one with _simt_
11385 clause, another without, make sure they have the same
11386 decls in _looptemp_ clauses, because the outer stmt
11387 they are combined into will look up just one inner_stmt. */
11388 if (ctx
->simt_stmt
)
11389 temp
= OMP_CLAUSE_DECL (simtc
);
11391 temp
= create_tmp_var (i
>= count
? type2
: type
);
11392 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11394 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11395 OMP_CLAUSE_DECL (*pc
) = temp
;
11396 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11397 if (ctx
->simt_stmt
)
11398 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11399 OMP_CLAUSE__LOOPTEMP_
);
11404 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11408 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11409 OMP_CLAUSE_REDUCTION
);
11410 tree rtmp
= NULL_TREE
;
11413 tree type
= build_pointer_type (pointer_sized_int_node
);
11414 tree temp
= create_tmp_var (type
);
11415 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11416 OMP_CLAUSE_DECL (c
) = temp
;
11417 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11418 gimple_omp_for_set_clauses (stmt
, c
);
11419 lower_omp_task_reductions (ctx
, OMP_FOR
,
11420 gimple_omp_for_clauses (stmt
),
11421 &tred_ilist
, &tred_dlist
);
11423 rtmp
= make_ssa_name (type
);
11424 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11427 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11430 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11432 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11433 gimple_omp_for_pre_body (stmt
));
11435 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11437 gcall
*private_marker
= NULL
;
11438 if (is_gimple_omp_oacc (ctx
->stmt
)
11439 && !gimple_seq_empty_p (omp_for_body
))
11440 private_marker
= lower_oacc_private_marker (ctx
);
11442 /* Lower the header expressions. At this point, we can assume that
11443 the header is of the form:
11445 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11447 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11448 using the .omp_data_s mapping, if needed. */
11449 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11451 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11452 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11454 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11455 TREE_VEC_ELT (*rhs_p
, 1)
11456 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11457 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11458 TREE_VEC_ELT (*rhs_p
, 2)
11459 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11461 else if (!is_gimple_min_invariant (*rhs_p
))
11462 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11463 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11464 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11466 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11467 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11469 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11470 TREE_VEC_ELT (*rhs_p
, 1)
11471 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11472 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11473 TREE_VEC_ELT (*rhs_p
, 2)
11474 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11476 else if (!is_gimple_min_invariant (*rhs_p
))
11477 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11478 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11479 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11481 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11482 if (!is_gimple_min_invariant (*rhs_p
))
11483 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11486 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11488 gimple_seq_add_seq (&body
, cnt_list
);
11490 /* Once lowered, extract the bounds and clauses. */
11491 omp_extract_for_data (stmt
, &fd
, NULL
);
11493 if (is_gimple_omp_oacc (ctx
->stmt
)
11494 && !ctx_in_oacc_kernels_region (ctx
))
11495 lower_oacc_head_tail (gimple_location (stmt
),
11496 gimple_omp_for_clauses (stmt
), private_marker
,
11497 &oacc_head
, &oacc_tail
, ctx
);
11499 /* Add OpenACC partitioning and reduction markers just before the loop. */
11501 gimple_seq_add_seq (&body
, oacc_head
);
11503 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11505 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11506 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11507 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11508 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11510 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11511 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11512 OMP_CLAUSE_LINEAR_STEP (c
)
11513 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11517 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11518 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11519 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11522 gimple_seq_add_stmt (&body
, stmt
);
11523 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11526 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11529 /* After the loop, add exit clauses. */
11530 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11534 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11535 gcall
*g
= gimple_build_call (fndecl
, 0);
11536 gimple_seq_add_stmt (&body
, g
);
11537 gimple_seq_add_seq (&body
, clist
);
11538 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11539 g
= gimple_build_call (fndecl
, 0);
11540 gimple_seq_add_stmt (&body
, g
);
11543 if (ctx
->cancellable
)
11544 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11546 gimple_seq_add_seq (&body
, dlist
);
11550 gimple_seq_add_seq (&tred_ilist
, body
);
11554 body
= maybe_catch_exception (body
);
11556 /* Region exit marker goes at the end of the loop body. */
11557 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11558 gimple_seq_add_stmt (&body
, g
);
11560 gimple_seq_add_seq (&body
, tred_dlist
);
11562 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11565 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11567 /* Add OpenACC joining and reduction markers just after the loop. */
11569 gimple_seq_add_seq (&body
, oacc_tail
);
11571 pop_gimplify_context (new_stmt
);
11573 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11574 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11575 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11576 if (BLOCK_VARS (block
))
11577 TREE_USED (block
) = 1;
11579 gimple_bind_set_body (new_stmt
, body
);
11580 gimple_omp_set_body (stmt
, NULL
);
11581 gimple_omp_for_set_pre_body (stmt
, NULL
);
11584 /* Callback for walk_stmts. Check if the current statement only contains
11585 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11588 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11589 bool *handled_ops_p
,
11590 struct walk_stmt_info
*wi
)
11592 int *info
= (int *) wi
->info
;
11593 gimple
*stmt
= gsi_stmt (*gsi_p
);
11595 *handled_ops_p
= true;
11596 switch (gimple_code (stmt
))
11602 case GIMPLE_OMP_FOR
:
11603 case GIMPLE_OMP_SECTIONS
:
11604 *info
= *info
== 0 ? 1 : -1;
11613 struct omp_taskcopy_context
11615 /* This field must be at the beginning, as we do "inheritance": Some
11616 callback functions for tree-inline.c (e.g., omp_copy_decl)
11617 receive a copy_body_data pointer that is up-casted to an
11618 omp_context pointer. */
11624 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11626 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11628 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11629 return create_tmp_var (TREE_TYPE (var
));
11635 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11637 tree name
, new_fields
= NULL
, type
, f
;
11639 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11640 name
= DECL_NAME (TYPE_NAME (orig_type
));
11641 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
11642 TYPE_DECL
, name
, type
);
11643 TYPE_NAME (type
) = name
;
11645 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
11647 tree new_f
= copy_node (f
);
11648 DECL_CONTEXT (new_f
) = type
;
11649 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
11650 TREE_CHAIN (new_f
) = new_fields
;
11651 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11652 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
11653 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
11655 new_fields
= new_f
;
11656 tcctx
->cb
.decl_map
->put (f
, new_f
);
11658 TYPE_FIELDS (type
) = nreverse (new_fields
);
11659 layout_type (type
);
11663 /* Create task copyfn. */
11666 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
11668 struct function
*child_cfun
;
11669 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
11670 tree record_type
, srecord_type
, bind
, list
;
11671 bool record_needs_remap
= false, srecord_needs_remap
= false;
11673 struct omp_taskcopy_context tcctx
;
11674 location_t loc
= gimple_location (task_stmt
);
11675 size_t looptempno
= 0;
11677 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
11678 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
11679 gcc_assert (child_cfun
->cfg
== NULL
);
11680 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
11682 /* Reset DECL_CONTEXT on function arguments. */
11683 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
11684 DECL_CONTEXT (t
) = child_fn
;
11686 /* Populate the function. */
11687 push_gimplify_context ();
11688 push_cfun (child_cfun
);
11690 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11691 TREE_SIDE_EFFECTS (bind
) = 1;
11693 DECL_SAVED_TREE (child_fn
) = bind
;
11694 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
11696 /* Remap src and dst argument types if needed. */
11697 record_type
= ctx
->record_type
;
11698 srecord_type
= ctx
->srecord_type
;
11699 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
11700 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11702 record_needs_remap
= true;
11705 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
11706 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
11708 srecord_needs_remap
= true;
11712 if (record_needs_remap
|| srecord_needs_remap
)
11714 memset (&tcctx
, '\0', sizeof (tcctx
));
11715 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
11716 tcctx
.cb
.dst_fn
= child_fn
;
11717 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
11718 gcc_checking_assert (tcctx
.cb
.src_node
);
11719 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
11720 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
11721 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
11722 tcctx
.cb
.eh_lp_nr
= 0;
11723 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
11724 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
11727 if (record_needs_remap
)
11728 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
11729 if (srecord_needs_remap
)
11730 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
11733 tcctx
.cb
.decl_map
= NULL
;
11735 arg
= DECL_ARGUMENTS (child_fn
);
11736 TREE_TYPE (arg
) = build_pointer_type (record_type
);
11737 sarg
= DECL_CHAIN (arg
);
11738 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
11740 /* First pass: initialize temporaries used in record_type and srecord_type
11741 sizes and field offsets. */
11742 if (tcctx
.cb
.decl_map
)
11743 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11744 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11748 decl
= OMP_CLAUSE_DECL (c
);
11749 p
= tcctx
.cb
.decl_map
->get (decl
);
11752 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11753 sf
= (tree
) n
->value
;
11754 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11755 src
= build_simple_mem_ref_loc (loc
, sarg
);
11756 src
= omp_build_component_ref (src
, sf
);
11757 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
11758 append_to_statement_list (t
, &list
);
11761 /* Second pass: copy shared var pointers and copy construct non-VLA
11762 firstprivate vars. */
11763 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11764 switch (OMP_CLAUSE_CODE (c
))
11766 splay_tree_key key
;
11767 case OMP_CLAUSE_SHARED
:
11768 decl
= OMP_CLAUSE_DECL (c
);
11769 key
= (splay_tree_key
) decl
;
11770 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
11771 key
= (splay_tree_key
) &DECL_UID (decl
);
11772 n
= splay_tree_lookup (ctx
->field_map
, key
);
11775 f
= (tree
) n
->value
;
11776 if (tcctx
.cb
.decl_map
)
11777 f
= *tcctx
.cb
.decl_map
->get (f
);
11778 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
11779 sf
= (tree
) n
->value
;
11780 if (tcctx
.cb
.decl_map
)
11781 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11782 src
= build_simple_mem_ref_loc (loc
, sarg
);
11783 src
= omp_build_component_ref (src
, sf
);
11784 dst
= build_simple_mem_ref_loc (loc
, arg
);
11785 dst
= omp_build_component_ref (dst
, f
);
11786 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11787 append_to_statement_list (t
, &list
);
11789 case OMP_CLAUSE_REDUCTION
:
11790 case OMP_CLAUSE_IN_REDUCTION
:
11791 decl
= OMP_CLAUSE_DECL (c
);
11792 if (TREE_CODE (decl
) == MEM_REF
)
11794 decl
= TREE_OPERAND (decl
, 0);
11795 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
11796 decl
= TREE_OPERAND (decl
, 0);
11797 if (TREE_CODE (decl
) == INDIRECT_REF
11798 || TREE_CODE (decl
) == ADDR_EXPR
)
11799 decl
= TREE_OPERAND (decl
, 0);
11801 key
= (splay_tree_key
) decl
;
11802 n
= splay_tree_lookup (ctx
->field_map
, key
);
11805 f
= (tree
) n
->value
;
11806 if (tcctx
.cb
.decl_map
)
11807 f
= *tcctx
.cb
.decl_map
->get (f
);
11808 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
11809 sf
= (tree
) n
->value
;
11810 if (tcctx
.cb
.decl_map
)
11811 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11812 src
= build_simple_mem_ref_loc (loc
, sarg
);
11813 src
= omp_build_component_ref (src
, sf
);
11814 if (decl
!= OMP_CLAUSE_DECL (c
)
11815 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11816 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
11817 src
= build_simple_mem_ref_loc (loc
, src
);
11818 dst
= build_simple_mem_ref_loc (loc
, arg
);
11819 dst
= omp_build_component_ref (dst
, f
);
11820 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11821 append_to_statement_list (t
, &list
);
11823 case OMP_CLAUSE__LOOPTEMP_
:
11824 /* Fields for first two _looptemp_ clauses are initialized by
11825 GOMP_taskloop*, the rest are handled like firstprivate. */
11826 if (looptempno
< 2)
11832 case OMP_CLAUSE__REDUCTEMP_
:
11833 case OMP_CLAUSE_FIRSTPRIVATE
:
11834 decl
= OMP_CLAUSE_DECL (c
);
11835 if (is_variable_sized (decl
))
11837 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11840 f
= (tree
) n
->value
;
11841 if (tcctx
.cb
.decl_map
)
11842 f
= *tcctx
.cb
.decl_map
->get (f
);
11843 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11846 sf
= (tree
) n
->value
;
11847 if (tcctx
.cb
.decl_map
)
11848 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11849 src
= build_simple_mem_ref_loc (loc
, sarg
);
11850 src
= omp_build_component_ref (src
, sf
);
11851 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
11852 src
= build_simple_mem_ref_loc (loc
, src
);
11856 dst
= build_simple_mem_ref_loc (loc
, arg
);
11857 dst
= omp_build_component_ref (dst
, f
);
11858 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
11859 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11862 if (ctx
->allocate_map
)
11863 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
11865 tree allocator
= *allocatorp
;
11866 if (TREE_CODE (allocator
) != INTEGER_CST
)
11868 n
= splay_tree_lookup (ctx
->sfield_map
,
11869 (splay_tree_key
) allocator
);
11870 allocator
= (tree
) n
->value
;
11871 if (tcctx
.cb
.decl_map
)
11872 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
11873 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
11874 allocator
= omp_build_component_ref (a
, allocator
);
11876 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
11877 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
11878 tree align
= build_int_cst (size_type_node
,
11879 DECL_ALIGN_UNIT (decl
));
11880 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
11881 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
11883 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
11884 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
11885 append_to_statement_list (t
, &list
);
11886 dst
= build_simple_mem_ref_loc (loc
, dst
);
11888 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11890 append_to_statement_list (t
, &list
);
11892 case OMP_CLAUSE_PRIVATE
:
11893 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
11895 decl
= OMP_CLAUSE_DECL (c
);
11896 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11897 f
= (tree
) n
->value
;
11898 if (tcctx
.cb
.decl_map
)
11899 f
= *tcctx
.cb
.decl_map
->get (f
);
11900 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
11903 sf
= (tree
) n
->value
;
11904 if (tcctx
.cb
.decl_map
)
11905 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11906 src
= build_simple_mem_ref_loc (loc
, sarg
);
11907 src
= omp_build_component_ref (src
, sf
);
11908 if (use_pointer_for_field (decl
, NULL
))
11909 src
= build_simple_mem_ref_loc (loc
, src
);
11913 dst
= build_simple_mem_ref_loc (loc
, arg
);
11914 dst
= omp_build_component_ref (dst
, f
);
11915 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
11916 append_to_statement_list (t
, &list
);
11922 /* Last pass: handle VLA firstprivates. */
11923 if (tcctx
.cb
.decl_map
)
11924 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11925 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11929 decl
= OMP_CLAUSE_DECL (c
);
11930 if (!is_variable_sized (decl
))
11932 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
11935 f
= (tree
) n
->value
;
11936 f
= *tcctx
.cb
.decl_map
->get (f
);
11937 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
11938 ind
= DECL_VALUE_EXPR (decl
);
11939 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
11940 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
11941 n
= splay_tree_lookup (ctx
->sfield_map
,
11942 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11943 sf
= (tree
) n
->value
;
11944 sf
= *tcctx
.cb
.decl_map
->get (sf
);
11945 src
= build_simple_mem_ref_loc (loc
, sarg
);
11946 src
= omp_build_component_ref (src
, sf
);
11947 src
= build_simple_mem_ref_loc (loc
, src
);
11948 dst
= build_simple_mem_ref_loc (loc
, arg
);
11949 dst
= omp_build_component_ref (dst
, f
);
11950 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
11951 append_to_statement_list (t
, &list
);
11952 n
= splay_tree_lookup (ctx
->field_map
,
11953 (splay_tree_key
) TREE_OPERAND (ind
, 0));
11954 df
= (tree
) n
->value
;
11955 df
= *tcctx
.cb
.decl_map
->get (df
);
11956 ptr
= build_simple_mem_ref_loc (loc
, arg
);
11957 ptr
= omp_build_component_ref (ptr
, df
);
11958 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
11959 build_fold_addr_expr_loc (loc
, dst
));
11960 append_to_statement_list (t
, &list
);
11963 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
11964 append_to_statement_list (t
, &list
);
11966 if (tcctx
.cb
.decl_map
)
11967 delete tcctx
.cb
.decl_map
;
11968 pop_gimplify_context (NULL
);
11969 BIND_EXPR_BODY (bind
) = list
;
11974 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
11978 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
11980 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
11981 gcc_assert (clauses
);
11982 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11983 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
11984 switch (OMP_CLAUSE_DEPEND_KIND (c
))
11986 case OMP_CLAUSE_DEPEND_LAST
:
11987 /* Lowering already done at gimplification. */
11989 case OMP_CLAUSE_DEPEND_IN
:
11992 case OMP_CLAUSE_DEPEND_OUT
:
11993 case OMP_CLAUSE_DEPEND_INOUT
:
11996 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
11999 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12002 case OMP_CLAUSE_DEPEND_SOURCE
:
12003 case OMP_CLAUSE_DEPEND_SINK
:
12006 gcc_unreachable ();
12008 if (cnt
[1] || cnt
[3])
12010 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
12011 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
12012 tree array
= create_tmp_var (type
);
12013 TREE_ADDRESSABLE (array
) = 1;
12014 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12018 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12019 gimple_seq_add_stmt (iseq
, g
);
12020 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12023 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12024 gimple_seq_add_stmt (iseq
, g
);
12025 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12027 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12028 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12029 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12030 gimple_seq_add_stmt (iseq
, g
);
12032 for (i
= 0; i
< 4; i
++)
12036 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12037 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12041 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12043 case OMP_CLAUSE_DEPEND_IN
:
12047 case OMP_CLAUSE_DEPEND_OUT
:
12048 case OMP_CLAUSE_DEPEND_INOUT
:
12052 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12056 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12061 gcc_unreachable ();
12063 tree t
= OMP_CLAUSE_DECL (c
);
12064 t
= fold_convert (ptr_type_node
, t
);
12065 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12066 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12067 NULL_TREE
, NULL_TREE
);
12068 g
= gimple_build_assign (r
, t
);
12069 gimple_seq_add_stmt (iseq
, g
);
12072 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12073 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12074 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12075 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12077 tree clobber
= build_clobber (type
);
12078 g
= gimple_build_assign (array
, clobber
);
12079 gimple_seq_add_stmt (oseq
, g
);
12082 /* Lower the OpenMP parallel or task directive in the current statement
12083 in GSI_P. CTX holds context information for the directive. */
12086 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12090 gimple
*stmt
= gsi_stmt (*gsi_p
);
12091 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12092 gimple_seq par_body
;
12093 location_t loc
= gimple_location (stmt
);
12095 clauses
= gimple_omp_taskreg_clauses (stmt
);
12096 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12097 && gimple_omp_task_taskwait_p (stmt
))
12105 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12106 par_body
= gimple_bind_body (par_bind
);
12108 child_fn
= ctx
->cb
.dst_fn
;
12109 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12110 && !gimple_omp_parallel_combined_p (stmt
))
12112 struct walk_stmt_info wi
;
12115 memset (&wi
, 0, sizeof (wi
));
12117 wi
.val_only
= true;
12118 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12120 gimple_omp_parallel_set_combined_p (stmt
, true);
12122 gimple_seq dep_ilist
= NULL
;
12123 gimple_seq dep_olist
= NULL
;
12124 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12125 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12127 push_gimplify_context ();
12128 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12129 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12130 &dep_ilist
, &dep_olist
);
12133 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12134 && gimple_omp_task_taskwait_p (stmt
))
12138 gsi_replace (gsi_p
, dep_bind
, true);
12139 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12140 gimple_bind_add_stmt (dep_bind
, stmt
);
12141 gimple_bind_add_seq (dep_bind
, dep_olist
);
12142 pop_gimplify_context (dep_bind
);
12147 if (ctx
->srecord_type
)
12148 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12150 gimple_seq tskred_ilist
= NULL
;
12151 gimple_seq tskred_olist
= NULL
;
12152 if ((is_task_ctx (ctx
)
12153 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12154 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12155 OMP_CLAUSE_REDUCTION
))
12156 || (is_parallel_ctx (ctx
)
12157 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12158 OMP_CLAUSE__REDUCTEMP_
)))
12160 if (dep_bind
== NULL
)
12162 push_gimplify_context ();
12163 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12165 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12167 gimple_omp_taskreg_clauses (ctx
->stmt
),
12168 &tskred_ilist
, &tskred_olist
);
12171 push_gimplify_context ();
12173 gimple_seq par_olist
= NULL
;
12174 gimple_seq par_ilist
= NULL
;
12175 gimple_seq par_rlist
= NULL
;
12176 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12177 lower_omp (&par_body
, ctx
);
12178 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12179 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12181 /* Declare all the variables created by mapping and the variables
12182 declared in the scope of the parallel body. */
12183 record_vars_into (ctx
->block_vars
, child_fn
);
12184 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12185 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12187 if (ctx
->record_type
)
12190 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12191 : ctx
->record_type
, ".omp_data_o");
12192 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12193 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12194 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12197 gimple_seq olist
= NULL
;
12198 gimple_seq ilist
= NULL
;
12199 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12200 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12202 if (ctx
->record_type
)
12204 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12205 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12209 /* Once all the expansions are done, sequence all the different
12210 fragments inside gimple_omp_body. */
12212 gimple_seq new_body
= NULL
;
12214 if (ctx
->record_type
)
12216 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12217 /* fixup_child_record_type might have changed receiver_decl's type. */
12218 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12219 gimple_seq_add_stmt (&new_body
,
12220 gimple_build_assign (ctx
->receiver_decl
, t
));
12223 gimple_seq_add_seq (&new_body
, par_ilist
);
12224 gimple_seq_add_seq (&new_body
, par_body
);
12225 gimple_seq_add_seq (&new_body
, par_rlist
);
12226 if (ctx
->cancellable
)
12227 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12228 gimple_seq_add_seq (&new_body
, par_olist
);
12229 new_body
= maybe_catch_exception (new_body
);
12230 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12231 gimple_seq_add_stmt (&new_body
,
12232 gimple_build_omp_continue (integer_zero_node
,
12233 integer_zero_node
));
12234 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12235 gimple_omp_set_body (stmt
, new_body
);
12237 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12238 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12240 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12241 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12242 gimple_bind_add_seq (bind
, ilist
);
12243 gimple_bind_add_stmt (bind
, stmt
);
12244 gimple_bind_add_seq (bind
, olist
);
12246 pop_gimplify_context (NULL
);
12250 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12251 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12252 gimple_bind_add_stmt (dep_bind
, bind
);
12253 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12254 gimple_bind_add_seq (dep_bind
, dep_olist
);
12255 pop_gimplify_context (dep_bind
);
12259 /* Lower the GIMPLE_OMP_TARGET in the current statement
12260 in GSI_P. CTX holds context information for the directive. */
12263 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12266 tree child_fn
, t
, c
;
12267 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12268 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12269 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12270 location_t loc
= gimple_location (stmt
);
12271 bool offloaded
, data_region
;
12272 unsigned int map_cnt
= 0;
12273 tree in_reduction_clauses
= NULL_TREE
;
12275 offloaded
= is_gimple_omp_offloaded (stmt
);
12276 switch (gimple_omp_target_kind (stmt
))
12278 case GF_OMP_TARGET_KIND_REGION
:
12280 q
= &in_reduction_clauses
;
12281 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12282 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12285 q
= &OMP_CLAUSE_CHAIN (*q
);
12286 *p
= OMP_CLAUSE_CHAIN (*p
);
12289 p
= &OMP_CLAUSE_CHAIN (*p
);
12291 *p
= in_reduction_clauses
;
12293 case GF_OMP_TARGET_KIND_UPDATE
:
12294 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12295 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12296 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12297 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12298 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12299 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12300 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12301 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12302 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12303 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12304 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12305 data_region
= false;
12307 case GF_OMP_TARGET_KIND_DATA
:
12308 case GF_OMP_TARGET_KIND_OACC_DATA
:
12309 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12310 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12311 data_region
= true;
12314 gcc_unreachable ();
12317 clauses
= gimple_omp_target_clauses (stmt
);
12319 gimple_seq dep_ilist
= NULL
;
12320 gimple_seq dep_olist
= NULL
;
12321 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12322 if (has_depend
|| in_reduction_clauses
)
12324 push_gimplify_context ();
12325 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12327 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12328 &dep_ilist
, &dep_olist
);
12329 if (in_reduction_clauses
)
12330 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12338 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12339 tgt_body
= gimple_bind_body (tgt_bind
);
12341 else if (data_region
)
12342 tgt_body
= gimple_omp_body (stmt
);
12343 child_fn
= ctx
->cb
.dst_fn
;
12345 push_gimplify_context ();
12348 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12349 switch (OMP_CLAUSE_CODE (c
))
12355 case OMP_CLAUSE_MAP
:
12357 /* First check what we're prepared to handle in the following. */
12358 switch (OMP_CLAUSE_MAP_KIND (c
))
12360 case GOMP_MAP_ALLOC
:
12362 case GOMP_MAP_FROM
:
12363 case GOMP_MAP_TOFROM
:
12364 case GOMP_MAP_POINTER
:
12365 case GOMP_MAP_TO_PSET
:
12366 case GOMP_MAP_DELETE
:
12367 case GOMP_MAP_RELEASE
:
12368 case GOMP_MAP_ALWAYS_TO
:
12369 case GOMP_MAP_ALWAYS_FROM
:
12370 case GOMP_MAP_ALWAYS_TOFROM
:
12371 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12372 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12373 case GOMP_MAP_STRUCT
:
12374 case GOMP_MAP_ALWAYS_POINTER
:
12375 case GOMP_MAP_ATTACH
:
12376 case GOMP_MAP_DETACH
:
12378 case GOMP_MAP_IF_PRESENT
:
12379 case GOMP_MAP_FORCE_ALLOC
:
12380 case GOMP_MAP_FORCE_TO
:
12381 case GOMP_MAP_FORCE_FROM
:
12382 case GOMP_MAP_FORCE_TOFROM
:
12383 case GOMP_MAP_FORCE_PRESENT
:
12384 case GOMP_MAP_FORCE_DEVICEPTR
:
12385 case GOMP_MAP_DEVICE_RESIDENT
:
12386 case GOMP_MAP_LINK
:
12387 case GOMP_MAP_FORCE_DETACH
:
12388 gcc_assert (is_gimple_omp_oacc (stmt
));
12391 gcc_unreachable ();
12395 case OMP_CLAUSE_TO
:
12396 case OMP_CLAUSE_FROM
:
12398 var
= OMP_CLAUSE_DECL (c
);
12401 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12402 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12403 && (OMP_CLAUSE_MAP_KIND (c
)
12404 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12409 if (DECL_SIZE (var
)
12410 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12412 tree var2
= DECL_VALUE_EXPR (var
);
12413 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12414 var2
= TREE_OPERAND (var2
, 0);
12415 gcc_assert (DECL_P (var2
));
12420 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12421 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12422 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12424 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12426 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12427 && varpool_node::get_create (var
)->offloadable
)
12430 tree type
= build_pointer_type (TREE_TYPE (var
));
12431 tree new_var
= lookup_decl (var
, ctx
);
12432 x
= create_tmp_var_raw (type
, get_name (new_var
));
12433 gimple_add_tmp_var (x
);
12434 x
= build_simple_mem_ref (x
);
12435 SET_DECL_VALUE_EXPR (new_var
, x
);
12436 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12441 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12442 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12443 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12444 && is_omp_target (stmt
))
12446 gcc_assert (maybe_lookup_field (c
, ctx
));
12451 if (!maybe_lookup_field (var
, ctx
))
12454 /* Don't remap compute constructs' reduction variables, because the
12455 intermediate result must be local to each gang. */
12456 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12457 && is_gimple_omp_oacc (ctx
->stmt
)
12458 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12460 x
= build_receiver_ref (var
, true, ctx
);
12461 tree new_var
= lookup_decl (var
, ctx
);
12463 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12464 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12465 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12466 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12467 x
= build_simple_mem_ref (x
);
12468 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12470 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12471 if (omp_is_reference (new_var
)
12472 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12473 || DECL_BY_REFERENCE (var
)))
12475 /* Create a local object to hold the instance
12477 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12478 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12479 tree inst
= create_tmp_var (type
, id
);
12480 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12481 x
= build_fold_addr_expr (inst
);
12483 gimplify_assign (new_var
, x
, &fplist
);
12485 else if (DECL_P (new_var
))
12487 SET_DECL_VALUE_EXPR (new_var
, x
);
12488 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12491 gcc_unreachable ();
12496 case OMP_CLAUSE_FIRSTPRIVATE
:
12497 gcc_checking_assert (offloaded
);
12498 if (is_gimple_omp_oacc (ctx
->stmt
))
12500 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12501 gcc_checking_assert (!is_oacc_kernels (ctx
));
12502 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12503 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12505 goto oacc_firstprivate
;
12508 var
= OMP_CLAUSE_DECL (c
);
12509 if (!omp_is_reference (var
)
12510 && !is_gimple_reg_type (TREE_TYPE (var
)))
12512 tree new_var
= lookup_decl (var
, ctx
);
12513 if (is_variable_sized (var
))
12515 tree pvar
= DECL_VALUE_EXPR (var
);
12516 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12517 pvar
= TREE_OPERAND (pvar
, 0);
12518 gcc_assert (DECL_P (pvar
));
12519 tree new_pvar
= lookup_decl (pvar
, ctx
);
12520 x
= build_fold_indirect_ref (new_pvar
);
12521 TREE_THIS_NOTRAP (x
) = 1;
12524 x
= build_receiver_ref (var
, true, ctx
);
12525 SET_DECL_VALUE_EXPR (new_var
, x
);
12526 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12530 case OMP_CLAUSE_PRIVATE
:
12531 gcc_checking_assert (offloaded
);
12532 if (is_gimple_omp_oacc (ctx
->stmt
))
12534 /* No 'private' clauses on OpenACC 'kernels'. */
12535 gcc_checking_assert (!is_oacc_kernels (ctx
));
12536 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12537 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12541 var
= OMP_CLAUSE_DECL (c
);
12542 if (is_variable_sized (var
))
12544 tree new_var
= lookup_decl (var
, ctx
);
12545 tree pvar
= DECL_VALUE_EXPR (var
);
12546 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12547 pvar
= TREE_OPERAND (pvar
, 0);
12548 gcc_assert (DECL_P (pvar
));
12549 tree new_pvar
= lookup_decl (pvar
, ctx
);
12550 x
= build_fold_indirect_ref (new_pvar
);
12551 TREE_THIS_NOTRAP (x
) = 1;
12552 SET_DECL_VALUE_EXPR (new_var
, x
);
12553 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12557 case OMP_CLAUSE_USE_DEVICE_PTR
:
12558 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12559 case OMP_CLAUSE_IS_DEVICE_PTR
:
12560 var
= OMP_CLAUSE_DECL (c
);
12562 if (is_variable_sized (var
))
12564 tree new_var
= lookup_decl (var
, ctx
);
12565 tree pvar
= DECL_VALUE_EXPR (var
);
12566 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12567 pvar
= TREE_OPERAND (pvar
, 0);
12568 gcc_assert (DECL_P (pvar
));
12569 tree new_pvar
= lookup_decl (pvar
, ctx
);
12570 x
= build_fold_indirect_ref (new_pvar
);
12571 TREE_THIS_NOTRAP (x
) = 1;
12572 SET_DECL_VALUE_EXPR (new_var
, x
);
12573 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12575 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12576 && !omp_is_reference (var
)
12577 && !omp_is_allocatable_or_ptr (var
)
12578 && !lang_hooks
.decls
.omp_array_data (var
, true))
12579 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12581 tree new_var
= lookup_decl (var
, ctx
);
12582 tree type
= build_pointer_type (TREE_TYPE (var
));
12583 x
= create_tmp_var_raw (type
, get_name (new_var
));
12584 gimple_add_tmp_var (x
);
12585 x
= build_simple_mem_ref (x
);
12586 SET_DECL_VALUE_EXPR (new_var
, x
);
12587 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12591 tree new_var
= lookup_decl (var
, ctx
);
12592 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
12593 gimple_add_tmp_var (x
);
12594 SET_DECL_VALUE_EXPR (new_var
, x
);
12595 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12602 target_nesting_level
++;
12603 lower_omp (&tgt_body
, ctx
);
12604 target_nesting_level
--;
12606 else if (data_region
)
12607 lower_omp (&tgt_body
, ctx
);
12611 /* Declare all the variables created by mapping and the variables
12612 declared in the scope of the target body. */
12613 record_vars_into (ctx
->block_vars
, child_fn
);
12614 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
12615 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
12620 if (ctx
->record_type
)
12623 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
12624 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12625 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12626 t
= make_tree_vec (3);
12627 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
12628 TREE_VEC_ELT (t
, 1)
12629 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
12630 ".omp_data_sizes");
12631 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
12632 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
12633 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
12634 tree tkind_type
= short_unsigned_type_node
;
12635 int talign_shift
= 8;
12636 TREE_VEC_ELT (t
, 2)
12637 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
12638 ".omp_data_kinds");
12639 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
12640 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
12641 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
12642 gimple_omp_target_set_data_arg (stmt
, t
);
12644 vec
<constructor_elt
, va_gc
> *vsize
;
12645 vec
<constructor_elt
, va_gc
> *vkind
;
12646 vec_alloc (vsize
, map_cnt
);
12647 vec_alloc (vkind
, map_cnt
);
12648 unsigned int map_idx
= 0;
12650 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12651 switch (OMP_CLAUSE_CODE (c
))
12653 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
12654 unsigned int talign
;
12659 case OMP_CLAUSE_MAP
:
12660 case OMP_CLAUSE_TO
:
12661 case OMP_CLAUSE_FROM
:
12662 oacc_firstprivate_map
:
12664 ovar
= OMP_CLAUSE_DECL (c
);
12665 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12666 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12667 || (OMP_CLAUSE_MAP_KIND (c
)
12668 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
12670 if (!DECL_P (ovar
))
12672 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12673 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
12675 nc
= OMP_CLAUSE_CHAIN (c
);
12676 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
12677 == get_base_address (ovar
));
12678 ovar
= OMP_CLAUSE_DECL (nc
);
12682 tree x
= build_sender_ref (ovar
, ctx
);
12684 if (in_reduction_clauses
12685 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12686 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12688 v
= unshare_expr (v
);
12690 while (handled_component_p (*p
)
12691 || TREE_CODE (*p
) == INDIRECT_REF
12692 || TREE_CODE (*p
) == ADDR_EXPR
12693 || TREE_CODE (*p
) == MEM_REF
12694 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12695 p
= &TREE_OPERAND (*p
, 0);
12697 if (is_variable_sized (d
))
12699 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12700 d
= DECL_VALUE_EXPR (d
);
12701 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12702 d
= TREE_OPERAND (d
, 0);
12703 gcc_assert (DECL_P (d
));
12706 = (splay_tree_key
) &DECL_CONTEXT (d
);
12707 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12712 *p
= build_fold_indirect_ref (nd
);
12714 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
12715 gimplify_assign (x
, v
, &ilist
);
12721 if (DECL_SIZE (ovar
)
12722 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
12724 tree ovar2
= DECL_VALUE_EXPR (ovar
);
12725 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
12726 ovar2
= TREE_OPERAND (ovar2
, 0);
12727 gcc_assert (DECL_P (ovar2
));
12730 if (!maybe_lookup_field (ovar
, ctx
)
12731 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12732 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12733 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
12737 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
12738 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
12739 talign
= DECL_ALIGN_UNIT (ovar
);
12744 if (in_reduction_clauses
12745 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12746 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12749 if (is_variable_sized (d
))
12751 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
12752 d
= DECL_VALUE_EXPR (d
);
12753 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
12754 d
= TREE_OPERAND (d
, 0);
12755 gcc_assert (DECL_P (d
));
12758 = (splay_tree_key
) &DECL_CONTEXT (d
);
12759 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
12764 var
= build_fold_indirect_ref (nd
);
12767 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12770 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12771 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12772 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12773 && is_omp_target (stmt
))
12775 x
= build_sender_ref (c
, ctx
);
12776 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
12780 x
= build_sender_ref (ovar
, ctx
);
12782 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12783 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12784 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12785 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
12787 gcc_assert (offloaded
);
12789 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
12790 mark_addressable (avar
);
12791 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
12792 talign
= DECL_ALIGN_UNIT (avar
);
12793 avar
= build_fold_addr_expr (avar
);
12794 gimplify_assign (x
, avar
, &ilist
);
12796 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12798 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12799 if (!omp_is_reference (var
))
12801 if (is_gimple_reg (var
)
12802 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12803 suppress_warning (var
);
12804 var
= build_fold_addr_expr (var
);
12807 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12808 gimplify_assign (x
, var
, &ilist
);
12810 else if (is_gimple_reg (var
))
12812 gcc_assert (offloaded
);
12813 tree avar
= create_tmp_var (TREE_TYPE (var
));
12814 mark_addressable (avar
);
12815 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
12816 if (GOMP_MAP_COPY_TO_P (map_kind
)
12817 || map_kind
== GOMP_MAP_POINTER
12818 || map_kind
== GOMP_MAP_TO_PSET
12819 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
12821 /* If we need to initialize a temporary
12822 with VAR because it is not addressable, and
12823 the variable hasn't been initialized yet, then
12824 we'll get a warning for the store to avar.
12825 Don't warn in that case, the mapping might
12827 suppress_warning (var
, OPT_Wuninitialized
);
12828 gimplify_assign (avar
, var
, &ilist
);
12830 avar
= build_fold_addr_expr (avar
);
12831 gimplify_assign (x
, avar
, &ilist
);
12832 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
12833 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
12834 && !TYPE_READONLY (TREE_TYPE (var
)))
12836 x
= unshare_expr (x
);
12837 x
= build_simple_mem_ref (x
);
12838 gimplify_assign (var
, x
, &olist
);
12843 /* While MAP is handled explicitly by the FE,
12844 for 'target update', only the identified is passed. */
12845 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
12846 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
12847 && (omp_is_allocatable_or_ptr (var
)
12848 && omp_check_optional_argument (var
, false)))
12849 var
= build_fold_indirect_ref (var
);
12850 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
12851 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
12852 || (!omp_is_allocatable_or_ptr (var
)
12853 && !omp_check_optional_argument (var
, false)))
12854 var
= build_fold_addr_expr (var
);
12855 gimplify_assign (x
, var
, &ilist
);
12859 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12861 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
12862 s
= TREE_TYPE (ovar
);
12863 if (TREE_CODE (s
) == REFERENCE_TYPE
12864 || omp_check_optional_argument (ovar
, false))
12866 s
= TYPE_SIZE_UNIT (s
);
12869 s
= OMP_CLAUSE_SIZE (c
);
12870 if (s
== NULL_TREE
)
12871 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
12872 s
= fold_convert (size_type_node
, s
);
12873 purpose
= size_int (map_idx
++);
12874 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
12875 if (TREE_CODE (s
) != INTEGER_CST
)
12876 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
12878 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
12879 switch (OMP_CLAUSE_CODE (c
))
12881 case OMP_CLAUSE_MAP
:
12882 tkind
= OMP_CLAUSE_MAP_KIND (c
);
12883 tkind_zero
= tkind
;
12884 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
12887 case GOMP_MAP_ALLOC
:
12888 case GOMP_MAP_IF_PRESENT
:
12890 case GOMP_MAP_FROM
:
12891 case GOMP_MAP_TOFROM
:
12892 case GOMP_MAP_ALWAYS_TO
:
12893 case GOMP_MAP_ALWAYS_FROM
:
12894 case GOMP_MAP_ALWAYS_TOFROM
:
12895 case GOMP_MAP_RELEASE
:
12896 case GOMP_MAP_FORCE_TO
:
12897 case GOMP_MAP_FORCE_FROM
:
12898 case GOMP_MAP_FORCE_TOFROM
:
12899 case GOMP_MAP_FORCE_PRESENT
:
12900 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
12902 case GOMP_MAP_DELETE
:
12903 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
12907 if (tkind_zero
!= tkind
)
12909 if (integer_zerop (s
))
12910 tkind
= tkind_zero
;
12911 else if (integer_nonzerop (s
))
12912 tkind_zero
= tkind
;
12915 case OMP_CLAUSE_FIRSTPRIVATE
:
12916 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
12917 tkind
= GOMP_MAP_TO
;
12918 tkind_zero
= tkind
;
12920 case OMP_CLAUSE_TO
:
12921 tkind
= GOMP_MAP_TO
;
12922 tkind_zero
= tkind
;
12924 case OMP_CLAUSE_FROM
:
12925 tkind
= GOMP_MAP_FROM
;
12926 tkind_zero
= tkind
;
12929 gcc_unreachable ();
12931 gcc_checking_assert (tkind
12932 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12933 gcc_checking_assert (tkind_zero
12934 < (HOST_WIDE_INT_C (1U) << talign_shift
));
12935 talign
= ceil_log2 (talign
);
12936 tkind
|= talign
<< talign_shift
;
12937 tkind_zero
|= talign
<< talign_shift
;
12938 gcc_checking_assert (tkind
12939 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12940 gcc_checking_assert (tkind_zero
12941 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
12942 if (tkind
== tkind_zero
)
12943 x
= build_int_cstu (tkind_type
, tkind
);
12946 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
12947 x
= build3 (COND_EXPR
, tkind_type
,
12948 fold_build2 (EQ_EXPR
, boolean_type_node
,
12949 unshare_expr (s
), size_zero_node
),
12950 build_int_cstu (tkind_type
, tkind_zero
),
12951 build_int_cstu (tkind_type
, tkind
));
12953 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
12958 case OMP_CLAUSE_FIRSTPRIVATE
:
12959 if (is_gimple_omp_oacc (ctx
->stmt
))
12960 goto oacc_firstprivate_map
;
12961 ovar
= OMP_CLAUSE_DECL (c
);
12962 if (omp_is_reference (ovar
))
12963 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
12965 talign
= DECL_ALIGN_UNIT (ovar
);
12966 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
12967 x
= build_sender_ref (ovar
, ctx
);
12968 tkind
= GOMP_MAP_FIRSTPRIVATE
;
12969 type
= TREE_TYPE (ovar
);
12970 if (omp_is_reference (ovar
))
12971 type
= TREE_TYPE (type
);
12972 if ((INTEGRAL_TYPE_P (type
)
12973 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
12974 || TREE_CODE (type
) == POINTER_TYPE
)
12976 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
12978 if (omp_is_reference (var
))
12979 t
= build_simple_mem_ref (var
);
12980 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12981 suppress_warning (var
);
12982 if (TREE_CODE (type
) != POINTER_TYPE
)
12983 t
= fold_convert (pointer_sized_int_node
, t
);
12984 t
= fold_convert (TREE_TYPE (x
), t
);
12985 gimplify_assign (x
, t
, &ilist
);
12987 else if (omp_is_reference (var
))
12988 gimplify_assign (x
, var
, &ilist
);
12989 else if (is_gimple_reg (var
))
12991 tree avar
= create_tmp_var (TREE_TYPE (var
));
12992 mark_addressable (avar
);
12993 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12994 suppress_warning (var
);
12995 gimplify_assign (avar
, var
, &ilist
);
12996 avar
= build_fold_addr_expr (avar
);
12997 gimplify_assign (x
, avar
, &ilist
);
13001 var
= build_fold_addr_expr (var
);
13002 gimplify_assign (x
, var
, &ilist
);
13004 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13006 else if (omp_is_reference (ovar
))
13007 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13009 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13010 s
= fold_convert (size_type_node
, s
);
13011 purpose
= size_int (map_idx
++);
13012 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13013 if (TREE_CODE (s
) != INTEGER_CST
)
13014 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13016 gcc_checking_assert (tkind
13017 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13018 talign
= ceil_log2 (talign
);
13019 tkind
|= talign
<< talign_shift
;
13020 gcc_checking_assert (tkind
13021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13022 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13023 build_int_cstu (tkind_type
, tkind
));
13026 case OMP_CLAUSE_USE_DEVICE_PTR
:
13027 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13028 case OMP_CLAUSE_IS_DEVICE_PTR
:
13029 ovar
= OMP_CLAUSE_DECL (c
);
13030 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13032 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13034 tkind
= (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13035 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13036 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13038 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13040 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13041 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13045 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13046 x
= build_sender_ref (ovar
, ctx
);
13049 if (is_gimple_omp_oacc (ctx
->stmt
))
13051 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13053 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13054 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13057 type
= TREE_TYPE (ovar
);
13058 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13059 var
= lang_hooks
.decls
.omp_array_data (ovar
, false);
13060 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13061 && !omp_is_reference (ovar
)
13062 && !omp_is_allocatable_or_ptr (ovar
))
13063 || TREE_CODE (type
) == ARRAY_TYPE
)
13064 var
= build_fold_addr_expr (var
);
13067 if (omp_is_reference (ovar
)
13068 || omp_check_optional_argument (ovar
, false)
13069 || omp_is_allocatable_or_ptr (ovar
))
13071 type
= TREE_TYPE (type
);
13072 if (POINTER_TYPE_P (type
)
13073 && TREE_CODE (type
) != ARRAY_TYPE
13074 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13075 && !omp_is_allocatable_or_ptr (ovar
))
13076 || (omp_is_reference (ovar
)
13077 && omp_is_allocatable_or_ptr (ovar
))))
13078 var
= build_simple_mem_ref (var
);
13079 var
= fold_convert (TREE_TYPE (x
), var
);
13083 present
= omp_check_optional_argument (ovar
, true);
13086 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13087 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13088 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13089 tree new_x
= unshare_expr (x
);
13090 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13092 gcond
*cond
= gimple_build_cond_from_tree (present
,
13095 gimple_seq_add_stmt (&ilist
, cond
);
13096 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13097 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13098 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13099 gimple_seq_add_stmt (&ilist
,
13100 gimple_build_label (notnull_label
));
13101 gimplify_assign (x
, var
, &ilist
);
13102 gimple_seq_add_stmt (&ilist
,
13103 gimple_build_label (opt_arg_label
));
13106 gimplify_assign (x
, var
, &ilist
);
13108 purpose
= size_int (map_idx
++);
13109 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13110 gcc_checking_assert (tkind
13111 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13112 gcc_checking_assert (tkind
13113 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13114 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13115 build_int_cstu (tkind_type
, tkind
));
13119 gcc_assert (map_idx
== map_cnt
);
13121 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13122 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13123 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13124 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13125 for (int i
= 1; i
<= 2; i
++)
13126 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13128 gimple_seq initlist
= NULL
;
13129 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13130 TREE_VEC_ELT (t
, i
)),
13131 &initlist
, true, NULL_TREE
);
13132 gimple_seq_add_seq (&ilist
, initlist
);
13134 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13135 gimple_seq_add_stmt (&olist
,
13136 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13139 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13141 tree id
= get_identifier ("omp declare target");
13142 tree decl
= TREE_VEC_ELT (t
, i
);
13143 DECL_ATTRIBUTES (decl
)
13144 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13145 varpool_node
*node
= varpool_node::get (decl
);
13148 node
->offloadable
= 1;
13149 if (ENABLE_OFFLOADING
)
13151 g
->have_offload
= true;
13152 vec_safe_push (offload_vars
, t
);
13157 tree clobber
= build_clobber (ctx
->record_type
);
13158 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13162 /* Once all the expansions are done, sequence all the different
13163 fragments inside gimple_omp_body. */
13168 && ctx
->record_type
)
13170 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13171 /* fixup_child_record_type might have changed receiver_decl's type. */
13172 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13173 gimple_seq_add_stmt (&new_body
,
13174 gimple_build_assign (ctx
->receiver_decl
, t
));
13176 gimple_seq_add_seq (&new_body
, fplist
);
13178 if (offloaded
|| data_region
)
13180 tree prev
= NULL_TREE
;
13181 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13182 switch (OMP_CLAUSE_CODE (c
))
13187 case OMP_CLAUSE_FIRSTPRIVATE
:
13188 if (is_gimple_omp_oacc (ctx
->stmt
))
13190 var
= OMP_CLAUSE_DECL (c
);
13191 if (omp_is_reference (var
)
13192 || is_gimple_reg_type (TREE_TYPE (var
)))
13194 tree new_var
= lookup_decl (var
, ctx
);
13196 type
= TREE_TYPE (var
);
13197 if (omp_is_reference (var
))
13198 type
= TREE_TYPE (type
);
13199 if ((INTEGRAL_TYPE_P (type
)
13200 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13201 || TREE_CODE (type
) == POINTER_TYPE
)
13203 x
= build_receiver_ref (var
, false, ctx
);
13204 if (TREE_CODE (type
) != POINTER_TYPE
)
13205 x
= fold_convert (pointer_sized_int_node
, x
);
13206 x
= fold_convert (type
, x
);
13207 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13209 if (omp_is_reference (var
))
13211 tree v
= create_tmp_var_raw (type
, get_name (var
));
13212 gimple_add_tmp_var (v
);
13213 TREE_ADDRESSABLE (v
) = 1;
13214 gimple_seq_add_stmt (&new_body
,
13215 gimple_build_assign (v
, x
));
13216 x
= build_fold_addr_expr (v
);
13218 gimple_seq_add_stmt (&new_body
,
13219 gimple_build_assign (new_var
, x
));
13223 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
13224 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13226 gimple_seq_add_stmt (&new_body
,
13227 gimple_build_assign (new_var
, x
));
13230 else if (is_variable_sized (var
))
13232 tree pvar
= DECL_VALUE_EXPR (var
);
13233 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13234 pvar
= TREE_OPERAND (pvar
, 0);
13235 gcc_assert (DECL_P (pvar
));
13236 tree new_var
= lookup_decl (pvar
, ctx
);
13237 x
= build_receiver_ref (var
, false, ctx
);
13238 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13239 gimple_seq_add_stmt (&new_body
,
13240 gimple_build_assign (new_var
, x
));
13243 case OMP_CLAUSE_PRIVATE
:
13244 if (is_gimple_omp_oacc (ctx
->stmt
))
13246 var
= OMP_CLAUSE_DECL (c
);
13247 if (omp_is_reference (var
))
13249 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13250 tree new_var
= lookup_decl (var
, ctx
);
13251 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13252 if (TREE_CONSTANT (x
))
13254 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13256 gimple_add_tmp_var (x
);
13257 TREE_ADDRESSABLE (x
) = 1;
13258 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13263 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13264 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13265 gimple_seq_add_stmt (&new_body
,
13266 gimple_build_assign (new_var
, x
));
13269 case OMP_CLAUSE_USE_DEVICE_PTR
:
13270 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13271 case OMP_CLAUSE_IS_DEVICE_PTR
:
13273 gimple_seq assign_body
;
13274 bool is_array_data
;
13275 bool do_optional_check
;
13276 assign_body
= NULL
;
13277 do_optional_check
= false;
13278 var
= OMP_CLAUSE_DECL (c
);
13279 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13281 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
)
13282 x
= build_sender_ref (is_array_data
13283 ? (splay_tree_key
) &DECL_NAME (var
)
13284 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13286 x
= build_receiver_ref (var
, false, ctx
);
13290 bool is_ref
= omp_is_reference (var
);
13291 do_optional_check
= true;
13292 /* First, we copy the descriptor data from the host; then
13293 we update its data to point to the target address. */
13294 new_var
= lookup_decl (var
, ctx
);
13295 new_var
= DECL_VALUE_EXPR (new_var
);
13300 var
= build_fold_indirect_ref (var
);
13301 gimplify_expr (&var
, &assign_body
, NULL
, is_gimple_val
,
13303 v
= create_tmp_var_raw (TREE_TYPE (var
), get_name (var
));
13304 gimple_add_tmp_var (v
);
13305 TREE_ADDRESSABLE (v
) = 1;
13306 gimple_seq_add_stmt (&assign_body
,
13307 gimple_build_assign (v
, var
));
13308 tree rhs
= build_fold_addr_expr (v
);
13309 gimple_seq_add_stmt (&assign_body
,
13310 gimple_build_assign (new_var
, rhs
));
13313 gimple_seq_add_stmt (&assign_body
,
13314 gimple_build_assign (new_var
, var
));
13316 tree v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13318 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13319 gimple_seq_add_stmt (&assign_body
,
13320 gimple_build_assign (v2
, x
));
13322 else if (is_variable_sized (var
))
13324 tree pvar
= DECL_VALUE_EXPR (var
);
13325 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13326 pvar
= TREE_OPERAND (pvar
, 0);
13327 gcc_assert (DECL_P (pvar
));
13328 new_var
= lookup_decl (pvar
, ctx
);
13329 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13330 gimple_seq_add_stmt (&assign_body
,
13331 gimple_build_assign (new_var
, x
));
13333 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13334 && !omp_is_reference (var
)
13335 && !omp_is_allocatable_or_ptr (var
))
13336 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13338 new_var
= lookup_decl (var
, ctx
);
13339 new_var
= DECL_VALUE_EXPR (new_var
);
13340 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13341 new_var
= TREE_OPERAND (new_var
, 0);
13342 gcc_assert (DECL_P (new_var
));
13343 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13344 gimple_seq_add_stmt (&assign_body
,
13345 gimple_build_assign (new_var
, x
));
13349 tree type
= TREE_TYPE (var
);
13350 new_var
= lookup_decl (var
, ctx
);
13351 if (omp_is_reference (var
))
13353 type
= TREE_TYPE (type
);
13354 if (POINTER_TYPE_P (type
)
13355 && TREE_CODE (type
) != ARRAY_TYPE
13356 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13357 || (omp_is_reference (var
)
13358 && omp_is_allocatable_or_ptr (var
))))
13360 tree v
= create_tmp_var_raw (type
, get_name (var
));
13361 gimple_add_tmp_var (v
);
13362 TREE_ADDRESSABLE (v
) = 1;
13363 x
= fold_convert (type
, x
);
13364 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13366 gimple_seq_add_stmt (&assign_body
,
13367 gimple_build_assign (v
, x
));
13368 x
= build_fold_addr_expr (v
);
13369 do_optional_check
= true;
13372 new_var
= DECL_VALUE_EXPR (new_var
);
13373 x
= fold_convert (TREE_TYPE (new_var
), x
);
13374 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13375 gimple_seq_add_stmt (&assign_body
,
13376 gimple_build_assign (new_var
, x
));
13379 present
= (do_optional_check
13380 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13384 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13385 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13386 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13387 glabel
*null_glabel
= gimple_build_label (null_label
);
13388 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13389 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13390 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13392 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13394 gcond
*cond
= gimple_build_cond_from_tree (present
,
13397 gimple_seq_add_stmt (&new_body
, cond
);
13398 gimple_seq_add_stmt (&new_body
, null_glabel
);
13399 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13400 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13401 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13402 gimple_seq_add_seq (&new_body
, assign_body
);
13403 gimple_seq_add_stmt (&new_body
,
13404 gimple_build_label (opt_arg_label
));
13407 gimple_seq_add_seq (&new_body
, assign_body
);
13410 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13411 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13412 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13413 or references to VLAs. */
13414 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13415 switch (OMP_CLAUSE_CODE (c
))
13420 case OMP_CLAUSE_MAP
:
13421 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13422 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13424 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13425 poly_int64 offset
= 0;
13427 var
= OMP_CLAUSE_DECL (c
);
13429 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13430 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13432 && varpool_node::get_create (var
)->offloadable
)
13434 if (TREE_CODE (var
) == INDIRECT_REF
13435 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13436 var
= TREE_OPERAND (var
, 0);
13437 if (TREE_CODE (var
) == COMPONENT_REF
)
13439 var
= get_addr_base_and_unit_offset (var
, &offset
);
13440 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13442 else if (DECL_SIZE (var
)
13443 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13445 tree var2
= DECL_VALUE_EXPR (var
);
13446 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13447 var2
= TREE_OPERAND (var2
, 0);
13448 gcc_assert (DECL_P (var2
));
13451 tree new_var
= lookup_decl (var
, ctx
), x
;
13452 tree type
= TREE_TYPE (new_var
);
13454 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13455 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13458 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
13460 new_var
= build2 (MEM_REF
, type
,
13461 build_fold_addr_expr (new_var
),
13462 build_int_cst (build_pointer_type (type
),
13465 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
13467 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
13468 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
13469 new_var
= build2 (MEM_REF
, type
,
13470 build_fold_addr_expr (new_var
),
13471 build_int_cst (build_pointer_type (type
),
13475 is_ref
= omp_is_reference (var
);
13476 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13478 bool ref_to_array
= false;
13481 type
= TREE_TYPE (type
);
13482 if (TREE_CODE (type
) == ARRAY_TYPE
)
13484 type
= build_pointer_type (type
);
13485 ref_to_array
= true;
13488 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13490 tree decl2
= DECL_VALUE_EXPR (new_var
);
13491 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
13492 decl2
= TREE_OPERAND (decl2
, 0);
13493 gcc_assert (DECL_P (decl2
));
13495 type
= TREE_TYPE (new_var
);
13497 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
13498 x
= fold_convert_loc (clause_loc
, type
, x
);
13499 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
13501 tree bias
= OMP_CLAUSE_SIZE (c
);
13503 bias
= lookup_decl (bias
, ctx
);
13504 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
13505 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
13507 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
13508 TREE_TYPE (x
), x
, bias
);
13511 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13512 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13513 if (is_ref
&& !ref_to_array
)
13515 tree t
= create_tmp_var_raw (type
, get_name (var
));
13516 gimple_add_tmp_var (t
);
13517 TREE_ADDRESSABLE (t
) = 1;
13518 gimple_seq_add_stmt (&new_body
,
13519 gimple_build_assign (t
, x
));
13520 x
= build_fold_addr_expr_loc (clause_loc
, t
);
13522 gimple_seq_add_stmt (&new_body
,
13523 gimple_build_assign (new_var
, x
));
13526 else if (OMP_CLAUSE_CHAIN (c
)
13527 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
13529 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13530 == GOMP_MAP_FIRSTPRIVATE_POINTER
13531 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
13532 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13535 case OMP_CLAUSE_PRIVATE
:
13536 var
= OMP_CLAUSE_DECL (c
);
13537 if (is_variable_sized (var
))
13539 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13540 tree new_var
= lookup_decl (var
, ctx
);
13541 tree pvar
= DECL_VALUE_EXPR (var
);
13542 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13543 pvar
= TREE_OPERAND (pvar
, 0);
13544 gcc_assert (DECL_P (pvar
));
13545 tree new_pvar
= lookup_decl (pvar
, ctx
);
13546 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13547 tree al
= size_int (DECL_ALIGN (var
));
13548 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
13549 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13550 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
13551 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13552 gimple_seq_add_stmt (&new_body
,
13553 gimple_build_assign (new_pvar
, x
));
13555 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
13557 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13558 tree new_var
= lookup_decl (var
, ctx
);
13559 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13560 if (TREE_CONSTANT (x
))
13565 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
13566 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
13567 tree al
= size_int (TYPE_ALIGN (rtype
));
13568 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
13571 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13572 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13573 gimple_seq_add_stmt (&new_body
,
13574 gimple_build_assign (new_var
, x
));
13579 gimple_seq fork_seq
= NULL
;
13580 gimple_seq join_seq
= NULL
;
13582 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
13584 /* If there are reductions on the offloaded region itself, treat
13585 them as a dummy GANG loop. */
13586 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
13588 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
13590 if (private_marker
)
13591 gimple_call_set_arg (private_marker
, 2, level
);
13593 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
13594 false, NULL
, private_marker
, NULL
, &fork_seq
,
13598 gimple_seq_add_seq (&new_body
, fork_seq
);
13599 gimple_seq_add_seq (&new_body
, tgt_body
);
13600 gimple_seq_add_seq (&new_body
, join_seq
);
13604 new_body
= maybe_catch_exception (new_body
);
13605 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
13607 gimple_omp_set_body (stmt
, new_body
);
13610 bind
= gimple_build_bind (NULL
, NULL
,
13611 tgt_bind
? gimple_bind_block (tgt_bind
)
13613 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
13614 gimple_bind_add_seq (bind
, ilist
);
13615 gimple_bind_add_stmt (bind
, stmt
);
13616 gimple_bind_add_seq (bind
, olist
);
13618 pop_gimplify_context (NULL
);
13622 gimple_bind_add_seq (dep_bind
, dep_ilist
);
13623 gimple_bind_add_stmt (dep_bind
, bind
);
13624 gimple_bind_add_seq (dep_bind
, dep_olist
);
13625 pop_gimplify_context (dep_bind
);
13629 /* Expand code for an OpenMP teams directive. */
13632 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
13634 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
13635 push_gimplify_context ();
13637 tree block
= make_node (BLOCK
);
13638 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
13639 gsi_replace (gsi_p
, bind
, true);
13640 gimple_seq bind_body
= NULL
;
13641 gimple_seq dlist
= NULL
;
13642 gimple_seq olist
= NULL
;
13644 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13645 OMP_CLAUSE_NUM_TEAMS
);
13646 if (num_teams
== NULL_TREE
)
13647 num_teams
= build_int_cst (unsigned_type_node
, 0);
13650 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
13651 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
13652 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
13654 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
13655 OMP_CLAUSE_THREAD_LIMIT
);
13656 if (thread_limit
== NULL_TREE
)
13657 thread_limit
= build_int_cst (unsigned_type_node
, 0);
13660 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
13661 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
13662 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
13666 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
13667 &bind_body
, &dlist
, ctx
, NULL
);
13668 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
13669 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
13671 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
13673 location_t loc
= gimple_location (teams_stmt
);
13674 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
13675 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
13676 gimple_set_location (call
, loc
);
13677 gimple_seq_add_stmt (&bind_body
, call
);
13679 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
13680 gimple_omp_set_body (teams_stmt
, NULL
);
13681 gimple_seq_add_seq (&bind_body
, olist
);
13682 gimple_seq_add_seq (&bind_body
, dlist
);
13683 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
13684 gimple_bind_set_body (bind
, bind_body
);
13686 pop_gimplify_context (bind
);
13688 gimple_bind_append_vars (bind
, ctx
->block_vars
);
13689 BLOCK_VARS (block
) = ctx
->block_vars
;
13690 if (BLOCK_VARS (block
))
13691 TREE_USED (block
) = 1;
13694 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13695 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13696 of OMP context, but with task_shared_vars set. */
13699 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
13704 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13705 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
13707 && DECL_HAS_VALUE_EXPR_P (t
))
13710 if (task_shared_vars
13712 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
13715 /* If a global variable has been privatized, TREE_CONSTANT on
13716 ADDR_EXPR might be wrong. */
13717 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
13718 recompute_tree_invariant_for_addr_expr (t
);
13720 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
13724 /* Data to be communicated between lower_omp_regimplify_operands and
13725 lower_omp_regimplify_operands_p. */
13727 struct lower_omp_regimplify_operands_data
13733 /* Helper function for lower_omp_regimplify_operands. Find
13734 omp_member_access_dummy_var vars and adjust temporarily their
13735 DECL_VALUE_EXPRs if needed. */
13738 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
13741 tree t
= omp_member_access_dummy_var (*tp
);
13744 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
13745 lower_omp_regimplify_operands_data
*ldata
13746 = (lower_omp_regimplify_operands_data
*) wi
->info
;
13747 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
13750 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
13751 ldata
->decls
->safe_push (*tp
);
13752 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
13753 SET_DECL_VALUE_EXPR (*tp
, v
);
13756 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
13760 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13761 of omp_member_access_dummy_var vars during regimplification. */
13764 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
13765 gimple_stmt_iterator
*gsi_p
)
13767 auto_vec
<tree
, 10> decls
;
13770 struct walk_stmt_info wi
;
13771 memset (&wi
, '\0', sizeof (wi
));
13772 struct lower_omp_regimplify_operands_data data
;
13774 data
.decls
= &decls
;
13776 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
13778 gimple_regimplify_operands (stmt
, gsi_p
);
13779 while (!decls
.is_empty ())
13781 tree t
= decls
.pop ();
13782 tree v
= decls
.pop ();
13783 SET_DECL_VALUE_EXPR (t
, v
);
13788 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
13790 gimple
*stmt
= gsi_stmt (*gsi_p
);
13791 struct walk_stmt_info wi
;
13794 if (gimple_has_location (stmt
))
13795 input_location
= gimple_location (stmt
);
13797 if (task_shared_vars
)
13798 memset (&wi
, '\0', sizeof (wi
));
13800 /* If we have issued syntax errors, avoid doing any heavy lifting.
13801 Just replace the OMP directives with a NOP to avoid
13802 confusing RTL expansion. */
13803 if (seen_error () && is_gimple_omp (stmt
))
13805 gsi_replace (gsi_p
, gimple_build_nop (), true);
13809 switch (gimple_code (stmt
))
13813 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
13814 if ((ctx
|| task_shared_vars
)
13815 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
13816 lower_omp_regimplify_p
,
13817 ctx
? NULL
: &wi
, NULL
)
13818 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
13819 lower_omp_regimplify_p
,
13820 ctx
? NULL
: &wi
, NULL
)))
13821 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
13825 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
13827 case GIMPLE_EH_FILTER
:
13828 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
13831 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
13832 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
13834 case GIMPLE_TRANSACTION
:
13835 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
13839 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
13841 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
13842 oacc_privatization_scan_decl_chain (ctx
, vars
);
13844 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
13845 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
13847 case GIMPLE_OMP_PARALLEL
:
13848 case GIMPLE_OMP_TASK
:
13849 ctx
= maybe_lookup_ctx (stmt
);
13851 if (ctx
->cancellable
)
13852 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13853 lower_omp_taskreg (gsi_p
, ctx
);
13855 case GIMPLE_OMP_FOR
:
13856 ctx
= maybe_lookup_ctx (stmt
);
13858 if (ctx
->cancellable
)
13859 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13860 lower_omp_for (gsi_p
, ctx
);
13862 case GIMPLE_OMP_SECTIONS
:
13863 ctx
= maybe_lookup_ctx (stmt
);
13865 if (ctx
->cancellable
)
13866 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
13867 lower_omp_sections (gsi_p
, ctx
);
13869 case GIMPLE_OMP_SINGLE
:
13870 ctx
= maybe_lookup_ctx (stmt
);
13872 lower_omp_single (gsi_p
, ctx
);
13874 case GIMPLE_OMP_MASTER
:
13875 ctx
= maybe_lookup_ctx (stmt
);
13877 lower_omp_master (gsi_p
, ctx
);
13879 case GIMPLE_OMP_TASKGROUP
:
13880 ctx
= maybe_lookup_ctx (stmt
);
13882 lower_omp_taskgroup (gsi_p
, ctx
);
13884 case GIMPLE_OMP_ORDERED
:
13885 ctx
= maybe_lookup_ctx (stmt
);
13887 lower_omp_ordered (gsi_p
, ctx
);
13889 case GIMPLE_OMP_SCAN
:
13890 ctx
= maybe_lookup_ctx (stmt
);
13892 lower_omp_scan (gsi_p
, ctx
);
13894 case GIMPLE_OMP_CRITICAL
:
13895 ctx
= maybe_lookup_ctx (stmt
);
13897 lower_omp_critical (gsi_p
, ctx
);
13899 case GIMPLE_OMP_ATOMIC_LOAD
:
13900 if ((ctx
|| task_shared_vars
)
13901 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13902 as_a
<gomp_atomic_load
*> (stmt
)),
13903 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
13904 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
13906 case GIMPLE_OMP_TARGET
:
13907 ctx
= maybe_lookup_ctx (stmt
);
13909 lower_omp_target (gsi_p
, ctx
);
13911 case GIMPLE_OMP_TEAMS
:
13912 ctx
= maybe_lookup_ctx (stmt
);
13914 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
13915 lower_omp_taskreg (gsi_p
, ctx
);
13917 lower_omp_teams (gsi_p
, ctx
);
13921 call_stmt
= as_a
<gcall
*> (stmt
);
13922 fndecl
= gimple_call_fndecl (call_stmt
);
13924 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
13925 switch (DECL_FUNCTION_CODE (fndecl
))
13927 case BUILT_IN_GOMP_BARRIER
:
13931 case BUILT_IN_GOMP_CANCEL
:
13932 case BUILT_IN_GOMP_CANCELLATION_POINT
:
13935 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
13936 cctx
= cctx
->outer
;
13937 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
13938 if (!cctx
->cancellable
)
13940 if (DECL_FUNCTION_CODE (fndecl
)
13941 == BUILT_IN_GOMP_CANCELLATION_POINT
)
13943 stmt
= gimple_build_nop ();
13944 gsi_replace (gsi_p
, stmt
, false);
13948 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
13950 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
13951 gimple_call_set_fndecl (call_stmt
, fndecl
);
13952 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
13955 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
13956 gimple_call_set_lhs (call_stmt
, lhs
);
13957 tree fallthru_label
;
13958 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
13960 g
= gimple_build_label (fallthru_label
);
13961 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13962 g
= gimple_build_cond (NE_EXPR
, lhs
,
13963 fold_convert (TREE_TYPE (lhs
),
13964 boolean_false_node
),
13965 cctx
->cancel_label
, fallthru_label
);
13966 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
13973 case GIMPLE_ASSIGN
:
13974 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
13976 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
13977 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
13978 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
13979 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
13980 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
13981 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
13982 && (gimple_omp_target_kind (up
->stmt
)
13983 == GF_OMP_TARGET_KIND_DATA
)))
13985 else if (!up
->lastprivate_conditional_map
)
13987 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
13988 if (TREE_CODE (lhs
) == MEM_REF
13989 && DECL_P (TREE_OPERAND (lhs
, 0))
13990 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
13991 0))) == REFERENCE_TYPE
)
13992 lhs
= TREE_OPERAND (lhs
, 0);
13994 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
13997 if (up
->combined_into_simd_safelen1
)
14000 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14003 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14004 clauses
= gimple_omp_for_clauses (up
->stmt
);
14006 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14007 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14008 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14009 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14010 OMP_CLAUSE__CONDTEMP_
);
14011 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14012 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14013 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14020 if ((ctx
|| task_shared_vars
)
14021 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14024 /* Just remove clobbers, this should happen only if we have
14025 "privatized" local addressable variables in SIMD regions,
14026 the clobber isn't needed in that case and gimplifying address
14027 of the ARRAY_REF into a pointer and creating MEM_REF based
14028 clobber would create worse code than we get with the clobber
14030 if (gimple_clobber_p (stmt
))
14032 gsi_replace (gsi_p
, gimple_build_nop (), true);
14035 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14042 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14044 location_t saved_location
= input_location
;
14045 gimple_stmt_iterator gsi
;
14046 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14047 lower_omp_1 (&gsi
, ctx
);
14048 /* During gimplification, we haven't folded statments inside offloading
14049 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14050 if (target_nesting_level
|| taskreg_nesting_level
)
14051 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14053 input_location
= saved_location
;
14056 /* Main entry point. */
14058 static unsigned int
14059 execute_lower_omp (void)
14065 /* This pass always runs, to provide PROP_gimple_lomp.
14066 But often, there is nothing to do. */
14067 if (flag_openacc
== 0 && flag_openmp
== 0
14068 && flag_openmp_simd
== 0)
14071 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14072 delete_omp_context
);
14074 body
= gimple_body (current_function_decl
);
14076 scan_omp (&body
, NULL
);
14077 gcc_assert (taskreg_nesting_level
== 0);
14078 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14079 finish_taskreg_scan (ctx
);
14080 taskreg_contexts
.release ();
14082 if (all_contexts
->root
)
14084 if (task_shared_vars
)
14085 push_gimplify_context ();
14086 lower_omp (&body
, NULL
);
14087 if (task_shared_vars
)
14088 pop_gimplify_context (NULL
);
14093 splay_tree_delete (all_contexts
);
14094 all_contexts
= NULL
;
14096 BITMAP_FREE (task_shared_vars
);
14097 BITMAP_FREE (global_nonaddressable_vars
);
14099 /* If current function is a method, remove artificial dummy VAR_DECL created
14100 for non-static data member privatization, they aren't needed for
14101 debuginfo nor anything else, have been already replaced everywhere in the
14102 IL and cause problems with LTO. */
14103 if (DECL_ARGUMENTS (current_function_decl
)
14104 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14105 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14107 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14113 const pass_data pass_data_lower_omp
=
14115 GIMPLE_PASS
, /* type */
14116 "omplower", /* name */
14117 OPTGROUP_OMP
, /* optinfo_flags */
14118 TV_NONE
, /* tv_id */
14119 PROP_gimple_any
, /* properties_required */
14120 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14121 0, /* properties_destroyed */
14122 0, /* todo_flags_start */
14123 0, /* todo_flags_finish */
14126 class pass_lower_omp
: public gimple_opt_pass
14129 pass_lower_omp (gcc::context
*ctxt
)
14130 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14133 /* opt_pass methods: */
14134 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
14136 }; // class pass_lower_omp
14138 } // anon namespace
14141 make_pass_lower_omp (gcc::context
*ctxt
)
14143 return new pass_lower_omp (ctxt
);
14146 /* The following is a utility to diagnose structured block violations.
14147 It is not part of the "omplower" pass, as that's invoked too late. It
14148 should be invoked by the respective front ends after gimplification. */
14150 static splay_tree all_labels
;
14152 /* Check for mismatched contexts and generate an error if needed. Return
14153 true if an error is detected. */
14156 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14157 gimple
*branch_ctx
, gimple
*label_ctx
)
14159 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14160 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14162 if (label_ctx
== branch_ctx
)
14165 const char* kind
= NULL
;
14169 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14170 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14172 gcc_checking_assert (kind
== NULL
);
14178 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14182 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14183 so we could traverse it and issue a correct "exit" or "enter" error
14184 message upon a structured block violation.
14186 We built the context by building a list with tree_cons'ing, but there is
14187 no easy counterpart in gimple tuples. It seems like far too much work
14188 for issuing exit/enter error messages. If someone really misses the
14189 distinct error message... patches welcome. */
14192 /* Try to avoid confusing the user by producing and error message
14193 with correct "exit" or "enter" verbiage. We prefer "exit"
14194 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14195 if (branch_ctx
== NULL
)
14201 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14206 label_ctx
= TREE_CHAIN (label_ctx
);
14211 error ("invalid exit from %s structured block", kind
);
14213 error ("invalid entry to %s structured block", kind
);
14216 /* If it's obvious we have an invalid entry, be specific about the error. */
14217 if (branch_ctx
== NULL
)
14218 error ("invalid entry to %s structured block", kind
);
14221 /* Otherwise, be vague and lazy, but efficient. */
14222 error ("invalid branch to/from %s structured block", kind
);
14225 gsi_replace (gsi_p
, gimple_build_nop (), false);
14229 /* Pass 1: Create a minimal tree of structured blocks, and record
14230 where each label is found. */
14233 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14234 struct walk_stmt_info
*wi
)
14236 gimple
*context
= (gimple
*) wi
->info
;
14237 gimple
*inner_context
;
14238 gimple
*stmt
= gsi_stmt (*gsi_p
);
14240 *handled_ops_p
= true;
14242 switch (gimple_code (stmt
))
14246 case GIMPLE_OMP_PARALLEL
:
14247 case GIMPLE_OMP_TASK
:
14248 case GIMPLE_OMP_SECTIONS
:
14249 case GIMPLE_OMP_SINGLE
:
14250 case GIMPLE_OMP_SECTION
:
14251 case GIMPLE_OMP_MASTER
:
14252 case GIMPLE_OMP_ORDERED
:
14253 case GIMPLE_OMP_SCAN
:
14254 case GIMPLE_OMP_CRITICAL
:
14255 case GIMPLE_OMP_TARGET
:
14256 case GIMPLE_OMP_TEAMS
:
14257 case GIMPLE_OMP_TASKGROUP
:
14258 /* The minimal context here is just the current OMP construct. */
14259 inner_context
= stmt
;
14260 wi
->info
= inner_context
;
14261 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14262 wi
->info
= context
;
14265 case GIMPLE_OMP_FOR
:
14266 inner_context
= stmt
;
14267 wi
->info
= inner_context
;
14268 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14270 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14271 diagnose_sb_1
, NULL
, wi
);
14272 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14273 wi
->info
= context
;
14277 splay_tree_insert (all_labels
,
14278 (splay_tree_key
) gimple_label_label (
14279 as_a
<glabel
*> (stmt
)),
14280 (splay_tree_value
) context
);
14290 /* Pass 2: Check each branch and see if its context differs from that of
14291 the destination label's context. */
14294 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14295 struct walk_stmt_info
*wi
)
14297 gimple
*context
= (gimple
*) wi
->info
;
14299 gimple
*stmt
= gsi_stmt (*gsi_p
);
14301 *handled_ops_p
= true;
14303 switch (gimple_code (stmt
))
14307 case GIMPLE_OMP_PARALLEL
:
14308 case GIMPLE_OMP_TASK
:
14309 case GIMPLE_OMP_SECTIONS
:
14310 case GIMPLE_OMP_SINGLE
:
14311 case GIMPLE_OMP_SECTION
:
14312 case GIMPLE_OMP_MASTER
:
14313 case GIMPLE_OMP_ORDERED
:
14314 case GIMPLE_OMP_SCAN
:
14315 case GIMPLE_OMP_CRITICAL
:
14316 case GIMPLE_OMP_TARGET
:
14317 case GIMPLE_OMP_TEAMS
:
14318 case GIMPLE_OMP_TASKGROUP
:
14320 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14321 wi
->info
= context
;
14324 case GIMPLE_OMP_FOR
:
14326 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14328 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14329 diagnose_sb_2
, NULL
, wi
);
14330 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14331 wi
->info
= context
;
14336 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14337 tree lab
= gimple_cond_true_label (cond_stmt
);
14340 n
= splay_tree_lookup (all_labels
,
14341 (splay_tree_key
) lab
);
14342 diagnose_sb_0 (gsi_p
, context
,
14343 n
? (gimple
*) n
->value
: NULL
);
14345 lab
= gimple_cond_false_label (cond_stmt
);
14348 n
= splay_tree_lookup (all_labels
,
14349 (splay_tree_key
) lab
);
14350 diagnose_sb_0 (gsi_p
, context
,
14351 n
? (gimple
*) n
->value
: NULL
);
14358 tree lab
= gimple_goto_dest (stmt
);
14359 if (TREE_CODE (lab
) != LABEL_DECL
)
14362 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14363 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14367 case GIMPLE_SWITCH
:
14369 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14371 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14373 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14374 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14375 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14381 case GIMPLE_RETURN
:
14382 diagnose_sb_0 (gsi_p
, context
, NULL
);
14392 static unsigned int
14393 diagnose_omp_structured_block_errors (void)
14395 struct walk_stmt_info wi
;
14396 gimple_seq body
= gimple_body (current_function_decl
);
14398 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14400 memset (&wi
, 0, sizeof (wi
));
14401 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
14403 memset (&wi
, 0, sizeof (wi
));
14404 wi
.want_locations
= true;
14405 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
14407 gimple_set_body (current_function_decl
, body
);
14409 splay_tree_delete (all_labels
);
14417 const pass_data pass_data_diagnose_omp_blocks
=
14419 GIMPLE_PASS
, /* type */
14420 "*diagnose_omp_blocks", /* name */
14421 OPTGROUP_OMP
, /* optinfo_flags */
14422 TV_NONE
, /* tv_id */
14423 PROP_gimple_any
, /* properties_required */
14424 0, /* properties_provided */
14425 0, /* properties_destroyed */
14426 0, /* todo_flags_start */
14427 0, /* todo_flags_finish */
14430 class pass_diagnose_omp_blocks
: public gimple_opt_pass
14433 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14434 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
14437 /* opt_pass methods: */
14438 virtual bool gate (function
*)
14440 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
14442 virtual unsigned int execute (function
*)
14444 return diagnose_omp_structured_block_errors ();
14447 }; // class pass_diagnose_omp_blocks
14449 } // anon namespace
14452 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
14454 return new pass_diagnose_omp_blocks (ctxt
);
14458 #include "gt-omp-low.h"