1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
135 /* True if this parallel directive is nested within another. */
138 /* True if this construct can be cancelled. */
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
143 bool combined_into_simd_safelen1
;
145 /* True if there is nested scan context with inclusive clause. */
148 /* True if there is nested scan context with exclusive clause. */
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase
;
155 static splay_tree all_contexts
;
156 static int taskreg_nesting_level
;
157 static int target_nesting_level
;
158 static bitmap task_shared_vars
;
159 static vec
<omp_context
*> taskreg_contexts
;
161 static void scan_omp (gimple_seq
*, omp_context
*);
162 static tree
scan_omp_1_op (tree
*, int *, void *);
164 #define WALK_SUBSTMTS \
168 case GIMPLE_EH_FILTER: \
169 case GIMPLE_TRANSACTION: \
170 /* The sub-statements for these should be walked. */ \
171 *handled_ops_p = false; \
174 /* Return true if CTX corresponds to an oacc parallel region. */
177 is_oacc_parallel (omp_context
*ctx
)
179 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
180 return ((outer_type
== GIMPLE_OMP_TARGET
)
181 && (gimple_omp_target_kind (ctx
->stmt
)
182 == GF_OMP_TARGET_KIND_OACC_PARALLEL
));
185 /* Return true if CTX corresponds to an oacc kernels region. */
188 is_oacc_kernels (omp_context
*ctx
)
190 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
191 return ((outer_type
== GIMPLE_OMP_TARGET
)
192 && (gimple_omp_target_kind (ctx
->stmt
)
193 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
196 /* If DECL is the artificial dummy VAR_DECL created for non-static
197 data member privatization, return the underlying "this" parameter,
198 otherwise return NULL. */
201 omp_member_access_dummy_var (tree decl
)
204 || !DECL_ARTIFICIAL (decl
)
205 || !DECL_IGNORED_P (decl
)
206 || !DECL_HAS_VALUE_EXPR_P (decl
)
207 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
210 tree v
= DECL_VALUE_EXPR (decl
);
211 if (TREE_CODE (v
) != COMPONENT_REF
)
215 switch (TREE_CODE (v
))
221 case POINTER_PLUS_EXPR
:
222 v
= TREE_OPERAND (v
, 0);
225 if (DECL_CONTEXT (v
) == current_function_decl
226 && DECL_ARTIFICIAL (v
)
227 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
235 /* Helper for unshare_and_remap, called through walk_tree. */
238 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
240 tree
*pair
= (tree
*) data
;
243 *tp
= unshare_expr (pair
[1]);
246 else if (IS_TYPE_OR_DECL_P (*tp
))
251 /* Return unshare_expr (X) with all occurrences of FROM
255 unshare_and_remap (tree x
, tree from
, tree to
)
257 tree pair
[2] = { from
, to
};
258 x
= unshare_expr (x
);
259 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
263 /* Convenience function for calling scan_omp_1_op on tree operands. */
266 scan_omp_op (tree
*tp
, omp_context
*ctx
)
268 struct walk_stmt_info wi
;
270 memset (&wi
, 0, sizeof (wi
));
272 wi
.want_locations
= true;
274 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
277 static void lower_omp (gimple_seq
*, omp_context
*);
278 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
279 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
281 /* Return true if CTX is for an omp parallel. */
284 is_parallel_ctx (omp_context
*ctx
)
286 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
290 /* Return true if CTX is for an omp task. */
293 is_task_ctx (omp_context
*ctx
)
295 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
299 /* Return true if CTX is for an omp taskloop. */
302 is_taskloop_ctx (omp_context
*ctx
)
304 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
305 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
309 /* Return true if CTX is for a host omp teams. */
312 is_host_teams_ctx (omp_context
*ctx
)
314 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
315 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
318 /* Return true if CTX is for an omp parallel or omp task or host omp teams
319 (the last one is strictly not a task region in OpenMP speak, but we
320 need to treat it similarly). */
323 is_taskreg_ctx (omp_context
*ctx
)
325 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
328 /* Return true if EXPR is variable sized. */
331 is_variable_sized (const_tree expr
)
333 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
336 /* Lookup variables. The "maybe" form
337 allows for the variable form to not have been entered, otherwise we
338 assert that the variable must have been entered. */
341 lookup_decl (tree var
, omp_context
*ctx
)
343 tree
*n
= ctx
->cb
.decl_map
->get (var
);
348 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
350 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
351 return n
? *n
: NULL_TREE
;
355 lookup_field (tree var
, omp_context
*ctx
)
358 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
359 return (tree
) n
->value
;
363 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
366 n
= splay_tree_lookup (ctx
->sfield_map
367 ? ctx
->sfield_map
: ctx
->field_map
, key
);
368 return (tree
) n
->value
;
372 lookup_sfield (tree var
, omp_context
*ctx
)
374 return lookup_sfield ((splay_tree_key
) var
, ctx
);
378 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
381 n
= splay_tree_lookup (ctx
->field_map
, key
);
382 return n
? (tree
) n
->value
: NULL_TREE
;
386 maybe_lookup_field (tree var
, omp_context
*ctx
)
388 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
391 /* Return true if DECL should be copied by pointer. SHARED_CTX is
392 the parallel context if DECL is to be shared. */
395 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
397 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
398 || TYPE_ATOMIC (TREE_TYPE (decl
)))
401 /* We can only use copy-in/copy-out semantics for shared variables
402 when we know the value is not accessible from an outer scope. */
405 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
407 /* ??? Trivially accessible from anywhere. But why would we even
408 be passing an address in this case? Should we simply assert
409 this to be false, or should we have a cleanup pass that removes
410 these from the list of mappings? */
411 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
414 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
415 without analyzing the expression whether or not its location
416 is accessible to anyone else. In the case of nested parallel
417 regions it certainly may be. */
418 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
421 /* Do not use copy-in/copy-out for variables that have their
423 if (TREE_ADDRESSABLE (decl
))
426 /* lower_send_shared_vars only uses copy-in, but not copy-out
428 if (TREE_READONLY (decl
)
429 || ((TREE_CODE (decl
) == RESULT_DECL
430 || TREE_CODE (decl
) == PARM_DECL
)
431 && DECL_BY_REFERENCE (decl
)))
434 /* Disallow copy-in/out in nested parallel if
435 decl is shared in outer parallel, otherwise
436 each thread could store the shared variable
437 in its own copy-in location, making the
438 variable no longer really shared. */
439 if (shared_ctx
->is_nested
)
443 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
444 if (is_taskreg_ctx (up
) && maybe_lookup_decl (decl
, up
))
451 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
452 c
; c
= OMP_CLAUSE_CHAIN (c
))
453 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
454 && OMP_CLAUSE_DECL (c
) == decl
)
458 goto maybe_mark_addressable_and_ret
;
462 /* For tasks avoid using copy-in/out. As tasks can be
463 deferred or executed in different thread, when GOMP_task
464 returns, the task hasn't necessarily terminated. */
465 if (is_task_ctx (shared_ctx
))
468 maybe_mark_addressable_and_ret
:
469 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
470 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
472 /* Taking address of OUTER in lower_send_shared_vars
473 might need regimplification of everything that uses the
475 if (!task_shared_vars
)
476 task_shared_vars
= BITMAP_ALLOC (NULL
);
477 bitmap_set_bit (task_shared_vars
, DECL_UID (outer
));
478 TREE_ADDRESSABLE (outer
) = 1;
487 /* Construct a new automatic decl similar to VAR. */
490 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
492 tree copy
= copy_var_decl (var
, name
, type
);
494 DECL_CONTEXT (copy
) = current_function_decl
;
495 DECL_CHAIN (copy
) = ctx
->block_vars
;
496 /* If VAR is listed in task_shared_vars, it means it wasn't
497 originally addressable and is just because task needs to take
498 it's address. But we don't need to take address of privatizations
500 if (TREE_ADDRESSABLE (var
)
502 && bitmap_bit_p (task_shared_vars
, DECL_UID (var
)))
503 TREE_ADDRESSABLE (copy
) = 0;
504 ctx
->block_vars
= copy
;
510 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
512 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
515 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
518 omp_build_component_ref (tree obj
, tree field
)
520 tree ret
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL
);
521 if (TREE_THIS_VOLATILE (field
))
522 TREE_THIS_VOLATILE (ret
) |= 1;
523 if (TREE_READONLY (field
))
524 TREE_READONLY (ret
) |= 1;
528 /* Build tree nodes to access the field for VAR on the receiver side. */
531 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
533 tree x
, field
= lookup_field (var
, ctx
);
535 /* If the receiver record type was remapped in the child function,
536 remap the field into the new record type. */
537 x
= maybe_lookup_field (field
, ctx
);
541 x
= build_simple_mem_ref (ctx
->receiver_decl
);
542 TREE_THIS_NOTRAP (x
) = 1;
543 x
= omp_build_component_ref (x
, field
);
546 x
= build_simple_mem_ref (x
);
547 TREE_THIS_NOTRAP (x
) = 1;
553 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
554 of a parallel, this is a component reference; for workshare constructs
555 this is some variable. */
558 build_outer_var_ref (tree var
, omp_context
*ctx
,
559 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
562 omp_context
*outer
= ctx
->outer
;
563 while (outer
&& gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
564 outer
= outer
->outer
;
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
568 else if (is_variable_sized (var
))
570 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
571 x
= build_outer_var_ref (x
, ctx
, code
);
572 x
= build_simple_mem_ref (x
);
574 else if (is_taskreg_ctx (ctx
))
576 bool by_ref
= use_pointer_for_field (var
, NULL
);
577 x
= build_receiver_ref (var
, by_ref
, ctx
);
579 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
580 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
581 || (code
== OMP_CLAUSE_PRIVATE
582 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
583 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
584 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
586 /* #pragma omp simd isn't a worksharing construct, and can reference
587 even private vars in its linear etc. clauses.
588 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
589 to private vars in all worksharing constructs. */
591 if (outer
&& is_taskreg_ctx (outer
))
592 x
= lookup_decl (var
, outer
);
594 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
598 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
602 = splay_tree_lookup (outer
->field_map
,
603 (splay_tree_key
) &DECL_UID (var
));
606 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
609 x
= lookup_decl (var
, outer
);
613 tree field
= (tree
) n
->value
;
614 /* If the receiver record type was remapped in the child function,
615 remap the field into the new record type. */
616 x
= maybe_lookup_field (field
, outer
);
620 x
= build_simple_mem_ref (outer
->receiver_decl
);
621 x
= omp_build_component_ref (x
, field
);
622 if (use_pointer_for_field (var
, outer
))
623 x
= build_simple_mem_ref (x
);
628 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_GRID_BODY
)
630 outer
= outer
->outer
;
632 && gimple_code (outer
->stmt
) != GIMPLE_OMP_GRID_BODY
);
634 x
= lookup_decl (var
, outer
);
636 else if (omp_is_reference (var
))
637 /* This can happen with orphaned constructs. If var is reference, it is
638 possible it is shared and as such valid. */
640 else if (omp_member_access_dummy_var (var
))
647 tree t
= omp_member_access_dummy_var (var
);
650 x
= DECL_VALUE_EXPR (var
);
651 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
653 x
= unshare_and_remap (x
, t
, o
);
655 x
= unshare_expr (x
);
659 if (omp_is_reference (var
))
660 x
= build_simple_mem_ref (x
);
665 /* Build tree nodes to access the field for VAR on the sender side. */
668 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
670 tree field
= lookup_sfield (key
, ctx
);
671 return omp_build_component_ref (ctx
->sender_decl
, field
);
675 build_sender_ref (tree var
, omp_context
*ctx
)
677 return build_sender_ref ((splay_tree_key
) var
, ctx
);
680 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
681 BASE_POINTERS_RESTRICT, declare the field with restrict. */
684 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
686 tree field
, type
, sfield
= NULL_TREE
;
687 splay_tree_key key
= (splay_tree_key
) var
;
691 key
= (splay_tree_key
) &DECL_UID (var
);
692 gcc_checking_assert (key
!= (splay_tree_key
) var
);
694 gcc_assert ((mask
& 1) == 0
695 || !splay_tree_lookup (ctx
->field_map
, key
));
696 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
697 || !splay_tree_lookup (ctx
->sfield_map
, key
));
698 gcc_assert ((mask
& 3) == 3
699 || !is_gimple_omp_oacc (ctx
->stmt
));
701 type
= TREE_TYPE (var
);
702 /* Prevent redeclaring the var in the split-off function with a restrict
703 pointer type. Note that we only clear type itself, restrict qualifiers in
704 the pointed-to type will be ignored by points-to analysis. */
705 if (POINTER_TYPE_P (type
)
706 && TYPE_RESTRICT (type
))
707 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
711 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
712 type
= build_pointer_type (build_pointer_type (type
));
715 type
= build_pointer_type (type
);
716 else if ((mask
& 3) == 1 && omp_is_reference (var
))
717 type
= TREE_TYPE (type
);
719 field
= build_decl (DECL_SOURCE_LOCATION (var
),
720 FIELD_DECL
, DECL_NAME (var
), type
);
722 /* Remember what variable this field was created for. This does have a
723 side effect of making dwarf2out ignore this member, so for helpful
724 debugging we clear it later in delete_omp_context. */
725 DECL_ABSTRACT_ORIGIN (field
) = var
;
726 if (type
== TREE_TYPE (var
))
728 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
729 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
730 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
733 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
737 insert_field_into_struct (ctx
->record_type
, field
);
738 if (ctx
->srecord_type
)
740 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
741 FIELD_DECL
, DECL_NAME (var
), type
);
742 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
743 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
744 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
745 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
746 insert_field_into_struct (ctx
->srecord_type
, sfield
);
751 if (ctx
->srecord_type
== NULL_TREE
)
755 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
756 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
757 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
759 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
760 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
761 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
762 insert_field_into_struct (ctx
->srecord_type
, sfield
);
763 splay_tree_insert (ctx
->sfield_map
,
764 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
765 (splay_tree_value
) sfield
);
769 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
770 : ctx
->srecord_type
, field
);
774 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
775 if ((mask
& 2) && ctx
->sfield_map
)
776 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
780 install_var_local (tree var
, omp_context
*ctx
)
782 tree new_var
= omp_copy_decl_1 (var
, ctx
);
783 insert_decl_map (&ctx
->cb
, var
, new_var
);
787 /* Adjust the replacement for DECL in CTX for the new context. This means
788 copying the DECL_VALUE_EXPR, and fixing up the type. */
791 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
795 new_decl
= lookup_decl (decl
, ctx
);
797 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
799 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
800 && DECL_HAS_VALUE_EXPR_P (decl
))
802 tree ve
= DECL_VALUE_EXPR (decl
);
803 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
804 SET_DECL_VALUE_EXPR (new_decl
, ve
);
805 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
808 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
810 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
811 if (size
== error_mark_node
)
812 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
813 DECL_SIZE (new_decl
) = size
;
815 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
816 if (size
== error_mark_node
)
817 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
818 DECL_SIZE_UNIT (new_decl
) = size
;
822 /* The callback for remap_decl. Search all containing contexts for a
823 mapping of the variable; this avoids having to duplicate the splay
824 tree ahead of time. We know a mapping doesn't already exist in the
825 given context. Create new mappings to implement default semantics. */
828 omp_copy_decl (tree var
, copy_body_data
*cb
)
830 omp_context
*ctx
= (omp_context
*) cb
;
833 if (TREE_CODE (var
) == LABEL_DECL
)
835 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
837 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
838 DECL_CONTEXT (new_var
) = current_function_decl
;
839 insert_decl_map (&ctx
->cb
, var
, new_var
);
843 while (!is_taskreg_ctx (ctx
))
848 new_var
= maybe_lookup_decl (var
, ctx
);
853 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
856 return error_mark_node
;
859 /* Create a new context, with OUTER_CTX being the surrounding context. */
862 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
864 omp_context
*ctx
= XCNEW (omp_context
);
866 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
867 (splay_tree_value
) ctx
);
872 ctx
->outer
= outer_ctx
;
873 ctx
->cb
= outer_ctx
->cb
;
874 ctx
->cb
.block
= NULL
;
875 ctx
->depth
= outer_ctx
->depth
+ 1;
879 ctx
->cb
.src_fn
= current_function_decl
;
880 ctx
->cb
.dst_fn
= current_function_decl
;
881 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
882 gcc_checking_assert (ctx
->cb
.src_node
);
883 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
884 ctx
->cb
.src_cfun
= cfun
;
885 ctx
->cb
.copy_decl
= omp_copy_decl
;
886 ctx
->cb
.eh_lp_nr
= 0;
887 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
888 ctx
->cb
.adjust_array_error_bounds
= true;
889 ctx
->cb
.dont_remap_vla_if_no_change
= true;
893 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
898 static gimple_seq
maybe_catch_exception (gimple_seq
);
900 /* Finalize task copyfn. */
903 finalize_task_copyfn (gomp_task
*task_stmt
)
905 struct function
*child_cfun
;
907 gimple_seq seq
= NULL
, new_seq
;
910 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
911 if (child_fn
== NULL_TREE
)
914 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
915 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
917 push_cfun (child_cfun
);
918 bind
= gimplify_body (child_fn
, false);
919 gimple_seq_add_stmt (&seq
, bind
);
920 new_seq
= maybe_catch_exception (seq
);
923 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
925 gimple_seq_add_stmt (&seq
, bind
);
927 gimple_set_body (child_fn
, seq
);
930 /* Inform the callgraph about the new function. */
931 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
932 node
->parallelized_function
= 1;
933 cgraph_node::add_new_function (child_fn
, false);
936 /* Destroy a omp_context data structures. Called through the splay tree
937 value delete callback. */
940 delete_omp_context (splay_tree_value value
)
942 omp_context
*ctx
= (omp_context
*) value
;
944 delete ctx
->cb
.decl_map
;
947 splay_tree_delete (ctx
->field_map
);
949 splay_tree_delete (ctx
->sfield_map
);
951 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
952 it produces corrupt debug information. */
953 if (ctx
->record_type
)
956 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
957 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
959 if (ctx
->srecord_type
)
962 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
963 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
966 if (is_task_ctx (ctx
))
967 finalize_task_copyfn (as_a
<gomp_task
*> (ctx
->stmt
));
969 if (ctx
->task_reduction_map
)
971 ctx
->task_reductions
.release ();
972 delete ctx
->task_reduction_map
;
975 delete ctx
->lastprivate_conditional_map
;
980 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
984 fixup_child_record_type (omp_context
*ctx
)
986 tree f
, type
= ctx
->record_type
;
988 if (!ctx
->receiver_decl
)
990 /* ??? It isn't sufficient to just call remap_type here, because
991 variably_modified_type_p doesn't work the way we expect for
992 record types. Testing each field for whether it needs remapping
993 and creating a new record by hand works, however. */
994 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
995 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
999 tree name
, new_fields
= NULL
;
1001 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1002 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1003 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1004 TYPE_DECL
, name
, type
);
1005 TYPE_NAME (type
) = name
;
1007 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1009 tree new_f
= copy_node (f
);
1010 DECL_CONTEXT (new_f
) = type
;
1011 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1012 DECL_CHAIN (new_f
) = new_fields
;
1013 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1014 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1016 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1020 /* Arrange to be able to look up the receiver field
1021 given the sender field. */
1022 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1023 (splay_tree_value
) new_f
);
1025 TYPE_FIELDS (type
) = nreverse (new_fields
);
1029 /* In a target region we never modify any of the pointers in *.omp_data_i,
1030 so attempt to help the optimizers. */
1031 if (is_gimple_omp_offloaded (ctx
->stmt
))
1032 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1034 TREE_TYPE (ctx
->receiver_decl
)
1035 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1038 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1039 specified by CLAUSES. */
1042 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1045 bool scan_array_reductions
= false;
1047 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1051 switch (OMP_CLAUSE_CODE (c
))
1053 case OMP_CLAUSE_PRIVATE
:
1054 decl
= OMP_CLAUSE_DECL (c
);
1055 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1057 else if (!is_variable_sized (decl
))
1058 install_var_local (decl
, ctx
);
1061 case OMP_CLAUSE_SHARED
:
1062 decl
= OMP_CLAUSE_DECL (c
);
1063 /* Ignore shared directives in teams construct inside of
1064 target construct. */
1065 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1066 && !is_host_teams_ctx (ctx
))
1068 /* Global variables don't need to be copied,
1069 the receiver side will use them directly. */
1070 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1071 if (is_global_var (odecl
))
1073 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1076 gcc_assert (is_taskreg_ctx (ctx
));
1077 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1078 || !is_variable_sized (decl
));
1079 /* Global variables don't need to be copied,
1080 the receiver side will use them directly. */
1081 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1083 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1085 use_pointer_for_field (decl
, ctx
);
1088 by_ref
= use_pointer_for_field (decl
, NULL
);
1089 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1090 || TREE_ADDRESSABLE (decl
)
1092 || omp_is_reference (decl
))
1094 by_ref
= use_pointer_for_field (decl
, ctx
);
1095 install_var_field (decl
, by_ref
, 3, ctx
);
1096 install_var_local (decl
, ctx
);
1099 /* We don't need to copy const scalar vars back. */
1100 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1103 case OMP_CLAUSE_REDUCTION
:
1104 case OMP_CLAUSE_IN_REDUCTION
:
1105 decl
= OMP_CLAUSE_DECL (c
);
1106 if (TREE_CODE (decl
) == MEM_REF
)
1108 tree t
= TREE_OPERAND (decl
, 0);
1109 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1110 t
= TREE_OPERAND (t
, 0);
1111 if (TREE_CODE (t
) == INDIRECT_REF
1112 || TREE_CODE (t
) == ADDR_EXPR
)
1113 t
= TREE_OPERAND (t
, 0);
1114 install_var_local (t
, ctx
);
1115 if (is_taskreg_ctx (ctx
)
1116 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1117 || (is_task_ctx (ctx
)
1118 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1119 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1120 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1121 == POINTER_TYPE
)))))
1122 && !is_variable_sized (t
)
1123 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1124 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1125 && !is_task_ctx (ctx
))))
1127 by_ref
= use_pointer_for_field (t
, NULL
);
1128 if (is_task_ctx (ctx
)
1129 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1130 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1132 install_var_field (t
, false, 1, ctx
);
1133 install_var_field (t
, by_ref
, 2, ctx
);
1136 install_var_field (t
, by_ref
, 3, ctx
);
1140 if (is_task_ctx (ctx
)
1141 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1142 && OMP_CLAUSE_REDUCTION_TASK (c
)
1143 && is_parallel_ctx (ctx
)))
1145 /* Global variables don't need to be copied,
1146 the receiver side will use them directly. */
1147 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1149 by_ref
= use_pointer_for_field (decl
, ctx
);
1150 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1151 install_var_field (decl
, by_ref
, 3, ctx
);
1153 install_var_local (decl
, ctx
);
1156 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1157 && OMP_CLAUSE_REDUCTION_TASK (c
))
1159 install_var_local (decl
, ctx
);
1164 case OMP_CLAUSE_LASTPRIVATE
:
1165 /* Let the corresponding firstprivate clause create
1167 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1171 case OMP_CLAUSE_FIRSTPRIVATE
:
1172 case OMP_CLAUSE_LINEAR
:
1173 decl
= OMP_CLAUSE_DECL (c
);
1175 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1176 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1177 && is_gimple_omp_offloaded (ctx
->stmt
))
1179 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1180 install_var_field (decl
, !omp_is_reference (decl
), 3, ctx
);
1181 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1182 install_var_field (decl
, true, 3, ctx
);
1184 install_var_field (decl
, false, 3, ctx
);
1186 if (is_variable_sized (decl
))
1188 if (is_task_ctx (ctx
))
1189 install_var_field (decl
, false, 1, ctx
);
1192 else if (is_taskreg_ctx (ctx
))
1195 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1196 by_ref
= use_pointer_for_field (decl
, NULL
);
1198 if (is_task_ctx (ctx
)
1199 && (global
|| by_ref
|| omp_is_reference (decl
)))
1201 install_var_field (decl
, false, 1, ctx
);
1203 install_var_field (decl
, by_ref
, 2, ctx
);
1206 install_var_field (decl
, by_ref
, 3, ctx
);
1208 install_var_local (decl
, ctx
);
1211 case OMP_CLAUSE_USE_DEVICE_PTR
:
1212 decl
= OMP_CLAUSE_DECL (c
);
1213 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1214 install_var_field (decl
, true, 3, ctx
);
1216 install_var_field (decl
, false, 3, ctx
);
1217 if (DECL_SIZE (decl
)
1218 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1220 tree decl2
= DECL_VALUE_EXPR (decl
);
1221 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1222 decl2
= TREE_OPERAND (decl2
, 0);
1223 gcc_assert (DECL_P (decl2
));
1224 install_var_local (decl2
, ctx
);
1226 install_var_local (decl
, ctx
);
1229 case OMP_CLAUSE_IS_DEVICE_PTR
:
1230 decl
= OMP_CLAUSE_DECL (c
);
1233 case OMP_CLAUSE__LOOPTEMP_
:
1234 case OMP_CLAUSE__REDUCTEMP_
:
1235 gcc_assert (is_taskreg_ctx (ctx
));
1236 decl
= OMP_CLAUSE_DECL (c
);
1237 install_var_field (decl
, false, 3, ctx
);
1238 install_var_local (decl
, ctx
);
1241 case OMP_CLAUSE_COPYPRIVATE
:
1242 case OMP_CLAUSE_COPYIN
:
1243 decl
= OMP_CLAUSE_DECL (c
);
1244 by_ref
= use_pointer_for_field (decl
, NULL
);
1245 install_var_field (decl
, by_ref
, 3, ctx
);
1248 case OMP_CLAUSE_FINAL
:
1250 case OMP_CLAUSE_NUM_THREADS
:
1251 case OMP_CLAUSE_NUM_TEAMS
:
1252 case OMP_CLAUSE_THREAD_LIMIT
:
1253 case OMP_CLAUSE_DEVICE
:
1254 case OMP_CLAUSE_SCHEDULE
:
1255 case OMP_CLAUSE_DIST_SCHEDULE
:
1256 case OMP_CLAUSE_DEPEND
:
1257 case OMP_CLAUSE_PRIORITY
:
1258 case OMP_CLAUSE_GRAINSIZE
:
1259 case OMP_CLAUSE_NUM_TASKS
:
1260 case OMP_CLAUSE_NUM_GANGS
:
1261 case OMP_CLAUSE_NUM_WORKERS
:
1262 case OMP_CLAUSE_VECTOR_LENGTH
:
1264 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1268 case OMP_CLAUSE_FROM
:
1269 case OMP_CLAUSE_MAP
:
1271 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1272 decl
= OMP_CLAUSE_DECL (c
);
1273 /* Global variables with "omp declare target" attribute
1274 don't need to be copied, the receiver side will use them
1275 directly. However, global variables with "omp declare target link"
1276 attribute need to be copied. Or when ALWAYS modifier is used. */
1277 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1279 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1280 && (OMP_CLAUSE_MAP_KIND (c
)
1281 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1282 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1283 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1284 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1285 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1286 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1287 && varpool_node::get_create (decl
)->offloadable
1288 && !lookup_attribute ("omp declare target link",
1289 DECL_ATTRIBUTES (decl
)))
1291 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1292 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1294 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1295 not offloaded; there is nothing to map for those. */
1296 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1297 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1298 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1301 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1302 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1303 || (OMP_CLAUSE_MAP_KIND (c
)
1304 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1306 if (TREE_CODE (decl
) == COMPONENT_REF
1307 || (TREE_CODE (decl
) == INDIRECT_REF
1308 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1309 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1310 == REFERENCE_TYPE
)))
1312 if (DECL_SIZE (decl
)
1313 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1315 tree decl2
= DECL_VALUE_EXPR (decl
);
1316 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1317 decl2
= TREE_OPERAND (decl2
, 0);
1318 gcc_assert (DECL_P (decl2
));
1319 install_var_local (decl2
, ctx
);
1321 install_var_local (decl
, ctx
);
1326 if (DECL_SIZE (decl
)
1327 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1329 tree decl2
= DECL_VALUE_EXPR (decl
);
1330 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1331 decl2
= TREE_OPERAND (decl2
, 0);
1332 gcc_assert (DECL_P (decl2
));
1333 install_var_field (decl2
, true, 3, ctx
);
1334 install_var_local (decl2
, ctx
);
1335 install_var_local (decl
, ctx
);
1339 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1340 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1341 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1342 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1343 install_var_field (decl
, true, 7, ctx
);
1345 install_var_field (decl
, true, 3, ctx
);
1346 if (is_gimple_omp_offloaded (ctx
->stmt
)
1347 && !OMP_CLAUSE_MAP_IN_REDUCTION (c
))
1348 install_var_local (decl
, ctx
);
1353 tree base
= get_base_address (decl
);
1354 tree nc
= OMP_CLAUSE_CHAIN (c
);
1357 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1358 && OMP_CLAUSE_DECL (nc
) == base
1359 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1360 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1362 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1363 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1369 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1370 decl
= OMP_CLAUSE_DECL (c
);
1372 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1373 (splay_tree_key
) decl
));
1375 = build_decl (OMP_CLAUSE_LOCATION (c
),
1376 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1377 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1378 insert_field_into_struct (ctx
->record_type
, field
);
1379 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1380 (splay_tree_value
) field
);
1385 case OMP_CLAUSE__GRIDDIM_
:
1388 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c
), ctx
->outer
);
1389 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c
), ctx
->outer
);
1393 case OMP_CLAUSE_NOWAIT
:
1394 case OMP_CLAUSE_ORDERED
:
1395 case OMP_CLAUSE_COLLAPSE
:
1396 case OMP_CLAUSE_UNTIED
:
1397 case OMP_CLAUSE_MERGEABLE
:
1398 case OMP_CLAUSE_PROC_BIND
:
1399 case OMP_CLAUSE_SAFELEN
:
1400 case OMP_CLAUSE_SIMDLEN
:
1401 case OMP_CLAUSE_THREADS
:
1402 case OMP_CLAUSE_SIMD
:
1403 case OMP_CLAUSE_NOGROUP
:
1404 case OMP_CLAUSE_DEFAULTMAP
:
1405 case OMP_CLAUSE_ASYNC
:
1406 case OMP_CLAUSE_WAIT
:
1407 case OMP_CLAUSE_GANG
:
1408 case OMP_CLAUSE_WORKER
:
1409 case OMP_CLAUSE_VECTOR
:
1410 case OMP_CLAUSE_INDEPENDENT
:
1411 case OMP_CLAUSE_AUTO
:
1412 case OMP_CLAUSE_SEQ
:
1413 case OMP_CLAUSE_TILE
:
1414 case OMP_CLAUSE__SIMT_
:
1415 case OMP_CLAUSE_DEFAULT
:
1416 case OMP_CLAUSE_NONTEMPORAL
:
1417 case OMP_CLAUSE_IF_PRESENT
:
1418 case OMP_CLAUSE_FINALIZE
:
1419 case OMP_CLAUSE_TASK_REDUCTION
:
1422 case OMP_CLAUSE_ALIGNED
:
1423 decl
= OMP_CLAUSE_DECL (c
);
1424 if (is_global_var (decl
)
1425 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1426 install_var_local (decl
, ctx
);
1429 case OMP_CLAUSE__CONDTEMP_
:
1430 decl
= OMP_CLAUSE_DECL (c
);
1431 if (is_parallel_ctx (ctx
))
1433 install_var_field (decl
, false, 3, ctx
);
1434 install_var_local (decl
, ctx
);
1436 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1437 && (gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
1438 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1439 install_var_local (decl
, ctx
);
1442 case OMP_CLAUSE__CACHE_
:
1448 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1450 switch (OMP_CLAUSE_CODE (c
))
1452 case OMP_CLAUSE_LASTPRIVATE
:
1453 /* Let the corresponding firstprivate clause create
1455 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1456 scan_array_reductions
= true;
1457 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1461 case OMP_CLAUSE_FIRSTPRIVATE
:
1462 case OMP_CLAUSE_PRIVATE
:
1463 case OMP_CLAUSE_LINEAR
:
1464 case OMP_CLAUSE_IS_DEVICE_PTR
:
1465 decl
= OMP_CLAUSE_DECL (c
);
1466 if (is_variable_sized (decl
))
1468 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1469 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
)
1470 && is_gimple_omp_offloaded (ctx
->stmt
))
1472 tree decl2
= DECL_VALUE_EXPR (decl
);
1473 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1474 decl2
= TREE_OPERAND (decl2
, 0);
1475 gcc_assert (DECL_P (decl2
));
1476 install_var_local (decl2
, ctx
);
1477 fixup_remapped_decl (decl2
, ctx
, false);
1479 install_var_local (decl
, ctx
);
1481 fixup_remapped_decl (decl
, ctx
,
1482 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1483 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1484 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1485 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1486 scan_array_reductions
= true;
1489 case OMP_CLAUSE_REDUCTION
:
1490 case OMP_CLAUSE_IN_REDUCTION
:
1491 decl
= OMP_CLAUSE_DECL (c
);
1492 if (TREE_CODE (decl
) != MEM_REF
)
1494 if (is_variable_sized (decl
))
1495 install_var_local (decl
, ctx
);
1496 fixup_remapped_decl (decl
, ctx
, false);
1498 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1499 scan_array_reductions
= true;
1502 case OMP_CLAUSE_TASK_REDUCTION
:
1503 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1504 scan_array_reductions
= true;
1507 case OMP_CLAUSE_SHARED
:
1508 /* Ignore shared directives in teams construct inside of
1509 target construct. */
1510 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1511 && !is_host_teams_ctx (ctx
))
1513 decl
= OMP_CLAUSE_DECL (c
);
1514 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1516 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1518 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1521 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1522 install_var_field (decl
, by_ref
, 11, ctx
);
1525 fixup_remapped_decl (decl
, ctx
, false);
1528 case OMP_CLAUSE_MAP
:
1529 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1531 decl
= OMP_CLAUSE_DECL (c
);
1533 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1534 && (OMP_CLAUSE_MAP_KIND (c
)
1535 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1536 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1537 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1538 && varpool_node::get_create (decl
)->offloadable
)
1542 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1543 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1544 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1545 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1547 tree new_decl
= lookup_decl (decl
, ctx
);
1548 TREE_TYPE (new_decl
)
1549 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1551 else if (DECL_SIZE (decl
)
1552 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1554 tree decl2
= DECL_VALUE_EXPR (decl
);
1555 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1556 decl2
= TREE_OPERAND (decl2
, 0);
1557 gcc_assert (DECL_P (decl2
));
1558 fixup_remapped_decl (decl2
, ctx
, false);
1559 fixup_remapped_decl (decl
, ctx
, true);
1562 fixup_remapped_decl (decl
, ctx
, false);
1566 case OMP_CLAUSE_COPYPRIVATE
:
1567 case OMP_CLAUSE_COPYIN
:
1568 case OMP_CLAUSE_DEFAULT
:
1570 case OMP_CLAUSE_NUM_THREADS
:
1571 case OMP_CLAUSE_NUM_TEAMS
:
1572 case OMP_CLAUSE_THREAD_LIMIT
:
1573 case OMP_CLAUSE_DEVICE
:
1574 case OMP_CLAUSE_SCHEDULE
:
1575 case OMP_CLAUSE_DIST_SCHEDULE
:
1576 case OMP_CLAUSE_NOWAIT
:
1577 case OMP_CLAUSE_ORDERED
:
1578 case OMP_CLAUSE_COLLAPSE
:
1579 case OMP_CLAUSE_UNTIED
:
1580 case OMP_CLAUSE_FINAL
:
1581 case OMP_CLAUSE_MERGEABLE
:
1582 case OMP_CLAUSE_PROC_BIND
:
1583 case OMP_CLAUSE_SAFELEN
:
1584 case OMP_CLAUSE_SIMDLEN
:
1585 case OMP_CLAUSE_ALIGNED
:
1586 case OMP_CLAUSE_DEPEND
:
1587 case OMP_CLAUSE__LOOPTEMP_
:
1588 case OMP_CLAUSE__REDUCTEMP_
:
1590 case OMP_CLAUSE_FROM
:
1591 case OMP_CLAUSE_PRIORITY
:
1592 case OMP_CLAUSE_GRAINSIZE
:
1593 case OMP_CLAUSE_NUM_TASKS
:
1594 case OMP_CLAUSE_THREADS
:
1595 case OMP_CLAUSE_SIMD
:
1596 case OMP_CLAUSE_NOGROUP
:
1597 case OMP_CLAUSE_DEFAULTMAP
:
1598 case OMP_CLAUSE_USE_DEVICE_PTR
:
1599 case OMP_CLAUSE_NONTEMPORAL
:
1600 case OMP_CLAUSE_ASYNC
:
1601 case OMP_CLAUSE_WAIT
:
1602 case OMP_CLAUSE_NUM_GANGS
:
1603 case OMP_CLAUSE_NUM_WORKERS
:
1604 case OMP_CLAUSE_VECTOR_LENGTH
:
1605 case OMP_CLAUSE_GANG
:
1606 case OMP_CLAUSE_WORKER
:
1607 case OMP_CLAUSE_VECTOR
:
1608 case OMP_CLAUSE_INDEPENDENT
:
1609 case OMP_CLAUSE_AUTO
:
1610 case OMP_CLAUSE_SEQ
:
1611 case OMP_CLAUSE_TILE
:
1612 case OMP_CLAUSE__GRIDDIM_
:
1613 case OMP_CLAUSE__SIMT_
:
1614 case OMP_CLAUSE_IF_PRESENT
:
1615 case OMP_CLAUSE_FINALIZE
:
1616 case OMP_CLAUSE__CONDTEMP_
:
1619 case OMP_CLAUSE__CACHE_
:
1625 gcc_checking_assert (!scan_array_reductions
1626 || !is_gimple_omp_oacc (ctx
->stmt
));
1627 if (scan_array_reductions
)
1629 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1630 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1631 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1632 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1633 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1635 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), ctx
);
1636 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
1638 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1639 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1640 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1641 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1642 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1643 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
1647 /* Create a new name for omp child function. Returns an identifier. */
1650 create_omp_child_function_name (bool task_copy
)
1652 return clone_function_name_numbered (current_function_decl
,
1653 task_copy
? "_omp_cpyfn" : "_omp_fn");
1656 /* Return true if CTX may belong to offloaded code: either if current function
1657 is offloaded, or any enclosing context corresponds to a target region. */
1660 omp_maybe_offloaded_ctx (omp_context
*ctx
)
1662 if (cgraph_node::get (current_function_decl
)->offloadable
)
1664 for (; ctx
; ctx
= ctx
->outer
)
1665 if (is_gimple_omp_offloaded (ctx
->stmt
))
1670 /* Build a decl for the omp child function. It'll not contain a body
1671 yet, just the bare decl. */
1674 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
1676 tree decl
, type
, name
, t
;
1678 name
= create_omp_child_function_name (task_copy
);
1680 type
= build_function_type_list (void_type_node
, ptr_type_node
,
1681 ptr_type_node
, NULL_TREE
);
1683 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
1685 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
1687 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
1690 ctx
->cb
.dst_fn
= decl
;
1692 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
1694 TREE_STATIC (decl
) = 1;
1695 TREE_USED (decl
) = 1;
1696 DECL_ARTIFICIAL (decl
) = 1;
1697 DECL_IGNORED_P (decl
) = 0;
1698 TREE_PUBLIC (decl
) = 0;
1699 DECL_UNINLINABLE (decl
) = 1;
1700 DECL_EXTERNAL (decl
) = 0;
1701 DECL_CONTEXT (decl
) = NULL_TREE
;
1702 DECL_INITIAL (decl
) = make_node (BLOCK
);
1703 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
1704 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
1705 /* Remove omp declare simd attribute from the new attributes. */
1706 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
1708 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
1711 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
1712 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
1713 *p
= TREE_CHAIN (*p
);
1716 tree chain
= TREE_CHAIN (*p
);
1717 *p
= copy_node (*p
);
1718 p
= &TREE_CHAIN (*p
);
1722 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
1723 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
1724 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
1725 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
1726 DECL_FUNCTION_VERSIONED (decl
)
1727 = DECL_FUNCTION_VERSIONED (current_function_decl
);
1729 if (omp_maybe_offloaded_ctx (ctx
))
1731 cgraph_node::get_create (decl
)->offloadable
= 1;
1732 if (ENABLE_OFFLOADING
)
1733 g
->have_offload
= true;
1736 if (cgraph_node::get_create (decl
)->offloadable
1737 && !lookup_attribute ("omp declare target",
1738 DECL_ATTRIBUTES (current_function_decl
)))
1740 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
1741 ? "omp target entrypoint"
1742 : "omp declare target");
1743 DECL_ATTRIBUTES (decl
)
1744 = tree_cons (get_identifier (target_attr
),
1745 NULL_TREE
, DECL_ATTRIBUTES (decl
));
1748 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1749 RESULT_DECL
, NULL_TREE
, void_type_node
);
1750 DECL_ARTIFICIAL (t
) = 1;
1751 DECL_IGNORED_P (t
) = 1;
1752 DECL_CONTEXT (t
) = decl
;
1753 DECL_RESULT (decl
) = t
;
1755 tree data_name
= get_identifier (".omp_data_i");
1756 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
1758 DECL_ARTIFICIAL (t
) = 1;
1759 DECL_NAMELESS (t
) = 1;
1760 DECL_ARG_TYPE (t
) = ptr_type_node
;
1761 DECL_CONTEXT (t
) = current_function_decl
;
1763 TREE_READONLY (t
) = 1;
1764 DECL_ARGUMENTS (decl
) = t
;
1766 ctx
->receiver_decl
= t
;
1769 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
1770 PARM_DECL
, get_identifier (".omp_data_o"),
1772 DECL_ARTIFICIAL (t
) = 1;
1773 DECL_NAMELESS (t
) = 1;
1774 DECL_ARG_TYPE (t
) = ptr_type_node
;
1775 DECL_CONTEXT (t
) = current_function_decl
;
1777 TREE_ADDRESSABLE (t
) = 1;
1778 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
1779 DECL_ARGUMENTS (decl
) = t
;
1782 /* Allocate memory for the function structure. The call to
1783 allocate_struct_function clobbers CFUN, so we need to restore
1785 push_struct_function (decl
);
1786 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
1787 init_tree_ssa (cfun
);
1791 /* Callback for walk_gimple_seq. Check if combined parallel
1792 contains gimple_omp_for_combined_into_p OMP_FOR. */
1795 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
1796 bool *handled_ops_p
,
1797 struct walk_stmt_info
*wi
)
1799 gimple
*stmt
= gsi_stmt (*gsi_p
);
1801 *handled_ops_p
= true;
1802 switch (gimple_code (stmt
))
1806 case GIMPLE_OMP_FOR
:
1807 if (gimple_omp_for_combined_into_p (stmt
)
1808 && gimple_omp_for_kind (stmt
)
1809 == *(const enum gf_mask
*) (wi
->info
))
1812 return integer_zero_node
;
1821 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1824 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
1825 omp_context
*outer_ctx
)
1827 struct walk_stmt_info wi
;
1829 memset (&wi
, 0, sizeof (wi
));
1831 wi
.info
= (void *) &msk
;
1832 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
1833 if (wi
.info
!= (void *) &msk
)
1835 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
1836 struct omp_for_data fd
;
1837 omp_extract_for_data (for_stmt
, &fd
, NULL
);
1838 /* We need two temporaries with fd.loop.v type (istart/iend)
1839 and then (fd.collapse - 1) temporaries with the same
1840 type for count2 ... countN-1 vars if not constant. */
1841 size_t count
= 2, i
;
1842 tree type
= fd
.iter_type
;
1844 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
1846 count
+= fd
.collapse
- 1;
1847 /* If there are lastprivate clauses on the inner
1848 GIMPLE_OMP_FOR, add one more temporaries for the total number
1849 of iterations (product of count1 ... countN-1). */
1850 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
1851 OMP_CLAUSE_LASTPRIVATE
))
1853 else if (msk
== GF_OMP_FOR_KIND_FOR
1854 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1855 OMP_CLAUSE_LASTPRIVATE
))
1858 for (i
= 0; i
< count
; i
++)
1860 tree temp
= create_tmp_var (type
);
1861 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
1862 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1863 OMP_CLAUSE_DECL (c
) = temp
;
1864 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1865 gimple_omp_taskreg_set_clauses (stmt
, c
);
1868 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
1869 && omp_find_clause (gimple_omp_task_clauses (stmt
),
1870 OMP_CLAUSE_REDUCTION
))
1872 tree type
= build_pointer_type (pointer_sized_int_node
);
1873 tree temp
= create_tmp_var (type
);
1874 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1875 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1876 OMP_CLAUSE_DECL (c
) = temp
;
1877 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
1878 gimple_omp_task_set_clauses (stmt
, c
);
1882 /* Scan an OpenMP parallel directive. */
1885 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1889 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
1891 /* Ignore parallel directives with empty bodies, unless there
1892 are copyin clauses. */
1894 && empty_body_p (gimple_omp_body (stmt
))
1895 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1896 OMP_CLAUSE_COPYIN
) == NULL
)
1898 gsi_replace (gsi
, gimple_build_nop (), false);
1902 if (gimple_omp_parallel_combined_p (stmt
))
1903 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
1904 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
1905 OMP_CLAUSE_REDUCTION
);
1906 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
1907 if (OMP_CLAUSE_REDUCTION_TASK (c
))
1909 tree type
= build_pointer_type (pointer_sized_int_node
);
1910 tree temp
= create_tmp_var (type
);
1911 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
1913 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
1914 OMP_CLAUSE_DECL (c
) = temp
;
1915 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
1916 gimple_omp_parallel_set_clauses (stmt
, c
);
1919 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
1922 ctx
= new_omp_context (stmt
, outer_ctx
);
1923 taskreg_contexts
.safe_push (ctx
);
1924 if (taskreg_nesting_level
> 1)
1925 ctx
->is_nested
= true;
1926 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1927 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1928 name
= create_tmp_var_name (".omp_data_s");
1929 name
= build_decl (gimple_location (stmt
),
1930 TYPE_DECL
, name
, ctx
->record_type
);
1931 DECL_ARTIFICIAL (name
) = 1;
1932 DECL_NAMELESS (name
) = 1;
1933 TYPE_NAME (ctx
->record_type
) = name
;
1934 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1935 if (!gimple_omp_parallel_grid_phony (stmt
))
1937 create_omp_child_function (ctx
, false);
1938 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1941 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
1942 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
1944 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
1945 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
1948 /* Scan an OpenMP task directive. */
1951 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
1955 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
1957 /* Ignore task directives with empty bodies, unless they have depend
1960 && gimple_omp_body (stmt
)
1961 && empty_body_p (gimple_omp_body (stmt
))
1962 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
1964 gsi_replace (gsi
, gimple_build_nop (), false);
1968 if (gimple_omp_task_taskloop_p (stmt
))
1969 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
1971 ctx
= new_omp_context (stmt
, outer_ctx
);
1973 if (gimple_omp_task_taskwait_p (stmt
))
1975 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1979 taskreg_contexts
.safe_push (ctx
);
1980 if (taskreg_nesting_level
> 1)
1981 ctx
->is_nested
= true;
1982 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
1983 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1984 name
= create_tmp_var_name (".omp_data_s");
1985 name
= build_decl (gimple_location (stmt
),
1986 TYPE_DECL
, name
, ctx
->record_type
);
1987 DECL_ARTIFICIAL (name
) = 1;
1988 DECL_NAMELESS (name
) = 1;
1989 TYPE_NAME (ctx
->record_type
) = name
;
1990 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
1991 create_omp_child_function (ctx
, false);
1992 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
1994 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
1996 if (ctx
->srecord_type
)
1998 name
= create_tmp_var_name (".omp_data_a");
1999 name
= build_decl (gimple_location (stmt
),
2000 TYPE_DECL
, name
, ctx
->srecord_type
);
2001 DECL_ARTIFICIAL (name
) = 1;
2002 DECL_NAMELESS (name
) = 1;
2003 TYPE_NAME (ctx
->srecord_type
) = name
;
2004 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2005 create_omp_child_function (ctx
, true);
2008 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2010 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2012 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2013 t
= build_int_cst (long_integer_type_node
, 0);
2014 gimple_omp_task_set_arg_size (stmt
, t
);
2015 t
= build_int_cst (long_integer_type_node
, 1);
2016 gimple_omp_task_set_arg_align (stmt
, t
);
2020 /* Helper function for finish_taskreg_scan, called through walk_tree.
2021 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2022 tree, replace it in the expression. */
2025 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2029 omp_context
*ctx
= (omp_context
*) data
;
2030 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2033 if (DECL_HAS_VALUE_EXPR_P (t
))
2034 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2039 else if (IS_TYPE_OR_DECL_P (*tp
))
2044 /* If any decls have been made addressable during scan_omp,
2045 adjust their fields if needed, and layout record types
2046 of parallel/task constructs. */
2049 finish_taskreg_scan (omp_context
*ctx
)
2051 if (ctx
->record_type
== NULL_TREE
)
2054 /* If any task_shared_vars were needed, verify all
2055 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2056 statements if use_pointer_for_field hasn't changed
2057 because of that. If it did, update field types now. */
2058 if (task_shared_vars
)
2062 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2063 c
; c
= OMP_CLAUSE_CHAIN (c
))
2064 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2065 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2067 tree decl
= OMP_CLAUSE_DECL (c
);
2069 /* Global variables don't need to be copied,
2070 the receiver side will use them directly. */
2071 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2073 if (!bitmap_bit_p (task_shared_vars
, DECL_UID (decl
))
2074 || !use_pointer_for_field (decl
, ctx
))
2076 tree field
= lookup_field (decl
, ctx
);
2077 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2078 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2080 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2081 TREE_THIS_VOLATILE (field
) = 0;
2082 DECL_USER_ALIGN (field
) = 0;
2083 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2084 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2085 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2086 if (ctx
->srecord_type
)
2088 tree sfield
= lookup_sfield (decl
, ctx
);
2089 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2090 TREE_THIS_VOLATILE (sfield
) = 0;
2091 DECL_USER_ALIGN (sfield
) = 0;
2092 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2093 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2094 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2099 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2101 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2102 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2105 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2106 expects to find it at the start of data. */
2107 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2108 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2112 *p
= DECL_CHAIN (*p
);
2116 p
= &DECL_CHAIN (*p
);
2117 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2118 TYPE_FIELDS (ctx
->record_type
) = f
;
2120 layout_type (ctx
->record_type
);
2121 fixup_child_record_type (ctx
);
2123 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2125 layout_type (ctx
->record_type
);
2126 fixup_child_record_type (ctx
);
2130 location_t loc
= gimple_location (ctx
->stmt
);
2131 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2132 /* Move VLA fields to the end. */
2133 p
= &TYPE_FIELDS (ctx
->record_type
);
2135 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2136 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2139 *p
= TREE_CHAIN (*p
);
2140 TREE_CHAIN (*q
) = NULL_TREE
;
2141 q
= &TREE_CHAIN (*q
);
2144 p
= &DECL_CHAIN (*p
);
2146 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2148 /* Move fields corresponding to first and second _looptemp_
2149 clause first. There are filled by GOMP_taskloop
2150 and thus need to be in specific positions. */
2151 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2152 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2153 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2154 OMP_CLAUSE__LOOPTEMP_
);
2155 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2156 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2157 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2158 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2159 p
= &TYPE_FIELDS (ctx
->record_type
);
2161 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2162 *p
= DECL_CHAIN (*p
);
2164 p
= &DECL_CHAIN (*p
);
2165 DECL_CHAIN (f1
) = f2
;
2168 DECL_CHAIN (f2
) = f3
;
2169 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2172 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2173 TYPE_FIELDS (ctx
->record_type
) = f1
;
2174 if (ctx
->srecord_type
)
2176 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2177 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2179 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2180 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2182 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2183 *p
= DECL_CHAIN (*p
);
2185 p
= &DECL_CHAIN (*p
);
2186 DECL_CHAIN (f1
) = f2
;
2187 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2190 DECL_CHAIN (f2
) = f3
;
2191 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2194 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2195 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2198 layout_type (ctx
->record_type
);
2199 fixup_child_record_type (ctx
);
2200 if (ctx
->srecord_type
)
2201 layout_type (ctx
->srecord_type
);
2202 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2203 TYPE_SIZE_UNIT (ctx
->record_type
));
2204 if (TREE_CODE (t
) != INTEGER_CST
)
2206 t
= unshare_expr (t
);
2207 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2209 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2210 t
= build_int_cst (long_integer_type_node
,
2211 TYPE_ALIGN_UNIT (ctx
->record_type
));
2212 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2216 /* Find the enclosing offload context. */
2218 static omp_context
*
2219 enclosing_target_ctx (omp_context
*ctx
)
2221 for (; ctx
; ctx
= ctx
->outer
)
2222 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2228 /* Return true if ctx is part of an oacc kernels region. */
2231 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2233 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2235 gimple
*stmt
= ctx
->stmt
;
2236 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2237 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2244 /* Check the parallelism clauses inside a kernels regions.
2245 Until kernels handling moves to use the same loop indirection
2246 scheme as parallel, we need to do this checking early. */
2249 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2251 bool checking
= true;
2252 unsigned outer_mask
= 0;
2253 unsigned this_mask
= 0;
2254 bool has_seq
= false, has_auto
= false;
2257 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2261 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2263 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2266 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2268 switch (OMP_CLAUSE_CODE (c
))
2270 case OMP_CLAUSE_GANG
:
2271 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2273 case OMP_CLAUSE_WORKER
:
2274 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2276 case OMP_CLAUSE_VECTOR
:
2277 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2279 case OMP_CLAUSE_SEQ
:
2282 case OMP_CLAUSE_AUTO
:
2292 if (has_seq
&& (this_mask
|| has_auto
))
2293 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2294 " OpenACC loop specifiers");
2295 else if (has_auto
&& this_mask
)
2296 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2297 " OpenACC loop specifiers");
2299 if (this_mask
& outer_mask
)
2300 error_at (gimple_location (stmt
), "inner loop uses same"
2301 " OpenACC parallelism as containing loop");
2304 return outer_mask
| this_mask
;
2307 /* Scan a GIMPLE_OMP_FOR. */
2309 static omp_context
*
2310 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2314 tree clauses
= gimple_omp_for_clauses (stmt
);
2316 ctx
= new_omp_context (stmt
, outer_ctx
);
2318 if (is_gimple_omp_oacc (stmt
))
2320 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2322 if (!tgt
|| is_oacc_parallel (tgt
))
2323 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2325 char const *check
= NULL
;
2327 switch (OMP_CLAUSE_CODE (c
))
2329 case OMP_CLAUSE_GANG
:
2333 case OMP_CLAUSE_WORKER
:
2337 case OMP_CLAUSE_VECTOR
:
2345 if (check
&& OMP_CLAUSE_OPERAND (c
, 0))
2346 error_at (gimple_location (stmt
),
2347 "argument not permitted on %qs clause in"
2348 " OpenACC %<parallel%>", check
);
2351 if (tgt
&& is_oacc_kernels (tgt
))
2353 /* Strip out reductions, as they are not handled yet. */
2354 tree
*prev_ptr
= &clauses
;
2356 while (tree probe
= *prev_ptr
)
2358 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2360 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2361 *prev_ptr
= *next_ptr
;
2363 prev_ptr
= next_ptr
;
2366 gimple_omp_for_set_clauses (stmt
, clauses
);
2367 check_oacc_kernel_gwv (stmt
, ctx
);
2371 scan_sharing_clauses (clauses
, ctx
);
2373 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2374 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2376 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2377 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2378 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2379 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2381 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2385 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2388 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2389 omp_context
*outer_ctx
)
2391 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2392 gsi_replace (gsi
, bind
, false);
2393 gimple_seq seq
= NULL
;
2394 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2395 tree cond
= create_tmp_var_raw (integer_type_node
);
2396 DECL_CONTEXT (cond
) = current_function_decl
;
2397 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2398 gimple_bind_set_vars (bind
, cond
);
2399 gimple_call_set_lhs (g
, cond
);
2400 gimple_seq_add_stmt (&seq
, g
);
2401 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2402 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2403 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2404 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2405 gimple_seq_add_stmt (&seq
, g
);
2406 g
= gimple_build_label (lab1
);
2407 gimple_seq_add_stmt (&seq
, g
);
2408 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2409 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2410 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2411 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2412 gimple_omp_for_set_clauses (new_stmt
, clause
);
2413 gimple_seq_add_stmt (&seq
, new_stmt
);
2414 g
= gimple_build_goto (lab3
);
2415 gimple_seq_add_stmt (&seq
, g
);
2416 g
= gimple_build_label (lab2
);
2417 gimple_seq_add_stmt (&seq
, g
);
2418 gimple_seq_add_stmt (&seq
, stmt
);
2419 g
= gimple_build_label (lab3
);
2420 gimple_seq_add_stmt (&seq
, g
);
2421 gimple_bind_set_body (bind
, seq
);
2423 scan_omp_for (new_stmt
, outer_ctx
);
2424 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2427 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2428 struct walk_stmt_info
*);
2429 static omp_context
*maybe_lookup_ctx (gimple
*);
2431 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2432 for scan phase loop. */
2435 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2436 omp_context
*outer_ctx
)
2438 /* The only change between inclusive and exclusive scan will be
2439 within the first simd loop, so just use inclusive in the
2440 worksharing loop. */
2441 outer_ctx
->scan_inclusive
= true;
2442 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2443 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2445 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2446 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2447 gsi_replace (gsi
, input_stmt
, false);
2448 gimple_seq input_body
= NULL
;
2449 gimple_seq_add_stmt (&input_body
, stmt
);
2450 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2452 gimple_stmt_iterator input1_gsi
= gsi_none ();
2453 struct walk_stmt_info wi
;
2454 memset (&wi
, 0, sizeof (wi
));
2456 wi
.info
= (void *) &input1_gsi
;
2457 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2458 gcc_assert (!gsi_end_p (input1_gsi
));
2460 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
2461 gsi_next (&input1_gsi
);
2462 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
2463 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
2464 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
2465 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2466 std::swap (input_stmt1
, scan_stmt1
);
2468 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
2469 gimple_omp_set_body (input_stmt1
, NULL
);
2471 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
2472 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
2474 gimple_omp_set_body (input_stmt1
, input_body1
);
2475 gimple_omp_set_body (scan_stmt1
, NULL
);
2477 gimple_stmt_iterator input2_gsi
= gsi_none ();
2478 memset (&wi
, 0, sizeof (wi
));
2480 wi
.info
= (void *) &input2_gsi
;
2481 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
2483 gcc_assert (!gsi_end_p (input2_gsi
));
2485 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
2486 gsi_next (&input2_gsi
);
2487 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
2488 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
2489 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
2490 std::swap (input_stmt2
, scan_stmt2
);
2492 gimple_omp_set_body (input_stmt2
, NULL
);
2494 gimple_omp_set_body (input_stmt
, input_body
);
2495 gimple_omp_set_body (scan_stmt
, scan_body
);
2497 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
2498 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
2500 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
2501 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
2503 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
2506 /* Scan an OpenMP sections directive. */
2509 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
2513 ctx
= new_omp_context (stmt
, outer_ctx
);
2514 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
2515 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2518 /* Scan an OpenMP single directive. */
2521 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
2526 ctx
= new_omp_context (stmt
, outer_ctx
);
2527 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2528 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2529 name
= create_tmp_var_name (".omp_copy_s");
2530 name
= build_decl (gimple_location (stmt
),
2531 TYPE_DECL
, name
, ctx
->record_type
);
2532 TYPE_NAME (ctx
->record_type
) = name
;
2534 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
2535 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2537 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2538 ctx
->record_type
= NULL
;
2540 layout_type (ctx
->record_type
);
2543 /* Scan a GIMPLE_OMP_TARGET. */
2546 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
2550 bool offloaded
= is_gimple_omp_offloaded (stmt
);
2551 tree clauses
= gimple_omp_target_clauses (stmt
);
2553 ctx
= new_omp_context (stmt
, outer_ctx
);
2554 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2555 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2556 name
= create_tmp_var_name (".omp_data_t");
2557 name
= build_decl (gimple_location (stmt
),
2558 TYPE_DECL
, name
, ctx
->record_type
);
2559 DECL_ARTIFICIAL (name
) = 1;
2560 DECL_NAMELESS (name
) = 1;
2561 TYPE_NAME (ctx
->record_type
) = name
;
2562 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2566 create_omp_child_function (ctx
, false);
2567 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2570 scan_sharing_clauses (clauses
, ctx
);
2571 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2573 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2574 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2577 TYPE_FIELDS (ctx
->record_type
)
2578 = nreverse (TYPE_FIELDS (ctx
->record_type
));
2581 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
2582 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
2584 field
= DECL_CHAIN (field
))
2585 gcc_assert (DECL_ALIGN (field
) == align
);
2587 layout_type (ctx
->record_type
);
2589 fixup_child_record_type (ctx
);
2593 /* Scan an OpenMP teams directive. */
2596 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
2598 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
2600 if (!gimple_omp_teams_host (stmt
))
2602 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2603 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2606 taskreg_contexts
.safe_push (ctx
);
2607 gcc_assert (taskreg_nesting_level
== 1);
2608 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2609 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2610 tree name
= create_tmp_var_name (".omp_data_s");
2611 name
= build_decl (gimple_location (stmt
),
2612 TYPE_DECL
, name
, ctx
->record_type
);
2613 DECL_ARTIFICIAL (name
) = 1;
2614 DECL_NAMELESS (name
) = 1;
2615 TYPE_NAME (ctx
->record_type
) = name
;
2616 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2617 create_omp_child_function (ctx
, false);
2618 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2620 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
2621 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2623 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2624 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2627 /* Check nesting restrictions. */
2629 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
2633 if (ctx
&& gimple_code (ctx
->stmt
) == GIMPLE_OMP_GRID_BODY
)
2634 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2635 the original copy of its contents. */
2638 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2639 inside an OpenACC CTX. */
2640 if (!(is_gimple_omp (stmt
)
2641 && is_gimple_omp_oacc (stmt
))
2642 /* Except for atomic codes that we share with OpenMP. */
2643 && !(gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2644 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
2646 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
2648 error_at (gimple_location (stmt
),
2649 "non-OpenACC construct inside of OpenACC routine");
2653 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
2654 if (is_gimple_omp (octx
->stmt
)
2655 && is_gimple_omp_oacc (octx
->stmt
))
2657 error_at (gimple_location (stmt
),
2658 "non-OpenACC construct inside of OpenACC region");
2665 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
2667 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
2669 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
2670 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
2673 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
2675 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
2676 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
2678 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
2679 && (ctx
->outer
== NULL
2680 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
2681 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
2682 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
2683 != GF_OMP_FOR_KIND_FOR
)
2684 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
2686 error_at (gimple_location (stmt
),
2687 "%<ordered simd threads%> must be closely "
2688 "nested inside of %<for simd%> region");
2694 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
2695 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
2696 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
2698 error_at (gimple_location (stmt
),
2699 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2700 " or %<#pragma omp atomic%> may not be nested inside"
2701 " %<simd%> region");
2704 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2706 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
2707 || ((gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
)
2708 && (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
)))
2709 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
2711 error_at (gimple_location (stmt
),
2712 "only %<distribute%> or %<parallel%> regions are "
2713 "allowed to be strictly nested inside %<teams%> "
2719 switch (gimple_code (stmt
))
2721 case GIMPLE_OMP_FOR
:
2722 if (gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
2724 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
2726 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
2728 error_at (gimple_location (stmt
),
2729 "%<distribute%> region must be strictly nested "
2730 "inside %<teams%> construct");
2735 /* We split taskloop into task and nested taskloop in it. */
2736 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
2738 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
2743 switch (gimple_code (ctx
->stmt
))
2745 case GIMPLE_OMP_FOR
:
2746 ok
= (gimple_omp_for_kind (ctx
->stmt
)
2747 == GF_OMP_FOR_KIND_OACC_LOOP
);
2750 case GIMPLE_OMP_TARGET
:
2751 switch (gimple_omp_target_kind (ctx
->stmt
))
2753 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
2754 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
2765 else if (oacc_get_fn_attrib (current_function_decl
))
2769 error_at (gimple_location (stmt
),
2770 "OpenACC loop directive must be associated with"
2771 " an OpenACC compute region");
2777 if (is_gimple_call (stmt
)
2778 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2779 == BUILT_IN_GOMP_CANCEL
2780 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2781 == BUILT_IN_GOMP_CANCELLATION_POINT
))
2783 const char *bad
= NULL
;
2784 const char *kind
= NULL
;
2785 const char *construct
2786 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2787 == BUILT_IN_GOMP_CANCEL
)
2788 ? "#pragma omp cancel"
2789 : "#pragma omp cancellation point";
2792 error_at (gimple_location (stmt
), "orphaned %qs construct",
2796 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
2797 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
2801 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
2802 bad
= "#pragma omp parallel";
2803 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2804 == BUILT_IN_GOMP_CANCEL
2805 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2806 ctx
->cancellable
= true;
2810 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
2811 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
2812 bad
= "#pragma omp for";
2813 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2814 == BUILT_IN_GOMP_CANCEL
2815 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2817 ctx
->cancellable
= true;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2820 warning_at (gimple_location (stmt
), 0,
2821 "%<#pragma omp cancel for%> inside "
2822 "%<nowait%> for construct");
2823 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
2824 OMP_CLAUSE_ORDERED
))
2825 warning_at (gimple_location (stmt
), 0,
2826 "%<#pragma omp cancel for%> inside "
2827 "%<ordered%> for construct");
2832 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
2833 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
2834 bad
= "#pragma omp sections";
2835 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2836 == BUILT_IN_GOMP_CANCEL
2837 && !integer_zerop (gimple_call_arg (stmt
, 1)))
2839 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
2841 ctx
->cancellable
= true;
2842 if (omp_find_clause (gimple_omp_sections_clauses
2845 warning_at (gimple_location (stmt
), 0,
2846 "%<#pragma omp cancel sections%> inside "
2847 "%<nowait%> sections construct");
2851 gcc_assert (ctx
->outer
2852 && gimple_code (ctx
->outer
->stmt
)
2853 == GIMPLE_OMP_SECTIONS
);
2854 ctx
->outer
->cancellable
= true;
2855 if (omp_find_clause (gimple_omp_sections_clauses
2858 warning_at (gimple_location (stmt
), 0,
2859 "%<#pragma omp cancel sections%> inside "
2860 "%<nowait%> sections construct");
2866 if (!is_task_ctx (ctx
)
2867 && (!is_taskloop_ctx (ctx
)
2868 || ctx
->outer
== NULL
2869 || !is_task_ctx (ctx
->outer
)))
2870 bad
= "#pragma omp task";
2873 for (omp_context
*octx
= ctx
->outer
;
2874 octx
; octx
= octx
->outer
)
2876 switch (gimple_code (octx
->stmt
))
2878 case GIMPLE_OMP_TASKGROUP
:
2880 case GIMPLE_OMP_TARGET
:
2881 if (gimple_omp_target_kind (octx
->stmt
)
2882 != GF_OMP_TARGET_KIND_REGION
)
2885 case GIMPLE_OMP_PARALLEL
:
2886 case GIMPLE_OMP_TEAMS
:
2887 error_at (gimple_location (stmt
),
2888 "%<%s taskgroup%> construct not closely "
2889 "nested inside of %<taskgroup%> region",
2892 case GIMPLE_OMP_TASK
:
2893 if (gimple_omp_task_taskloop_p (octx
->stmt
)
2895 && is_taskloop_ctx (octx
->outer
))
2898 = gimple_omp_for_clauses (octx
->outer
->stmt
);
2899 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
2908 ctx
->cancellable
= true;
2913 error_at (gimple_location (stmt
), "invalid arguments");
2918 error_at (gimple_location (stmt
),
2919 "%<%s %s%> construct not closely nested inside of %qs",
2920 construct
, kind
, bad
);
2925 case GIMPLE_OMP_SECTIONS
:
2926 case GIMPLE_OMP_SINGLE
:
2927 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2928 switch (gimple_code (ctx
->stmt
))
2930 case GIMPLE_OMP_FOR
:
2931 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2932 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2935 case GIMPLE_OMP_SECTIONS
:
2936 case GIMPLE_OMP_SINGLE
:
2937 case GIMPLE_OMP_ORDERED
:
2938 case GIMPLE_OMP_MASTER
:
2939 case GIMPLE_OMP_TASK
:
2940 case GIMPLE_OMP_CRITICAL
:
2941 if (is_gimple_call (stmt
))
2943 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
2944 != BUILT_IN_GOMP_BARRIER
)
2946 error_at (gimple_location (stmt
),
2947 "barrier region may not be closely nested inside "
2948 "of work-sharing, %<critical%>, %<ordered%>, "
2949 "%<master%>, explicit %<task%> or %<taskloop%> "
2953 error_at (gimple_location (stmt
),
2954 "work-sharing region may not be closely nested inside "
2955 "of work-sharing, %<critical%>, %<ordered%>, "
2956 "%<master%>, explicit %<task%> or %<taskloop%> region");
2958 case GIMPLE_OMP_PARALLEL
:
2959 case GIMPLE_OMP_TEAMS
:
2961 case GIMPLE_OMP_TARGET
:
2962 if (gimple_omp_target_kind (ctx
->stmt
)
2963 == GF_OMP_TARGET_KIND_REGION
)
2970 case GIMPLE_OMP_MASTER
:
2971 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
2972 switch (gimple_code (ctx
->stmt
))
2974 case GIMPLE_OMP_FOR
:
2975 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
2976 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
2979 case GIMPLE_OMP_SECTIONS
:
2980 case GIMPLE_OMP_SINGLE
:
2981 case GIMPLE_OMP_TASK
:
2982 error_at (gimple_location (stmt
),
2983 "%<master%> region may not be closely nested inside "
2984 "of work-sharing, explicit %<task%> or %<taskloop%> "
2987 case GIMPLE_OMP_PARALLEL
:
2988 case GIMPLE_OMP_TEAMS
:
2990 case GIMPLE_OMP_TARGET
:
2991 if (gimple_omp_target_kind (ctx
->stmt
)
2992 == GF_OMP_TARGET_KIND_REGION
)
2999 case GIMPLE_OMP_TASK
:
3000 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3001 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3002 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3003 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3005 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3006 error_at (OMP_CLAUSE_LOCATION (c
),
3007 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3008 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3012 case GIMPLE_OMP_ORDERED
:
3013 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3014 c
; c
= OMP_CLAUSE_CHAIN (c
))
3016 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
3018 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3019 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3022 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3023 if (kind
== OMP_CLAUSE_DEPEND_SOURCE
3024 || kind
== OMP_CLAUSE_DEPEND_SINK
)
3027 /* Look for containing ordered(N) loop. */
3029 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3031 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3032 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3034 error_at (OMP_CLAUSE_LOCATION (c
),
3035 "%<ordered%> construct with %<depend%> clause "
3036 "must be closely nested inside an %<ordered%> "
3040 else if (OMP_CLAUSE_ORDERED_EXPR (oclause
) == NULL_TREE
)
3042 error_at (OMP_CLAUSE_LOCATION (c
),
3043 "%<ordered%> construct with %<depend%> clause "
3044 "must be closely nested inside a loop with "
3045 "%<ordered%> clause with a parameter");
3051 error_at (OMP_CLAUSE_LOCATION (c
),
3052 "invalid depend kind in omp %<ordered%> %<depend%>");
3056 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3057 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3059 /* ordered simd must be closely nested inside of simd region,
3060 and simd region must not encounter constructs other than
3061 ordered simd, therefore ordered simd may be either orphaned,
3062 or ctx->stmt must be simd. The latter case is handled already
3066 error_at (gimple_location (stmt
),
3067 "%<ordered%> %<simd%> must be closely nested inside "
3072 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3073 switch (gimple_code (ctx
->stmt
))
3075 case GIMPLE_OMP_CRITICAL
:
3076 case GIMPLE_OMP_TASK
:
3077 case GIMPLE_OMP_ORDERED
:
3078 ordered_in_taskloop
:
3079 error_at (gimple_location (stmt
),
3080 "%<ordered%> region may not be closely nested inside "
3081 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3082 "%<taskloop%> region");
3084 case GIMPLE_OMP_FOR
:
3085 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3086 goto ordered_in_taskloop
;
3088 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3089 OMP_CLAUSE_ORDERED
);
3092 error_at (gimple_location (stmt
),
3093 "%<ordered%> region must be closely nested inside "
3094 "a loop region with an %<ordered%> clause");
3097 if (OMP_CLAUSE_ORDERED_EXPR (o
) != NULL_TREE
3098 && omp_find_clause (c
, OMP_CLAUSE_DEPEND
) == NULL_TREE
)
3100 error_at (gimple_location (stmt
),
3101 "%<ordered%> region without %<depend%> clause may "
3102 "not be closely nested inside a loop region with "
3103 "an %<ordered%> clause with a parameter");
3107 case GIMPLE_OMP_TARGET
:
3108 if (gimple_omp_target_kind (ctx
->stmt
)
3109 != GF_OMP_TARGET_KIND_REGION
)
3112 case GIMPLE_OMP_PARALLEL
:
3113 case GIMPLE_OMP_TEAMS
:
3114 error_at (gimple_location (stmt
),
3115 "%<ordered%> region must be closely nested inside "
3116 "a loop region with an %<ordered%> clause");
3122 case GIMPLE_OMP_CRITICAL
:
3125 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3126 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3127 if (gomp_critical
*other_crit
3128 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3129 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3131 error_at (gimple_location (stmt
),
3132 "%<critical%> region may not be nested inside "
3133 "a %<critical%> region with the same name");
3138 case GIMPLE_OMP_TEAMS
:
3141 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3142 || (gimple_omp_target_kind (ctx
->stmt
)
3143 != GF_OMP_TARGET_KIND_REGION
))
3145 /* Teams construct can appear either strictly nested inside of
3146 target construct with no intervening stmts, or can be encountered
3147 only by initial task (so must not appear inside any OpenMP
3149 error_at (gimple_location (stmt
),
3150 "%<teams%> construct must be closely nested inside of "
3151 "%<target%> construct or not nested in any OpenMP "
3156 case GIMPLE_OMP_TARGET
:
3157 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3158 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
3159 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
3160 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
))
3162 enum omp_clause_depend_kind kind
= OMP_CLAUSE_DEPEND_KIND (c
);
3163 error_at (OMP_CLAUSE_LOCATION (c
),
3164 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3165 kind
== OMP_CLAUSE_DEPEND_SOURCE
? "source" : "sink");
3168 if (is_gimple_omp_offloaded (stmt
)
3169 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3171 error_at (gimple_location (stmt
),
3172 "OpenACC region inside of OpenACC routine, nested "
3173 "parallelism not supported yet");
3176 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3178 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3180 if (is_gimple_omp (stmt
)
3181 && is_gimple_omp_oacc (stmt
)
3182 && is_gimple_omp (ctx
->stmt
))
3184 error_at (gimple_location (stmt
),
3185 "OpenACC construct inside of non-OpenACC region");
3191 const char *stmt_name
, *ctx_stmt_name
;
3192 switch (gimple_omp_target_kind (stmt
))
3194 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3195 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3196 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3197 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3198 stmt_name
= "target enter data"; break;
3199 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3200 stmt_name
= "target exit data"; break;
3201 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3202 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3203 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3204 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3205 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
3206 stmt_name
= "enter/exit data"; break;
3207 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3208 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3210 default: gcc_unreachable ();
3212 switch (gimple_omp_target_kind (ctx
->stmt
))
3214 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3215 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3216 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3217 ctx_stmt_name
= "parallel"; break;
3218 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3219 ctx_stmt_name
= "kernels"; break;
3220 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3221 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3222 ctx_stmt_name
= "host_data"; break;
3223 default: gcc_unreachable ();
3226 /* OpenACC/OpenMP mismatch? */
3227 if (is_gimple_omp_oacc (stmt
)
3228 != is_gimple_omp_oacc (ctx
->stmt
))
3230 error_at (gimple_location (stmt
),
3231 "%s %qs construct inside of %s %qs region",
3232 (is_gimple_omp_oacc (stmt
)
3233 ? "OpenACC" : "OpenMP"), stmt_name
,
3234 (is_gimple_omp_oacc (ctx
->stmt
)
3235 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3238 if (is_gimple_omp_offloaded (ctx
->stmt
))
3240 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3241 if (is_gimple_omp_oacc (ctx
->stmt
))
3243 error_at (gimple_location (stmt
),
3244 "%qs construct inside of %qs region",
3245 stmt_name
, ctx_stmt_name
);
3250 warning_at (gimple_location (stmt
), 0,
3251 "%qs construct inside of %qs region",
3252 stmt_name
, ctx_stmt_name
);
3264 /* Helper function scan_omp.
3266 Callback for walk_tree or operators in walk_gimple_stmt used to
3267 scan for OMP directives in TP. */
3270 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3272 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3273 omp_context
*ctx
= (omp_context
*) wi
->info
;
3276 switch (TREE_CODE (t
))
3284 tree repl
= remap_decl (t
, &ctx
->cb
);
3285 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3291 if (ctx
&& TYPE_P (t
))
3292 *tp
= remap_type (t
, &ctx
->cb
);
3293 else if (!DECL_P (t
))
3298 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3299 if (tem
!= TREE_TYPE (t
))
3301 if (TREE_CODE (t
) == INTEGER_CST
)
3302 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3304 TREE_TYPE (t
) = tem
;
3314 /* Return true if FNDECL is a setjmp or a longjmp. */
3317 setjmp_or_longjmp_p (const_tree fndecl
)
3319 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3320 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3323 tree declname
= DECL_NAME (fndecl
);
3326 const char *name
= IDENTIFIER_POINTER (declname
);
3327 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
3331 /* Helper function for scan_omp.
3333 Callback for walk_gimple_stmt used to scan for OMP directives in
3334 the current statement in GSI. */
3337 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3338 struct walk_stmt_info
*wi
)
3340 gimple
*stmt
= gsi_stmt (*gsi
);
3341 omp_context
*ctx
= (omp_context
*) wi
->info
;
3343 if (gimple_has_location (stmt
))
3344 input_location
= gimple_location (stmt
);
3346 /* Check the nesting restrictions. */
3347 bool remove
= false;
3348 if (is_gimple_omp (stmt
))
3349 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3350 else if (is_gimple_call (stmt
))
3352 tree fndecl
= gimple_call_fndecl (stmt
);
3355 if (setjmp_or_longjmp_p (fndecl
)
3357 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3358 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
3361 error_at (gimple_location (stmt
),
3362 "setjmp/longjmp inside simd construct");
3364 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3365 switch (DECL_FUNCTION_CODE (fndecl
))
3367 case BUILT_IN_GOMP_BARRIER
:
3368 case BUILT_IN_GOMP_CANCEL
:
3369 case BUILT_IN_GOMP_CANCELLATION_POINT
:
3370 case BUILT_IN_GOMP_TASKYIELD
:
3371 case BUILT_IN_GOMP_TASKWAIT
:
3372 case BUILT_IN_GOMP_TASKGROUP_START
:
3373 case BUILT_IN_GOMP_TASKGROUP_END
:
3374 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
3383 stmt
= gimple_build_nop ();
3384 gsi_replace (gsi
, stmt
, false);
3387 *handled_ops_p
= true;
3389 switch (gimple_code (stmt
))
3391 case GIMPLE_OMP_PARALLEL
:
3392 taskreg_nesting_level
++;
3393 scan_omp_parallel (gsi
, ctx
);
3394 taskreg_nesting_level
--;
3397 case GIMPLE_OMP_TASK
:
3398 taskreg_nesting_level
++;
3399 scan_omp_task (gsi
, ctx
);
3400 taskreg_nesting_level
--;
3403 case GIMPLE_OMP_FOR
:
3404 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3405 == GF_OMP_FOR_KIND_SIMD
)
3406 && gimple_omp_for_combined_into_p (stmt
)
3407 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
3409 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
3410 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
3411 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
3413 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3417 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
3418 == GF_OMP_FOR_KIND_SIMD
)
3419 && omp_maybe_offloaded_ctx (ctx
)
3420 && omp_max_simt_vf ())
3421 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
3423 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
3426 case GIMPLE_OMP_SECTIONS
:
3427 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
3430 case GIMPLE_OMP_SINGLE
:
3431 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
3434 case GIMPLE_OMP_SCAN
:
3435 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
3437 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
3438 ctx
->scan_inclusive
= true;
3439 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
3440 ctx
->scan_exclusive
= true;
3443 case GIMPLE_OMP_SECTION
:
3444 case GIMPLE_OMP_MASTER
:
3445 case GIMPLE_OMP_ORDERED
:
3446 case GIMPLE_OMP_CRITICAL
:
3447 case GIMPLE_OMP_GRID_BODY
:
3448 ctx
= new_omp_context (stmt
, ctx
);
3449 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3452 case GIMPLE_OMP_TASKGROUP
:
3453 ctx
= new_omp_context (stmt
, ctx
);
3454 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
3455 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3458 case GIMPLE_OMP_TARGET
:
3459 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
3462 case GIMPLE_OMP_TEAMS
:
3463 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
3465 taskreg_nesting_level
++;
3466 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3467 taskreg_nesting_level
--;
3470 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
3477 *handled_ops_p
= false;
3479 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
3481 var
= DECL_CHAIN (var
))
3482 insert_decl_map (&ctx
->cb
, var
, var
);
3486 *handled_ops_p
= false;
3494 /* Scan all the statements starting at the current statement. CTX
3495 contains context information about the OMP directives and
3496 clauses found during the scan. */
3499 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
3501 location_t saved_location
;
3502 struct walk_stmt_info wi
;
3504 memset (&wi
, 0, sizeof (wi
));
3506 wi
.want_locations
= true;
3508 saved_location
= input_location
;
3509 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
3510 input_location
= saved_location
;
3513 /* Re-gimplification and code generation routines. */
3515 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3516 of BIND if in a method. */
3519 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
3521 if (DECL_ARGUMENTS (current_function_decl
)
3522 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
3523 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
3526 tree vars
= gimple_bind_vars (bind
);
3527 for (tree
*pvar
= &vars
; *pvar
; )
3528 if (omp_member_access_dummy_var (*pvar
))
3529 *pvar
= DECL_CHAIN (*pvar
);
3531 pvar
= &DECL_CHAIN (*pvar
);
3532 gimple_bind_set_vars (bind
, vars
);
3536 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3537 block and its subblocks. */
3540 remove_member_access_dummy_vars (tree block
)
3542 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
3543 if (omp_member_access_dummy_var (*pvar
))
3544 *pvar
= DECL_CHAIN (*pvar
);
3546 pvar
= &DECL_CHAIN (*pvar
);
3548 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
3549 remove_member_access_dummy_vars (block
);
3552 /* If a context was created for STMT when it was scanned, return it. */
3554 static omp_context
*
3555 maybe_lookup_ctx (gimple
*stmt
)
3558 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
3559 return n
? (omp_context
*) n
->value
: NULL
;
3563 /* Find the mapping for DECL in CTX or the immediately enclosing
3564 context that has a mapping for DECL.
3566 If CTX is a nested parallel directive, we may have to use the decl
3567 mappings created in CTX's parent context. Suppose that we have the
3568 following parallel nesting (variable UIDs showed for clarity):
3571 #omp parallel shared(iD.1562) -> outer parallel
3572 iD.1562 = iD.1562 + 1;
3574 #omp parallel shared (iD.1562) -> inner parallel
3575 iD.1562 = iD.1562 - 1;
3577 Each parallel structure will create a distinct .omp_data_s structure
3578 for copying iD.1562 in/out of the directive:
3580 outer parallel .omp_data_s.1.i -> iD.1562
3581 inner parallel .omp_data_s.2.i -> iD.1562
3583 A shared variable mapping will produce a copy-out operation before
3584 the parallel directive and a copy-in operation after it. So, in
3585 this case we would have:
3588 .omp_data_o.1.i = iD.1562;
3589 #omp parallel shared(iD.1562) -> outer parallel
3590 .omp_data_i.1 = &.omp_data_o.1
3591 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3593 .omp_data_o.2.i = iD.1562; -> **
3594 #omp parallel shared(iD.1562) -> inner parallel
3595 .omp_data_i.2 = &.omp_data_o.2
3596 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3599 ** This is a problem. The symbol iD.1562 cannot be referenced
3600 inside the body of the outer parallel region. But since we are
3601 emitting this copy operation while expanding the inner parallel
3602 directive, we need to access the CTX structure of the outer
3603 parallel directive to get the correct mapping:
3605 .omp_data_o.2.i = .omp_data_i.1->i
3607 Since there may be other workshare or parallel directives enclosing
3608 the parallel directive, it may be necessary to walk up the context
3609 parent chain. This is not a problem in general because nested
3610 parallelism happens only rarely. */
3613 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3618 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3619 t
= maybe_lookup_decl (decl
, up
);
3621 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
3623 return t
? t
: decl
;
3627 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3628 in outer contexts. */
3631 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
3636 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
3637 t
= maybe_lookup_decl (decl
, up
);
3639 return t
? t
: decl
;
3643 /* Construct the initialization value for reduction operation OP. */
3646 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
3655 case TRUTH_ORIF_EXPR
:
3656 case TRUTH_XOR_EXPR
:
3658 return build_zero_cst (type
);
3661 case TRUTH_AND_EXPR
:
3662 case TRUTH_ANDIF_EXPR
:
3664 return fold_convert_loc (loc
, type
, integer_one_node
);
3667 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
3670 if (SCALAR_FLOAT_TYPE_P (type
))
3672 REAL_VALUE_TYPE max
, min
;
3673 if (HONOR_INFINITIES (type
))
3676 real_arithmetic (&min
, NEGATE_EXPR
, &max
, NULL
);
3679 real_maxval (&min
, 1, TYPE_MODE (type
));
3680 return build_real (type
, min
);
3682 else if (POINTER_TYPE_P (type
))
3685 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3686 return wide_int_to_tree (type
, min
);
3690 gcc_assert (INTEGRAL_TYPE_P (type
));
3691 return TYPE_MIN_VALUE (type
);
3695 if (SCALAR_FLOAT_TYPE_P (type
))
3697 REAL_VALUE_TYPE max
;
3698 if (HONOR_INFINITIES (type
))
3701 real_maxval (&max
, 0, TYPE_MODE (type
));
3702 return build_real (type
, max
);
3704 else if (POINTER_TYPE_P (type
))
3707 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
3708 return wide_int_to_tree (type
, max
);
3712 gcc_assert (INTEGRAL_TYPE_P (type
));
3713 return TYPE_MAX_VALUE (type
);
3721 /* Construct the initialization value for reduction CLAUSE. */
3724 omp_reduction_init (tree clause
, tree type
)
3726 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
3727 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
3730 /* Return alignment to be assumed for var in CLAUSE, which should be
3731 OMP_CLAUSE_ALIGNED. */
3734 omp_clause_aligned_alignment (tree clause
)
3736 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
3737 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
3739 /* Otherwise return implementation defined alignment. */
3740 unsigned int al
= 1;
3741 opt_scalar_mode mode_iter
;
3742 auto_vector_sizes sizes
;
3743 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
, true);
3745 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
3746 vs
= ordered_max (vs
, sizes
[i
]);
3747 static enum mode_class classes
[]
3748 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
3749 for (int i
= 0; i
< 4; i
+= 2)
3750 /* The for loop above dictates that we only walk through scalar classes. */
3751 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
3753 scalar_mode mode
= mode_iter
.require ();
3754 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
3755 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
3757 while (maybe_ne (vs
, 0U)
3758 && known_lt (GET_MODE_SIZE (vmode
), vs
)
3759 && GET_MODE_2XWIDER_MODE (vmode
).exists ())
3760 vmode
= GET_MODE_2XWIDER_MODE (vmode
).require ();
3762 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3763 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
3765 poly_uint64 nelts
= exact_div (GET_MODE_SIZE (vmode
),
3766 GET_MODE_SIZE (mode
));
3767 type
= build_vector_type (type
, nelts
);
3768 if (TYPE_MODE (type
) != vmode
)
3770 if (TYPE_ALIGN_UNIT (type
) > al
)
3771 al
= TYPE_ALIGN_UNIT (type
);
3773 return build_int_cst (integer_type_node
, al
);
3777 /* This structure is part of the interface between lower_rec_simd_input_clauses
3778 and lower_rec_input_clauses. */
3780 class omplow_simd_context
{
3782 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3786 vec
<tree
, va_heap
> simt_eargs
;
3787 gimple_seq simt_dlist
;
3788 poly_uint64_pod max_vf
;
3792 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3796 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
3797 omplow_simd_context
*sctx
, tree
&ivar
,
3798 tree
&lvar
, tree
*rvar
= NULL
,
3801 if (known_eq (sctx
->max_vf
, 0U))
3803 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
3804 if (maybe_gt (sctx
->max_vf
, 1U))
3806 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3807 OMP_CLAUSE_SAFELEN
);
3810 poly_uint64 safe_len
;
3811 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
3812 || maybe_lt (safe_len
, 1U))
3815 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
3818 if (maybe_gt (sctx
->max_vf
, 1U))
3820 sctx
->idx
= create_tmp_var (unsigned_type_node
);
3821 sctx
->lane
= create_tmp_var (unsigned_type_node
);
3824 if (known_eq (sctx
->max_vf
, 1U))
3829 if (is_gimple_reg (new_var
))
3831 ivar
= lvar
= new_var
;
3834 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
3835 ivar
= lvar
= create_tmp_var (type
);
3836 TREE_ADDRESSABLE (ivar
) = 1;
3837 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
3838 NULL
, DECL_ATTRIBUTES (ivar
));
3839 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
3840 tree clobber
= build_constructor (type
, NULL
);
3841 TREE_THIS_VOLATILE (clobber
) = 1;
3842 gimple
*g
= gimple_build_assign (ivar
, clobber
);
3843 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
3847 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
3848 tree avar
= create_tmp_var_raw (atype
);
3849 if (TREE_ADDRESSABLE (new_var
))
3850 TREE_ADDRESSABLE (avar
) = 1;
3851 DECL_ATTRIBUTES (avar
)
3852 = tree_cons (get_identifier ("omp simd array"), NULL
,
3853 DECL_ATTRIBUTES (avar
));
3854 gimple_add_tmp_var (avar
);
3856 if (rvar
&& !ctx
->for_simd_scan_phase
)
3858 /* For inscan reductions, create another array temporary,
3859 which will hold the reduced value. */
3860 iavar
= create_tmp_var_raw (atype
);
3861 if (TREE_ADDRESSABLE (new_var
))
3862 TREE_ADDRESSABLE (iavar
) = 1;
3863 DECL_ATTRIBUTES (iavar
)
3864 = tree_cons (get_identifier ("omp simd array"), NULL
,
3865 tree_cons (get_identifier ("omp simd inscan"), NULL
,
3866 DECL_ATTRIBUTES (iavar
)));
3867 gimple_add_tmp_var (iavar
);
3868 ctx
->cb
.decl_map
->put (avar
, iavar
);
3869 if (sctx
->lastlane
== NULL_TREE
)
3870 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
3871 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
3872 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
3873 TREE_THIS_NOTRAP (*rvar
) = 1;
3875 if (ctx
->scan_exclusive
)
3877 /* And for exclusive scan yet another one, which will
3878 hold the value during the scan phase. */
3879 tree savar
= create_tmp_var_raw (atype
);
3880 if (TREE_ADDRESSABLE (new_var
))
3881 TREE_ADDRESSABLE (savar
) = 1;
3882 DECL_ATTRIBUTES (savar
)
3883 = tree_cons (get_identifier ("omp simd array"), NULL
,
3884 tree_cons (get_identifier ("omp simd inscan "
3886 DECL_ATTRIBUTES (savar
)));
3887 gimple_add_tmp_var (savar
);
3888 ctx
->cb
.decl_map
->put (iavar
, savar
);
3889 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
3890 sctx
->idx
, NULL_TREE
, NULL_TREE
);
3891 TREE_THIS_NOTRAP (*rvar2
) = 1;
3894 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
3895 NULL_TREE
, NULL_TREE
);
3896 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
3897 NULL_TREE
, NULL_TREE
);
3898 TREE_THIS_NOTRAP (ivar
) = 1;
3899 TREE_THIS_NOTRAP (lvar
) = 1;
3901 if (DECL_P (new_var
))
3903 SET_DECL_VALUE_EXPR (new_var
, lvar
);
3904 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
3909 /* Helper function of lower_rec_input_clauses. For a reference
3910 in simd reduction, add an underlying variable it will reference. */
3913 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
3915 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
3916 if (TREE_CONSTANT (z
))
3918 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
3919 get_name (new_vard
));
3920 gimple_add_tmp_var (z
);
3921 TREE_ADDRESSABLE (z
) = 1;
3922 z
= build_fold_addr_expr_loc (loc
, z
);
3923 gimplify_assign (new_vard
, z
, ilist
);
3927 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3928 code to emit (type) (tskred_temp[idx]). */
3931 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
3934 unsigned HOST_WIDE_INT sz
3935 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
3936 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
3937 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
3939 tree v
= create_tmp_var (pointer_sized_int_node
);
3940 gimple
*g
= gimple_build_assign (v
, r
);
3941 gimple_seq_add_stmt (ilist
, g
);
3942 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
3944 v
= create_tmp_var (type
);
3945 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
3946 gimple_seq_add_stmt (ilist
, g
);
3951 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3952 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3953 private variables. Initialization statements go in ILIST, while calls
3954 to destructors go in DLIST. */
3957 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
3958 omp_context
*ctx
, struct omp_for_data
*fd
)
3960 tree c
, copyin_seq
, x
, ptr
;
3961 bool copyin_by_ref
= false;
3962 bool lastprivate_firstprivate
= false;
3963 bool reduction_omp_orig_ref
= false;
3965 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3966 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
3967 omplow_simd_context sctx
= omplow_simd_context ();
3968 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
3969 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
3970 gimple_seq llist
[4] = { };
3971 tree nonconst_simd_if
= NULL_TREE
;
3974 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
3976 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3977 with data sharing clauses referencing variable sized vars. That
3978 is unnecessarily hard to support and very unlikely to result in
3979 vectorized code anyway. */
3981 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3982 switch (OMP_CLAUSE_CODE (c
))
3984 case OMP_CLAUSE_LINEAR
:
3985 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
3988 case OMP_CLAUSE_PRIVATE
:
3989 case OMP_CLAUSE_FIRSTPRIVATE
:
3990 case OMP_CLAUSE_LASTPRIVATE
:
3991 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
3993 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
3995 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
3996 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4000 case OMP_CLAUSE_REDUCTION
:
4001 case OMP_CLAUSE_IN_REDUCTION
:
4002 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4003 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4005 else if (omp_is_reference (OMP_CLAUSE_DECL (c
)))
4007 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4008 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4013 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4015 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4016 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4018 case OMP_CLAUSE_SIMDLEN
:
4019 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4022 case OMP_CLAUSE__CONDTEMP_
:
4023 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4031 /* Add a placeholder for simduid. */
4032 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4033 sctx
.simt_eargs
.safe_push (NULL_TREE
);
4035 unsigned task_reduction_cnt
= 0;
4036 unsigned task_reduction_cntorig
= 0;
4037 unsigned task_reduction_cnt_full
= 0;
4038 unsigned task_reduction_cntorig_full
= 0;
4039 unsigned task_reduction_other_cnt
= 0;
4040 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
4041 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
4042 /* Do all the fixed sized types in the first pass, and the variable sized
4043 types in the second pass. This makes sure that the scalar arguments to
4044 the variable sized types are processed before we use them in the
4045 variable sized operations. For task reductions we use 4 passes, in the
4046 first two we ignore them, in the third one gather arguments for
4047 GOMP_task_reduction_remap call and in the last pass actually handle
4048 the task reductions. */
4049 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
4052 if (pass
== 2 && task_reduction_cnt
)
4055 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
4056 + task_reduction_cntorig
);
4057 tskred_avar
= create_tmp_var_raw (tskred_atype
);
4058 gimple_add_tmp_var (tskred_avar
);
4059 TREE_ADDRESSABLE (tskred_avar
) = 1;
4060 task_reduction_cnt_full
= task_reduction_cnt
;
4061 task_reduction_cntorig_full
= task_reduction_cntorig
;
4063 else if (pass
== 3 && task_reduction_cnt
)
4065 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
4067 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
4068 size_int (task_reduction_cntorig
),
4069 build_fold_addr_expr (tskred_avar
));
4070 gimple_seq_add_stmt (ilist
, g
);
4072 if (pass
== 3 && task_reduction_other_cnt
)
4074 /* For reduction clauses, build
4075 tskred_base = (void *) tskred_temp[2]
4076 + omp_get_thread_num () * tskred_temp[1]
4077 or if tskred_temp[1] is known to be constant, that constant
4078 directly. This is the start of the private reduction copy block
4079 for the current thread. */
4080 tree v
= create_tmp_var (integer_type_node
);
4081 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
4082 gimple
*g
= gimple_build_call (x
, 0);
4083 gimple_call_set_lhs (g
, v
);
4084 gimple_seq_add_stmt (ilist
, g
);
4085 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
4086 tskred_temp
= OMP_CLAUSE_DECL (c
);
4087 if (is_taskreg_ctx (ctx
))
4088 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
4089 tree v2
= create_tmp_var (sizetype
);
4090 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
4091 gimple_seq_add_stmt (ilist
, g
);
4092 if (ctx
->task_reductions
[0])
4093 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
4095 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
4096 tree v3
= create_tmp_var (sizetype
);
4097 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
4098 gimple_seq_add_stmt (ilist
, g
);
4099 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
4100 tskred_base
= create_tmp_var (ptr_type_node
);
4101 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
4102 gimple_seq_add_stmt (ilist
, g
);
4104 task_reduction_cnt
= 0;
4105 task_reduction_cntorig
= 0;
4106 task_reduction_other_cnt
= 0;
4107 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4109 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
4112 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
4113 bool task_reduction_p
= false;
4114 bool task_reduction_needs_orig_p
= false;
4115 tree cond
= NULL_TREE
;
4119 case OMP_CLAUSE_PRIVATE
:
4120 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
4123 case OMP_CLAUSE_SHARED
:
4124 /* Ignore shared directives in teams construct inside
4125 of target construct. */
4126 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4127 && !is_host_teams_ctx (ctx
))
4129 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
4131 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
4132 || is_global_var (OMP_CLAUSE_DECL (c
)));
4135 case OMP_CLAUSE_FIRSTPRIVATE
:
4136 case OMP_CLAUSE_COPYIN
:
4138 case OMP_CLAUSE_LINEAR
:
4139 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
4140 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
4141 lastprivate_firstprivate
= true;
4143 case OMP_CLAUSE_REDUCTION
:
4144 case OMP_CLAUSE_IN_REDUCTION
:
4145 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
4147 task_reduction_p
= true;
4148 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
4150 task_reduction_other_cnt
++;
4155 task_reduction_cnt
++;
4156 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4158 var
= OMP_CLAUSE_DECL (c
);
4159 /* If var is a global variable that isn't privatized
4160 in outer contexts, we don't need to look up the
4161 original address, it is always the address of the
4162 global variable itself. */
4164 || omp_is_reference (var
)
4166 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4168 task_reduction_needs_orig_p
= true;
4169 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4170 task_reduction_cntorig
++;
4174 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4175 reduction_omp_orig_ref
= true;
4177 case OMP_CLAUSE__REDUCTEMP_
:
4178 if (!is_taskreg_ctx (ctx
))
4181 case OMP_CLAUSE__LOOPTEMP_
:
4182 /* Handle _looptemp_/_reductemp_ clauses only on
4187 case OMP_CLAUSE_LASTPRIVATE
:
4188 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4190 lastprivate_firstprivate
= true;
4191 if (pass
!= 0 || is_taskloop_ctx (ctx
))
4194 /* Even without corresponding firstprivate, if
4195 decl is Fortran allocatable, it needs outer var
4198 && lang_hooks
.decls
.omp_private_outer_ref
4199 (OMP_CLAUSE_DECL (c
)))
4200 lastprivate_firstprivate
= true;
4202 case OMP_CLAUSE_ALIGNED
:
4205 var
= OMP_CLAUSE_DECL (c
);
4206 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
4207 && !is_global_var (var
))
4209 new_var
= maybe_lookup_decl (var
, ctx
);
4210 if (new_var
== NULL_TREE
)
4211 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4212 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4213 tree alarg
= omp_clause_aligned_alignment (c
);
4214 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4215 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
4216 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4217 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
4218 gimplify_and_add (x
, ilist
);
4220 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
4221 && is_global_var (var
))
4223 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
4224 new_var
= lookup_decl (var
, ctx
);
4225 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4226 t
= build_fold_addr_expr_loc (clause_loc
, t
);
4227 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
4228 tree alarg
= omp_clause_aligned_alignment (c
);
4229 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
4230 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
4231 t
= fold_convert_loc (clause_loc
, ptype
, t
);
4232 x
= create_tmp_var (ptype
);
4233 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
4234 gimplify_and_add (t
, ilist
);
4235 t
= build_simple_mem_ref_loc (clause_loc
, x
);
4236 SET_DECL_VALUE_EXPR (new_var
, t
);
4237 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4240 case OMP_CLAUSE__CONDTEMP_
:
4241 if (is_parallel_ctx (ctx
)
4242 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
4249 if (task_reduction_p
!= (pass
>= 2))
4252 new_var
= var
= OMP_CLAUSE_DECL (c
);
4253 if ((c_kind
== OMP_CLAUSE_REDUCTION
4254 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4255 && TREE_CODE (var
) == MEM_REF
)
4257 var
= TREE_OPERAND (var
, 0);
4258 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
4259 var
= TREE_OPERAND (var
, 0);
4260 if (TREE_CODE (var
) == INDIRECT_REF
4261 || TREE_CODE (var
) == ADDR_EXPR
)
4262 var
= TREE_OPERAND (var
, 0);
4263 if (is_variable_sized (var
))
4265 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
4266 var
= DECL_VALUE_EXPR (var
);
4267 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
4268 var
= TREE_OPERAND (var
, 0);
4269 gcc_assert (DECL_P (var
));
4273 if (c_kind
!= OMP_CLAUSE_COPYIN
)
4274 new_var
= lookup_decl (var
, ctx
);
4276 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
4281 /* C/C++ array section reductions. */
4282 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4283 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4284 && var
!= OMP_CLAUSE_DECL (c
))
4289 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
4290 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
4292 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
4294 tree b
= TREE_OPERAND (orig_var
, 1);
4295 b
= maybe_lookup_decl (b
, ctx
);
4298 b
= TREE_OPERAND (orig_var
, 1);
4299 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
4301 if (integer_zerop (bias
))
4305 bias
= fold_convert_loc (clause_loc
,
4306 TREE_TYPE (b
), bias
);
4307 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4308 TREE_TYPE (b
), b
, bias
);
4310 orig_var
= TREE_OPERAND (orig_var
, 0);
4314 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
4315 if (is_global_var (out
)
4316 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
4317 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
4318 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
4323 bool by_ref
= use_pointer_for_field (var
, NULL
);
4324 x
= build_receiver_ref (var
, by_ref
, ctx
);
4325 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
4326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
4328 x
= build_fold_addr_expr (x
);
4330 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
4331 x
= build_simple_mem_ref (x
);
4332 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
4334 if (var
== TREE_OPERAND (orig_var
, 0))
4335 x
= build_fold_addr_expr (x
);
4337 bias
= fold_convert (sizetype
, bias
);
4338 x
= fold_convert (ptr_type_node
, x
);
4339 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
4340 TREE_TYPE (x
), x
, bias
);
4341 unsigned cnt
= task_reduction_cnt
- 1;
4342 if (!task_reduction_needs_orig_p
)
4343 cnt
+= (task_reduction_cntorig_full
4344 - task_reduction_cntorig
);
4346 cnt
= task_reduction_cntorig
- 1;
4347 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4348 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4349 gimplify_assign (r
, x
, ilist
);
4353 if (TREE_CODE (orig_var
) == INDIRECT_REF
4354 || TREE_CODE (orig_var
) == ADDR_EXPR
)
4355 orig_var
= TREE_OPERAND (orig_var
, 0);
4356 tree d
= OMP_CLAUSE_DECL (c
);
4357 tree type
= TREE_TYPE (d
);
4358 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
4359 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
4360 const char *name
= get_name (orig_var
);
4363 tree xv
= create_tmp_var (ptr_type_node
);
4364 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4366 unsigned cnt
= task_reduction_cnt
- 1;
4367 if (!task_reduction_needs_orig_p
)
4368 cnt
+= (task_reduction_cntorig_full
4369 - task_reduction_cntorig
);
4371 cnt
= task_reduction_cntorig
- 1;
4372 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4373 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4375 gimple
*g
= gimple_build_assign (xv
, x
);
4376 gimple_seq_add_stmt (ilist
, g
);
4380 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4382 if (ctx
->task_reductions
[1 + idx
])
4383 off
= fold_convert (sizetype
,
4384 ctx
->task_reductions
[1 + idx
]);
4386 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4388 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
4390 gimple_seq_add_stmt (ilist
, g
);
4392 x
= fold_convert (build_pointer_type (boolean_type_node
),
4394 if (TREE_CONSTANT (v
))
4395 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
4396 TYPE_SIZE_UNIT (type
));
4399 tree t
= maybe_lookup_decl (v
, ctx
);
4403 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4404 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
4406 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4408 build_int_cst (TREE_TYPE (v
), 1));
4409 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4411 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4412 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
4414 cond
= create_tmp_var (TREE_TYPE (x
));
4415 gimplify_assign (cond
, x
, ilist
);
4418 else if (TREE_CONSTANT (v
))
4420 x
= create_tmp_var_raw (type
, name
);
4421 gimple_add_tmp_var (x
);
4422 TREE_ADDRESSABLE (x
) = 1;
4423 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4428 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4429 tree t
= maybe_lookup_decl (v
, ctx
);
4433 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
4434 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
4435 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
4437 build_int_cst (TREE_TYPE (v
), 1));
4438 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
4440 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4441 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
4442 x
= build_call_expr_loc (clause_loc
, atmp
, 2, t
, al
);
4445 tree ptype
= build_pointer_type (TREE_TYPE (type
));
4446 x
= fold_convert_loc (clause_loc
, ptype
, x
);
4447 tree y
= create_tmp_var (ptype
, name
);
4448 gimplify_assign (y
, x
, ilist
);
4452 if (!integer_zerop (bias
))
4454 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4456 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
4458 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
4459 pointer_sized_int_node
, yb
, bias
);
4460 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
4461 yb
= create_tmp_var (ptype
, name
);
4462 gimplify_assign (yb
, x
, ilist
);
4466 d
= TREE_OPERAND (d
, 0);
4467 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
4468 d
= TREE_OPERAND (d
, 0);
4469 if (TREE_CODE (d
) == ADDR_EXPR
)
4471 if (orig_var
!= var
)
4473 gcc_assert (is_variable_sized (orig_var
));
4474 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
4476 gimplify_assign (new_var
, x
, ilist
);
4477 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
4478 tree t
= build_fold_indirect_ref (new_var
);
4479 DECL_IGNORED_P (new_var
) = 0;
4480 TREE_THIS_NOTRAP (t
) = 1;
4481 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
4482 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
4486 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
4487 build_int_cst (ptype
, 0));
4488 SET_DECL_VALUE_EXPR (new_var
, x
);
4489 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4494 gcc_assert (orig_var
== var
);
4495 if (TREE_CODE (d
) == INDIRECT_REF
)
4497 x
= create_tmp_var (ptype
, name
);
4498 TREE_ADDRESSABLE (x
) = 1;
4499 gimplify_assign (x
, yb
, ilist
);
4500 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4502 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4503 gimplify_assign (new_var
, x
, ilist
);
4505 /* GOMP_taskgroup_reduction_register memsets the whole
4506 array to zero. If the initializer is zero, we don't
4507 need to initialize it again, just mark it as ever
4508 used unconditionally, i.e. cond = true. */
4510 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
4511 && initializer_zerop (omp_reduction_init (c
,
4514 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
4516 gimple_seq_add_stmt (ilist
, g
);
4519 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
4523 if (!is_parallel_ctx (ctx
))
4525 tree condv
= create_tmp_var (boolean_type_node
);
4526 g
= gimple_build_assign (condv
,
4527 build_simple_mem_ref (cond
));
4528 gimple_seq_add_stmt (ilist
, g
);
4529 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
4530 g
= gimple_build_cond (NE_EXPR
, condv
,
4531 boolean_false_node
, end
, lab1
);
4532 gimple_seq_add_stmt (ilist
, g
);
4533 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
4535 g
= gimple_build_assign (build_simple_mem_ref (cond
),
4537 gimple_seq_add_stmt (ilist
, g
);
4540 tree y1
= create_tmp_var (ptype
);
4541 gimplify_assign (y1
, y
, ilist
);
4542 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
4543 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
4544 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
4545 if (task_reduction_needs_orig_p
)
4547 y3
= create_tmp_var (ptype
);
4549 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4550 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4551 size_int (task_reduction_cnt_full
4552 + task_reduction_cntorig
- 1),
4553 NULL_TREE
, NULL_TREE
);
4556 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4557 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
4560 gimplify_assign (y3
, ref
, ilist
);
4562 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
4566 y2
= create_tmp_var (ptype
);
4567 gimplify_assign (y2
, y
, ilist
);
4569 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4571 tree ref
= build_outer_var_ref (var
, ctx
);
4572 /* For ref build_outer_var_ref already performs this. */
4573 if (TREE_CODE (d
) == INDIRECT_REF
)
4574 gcc_assert (omp_is_reference (var
));
4575 else if (TREE_CODE (d
) == ADDR_EXPR
)
4576 ref
= build_fold_addr_expr (ref
);
4577 else if (omp_is_reference (var
))
4578 ref
= build_fold_addr_expr (ref
);
4579 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
4580 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
4581 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
4583 y3
= create_tmp_var (ptype
);
4584 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
4588 y4
= create_tmp_var (ptype
);
4589 gimplify_assign (y4
, ref
, dlist
);
4593 tree i
= create_tmp_var (TREE_TYPE (v
));
4594 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
4595 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
4596 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
4599 i2
= create_tmp_var (TREE_TYPE (v
));
4600 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
4601 body2
= create_artificial_label (UNKNOWN_LOCATION
);
4602 end2
= create_artificial_label (UNKNOWN_LOCATION
);
4603 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
4605 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4607 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
4608 tree decl_placeholder
4609 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
4610 SET_DECL_VALUE_EXPR (decl_placeholder
,
4611 build_simple_mem_ref (y1
));
4612 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
4613 SET_DECL_VALUE_EXPR (placeholder
,
4614 y3
? build_simple_mem_ref (y3
)
4616 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
4617 x
= lang_hooks
.decls
.omp_clause_default_ctor
4618 (c
, build_simple_mem_ref (y1
),
4619 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
4621 gimplify_and_add (x
, ilist
);
4622 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
4624 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
4625 lower_omp (&tseq
, ctx
);
4626 gimple_seq_add_seq (ilist
, tseq
);
4628 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
4631 SET_DECL_VALUE_EXPR (decl_placeholder
,
4632 build_simple_mem_ref (y2
));
4633 SET_DECL_VALUE_EXPR (placeholder
,
4634 build_simple_mem_ref (y4
));
4635 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
4636 lower_omp (&tseq
, ctx
);
4637 gimple_seq_add_seq (dlist
, tseq
);
4638 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
4640 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
4641 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
4644 x
= lang_hooks
.decls
.omp_clause_dtor
4645 (c
, build_simple_mem_ref (y2
));
4647 gimplify_and_add (x
, dlist
);
4652 x
= omp_reduction_init (c
, TREE_TYPE (type
));
4653 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
4655 /* reduction(-:var) sums up the partial results, so it
4656 acts identically to reduction(+:var). */
4657 if (code
== MINUS_EXPR
)
4660 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
4663 x
= build2 (code
, TREE_TYPE (type
),
4664 build_simple_mem_ref (y4
),
4665 build_simple_mem_ref (y2
));
4666 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
4670 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
4671 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4672 gimple_seq_add_stmt (ilist
, g
);
4675 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
4676 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4677 gimple_seq_add_stmt (ilist
, g
);
4679 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
4680 build_int_cst (TREE_TYPE (i
), 1));
4681 gimple_seq_add_stmt (ilist
, g
);
4682 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
4683 gimple_seq_add_stmt (ilist
, g
);
4684 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
4687 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
4688 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4689 gimple_seq_add_stmt (dlist
, g
);
4692 g
= gimple_build_assign
4693 (y4
, POINTER_PLUS_EXPR
, y4
,
4694 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4695 gimple_seq_add_stmt (dlist
, g
);
4697 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
4698 build_int_cst (TREE_TYPE (i2
), 1));
4699 gimple_seq_add_stmt (dlist
, g
);
4700 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
4701 gimple_seq_add_stmt (dlist
, g
);
4702 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
4708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
4712 bool by_ref
= use_pointer_for_field (var
, ctx
);
4713 x
= build_receiver_ref (var
, by_ref
, ctx
);
4715 if (!omp_is_reference (var
))
4716 x
= build_fold_addr_expr (x
);
4717 x
= fold_convert (ptr_type_node
, x
);
4718 unsigned cnt
= task_reduction_cnt
- 1;
4719 if (!task_reduction_needs_orig_p
)
4720 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
4722 cnt
= task_reduction_cntorig
- 1;
4723 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4724 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4725 gimplify_assign (r
, x
, ilist
);
4730 tree type
= TREE_TYPE (new_var
);
4731 if (!omp_is_reference (var
))
4732 type
= build_pointer_type (type
);
4733 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4735 unsigned cnt
= task_reduction_cnt
- 1;
4736 if (!task_reduction_needs_orig_p
)
4737 cnt
+= (task_reduction_cntorig_full
4738 - task_reduction_cntorig
);
4740 cnt
= task_reduction_cntorig
- 1;
4741 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
4742 size_int (cnt
), NULL_TREE
, NULL_TREE
);
4746 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
4748 if (ctx
->task_reductions
[1 + idx
])
4749 off
= fold_convert (sizetype
,
4750 ctx
->task_reductions
[1 + idx
]);
4752 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
4754 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
4757 x
= fold_convert (type
, x
);
4759 if (omp_is_reference (var
))
4761 gimplify_assign (new_var
, x
, ilist
);
4763 new_var
= build_simple_mem_ref (new_var
);
4767 t
= create_tmp_var (type
);
4768 gimplify_assign (t
, x
, ilist
);
4769 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
4770 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4772 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
4773 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
4774 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
4775 cond
= create_tmp_var (TREE_TYPE (t
));
4776 gimplify_assign (cond
, t
, ilist
);
4778 else if (is_variable_sized (var
))
4780 /* For variable sized types, we need to allocate the
4781 actual storage here. Call alloca and store the
4782 result in the pointer decl that we created elsewhere. */
4786 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
4791 ptr
= DECL_VALUE_EXPR (new_var
);
4792 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
4793 ptr
= TREE_OPERAND (ptr
, 0);
4794 gcc_assert (DECL_P (ptr
));
4795 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
4797 /* void *tmp = __builtin_alloca */
4798 atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4799 stmt
= gimple_build_call (atmp
, 2, x
,
4800 size_int (DECL_ALIGN (var
)));
4801 tmp
= create_tmp_var_raw (ptr_type_node
);
4802 gimple_add_tmp_var (tmp
);
4803 gimple_call_set_lhs (stmt
, tmp
);
4805 gimple_seq_add_stmt (ilist
, stmt
);
4807 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
4808 gimplify_assign (ptr
, x
, ilist
);
4811 else if (omp_is_reference (var
)
4812 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
4813 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
4815 /* For references that are being privatized for Fortran,
4816 allocate new backing storage for the new pointer
4817 variable. This allows us to avoid changing all the
4818 code that expects a pointer to something that expects
4819 a direct variable. */
4823 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
4824 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
4826 x
= build_receiver_ref (var
, false, ctx
);
4827 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4829 else if (TREE_CONSTANT (x
))
4831 /* For reduction in SIMD loop, defer adding the
4832 initialization of the reference, because if we decide
4833 to use SIMD array for it, the initilization could cause
4834 expansion ICE. Ditto for other privatization clauses. */
4839 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
4841 gimple_add_tmp_var (x
);
4842 TREE_ADDRESSABLE (x
) = 1;
4843 x
= build_fold_addr_expr_loc (clause_loc
, x
);
4849 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4850 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
4851 tree al
= size_int (TYPE_ALIGN (rtype
));
4852 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
4857 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
4858 gimplify_assign (new_var
, x
, ilist
);
4861 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
4863 else if ((c_kind
== OMP_CLAUSE_REDUCTION
4864 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
4865 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4873 switch (OMP_CLAUSE_CODE (c
))
4875 case OMP_CLAUSE_SHARED
:
4876 /* Ignore shared directives in teams construct inside
4877 target construct. */
4878 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4879 && !is_host_teams_ctx (ctx
))
4881 /* Shared global vars are just accessed directly. */
4882 if (is_global_var (new_var
))
4884 /* For taskloop firstprivate/lastprivate, represented
4885 as firstprivate and shared clause on the task, new_var
4886 is the firstprivate var. */
4887 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
4889 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4890 needs to be delayed until after fixup_child_record_type so
4891 that we get the correct type during the dereference. */
4892 by_ref
= use_pointer_for_field (var
, ctx
);
4893 x
= build_receiver_ref (var
, by_ref
, ctx
);
4894 SET_DECL_VALUE_EXPR (new_var
, x
);
4895 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4897 /* ??? If VAR is not passed by reference, and the variable
4898 hasn't been initialized yet, then we'll get a warning for
4899 the store into the omp_data_s structure. Ideally, we'd be
4900 able to notice this and not store anything at all, but
4901 we're generating code too early. Suppress the warning. */
4903 TREE_NO_WARNING (var
) = 1;
4906 case OMP_CLAUSE__CONDTEMP_
:
4907 if (is_parallel_ctx (ctx
))
4909 x
= build_receiver_ref (var
, false, ctx
);
4910 SET_DECL_VALUE_EXPR (new_var
, x
);
4911 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4913 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
4915 x
= build_zero_cst (TREE_TYPE (var
));
4920 case OMP_CLAUSE_LASTPRIVATE
:
4921 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
4925 case OMP_CLAUSE_PRIVATE
:
4926 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
4927 x
= build_outer_var_ref (var
, ctx
);
4928 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
4930 if (is_task_ctx (ctx
))
4931 x
= build_receiver_ref (var
, false, ctx
);
4933 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
4939 nx
= lang_hooks
.decls
.omp_clause_default_ctor
4940 (c
, unshare_expr (new_var
), x
);
4943 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
4944 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
4945 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4946 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
4947 || omp_is_reference (var
))
4948 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
4951 if (omp_is_reference (var
))
4953 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
4954 tree new_vard
= TREE_OPERAND (new_var
, 0);
4955 gcc_assert (DECL_P (new_vard
));
4956 SET_DECL_VALUE_EXPR (new_vard
,
4957 build_fold_addr_expr (lvar
));
4958 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
4962 x
= lang_hooks
.decls
.omp_clause_default_ctor
4963 (c
, unshare_expr (ivar
), x
);
4964 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
4966 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
4967 unshare_expr (ivar
), x
);
4971 gimplify_and_add (x
, &llist
[0]);
4972 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
4973 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
4978 gcc_assert (TREE_CODE (v
) == MEM_REF
);
4979 v
= TREE_OPERAND (v
, 0);
4980 gcc_assert (DECL_P (v
));
4982 v
= *ctx
->lastprivate_conditional_map
->get (v
);
4983 tree t
= create_tmp_var (TREE_TYPE (v
));
4984 tree z
= build_zero_cst (TREE_TYPE (v
));
4986 = build_outer_var_ref (var
, ctx
,
4987 OMP_CLAUSE_LASTPRIVATE
);
4988 gimple_seq_add_stmt (dlist
,
4989 gimple_build_assign (t
, z
));
4990 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
4991 tree civar
= DECL_VALUE_EXPR (v
);
4992 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
4993 civar
= unshare_expr (civar
);
4994 TREE_OPERAND (civar
, 1) = sctx
.idx
;
4995 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
4996 unshare_expr (civar
));
4997 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
4998 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
4999 orig_v
, unshare_expr (ivar
)));
5000 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
5002 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
5004 gimple_seq tseq
= NULL
;
5005 gimplify_and_add (x
, &tseq
);
5007 lower_omp (&tseq
, ctx
->outer
);
5008 gimple_seq_add_seq (&llist
[1], tseq
);
5010 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5011 && ctx
->for_simd_scan_phase
)
5013 x
= unshare_expr (ivar
);
5015 = build_outer_var_ref (var
, ctx
,
5016 OMP_CLAUSE_LASTPRIVATE
);
5017 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5019 gimplify_and_add (x
, &llist
[0]);
5023 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5025 gimplify_and_add (y
, &llist
[1]);
5029 if (omp_is_reference (var
))
5031 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5032 tree new_vard
= TREE_OPERAND (new_var
, 0);
5033 gcc_assert (DECL_P (new_vard
));
5034 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5035 x
= TYPE_SIZE_UNIT (type
);
5036 if (TREE_CONSTANT (x
))
5038 x
= create_tmp_var_raw (type
, get_name (var
));
5039 gimple_add_tmp_var (x
);
5040 TREE_ADDRESSABLE (x
) = 1;
5041 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5042 x
= fold_convert_loc (clause_loc
,
5043 TREE_TYPE (new_vard
), x
);
5044 gimplify_assign (new_vard
, x
, ilist
);
5049 gimplify_and_add (nx
, ilist
);
5050 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5052 && ctx
->for_simd_scan_phase
)
5054 tree orig_v
= build_outer_var_ref (var
, ctx
,
5055 OMP_CLAUSE_LASTPRIVATE
);
5056 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
5058 gimplify_and_add (x
, ilist
);
5063 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5065 gimplify_and_add (x
, dlist
);
5068 case OMP_CLAUSE_LINEAR
:
5069 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
5070 goto do_firstprivate
;
5071 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5074 x
= build_outer_var_ref (var
, ctx
);
5077 case OMP_CLAUSE_FIRSTPRIVATE
:
5078 if (is_task_ctx (ctx
))
5080 if ((omp_is_reference (var
)
5081 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
5082 || is_variable_sized (var
))
5084 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
5086 || use_pointer_for_field (var
, NULL
))
5088 x
= build_receiver_ref (var
, false, ctx
);
5089 SET_DECL_VALUE_EXPR (new_var
, x
);
5090 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5094 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
5095 && omp_is_reference (var
))
5097 x
= build_outer_var_ref (var
, ctx
);
5098 gcc_assert (TREE_CODE (x
) == MEM_REF
5099 && integer_zerop (TREE_OPERAND (x
, 1)));
5100 x
= TREE_OPERAND (x
, 0);
5101 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5102 (c
, unshare_expr (new_var
), x
);
5103 gimplify_and_add (x
, ilist
);
5107 x
= build_outer_var_ref (var
, ctx
);
5110 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
5111 && gimple_omp_for_combined_into_p (ctx
->stmt
))
5113 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5114 tree stept
= TREE_TYPE (t
);
5115 tree ct
= omp_find_clause (clauses
,
5116 OMP_CLAUSE__LOOPTEMP_
);
5118 tree l
= OMP_CLAUSE_DECL (ct
);
5119 tree n1
= fd
->loop
.n1
;
5120 tree step
= fd
->loop
.step
;
5121 tree itype
= TREE_TYPE (l
);
5122 if (POINTER_TYPE_P (itype
))
5123 itype
= signed_type_for (itype
);
5124 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
5125 if (TYPE_UNSIGNED (itype
)
5126 && fd
->loop
.cond_code
== GT_EXPR
)
5127 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
5128 fold_build1 (NEGATE_EXPR
, itype
, l
),
5129 fold_build1 (NEGATE_EXPR
,
5132 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
5133 t
= fold_build2 (MULT_EXPR
, stept
,
5134 fold_convert (stept
, l
), t
);
5136 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
5138 if (omp_is_reference (var
))
5140 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5141 tree new_vard
= TREE_OPERAND (new_var
, 0);
5142 gcc_assert (DECL_P (new_vard
));
5143 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5144 nx
= TYPE_SIZE_UNIT (type
);
5145 if (TREE_CONSTANT (nx
))
5147 nx
= create_tmp_var_raw (type
,
5149 gimple_add_tmp_var (nx
);
5150 TREE_ADDRESSABLE (nx
) = 1;
5151 nx
= build_fold_addr_expr_loc (clause_loc
,
5153 nx
= fold_convert_loc (clause_loc
,
5154 TREE_TYPE (new_vard
),
5156 gimplify_assign (new_vard
, nx
, ilist
);
5160 x
= lang_hooks
.decls
.omp_clause_linear_ctor
5162 gimplify_and_add (x
, ilist
);
5166 if (POINTER_TYPE_P (TREE_TYPE (x
)))
5167 x
= fold_build2 (POINTER_PLUS_EXPR
,
5168 TREE_TYPE (x
), x
, t
);
5170 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5173 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
5174 || TREE_ADDRESSABLE (new_var
)
5175 || omp_is_reference (var
))
5176 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5179 if (omp_is_reference (var
))
5181 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5182 tree new_vard
= TREE_OPERAND (new_var
, 0);
5183 gcc_assert (DECL_P (new_vard
));
5184 SET_DECL_VALUE_EXPR (new_vard
,
5185 build_fold_addr_expr (lvar
));
5186 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5188 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
5190 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
5191 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
5192 gimplify_and_add (x
, ilist
);
5193 gimple_stmt_iterator gsi
5194 = gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5196 = gimple_build_assign (unshare_expr (lvar
), iv
);
5197 gsi_insert_before_without_update (&gsi
, g
,
5199 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
5200 enum tree_code code
= PLUS_EXPR
;
5201 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
5202 code
= POINTER_PLUS_EXPR
;
5203 g
= gimple_build_assign (iv
, code
, iv
, t
);
5204 gsi_insert_before_without_update (&gsi
, g
,
5208 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5209 (c
, unshare_expr (ivar
), x
);
5210 gimplify_and_add (x
, &llist
[0]);
5211 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5213 gimplify_and_add (x
, &llist
[1]);
5216 if (omp_is_reference (var
))
5218 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5219 tree new_vard
= TREE_OPERAND (new_var
, 0);
5220 gcc_assert (DECL_P (new_vard
));
5221 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
5222 nx
= TYPE_SIZE_UNIT (type
);
5223 if (TREE_CONSTANT (nx
))
5225 nx
= create_tmp_var_raw (type
, get_name (var
));
5226 gimple_add_tmp_var (nx
);
5227 TREE_ADDRESSABLE (nx
) = 1;
5228 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
5229 nx
= fold_convert_loc (clause_loc
,
5230 TREE_TYPE (new_vard
), nx
);
5231 gimplify_assign (new_vard
, nx
, ilist
);
5235 x
= lang_hooks
.decls
.omp_clause_copy_ctor
5236 (c
, unshare_expr (new_var
), x
);
5237 gimplify_and_add (x
, ilist
);
5240 case OMP_CLAUSE__LOOPTEMP_
:
5241 case OMP_CLAUSE__REDUCTEMP_
:
5242 gcc_assert (is_taskreg_ctx (ctx
));
5243 x
= build_outer_var_ref (var
, ctx
);
5244 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5245 gimplify_and_add (x
, ilist
);
5248 case OMP_CLAUSE_COPYIN
:
5249 by_ref
= use_pointer_for_field (var
, NULL
);
5250 x
= build_receiver_ref (var
, by_ref
, ctx
);
5251 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
5252 append_to_statement_list (x
, ©in_seq
);
5253 copyin_by_ref
|= by_ref
;
5256 case OMP_CLAUSE_REDUCTION
:
5257 case OMP_CLAUSE_IN_REDUCTION
:
5258 /* OpenACC reductions are initialized using the
5259 GOACC_REDUCTION internal function. */
5260 if (is_gimple_omp_oacc (ctx
->stmt
))
5262 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5264 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5266 tree ptype
= TREE_TYPE (placeholder
);
5269 x
= error_mark_node
;
5270 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
5271 && !task_reduction_needs_orig_p
)
5273 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5275 tree pptype
= build_pointer_type (ptype
);
5276 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5277 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5278 size_int (task_reduction_cnt_full
5279 + task_reduction_cntorig
- 1),
5280 NULL_TREE
, NULL_TREE
);
5284 = *ctx
->task_reduction_map
->get (c
);
5285 x
= task_reduction_read (ilist
, tskred_temp
,
5286 pptype
, 7 + 3 * idx
);
5288 x
= fold_convert (pptype
, x
);
5289 x
= build_simple_mem_ref (x
);
5294 x
= build_outer_var_ref (var
, ctx
);
5296 if (omp_is_reference (var
)
5297 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
5298 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5300 SET_DECL_VALUE_EXPR (placeholder
, x
);
5301 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5302 tree new_vard
= new_var
;
5303 if (omp_is_reference (var
))
5305 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5306 new_vard
= TREE_OPERAND (new_var
, 0);
5307 gcc_assert (DECL_P (new_vard
));
5309 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5311 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5312 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5315 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5319 if (new_vard
== new_var
)
5321 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
5322 SET_DECL_VALUE_EXPR (new_var
, ivar
);
5326 SET_DECL_VALUE_EXPR (new_vard
,
5327 build_fold_addr_expr (ivar
));
5328 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5330 x
= lang_hooks
.decls
.omp_clause_default_ctor
5331 (c
, unshare_expr (ivar
),
5332 build_outer_var_ref (var
, ctx
));
5333 if (rvarp
&& ctx
->for_simd_scan_phase
)
5336 gimplify_and_add (x
, &llist
[0]);
5337 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5339 gimplify_and_add (x
, &llist
[1]);
5346 gimplify_and_add (x
, &llist
[0]);
5348 tree ivar2
= unshare_expr (lvar
);
5349 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5350 x
= lang_hooks
.decls
.omp_clause_default_ctor
5351 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
5352 gimplify_and_add (x
, &llist
[0]);
5356 x
= lang_hooks
.decls
.omp_clause_default_ctor
5357 (c
, unshare_expr (rvar2
),
5358 build_outer_var_ref (var
, ctx
));
5359 gimplify_and_add (x
, &llist
[0]);
5362 /* For types that need construction, add another
5363 private var which will be default constructed
5364 and optionally initialized with
5365 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5366 loop we want to assign this value instead of
5367 constructing and destructing it in each
5369 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
5370 gimple_add_tmp_var (nv
);
5371 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
5375 x
= lang_hooks
.decls
.omp_clause_default_ctor
5376 (c
, nv
, build_outer_var_ref (var
, ctx
));
5377 gimplify_and_add (x
, ilist
);
5379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5381 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5382 x
= DECL_VALUE_EXPR (new_vard
);
5384 if (new_vard
!= new_var
)
5385 vexpr
= build_fold_addr_expr (nv
);
5386 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5387 lower_omp (&tseq
, ctx
);
5388 SET_DECL_VALUE_EXPR (new_vard
, x
);
5389 gimple_seq_add_seq (ilist
, tseq
);
5390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5393 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5395 gimplify_and_add (x
, dlist
);
5398 tree ref
= build_outer_var_ref (var
, ctx
);
5399 x
= unshare_expr (ivar
);
5400 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
5402 gimplify_and_add (x
, &llist
[0]);
5404 ref
= build_outer_var_ref (var
, ctx
);
5405 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
5407 gimplify_and_add (x
, &llist
[3]);
5409 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5410 if (new_vard
== new_var
)
5411 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5413 SET_DECL_VALUE_EXPR (new_vard
,
5414 build_fold_addr_expr (lvar
));
5416 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5418 gimplify_and_add (x
, &llist
[1]);
5420 tree ivar2
= unshare_expr (lvar
);
5421 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
5422 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
5424 gimplify_and_add (x
, &llist
[1]);
5428 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
5430 gimplify_and_add (x
, &llist
[1]);
5435 gimplify_and_add (x
, &llist
[0]);
5436 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5438 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5439 lower_omp (&tseq
, ctx
);
5440 gimple_seq_add_seq (&llist
[0], tseq
);
5442 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5443 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5444 lower_omp (&tseq
, ctx
);
5445 gimple_seq_add_seq (&llist
[1], tseq
);
5446 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5447 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5448 if (new_vard
== new_var
)
5449 SET_DECL_VALUE_EXPR (new_var
, lvar
);
5451 SET_DECL_VALUE_EXPR (new_vard
,
5452 build_fold_addr_expr (lvar
));
5453 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
5455 gimplify_and_add (x
, &llist
[1]);
5458 /* If this is a reference to constant size reduction var
5459 with placeholder, we haven't emitted the initializer
5460 for it because it is undesirable if SIMD arrays are used.
5461 But if they aren't used, we need to emit the deferred
5462 initialization now. */
5463 else if (omp_is_reference (var
) && is_simd
)
5464 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5466 tree lab2
= NULL_TREE
;
5470 if (!is_parallel_ctx (ctx
))
5472 tree condv
= create_tmp_var (boolean_type_node
);
5473 tree m
= build_simple_mem_ref (cond
);
5474 g
= gimple_build_assign (condv
, m
);
5475 gimple_seq_add_stmt (ilist
, g
);
5477 = create_artificial_label (UNKNOWN_LOCATION
);
5478 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5479 g
= gimple_build_cond (NE_EXPR
, condv
,
5482 gimple_seq_add_stmt (ilist
, g
);
5483 gimple_seq_add_stmt (ilist
,
5484 gimple_build_label (lab1
));
5486 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5488 gimple_seq_add_stmt (ilist
, g
);
5490 x
= lang_hooks
.decls
.omp_clause_default_ctor
5491 (c
, unshare_expr (new_var
),
5493 : build_outer_var_ref (var
, ctx
));
5495 gimplify_and_add (x
, ilist
);
5497 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5498 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5500 if (ctx
->for_simd_scan_phase
)
5503 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
5505 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
5506 gimple_add_tmp_var (nv
);
5507 ctx
->cb
.decl_map
->put (new_vard
, nv
);
5508 x
= lang_hooks
.decls
.omp_clause_default_ctor
5509 (c
, nv
, build_outer_var_ref (var
, ctx
));
5511 gimplify_and_add (x
, ilist
);
5512 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5514 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5516 if (new_vard
!= new_var
)
5517 vexpr
= build_fold_addr_expr (nv
);
5518 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
5519 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5520 lower_omp (&tseq
, ctx
);
5521 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
5522 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
5523 gimple_seq_add_seq (ilist
, tseq
);
5525 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5526 if (is_simd
&& ctx
->scan_exclusive
)
5529 = create_tmp_var_raw (TREE_TYPE (new_var
));
5530 gimple_add_tmp_var (nv2
);
5531 ctx
->cb
.decl_map
->put (nv
, nv2
);
5532 x
= lang_hooks
.decls
.omp_clause_default_ctor
5533 (c
, nv2
, build_outer_var_ref (var
, ctx
));
5534 gimplify_and_add (x
, ilist
);
5535 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5537 gimplify_and_add (x
, dlist
);
5539 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
5541 gimplify_and_add (x
, dlist
);
5544 && ctx
->scan_exclusive
5545 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
5547 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
5548 gimple_add_tmp_var (nv2
);
5549 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
5550 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
5552 gimplify_and_add (x
, dlist
);
5554 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5558 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5560 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5561 lower_omp (&tseq
, ctx
);
5562 gimple_seq_add_seq (ilist
, tseq
);
5564 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5567 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5568 lower_omp (&tseq
, ctx
);
5569 gimple_seq_add_seq (dlist
, tseq
);
5570 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5572 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5576 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5583 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
5584 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
5585 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5590 tree lab2
= NULL_TREE
;
5591 /* GOMP_taskgroup_reduction_register memsets the whole
5592 array to zero. If the initializer is zero, we don't
5593 need to initialize it again, just mark it as ever
5594 used unconditionally, i.e. cond = true. */
5595 if (initializer_zerop (x
))
5597 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5599 gimple_seq_add_stmt (ilist
, g
);
5604 if (!cond) { cond = true; new_var = x; } */
5605 if (!is_parallel_ctx (ctx
))
5607 tree condv
= create_tmp_var (boolean_type_node
);
5608 tree m
= build_simple_mem_ref (cond
);
5609 g
= gimple_build_assign (condv
, m
);
5610 gimple_seq_add_stmt (ilist
, g
);
5612 = create_artificial_label (UNKNOWN_LOCATION
);
5613 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
5614 g
= gimple_build_cond (NE_EXPR
, condv
,
5617 gimple_seq_add_stmt (ilist
, g
);
5618 gimple_seq_add_stmt (ilist
,
5619 gimple_build_label (lab1
));
5621 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5623 gimple_seq_add_stmt (ilist
, g
);
5624 gimplify_assign (new_var
, x
, ilist
);
5626 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
5630 /* reduction(-:var) sums up the partial results, so it
5631 acts identically to reduction(+:var). */
5632 if (code
== MINUS_EXPR
)
5635 tree new_vard
= new_var
;
5636 if (is_simd
&& omp_is_reference (var
))
5638 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
5639 new_vard
= TREE_OPERAND (new_var
, 0);
5640 gcc_assert (DECL_P (new_vard
));
5642 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
5644 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5645 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5648 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5652 if (new_vard
!= new_var
)
5654 SET_DECL_VALUE_EXPR (new_vard
,
5655 build_fold_addr_expr (lvar
));
5656 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
5659 tree ref
= build_outer_var_ref (var
, ctx
);
5663 if (ctx
->for_simd_scan_phase
)
5665 gimplify_assign (ivar
, ref
, &llist
[0]);
5666 ref
= build_outer_var_ref (var
, ctx
);
5667 gimplify_assign (ref
, rvar
, &llist
[3]);
5671 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
5676 simt_lane
= create_tmp_var (unsigned_type_node
);
5677 x
= build_call_expr_internal_loc
5678 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
5679 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
5680 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, x
);
5681 gimplify_assign (ivar
, x
, &llist
[2]);
5683 x
= build2 (code
, TREE_TYPE (ref
), ref
, ivar
);
5684 ref
= build_outer_var_ref (var
, ctx
);
5685 gimplify_assign (ref
, x
, &llist
[1]);
5690 if (omp_is_reference (var
) && is_simd
)
5691 handle_simd_reference (clause_loc
, new_vard
, ilist
);
5692 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
5693 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
5695 gimplify_assign (new_var
, x
, ilist
);
5698 tree ref
= build_outer_var_ref (var
, ctx
);
5700 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
5701 ref
= build_outer_var_ref (var
, ctx
);
5702 gimplify_assign (ref
, x
, dlist
);
5715 tree clobber
= build_constructor (TREE_TYPE (tskred_avar
), NULL
);
5716 TREE_THIS_VOLATILE (clobber
) = 1;
5717 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
5720 if (known_eq (sctx
.max_vf
, 1U))
5722 sctx
.is_simt
= false;
5723 if (ctx
->lastprivate_conditional_map
)
5725 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
5727 /* Signal to lower_omp_1 that it should use parent context. */
5728 ctx
->combined_into_simd_safelen1
= true;
5729 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5730 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5731 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5733 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5734 omp_context
*outer
= ctx
->outer
;
5735 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
5736 outer
= outer
->outer
;
5737 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
5738 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
5739 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
5745 /* When not vectorized, treat lastprivate(conditional:) like
5746 normal lastprivate, as there will be just one simd lane
5747 writing the privatized variable. */
5748 delete ctx
->lastprivate_conditional_map
;
5749 ctx
->lastprivate_conditional_map
= NULL
;
5754 if (nonconst_simd_if
)
5756 if (sctx
.lane
== NULL_TREE
)
5758 sctx
.idx
= create_tmp_var (unsigned_type_node
);
5759 sctx
.lane
= create_tmp_var (unsigned_type_node
);
5761 /* FIXME: For now. */
5762 sctx
.is_simt
= false;
5765 if (sctx
.lane
|| sctx
.is_simt
)
5767 uid
= create_tmp_var (ptr_type_node
, "simduid");
5768 /* Don't want uninit warnings on simduid, it is always uninitialized,
5769 but we use it not for the value, but for the DECL_UID only. */
5770 TREE_NO_WARNING (uid
) = 1;
5771 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
5772 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
5773 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5774 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5776 /* Emit calls denoting privatized variables and initializing a pointer to
5777 structure that holds private variables as fields after ompdevlow pass. */
5780 sctx
.simt_eargs
[0] = uid
;
5782 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
5783 gimple_call_set_lhs (g
, uid
);
5784 gimple_seq_add_stmt (ilist
, g
);
5785 sctx
.simt_eargs
.release ();
5787 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
5788 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
5789 gimple_call_set_lhs (g
, simtrec
);
5790 gimple_seq_add_stmt (ilist
, g
);
5794 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
5795 2 + (nonconst_simd_if
!= NULL
),
5796 uid
, integer_zero_node
,
5798 gimple_call_set_lhs (g
, sctx
.lane
);
5799 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx
->stmt
));
5800 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
5801 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
5802 build_int_cst (unsigned_type_node
, 0));
5803 gimple_seq_add_stmt (ilist
, g
);
5806 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
5808 gimple_call_set_lhs (g
, sctx
.lastlane
);
5809 gimple_seq_add_stmt (dlist
, g
);
5810 gimple_seq_add_seq (dlist
, llist
[3]);
5812 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5815 tree simt_vf
= create_tmp_var (unsigned_type_node
);
5816 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
5817 gimple_call_set_lhs (g
, simt_vf
);
5818 gimple_seq_add_stmt (dlist
, g
);
5820 tree t
= build_int_cst (unsigned_type_node
, 1);
5821 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
5822 gimple_seq_add_stmt (dlist
, g
);
5824 t
= build_int_cst (unsigned_type_node
, 0);
5825 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5826 gimple_seq_add_stmt (dlist
, g
);
5828 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5829 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5830 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5831 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
5832 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
5834 gimple_seq_add_seq (dlist
, llist
[2]);
5836 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
5837 gimple_seq_add_stmt (dlist
, g
);
5839 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
5840 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
5841 gimple_seq_add_stmt (dlist
, g
);
5843 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
5845 for (int i
= 0; i
< 2; i
++)
5848 tree vf
= create_tmp_var (unsigned_type_node
);
5849 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
5850 gimple_call_set_lhs (g
, vf
);
5851 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
5852 gimple_seq_add_stmt (seq
, g
);
5853 tree t
= build_int_cst (unsigned_type_node
, 0);
5854 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
5855 gimple_seq_add_stmt (seq
, g
);
5856 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5857 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
5858 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5859 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
5860 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
5861 gimple_seq_add_seq (seq
, llist
[i
]);
5862 t
= build_int_cst (unsigned_type_node
, 1);
5863 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
5864 gimple_seq_add_stmt (seq
, g
);
5865 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
5866 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
5867 gimple_seq_add_stmt (seq
, g
);
5868 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
5873 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
5875 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
5876 gimple_seq_add_stmt (dlist
, g
);
5879 /* The copyin sequence is not to be executed by the main thread, since
5880 that would result in self-copies. Perhaps not visible to scalars,
5881 but it certainly is to C++ operator=. */
5884 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
5886 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
5887 build_int_cst (TREE_TYPE (x
), 0));
5888 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
5889 gimplify_and_add (x
, ilist
);
5892 /* If any copyin variable is passed by reference, we must ensure the
5893 master thread doesn't modify it before it is copied over in all
5894 threads. Similarly for variables in both firstprivate and
5895 lastprivate clauses we need to ensure the lastprivate copying
5896 happens after firstprivate copying in all threads. And similarly
5897 for UDRs if initializer expression refers to omp_orig. */
5898 if (copyin_by_ref
|| lastprivate_firstprivate
5899 || (reduction_omp_orig_ref
5900 && !ctx
->scan_inclusive
5901 && !ctx
->scan_exclusive
))
5903 /* Don't add any barrier for #pragma omp simd or
5904 #pragma omp distribute. */
5905 if (!is_task_ctx (ctx
)
5906 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
5907 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
5908 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
5911 /* If max_vf is non-zero, then we can use only a vectorization factor
5912 up to the max_vf we chose. So stick it into the safelen clause. */
5913 if (maybe_ne (sctx
.max_vf
, 0U))
5915 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
5916 OMP_CLAUSE_SAFELEN
);
5917 poly_uint64 safe_len
;
5919 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
5920 && maybe_gt (safe_len
, sctx
.max_vf
)))
5922 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
5923 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
5925 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
5926 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
5931 /* Create temporary variables for lastprivate(conditional:) implementation
5932 in context CTX with CLAUSES. */
5935 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
5937 tree iter_type
= NULL_TREE
;
5938 tree cond_ptr
= NULL_TREE
;
5939 tree iter_var
= NULL_TREE
;
5940 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
);
5942 tree next
= *clauses
;
5943 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5944 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5945 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
5949 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
5951 if (iter_type
== NULL_TREE
)
5953 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
5954 iter_var
= create_tmp_var_raw (iter_type
);
5955 DECL_CONTEXT (iter_var
) = current_function_decl
;
5956 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
5957 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
5958 ctx
->block_vars
= iter_var
;
5960 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
5961 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
5962 OMP_CLAUSE_DECL (c3
) = iter_var
;
5963 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
5965 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
5967 next
= OMP_CLAUSE_CHAIN (cc
);
5968 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
5969 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
5970 ctx
->lastprivate_conditional_map
->put (o
, v
);
5973 if (iter_type
== NULL
)
5975 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
5977 struct omp_for_data fd
;
5978 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
5980 iter_type
= unsigned_type_for (fd
.iter_type
);
5982 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
5983 iter_type
= unsigned_type_node
;
5984 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
5988 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
5989 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
5993 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
5994 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
5995 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
5996 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
5997 ctx
->block_vars
= cond_ptr
;
5998 c2
= build_omp_clause (UNKNOWN_LOCATION
,
5999 OMP_CLAUSE__CONDTEMP_
);
6000 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
6001 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
6004 iter_var
= create_tmp_var_raw (iter_type
);
6005 DECL_CONTEXT (iter_var
) = current_function_decl
;
6006 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
6007 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
6008 ctx
->block_vars
= iter_var
;
6010 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
6011 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
6012 OMP_CLAUSE_DECL (c3
) = iter_var
;
6013 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
6014 OMP_CLAUSE_CHAIN (c2
) = c3
;
6015 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
6017 tree v
= create_tmp_var_raw (iter_type
);
6018 DECL_CONTEXT (v
) = current_function_decl
;
6019 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
6020 DECL_CHAIN (v
) = ctx
->block_vars
;
6021 ctx
->block_vars
= v
;
6022 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6023 ctx
->lastprivate_conditional_map
->put (o
, v
);
6028 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6029 both parallel and workshare constructs. PREDICATE may be NULL if it's
6030 always true. BODY_P is the sequence to insert early initialization
6031 if needed, STMT_LIST is where the non-conditional lastprivate handling
6032 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6036 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
6037 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
6040 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
6041 bool par_clauses
= false;
6042 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
6043 unsigned HOST_WIDE_INT conditional_off
= 0;
6044 gimple_seq post_stmt_list
= NULL
;
6046 /* Early exit if there are no lastprivate or linear clauses. */
6047 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
6048 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
6049 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
6050 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
6052 if (clauses
== NULL
)
6054 /* If this was a workshare clause, see if it had been combined
6055 with its parallel. In that case, look for the clauses on the
6056 parallel statement itself. */
6057 if (is_parallel_ctx (ctx
))
6061 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6064 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6065 OMP_CLAUSE_LASTPRIVATE
);
6066 if (clauses
== NULL
)
6071 bool maybe_simt
= false;
6072 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6073 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
6075 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
6076 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
6078 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
6084 tree label_true
, arm1
, arm2
;
6085 enum tree_code pred_code
= TREE_CODE (predicate
);
6087 label
= create_artificial_label (UNKNOWN_LOCATION
);
6088 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
6089 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
6091 arm1
= TREE_OPERAND (predicate
, 0);
6092 arm2
= TREE_OPERAND (predicate
, 1);
6093 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6094 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6099 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
6100 arm2
= boolean_false_node
;
6101 pred_code
= NE_EXPR
;
6105 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
6106 c
= fold_convert (integer_type_node
, c
);
6107 simtcond
= create_tmp_var (integer_type_node
);
6108 gimplify_assign (simtcond
, c
, stmt_list
);
6109 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
6111 c
= create_tmp_var (integer_type_node
);
6112 gimple_call_set_lhs (g
, c
);
6113 gimple_seq_add_stmt (stmt_list
, g
);
6114 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
6118 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
6119 gimple_seq_add_stmt (stmt_list
, stmt
);
6120 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
6123 tree cond_ptr
= NULL_TREE
;
6124 for (c
= clauses
; c
;)
6127 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6128 gimple_seq
*this_stmt_list
= stmt_list
;
6129 tree lab2
= NULL_TREE
;
6131 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6132 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6133 && ctx
->lastprivate_conditional_map
6134 && !ctx
->combined_into_simd_safelen1
)
6136 gcc_assert (body_p
);
6139 if (cond_ptr
== NULL_TREE
)
6141 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
6142 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
6144 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
6145 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6146 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
6147 gimplify_assign (v
, build_zero_cst (type
), body_p
);
6148 this_stmt_list
= cstmt_list
;
6150 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
6152 mem
= build2 (MEM_REF
, type
, cond_ptr
,
6153 build_int_cst (TREE_TYPE (cond_ptr
),
6155 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6158 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
6159 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
6160 tree mem2
= copy_node (mem
);
6161 gimple_seq seq
= NULL
;
6162 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
6163 gimple_seq_add_seq (this_stmt_list
, seq
);
6164 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
6165 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6166 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
6167 gimple_seq_add_stmt (this_stmt_list
, g
);
6168 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
6169 gimplify_assign (mem2
, v
, this_stmt_list
);
6172 && ctx
->combined_into_simd_safelen1
6173 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6174 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
6175 && ctx
->lastprivate_conditional_map
)
6176 this_stmt_list
= &post_stmt_list
;
6178 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6179 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6180 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6182 var
= OMP_CLAUSE_DECL (c
);
6183 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6184 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6185 && is_taskloop_ctx (ctx
))
6187 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
6188 new_var
= lookup_decl (var
, ctx
->outer
);
6192 new_var
= lookup_decl (var
, ctx
);
6193 /* Avoid uninitialized warnings for lastprivate and
6194 for linear iterators. */
6196 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6197 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
6198 TREE_NO_WARNING (new_var
) = 1;
6201 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
6203 tree val
= DECL_VALUE_EXPR (new_var
);
6204 if (TREE_CODE (val
) == ARRAY_REF
6205 && VAR_P (TREE_OPERAND (val
, 0))
6206 && lookup_attribute ("omp simd array",
6207 DECL_ATTRIBUTES (TREE_OPERAND (val
,
6210 if (lastlane
== NULL
)
6212 lastlane
= create_tmp_var (unsigned_type_node
);
6214 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6216 TREE_OPERAND (val
, 1));
6217 gimple_call_set_lhs (g
, lastlane
);
6218 gimple_seq_add_stmt (this_stmt_list
, g
);
6220 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
6221 TREE_OPERAND (val
, 0), lastlane
,
6222 NULL_TREE
, NULL_TREE
);
6223 TREE_THIS_NOTRAP (new_var
) = 1;
6226 else if (maybe_simt
)
6228 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
6229 ? DECL_VALUE_EXPR (new_var
)
6231 if (simtlast
== NULL
)
6233 simtlast
= create_tmp_var (unsigned_type_node
);
6234 gcall
*g
= gimple_build_call_internal
6235 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
6236 gimple_call_set_lhs (g
, simtlast
);
6237 gimple_seq_add_stmt (this_stmt_list
, g
);
6239 x
= build_call_expr_internal_loc
6240 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
6241 TREE_TYPE (val
), 2, val
, simtlast
);
6242 new_var
= unshare_expr (new_var
);
6243 gimplify_assign (new_var
, x
, this_stmt_list
);
6244 new_var
= unshare_expr (new_var
);
6247 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6248 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
6250 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
6251 gimple_seq_add_seq (this_stmt_list
,
6252 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6253 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
6255 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6256 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
6258 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
6259 gimple_seq_add_seq (this_stmt_list
,
6260 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6261 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
6265 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6266 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
6268 gcc_checking_assert (is_taskloop_ctx (ctx
));
6269 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
6271 if (is_global_var (ovar
))
6275 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
6276 if (omp_is_reference (var
))
6277 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6278 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
6279 gimplify_and_add (x
, this_stmt_list
);
6282 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
6286 c
= OMP_CLAUSE_CHAIN (c
);
6287 if (c
== NULL
&& !par_clauses
)
6289 /* If this was a workshare clause, see if it had been combined
6290 with its parallel. In that case, continue looking for the
6291 clauses also on the parallel statement itself. */
6292 if (is_parallel_ctx (ctx
))
6296 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
6299 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
6300 OMP_CLAUSE_LASTPRIVATE
);
6306 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
6307 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
6310 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6311 (which might be a placeholder). INNER is true if this is an inner
6312 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6313 join markers. Generate the before-loop forking sequence in
6314 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6315 general form of these sequences is
6317 GOACC_REDUCTION_SETUP
6319 GOACC_REDUCTION_INIT
6321 GOACC_REDUCTION_FINI
6323 GOACC_REDUCTION_TEARDOWN. */
6326 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
6327 gcall
*fork
, gcall
*join
, gimple_seq
*fork_seq
,
6328 gimple_seq
*join_seq
, omp_context
*ctx
)
6330 gimple_seq before_fork
= NULL
;
6331 gimple_seq after_fork
= NULL
;
6332 gimple_seq before_join
= NULL
;
6333 gimple_seq after_join
= NULL
;
6334 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
6335 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
6336 unsigned offset
= 0;
6338 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6339 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
6341 tree orig
= OMP_CLAUSE_DECL (c
);
6342 tree var
= maybe_lookup_decl (orig
, ctx
);
6343 tree ref_to_res
= NULL_TREE
;
6344 tree incoming
, outgoing
, v1
, v2
, v3
;
6345 bool is_private
= false;
6347 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
6348 if (rcode
== MINUS_EXPR
)
6350 else if (rcode
== TRUTH_ANDIF_EXPR
)
6351 rcode
= BIT_AND_EXPR
;
6352 else if (rcode
== TRUTH_ORIF_EXPR
)
6353 rcode
= BIT_IOR_EXPR
;
6354 tree op
= build_int_cst (unsigned_type_node
, rcode
);
6359 incoming
= outgoing
= var
;
6363 /* See if an outer construct also reduces this variable. */
6364 omp_context
*outer
= ctx
;
6366 while (omp_context
*probe
= outer
->outer
)
6368 enum gimple_code type
= gimple_code (probe
->stmt
);
6373 case GIMPLE_OMP_FOR
:
6374 cls
= gimple_omp_for_clauses (probe
->stmt
);
6377 case GIMPLE_OMP_TARGET
:
6378 if (gimple_omp_target_kind (probe
->stmt
)
6379 != GF_OMP_TARGET_KIND_OACC_PARALLEL
)
6382 cls
= gimple_omp_target_clauses (probe
->stmt
);
6390 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
6391 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
6392 && orig
== OMP_CLAUSE_DECL (cls
))
6394 incoming
= outgoing
= lookup_decl (orig
, probe
);
6395 goto has_outer_reduction
;
6397 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
6398 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
6399 && orig
== OMP_CLAUSE_DECL (cls
))
6407 /* This is the outermost construct with this reduction,
6408 see if there's a mapping for it. */
6409 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
6410 && maybe_lookup_field (orig
, outer
) && !is_private
)
6412 ref_to_res
= build_receiver_ref (orig
, false, outer
);
6413 if (omp_is_reference (orig
))
6414 ref_to_res
= build_simple_mem_ref (ref_to_res
);
6416 tree type
= TREE_TYPE (var
);
6417 if (POINTER_TYPE_P (type
))
6418 type
= TREE_TYPE (type
);
6421 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
6425 /* Try to look at enclosing contexts for reduction var,
6426 use original if no mapping found. */
6428 omp_context
*c
= ctx
->outer
;
6431 t
= maybe_lookup_decl (orig
, c
);
6434 incoming
= outgoing
= (t
? t
: orig
);
6437 has_outer_reduction
:;
6441 ref_to_res
= integer_zero_node
;
6443 if (omp_is_reference (orig
))
6445 tree type
= TREE_TYPE (var
);
6446 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
6450 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
6451 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
6454 v1
= create_tmp_var (type
, id
);
6455 v2
= create_tmp_var (type
, id
);
6456 v3
= create_tmp_var (type
, id
);
6458 gimplify_assign (v1
, var
, fork_seq
);
6459 gimplify_assign (v2
, var
, fork_seq
);
6460 gimplify_assign (v3
, var
, fork_seq
);
6462 var
= build_simple_mem_ref (var
);
6463 v1
= build_simple_mem_ref (v1
);
6464 v2
= build_simple_mem_ref (v2
);
6465 v3
= build_simple_mem_ref (v3
);
6466 outgoing
= build_simple_mem_ref (outgoing
);
6468 if (!TREE_CONSTANT (incoming
))
6469 incoming
= build_simple_mem_ref (incoming
);
6474 /* Determine position in reduction buffer, which may be used
6475 by target. The parser has ensured that this is not a
6476 variable-sized type. */
6477 fixed_size_mode mode
6478 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
6479 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6480 offset
= (offset
+ align
- 1) & ~(align
- 1);
6481 tree off
= build_int_cst (sizetype
, offset
);
6482 offset
+= GET_MODE_SIZE (mode
);
6486 init_code
= build_int_cst (integer_type_node
,
6487 IFN_GOACC_REDUCTION_INIT
);
6488 fini_code
= build_int_cst (integer_type_node
,
6489 IFN_GOACC_REDUCTION_FINI
);
6490 setup_code
= build_int_cst (integer_type_node
,
6491 IFN_GOACC_REDUCTION_SETUP
);
6492 teardown_code
= build_int_cst (integer_type_node
,
6493 IFN_GOACC_REDUCTION_TEARDOWN
);
6497 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6498 TREE_TYPE (var
), 6, setup_code
,
6499 unshare_expr (ref_to_res
),
6500 incoming
, level
, op
, off
);
6502 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6503 TREE_TYPE (var
), 6, init_code
,
6504 unshare_expr (ref_to_res
),
6505 v1
, level
, op
, off
);
6507 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6508 TREE_TYPE (var
), 6, fini_code
,
6509 unshare_expr (ref_to_res
),
6510 v2
, level
, op
, off
);
6512 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
6513 TREE_TYPE (var
), 6, teardown_code
,
6514 ref_to_res
, v3
, level
, op
, off
);
6516 gimplify_assign (v1
, setup_call
, &before_fork
);
6517 gimplify_assign (v2
, init_call
, &after_fork
);
6518 gimplify_assign (v3
, fini_call
, &before_join
);
6519 gimplify_assign (outgoing
, teardown_call
, &after_join
);
6522 /* Now stitch things together. */
6523 gimple_seq_add_seq (fork_seq
, before_fork
);
6525 gimple_seq_add_stmt (fork_seq
, fork
);
6526 gimple_seq_add_seq (fork_seq
, after_fork
);
6528 gimple_seq_add_seq (join_seq
, before_join
);
6530 gimple_seq_add_stmt (join_seq
, join
);
6531 gimple_seq_add_seq (join_seq
, after_join
);
6534 /* Generate code to implement the REDUCTION clauses, append it
6535 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6536 that should be emitted also inside of the critical section,
6537 in that case clear *CLIST afterwards, otherwise leave it as is
6538 and let the caller emit it itself. */
6541 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
6542 gimple_seq
*clist
, omp_context
*ctx
)
6544 gimple_seq sub_seq
= NULL
;
6549 /* OpenACC loop reductions are handled elsewhere. */
6550 if (is_gimple_omp_oacc (ctx
->stmt
))
6553 /* SIMD reductions are handled in lower_rec_input_clauses. */
6554 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
6555 && gimple_omp_for_kind (ctx
->stmt
) & GF_OMP_FOR_SIMD
)
6558 /* inscan reductions are handled elsewhere. */
6559 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
6562 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6563 update in that case, otherwise use a lock. */
6564 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
6565 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6566 && !OMP_CLAUSE_REDUCTION_TASK (c
))
6568 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
6569 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6571 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6581 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6583 tree var
, ref
, new_var
, orig_var
;
6584 enum tree_code code
;
6585 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6587 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6588 || OMP_CLAUSE_REDUCTION_TASK (c
))
6591 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
6592 orig_var
= var
= OMP_CLAUSE_DECL (c
);
6593 if (TREE_CODE (var
) == MEM_REF
)
6595 var
= TREE_OPERAND (var
, 0);
6596 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
6597 var
= TREE_OPERAND (var
, 0);
6598 if (TREE_CODE (var
) == ADDR_EXPR
)
6599 var
= TREE_OPERAND (var
, 0);
6602 /* If this is a pointer or referenced based array
6603 section, the var could be private in the outer
6604 context e.g. on orphaned loop construct. Pretend this
6605 is private variable's outer reference. */
6606 ccode
= OMP_CLAUSE_PRIVATE
;
6607 if (TREE_CODE (var
) == INDIRECT_REF
)
6608 var
= TREE_OPERAND (var
, 0);
6611 if (is_variable_sized (var
))
6613 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
6614 var
= DECL_VALUE_EXPR (var
);
6615 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
6616 var
= TREE_OPERAND (var
, 0);
6617 gcc_assert (DECL_P (var
));
6620 new_var
= lookup_decl (var
, ctx
);
6621 if (var
== OMP_CLAUSE_DECL (c
) && omp_is_reference (var
))
6622 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6623 ref
= build_outer_var_ref (var
, ctx
, ccode
);
6624 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6626 /* reduction(-:var) sums up the partial results, so it acts
6627 identically to reduction(+:var). */
6628 if (code
== MINUS_EXPR
)
6633 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
6635 addr
= save_expr (addr
);
6636 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
6637 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (ref
), ref
, new_var
);
6638 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
6639 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
6640 gimplify_and_add (x
, stmt_seqp
);
6643 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
6645 tree d
= OMP_CLAUSE_DECL (c
);
6646 tree type
= TREE_TYPE (d
);
6647 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
6648 tree i
= create_tmp_var (TREE_TYPE (v
));
6649 tree ptype
= build_pointer_type (TREE_TYPE (type
));
6650 tree bias
= TREE_OPERAND (d
, 1);
6651 d
= TREE_OPERAND (d
, 0);
6652 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
6654 tree b
= TREE_OPERAND (d
, 1);
6655 b
= maybe_lookup_decl (b
, ctx
);
6658 b
= TREE_OPERAND (d
, 1);
6659 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
6661 if (integer_zerop (bias
))
6665 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
6666 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
6667 TREE_TYPE (b
), b
, bias
);
6669 d
= TREE_OPERAND (d
, 0);
6671 /* For ref build_outer_var_ref already performs this, so
6672 only new_var needs a dereference. */
6673 if (TREE_CODE (d
) == INDIRECT_REF
)
6675 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6676 gcc_assert (omp_is_reference (var
) && var
== orig_var
);
6678 else if (TREE_CODE (d
) == ADDR_EXPR
)
6680 if (orig_var
== var
)
6682 new_var
= build_fold_addr_expr (new_var
);
6683 ref
= build_fold_addr_expr (ref
);
6688 gcc_assert (orig_var
== var
);
6689 if (omp_is_reference (var
))
6690 ref
= build_fold_addr_expr (ref
);
6694 tree t
= maybe_lookup_decl (v
, ctx
);
6698 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
6699 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
6701 if (!integer_zerop (bias
))
6703 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
6704 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6705 TREE_TYPE (new_var
), new_var
,
6706 unshare_expr (bias
));
6707 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
6708 TREE_TYPE (ref
), ref
, bias
);
6710 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
6711 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
6712 tree m
= create_tmp_var (ptype
);
6713 gimplify_assign (m
, new_var
, stmt_seqp
);
6715 m
= create_tmp_var (ptype
);
6716 gimplify_assign (m
, ref
, stmt_seqp
);
6718 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
6719 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6720 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6721 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
6722 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6723 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
6724 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6726 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6727 tree decl_placeholder
6728 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
6729 SET_DECL_VALUE_EXPR (placeholder
, out
);
6730 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6731 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
6732 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
6733 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6734 gimple_seq_add_seq (&sub_seq
,
6735 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6736 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6737 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6738 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
6742 x
= build2 (code
, TREE_TYPE (out
), out
, priv
);
6743 out
= unshare_expr (out
);
6744 gimplify_assign (out
, x
, &sub_seq
);
6746 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
6747 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6748 gimple_seq_add_stmt (&sub_seq
, g
);
6749 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
6750 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
6751 gimple_seq_add_stmt (&sub_seq
, g
);
6752 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
6753 build_int_cst (TREE_TYPE (i
), 1));
6754 gimple_seq_add_stmt (&sub_seq
, g
);
6755 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
6756 gimple_seq_add_stmt (&sub_seq
, g
);
6757 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
6759 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6761 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6763 if (omp_is_reference (var
)
6764 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
6766 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
6767 SET_DECL_VALUE_EXPR (placeholder
, ref
);
6768 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6769 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
6770 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6771 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6772 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
6776 x
= build2 (code
, TREE_TYPE (ref
), ref
, new_var
);
6777 ref
= build_outer_var_ref (var
, ctx
);
6778 gimplify_assign (ref
, x
, &sub_seq
);
6782 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
6784 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6786 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
6790 gimple_seq_add_seq (stmt_seqp
, *clist
);
6794 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
6796 gimple_seq_add_stmt (stmt_seqp
, stmt
);
6800 /* Generate code to implement the COPYPRIVATE clauses. */
6803 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
6808 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6810 tree var
, new_var
, ref
, x
;
6812 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6814 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
6817 var
= OMP_CLAUSE_DECL (c
);
6818 by_ref
= use_pointer_for_field (var
, NULL
);
6820 ref
= build_sender_ref (var
, ctx
);
6821 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
6824 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
6825 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
6827 gimplify_assign (ref
, x
, slist
);
6829 ref
= build_receiver_ref (var
, false, ctx
);
6832 ref
= fold_convert_loc (clause_loc
,
6833 build_pointer_type (TREE_TYPE (new_var
)),
6835 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
6837 if (omp_is_reference (var
))
6839 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
6840 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
6841 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
6843 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
6844 gimplify_and_add (x
, rlist
);
6849 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6850 and REDUCTION from the sender (aka parent) side. */
6853 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
6857 int ignored_looptemp
= 0;
6858 bool is_taskloop
= false;
6860 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6861 by GOMP_taskloop. */
6862 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
6864 ignored_looptemp
= 2;
6868 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6870 tree val
, ref
, x
, var
;
6871 bool by_ref
, do_in
= false, do_out
= false;
6872 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
6874 switch (OMP_CLAUSE_CODE (c
))
6876 case OMP_CLAUSE_PRIVATE
:
6877 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
6880 case OMP_CLAUSE_FIRSTPRIVATE
:
6881 case OMP_CLAUSE_COPYIN
:
6882 case OMP_CLAUSE_LASTPRIVATE
:
6883 case OMP_CLAUSE_IN_REDUCTION
:
6884 case OMP_CLAUSE__REDUCTEMP_
:
6886 case OMP_CLAUSE_REDUCTION
:
6887 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
6890 case OMP_CLAUSE_SHARED
:
6891 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6894 case OMP_CLAUSE__LOOPTEMP_
:
6895 if (ignored_looptemp
)
6905 val
= OMP_CLAUSE_DECL (c
);
6906 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6907 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
6908 && TREE_CODE (val
) == MEM_REF
)
6910 val
= TREE_OPERAND (val
, 0);
6911 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
6912 val
= TREE_OPERAND (val
, 0);
6913 if (TREE_CODE (val
) == INDIRECT_REF
6914 || TREE_CODE (val
) == ADDR_EXPR
)
6915 val
= TREE_OPERAND (val
, 0);
6916 if (is_variable_sized (val
))
6920 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6921 outer taskloop region. */
6922 omp_context
*ctx_for_o
= ctx
;
6924 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
6925 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
6926 ctx_for_o
= ctx
->outer
;
6928 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
6930 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
6931 && is_global_var (var
)
6932 && (val
== OMP_CLAUSE_DECL (c
)
6933 || !is_task_ctx (ctx
)
6934 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
6935 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
6936 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
6937 != POINTER_TYPE
)))))
6940 t
= omp_member_access_dummy_var (var
);
6943 var
= DECL_VALUE_EXPR (var
);
6944 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
6946 var
= unshare_and_remap (var
, t
, o
);
6948 var
= unshare_expr (var
);
6951 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
6953 /* Handle taskloop firstprivate/lastprivate, where the
6954 lastprivate on GIMPLE_OMP_TASK is represented as
6955 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6956 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
6957 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
6958 if (use_pointer_for_field (val
, ctx
))
6959 var
= build_fold_addr_expr (var
);
6960 gimplify_assign (x
, var
, ilist
);
6961 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
6965 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
6966 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
6967 || val
== OMP_CLAUSE_DECL (c
))
6968 && is_variable_sized (val
))
6970 by_ref
= use_pointer_for_field (val
, NULL
);
6972 switch (OMP_CLAUSE_CODE (c
))
6974 case OMP_CLAUSE_FIRSTPRIVATE
:
6975 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
6977 && is_task_ctx (ctx
))
6978 TREE_NO_WARNING (var
) = 1;
6982 case OMP_CLAUSE_PRIVATE
:
6983 case OMP_CLAUSE_COPYIN
:
6984 case OMP_CLAUSE__LOOPTEMP_
:
6985 case OMP_CLAUSE__REDUCTEMP_
:
6989 case OMP_CLAUSE_LASTPRIVATE
:
6990 if (by_ref
|| omp_is_reference (val
))
6992 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6999 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
7004 case OMP_CLAUSE_REDUCTION
:
7005 case OMP_CLAUSE_IN_REDUCTION
:
7007 if (val
== OMP_CLAUSE_DECL (c
))
7009 if (is_task_ctx (ctx
))
7010 by_ref
= use_pointer_for_field (val
, ctx
);
7012 do_out
= !(by_ref
|| omp_is_reference (val
));
7015 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
7024 ref
= build_sender_ref (val
, ctx
);
7025 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
7026 gimplify_assign (ref
, x
, ilist
);
7027 if (is_task_ctx (ctx
))
7028 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
7033 ref
= build_sender_ref (val
, ctx
);
7034 gimplify_assign (var
, ref
, olist
);
7039 /* Generate code to implement SHARED from the sender (aka parent)
7040 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7041 list things that got automatically shared. */
7044 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
7046 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
7048 if (ctx
->record_type
== NULL
)
7051 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
7052 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
7054 ovar
= DECL_ABSTRACT_ORIGIN (f
);
7055 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
7058 nvar
= maybe_lookup_decl (ovar
, ctx
);
7059 if (!nvar
|| !DECL_HAS_VALUE_EXPR_P (nvar
))
7062 /* If CTX is a nested parallel directive. Find the immediately
7063 enclosing parallel or workshare construct that contains a
7064 mapping for OVAR. */
7065 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
7067 t
= omp_member_access_dummy_var (var
);
7070 var
= DECL_VALUE_EXPR (var
);
7071 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
7073 var
= unshare_and_remap (var
, t
, o
);
7075 var
= unshare_expr (var
);
7078 if (use_pointer_for_field (ovar
, ctx
))
7080 x
= build_sender_ref (ovar
, ctx
);
7081 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
7082 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
7084 gcc_assert (is_parallel_ctx (ctx
)
7085 && DECL_ARTIFICIAL (ovar
));
7086 /* _condtemp_ clause. */
7087 var
= build_constructor (TREE_TYPE (x
), NULL
);
7090 var
= build_fold_addr_expr (var
);
7091 gimplify_assign (x
, var
, ilist
);
7095 x
= build_sender_ref (ovar
, ctx
);
7096 gimplify_assign (x
, var
, ilist
);
7098 if (!TREE_READONLY (var
)
7099 /* We don't need to receive a new reference to a result
7100 or parm decl. In fact we may not store to it as we will
7101 invalidate any pending RSO and generate wrong gimple
7103 && !((TREE_CODE (var
) == RESULT_DECL
7104 || TREE_CODE (var
) == PARM_DECL
)
7105 && DECL_BY_REFERENCE (var
)))
7107 x
= build_sender_ref (ovar
, ctx
);
7108 gimplify_assign (var
, x
, olist
);
7114 /* Emit an OpenACC head marker call, encapulating the partitioning and
7115 other information that must be processed by the target compiler.
7116 Return the maximum number of dimensions the associated loop might
7117 be partitioned over. */
7120 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
7121 gimple_seq
*seq
, omp_context
*ctx
)
7123 unsigned levels
= 0;
7125 tree gang_static
= NULL_TREE
;
7126 auto_vec
<tree
, 5> args
;
7128 args
.quick_push (build_int_cst
7129 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
7130 args
.quick_push (ddvar
);
7131 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7133 switch (OMP_CLAUSE_CODE (c
))
7135 case OMP_CLAUSE_GANG
:
7136 tag
|= OLF_DIM_GANG
;
7137 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
7138 /* static:* is represented by -1, and we can ignore it, as
7139 scheduling is always static. */
7140 if (gang_static
&& integer_minus_onep (gang_static
))
7141 gang_static
= NULL_TREE
;
7145 case OMP_CLAUSE_WORKER
:
7146 tag
|= OLF_DIM_WORKER
;
7150 case OMP_CLAUSE_VECTOR
:
7151 tag
|= OLF_DIM_VECTOR
;
7155 case OMP_CLAUSE_SEQ
:
7159 case OMP_CLAUSE_AUTO
:
7163 case OMP_CLAUSE_INDEPENDENT
:
7164 tag
|= OLF_INDEPENDENT
;
7167 case OMP_CLAUSE_TILE
:
7178 if (DECL_P (gang_static
))
7179 gang_static
= build_outer_var_ref (gang_static
, ctx
);
7180 tag
|= OLF_GANG_STATIC
;
7183 /* In a parallel region, loops are implicitly INDEPENDENT. */
7184 omp_context
*tgt
= enclosing_target_ctx (ctx
);
7185 if (!tgt
|| is_oacc_parallel (tgt
))
7186 tag
|= OLF_INDEPENDENT
;
7189 /* Tiling could use all 3 levels. */
7193 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7194 Ensure at least one level, or 2 for possible auto
7196 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
7197 << OLF_DIM_BASE
) | OLF_SEQ
));
7199 if (levels
< 1u + maybe_auto
)
7200 levels
= 1u + maybe_auto
;
7203 args
.quick_push (build_int_cst (integer_type_node
, levels
));
7204 args
.quick_push (build_int_cst (integer_type_node
, tag
));
7206 args
.quick_push (gang_static
);
7208 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
7209 gimple_set_location (call
, loc
);
7210 gimple_set_lhs (call
, ddvar
);
7211 gimple_seq_add_stmt (seq
, call
);
7216 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7217 partitioning level of the enclosed region. */
7220 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
7221 tree tofollow
, gimple_seq
*seq
)
7223 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
7224 : IFN_UNIQUE_OACC_TAIL_MARK
);
7225 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
7226 int nargs
= 2 + (tofollow
!= NULL_TREE
);
7227 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
7228 marker
, ddvar
, tofollow
);
7229 gimple_set_location (call
, loc
);
7230 gimple_set_lhs (call
, ddvar
);
7231 gimple_seq_add_stmt (seq
, call
);
7234 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7235 the loop clauses, from which we extract reductions. Initialize
7239 lower_oacc_head_tail (location_t loc
, tree clauses
,
7240 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
7243 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
7244 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
7246 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
7247 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
7248 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
7251 for (unsigned done
= 1; count
; count
--, done
++)
7253 gimple_seq fork_seq
= NULL
;
7254 gimple_seq join_seq
= NULL
;
7256 tree place
= build_int_cst (integer_type_node
, -1);
7257 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7258 fork_kind
, ddvar
, place
);
7259 gimple_set_location (fork
, loc
);
7260 gimple_set_lhs (fork
, ddvar
);
7262 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
7263 join_kind
, ddvar
, place
);
7264 gimple_set_location (join
, loc
);
7265 gimple_set_lhs (join
, ddvar
);
7267 /* Mark the beginning of this level sequence. */
7269 lower_oacc_loop_marker (loc
, ddvar
, true,
7270 build_int_cst (integer_type_node
, count
),
7272 lower_oacc_loop_marker (loc
, ddvar
, false,
7273 build_int_cst (integer_type_node
, done
),
7276 lower_oacc_reductions (loc
, clauses
, place
, inner
,
7277 fork
, join
, &fork_seq
, &join_seq
, ctx
);
7279 /* Append this level to head. */
7280 gimple_seq_add_seq (head
, fork_seq
);
7281 /* Prepend it to tail. */
7282 gimple_seq_add_seq (&join_seq
, *tail
);
7288 /* Mark the end of the sequence. */
7289 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
7290 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
7293 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7294 catch handler and return it. This prevents programs from violating the
7295 structured block semantics with throws. */
7298 maybe_catch_exception (gimple_seq body
)
7303 if (!flag_exceptions
)
7306 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
7307 decl
= lang_hooks
.eh_protect_cleanup_actions ();
7309 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
7311 g
= gimple_build_eh_must_not_throw (decl
);
7312 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
7315 return gimple_seq_alloc_with_stmt (g
);
7319 /* Routines to lower OMP directives into OMP-GIMPLE. */
7321 /* If ctx is a worksharing context inside of a cancellable parallel
7322 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7323 and conditional branch to parallel's cancel_label to handle
7324 cancellation in the implicit barrier. */
7327 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
7330 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
7331 if (gimple_omp_return_nowait_p (omp_return
))
7333 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7334 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7335 && outer
->cancellable
)
7337 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
7338 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
7339 tree lhs
= create_tmp_var (c_bool_type
);
7340 gimple_omp_return_set_lhs (omp_return
, lhs
);
7341 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
7342 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
7343 fold_convert (c_bool_type
,
7344 boolean_false_node
),
7345 outer
->cancel_label
, fallthru_label
);
7346 gimple_seq_add_stmt (body
, g
);
7347 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
7349 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7353 /* Find the first task_reduction or reduction clause or return NULL
7354 if there are none. */
7357 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
7358 enum omp_clause_code ccode
)
7362 clauses
= omp_find_clause (clauses
, ccode
);
7363 if (clauses
== NULL_TREE
)
7365 if (ccode
!= OMP_CLAUSE_REDUCTION
7366 || code
== OMP_TASKLOOP
7367 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
7369 clauses
= OMP_CLAUSE_CHAIN (clauses
);
7373 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
7374 gimple_seq
*, gimple_seq
*);
7376 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7377 CTX is the enclosing OMP context for the current statement. */
7380 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7382 tree block
, control
;
7383 gimple_stmt_iterator tgsi
;
7384 gomp_sections
*stmt
;
7386 gbind
*new_stmt
, *bind
;
7387 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
7389 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
7391 push_gimplify_context ();
7397 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
7398 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
7399 tree rtmp
= NULL_TREE
;
7402 tree type
= build_pointer_type (pointer_sized_int_node
);
7403 tree temp
= create_tmp_var (type
);
7404 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
7405 OMP_CLAUSE_DECL (c
) = temp
;
7406 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
7407 gimple_omp_sections_set_clauses (stmt
, c
);
7408 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
7409 gimple_omp_sections_clauses (stmt
),
7410 &ilist
, &tred_dlist
);
7412 rtmp
= make_ssa_name (type
);
7413 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
7416 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
7417 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
7419 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
7420 &ilist
, &dlist
, ctx
, NULL
);
7422 control
= create_tmp_var (unsigned_type_node
, ".section");
7423 gimple_omp_sections_set_control (stmt
, control
);
7425 new_body
= gimple_omp_body (stmt
);
7426 gimple_omp_set_body (stmt
, NULL
);
7427 tgsi
= gsi_start (new_body
);
7428 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
7433 sec_start
= gsi_stmt (tgsi
);
7434 sctx
= maybe_lookup_ctx (sec_start
);
7437 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
7438 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
7439 GSI_CONTINUE_LINKING
);
7440 gimple_omp_set_body (sec_start
, NULL
);
7442 if (gsi_one_before_end_p (tgsi
))
7444 gimple_seq l
= NULL
;
7445 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
7446 &ilist
, &l
, &clist
, ctx
);
7447 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
7448 gimple_omp_section_set_last (sec_start
);
7451 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
7452 GSI_CONTINUE_LINKING
);
7455 block
= make_node (BLOCK
);
7456 bind
= gimple_build_bind (NULL
, new_body
, block
);
7459 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
7463 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
7464 gcall
*g
= gimple_build_call (fndecl
, 0);
7465 gimple_seq_add_stmt (&olist
, g
);
7466 gimple_seq_add_seq (&olist
, clist
);
7467 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
7468 g
= gimple_build_call (fndecl
, 0);
7469 gimple_seq_add_stmt (&olist
, g
);
7472 block
= make_node (BLOCK
);
7473 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
7474 gsi_replace (gsi_p
, new_stmt
, true);
7476 pop_gimplify_context (new_stmt
);
7477 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
7478 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
7479 if (BLOCK_VARS (block
))
7480 TREE_USED (block
) = 1;
7483 gimple_seq_add_seq (&new_body
, ilist
);
7484 gimple_seq_add_stmt (&new_body
, stmt
);
7485 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
7486 gimple_seq_add_stmt (&new_body
, bind
);
7488 t
= gimple_build_omp_continue (control
, control
);
7489 gimple_seq_add_stmt (&new_body
, t
);
7491 gimple_seq_add_seq (&new_body
, olist
);
7492 if (ctx
->cancellable
)
7493 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
7494 gimple_seq_add_seq (&new_body
, dlist
);
7496 new_body
= maybe_catch_exception (new_body
);
7498 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
7499 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7500 t
= gimple_build_omp_return (nowait
);
7501 gimple_seq_add_stmt (&new_body
, t
);
7502 gimple_seq_add_seq (&new_body
, tred_dlist
);
7503 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
7506 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
7508 gimple_bind_set_body (new_stmt
, new_body
);
7512 /* A subroutine of lower_omp_single. Expand the simple form of
7513 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7515 if (GOMP_single_start ())
7517 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7519 FIXME. It may be better to delay expanding the logic of this until
7520 pass_expand_omp. The expanded logic may make the job more difficult
7521 to a synchronization analysis pass. */
7524 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
7526 location_t loc
= gimple_location (single_stmt
);
7527 tree tlabel
= create_artificial_label (loc
);
7528 tree flabel
= create_artificial_label (loc
);
7529 gimple
*call
, *cond
;
7532 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
7533 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
7534 call
= gimple_build_call (decl
, 0);
7535 gimple_call_set_lhs (call
, lhs
);
7536 gimple_seq_add_stmt (pre_p
, call
);
7538 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
7539 fold_convert_loc (loc
, TREE_TYPE (lhs
),
7542 gimple_seq_add_stmt (pre_p
, cond
);
7543 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
7544 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7545 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
7549 /* A subroutine of lower_omp_single. Expand the simple form of
7550 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7552 #pragma omp single copyprivate (a, b, c)
7554 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7557 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7563 GOMP_single_copy_end (©out);
7574 FIXME. It may be better to delay expanding the logic of this until
7575 pass_expand_omp. The expanded logic may make the job more difficult
7576 to a synchronization analysis pass. */
7579 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
7582 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
7583 gimple_seq copyin_seq
;
7584 location_t loc
= gimple_location (single_stmt
);
7586 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
7588 ptr_type
= build_pointer_type (ctx
->record_type
);
7589 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
7591 l0
= create_artificial_label (loc
);
7592 l1
= create_artificial_label (loc
);
7593 l2
= create_artificial_label (loc
);
7595 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
7596 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
7597 t
= fold_convert_loc (loc
, ptr_type
, t
);
7598 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
7600 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
7601 build_int_cst (ptr_type
, 0));
7602 t
= build3 (COND_EXPR
, void_type_node
, t
,
7603 build_and_jump (&l0
), build_and_jump (&l1
));
7604 gimplify_and_add (t
, pre_p
);
7606 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
7608 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
7611 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
7614 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
7615 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
7616 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
7617 gimplify_and_add (t
, pre_p
);
7619 t
= build_and_jump (&l2
);
7620 gimplify_and_add (t
, pre_p
);
7622 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
7624 gimple_seq_add_seq (pre_p
, copyin_seq
);
7626 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
7630 /* Expand code for an OpenMP single directive. */
7633 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7636 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
7638 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
7640 push_gimplify_context ();
7642 block
= make_node (BLOCK
);
7643 bind
= gimple_build_bind (NULL
, NULL
, block
);
7644 gsi_replace (gsi_p
, bind
, true);
7647 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
7648 &bind_body
, &dlist
, ctx
, NULL
);
7649 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
7651 gimple_seq_add_stmt (&bind_body
, single_stmt
);
7653 if (ctx
->record_type
)
7654 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
7656 lower_omp_single_simple (single_stmt
, &bind_body
);
7658 gimple_omp_set_body (single_stmt
, NULL
);
7660 gimple_seq_add_seq (&bind_body
, dlist
);
7662 bind_body
= maybe_catch_exception (bind_body
);
7664 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
7665 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
7666 gimple
*g
= gimple_build_omp_return (nowait
);
7667 gimple_seq_add_stmt (&bind_body_tail
, g
);
7668 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
7669 if (ctx
->record_type
)
7671 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
7672 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
7673 TREE_THIS_VOLATILE (clobber
) = 1;
7674 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
7675 clobber
), GSI_SAME_STMT
);
7677 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
7678 gimple_bind_set_body (bind
, bind_body
);
7680 pop_gimplify_context (bind
);
7682 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7683 BLOCK_VARS (block
) = ctx
->block_vars
;
7684 if (BLOCK_VARS (block
))
7685 TREE_USED (block
) = 1;
7689 /* Expand code for an OpenMP master directive. */
7692 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
7694 tree block
, lab
= NULL
, x
, bfn_decl
;
7695 gimple
*stmt
= gsi_stmt (*gsi_p
);
7697 location_t loc
= gimple_location (stmt
);
7700 push_gimplify_context ();
7702 block
= make_node (BLOCK
);
7703 bind
= gimple_build_bind (NULL
, NULL
, block
);
7704 gsi_replace (gsi_p
, bind
, true);
7705 gimple_bind_add_stmt (bind
, stmt
);
7707 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7708 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
7709 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, integer_zero_node
);
7710 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
7712 gimplify_and_add (x
, &tseq
);
7713 gimple_bind_add_seq (bind
, tseq
);
7715 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
7716 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
7717 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
7718 gimple_omp_set_body (stmt
, NULL
);
7720 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
7722 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
7724 pop_gimplify_context (bind
);
7726 gimple_bind_append_vars (bind
, ctx
->block_vars
);
7727 BLOCK_VARS (block
) = ctx
->block_vars
;
7730 /* Helper function for lower_omp_task_reductions. For a specific PASS
7731 find out the current clause it should be processed, or return false
7732 if all have been processed already. */
7735 omp_task_reduction_iterate (int pass
, enum tree_code code
,
7736 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
7737 tree
*type
, tree
*next
)
7739 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
7741 if (ccode
== OMP_CLAUSE_REDUCTION
7742 && code
!= OMP_TASKLOOP
7743 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
7745 *decl
= OMP_CLAUSE_DECL (*c
);
7746 *type
= TREE_TYPE (*decl
);
7747 if (TREE_CODE (*decl
) == MEM_REF
)
7754 if (omp_is_reference (*decl
))
7755 *type
= TREE_TYPE (*type
);
7756 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
7759 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
7768 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7769 OMP_TASKGROUP only with task modifier). Register mapping of those in
7770 START sequence and reducing them and unregister them in the END sequence. */
7773 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
7774 gimple_seq
*start
, gimple_seq
*end
)
7776 enum omp_clause_code ccode
7777 = (code
== OMP_TASKGROUP
7778 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
7779 tree cancellable
= NULL_TREE
;
7780 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
7781 if (clauses
== NULL_TREE
)
7783 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7785 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
7786 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
7787 && outer
->cancellable
)
7789 cancellable
= error_mark_node
;
7792 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
)
7795 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7796 tree
*last
= &TYPE_FIELDS (record_type
);
7800 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7802 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
7805 DECL_CHAIN (field
) = ifield
;
7806 last
= &DECL_CHAIN (ifield
);
7807 DECL_CONTEXT (field
) = record_type
;
7808 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7809 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7810 DECL_CONTEXT (ifield
) = record_type
;
7811 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
7812 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
7814 for (int pass
= 0; pass
< 2; pass
++)
7816 tree decl
, type
, next
;
7817 for (tree c
= clauses
;
7818 omp_task_reduction_iterate (pass
, code
, ccode
,
7819 &c
, &decl
, &type
, &next
); c
= next
)
7822 tree new_type
= type
;
7824 new_type
= remap_type (type
, &ctx
->outer
->cb
);
7826 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
7827 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
7829 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
7831 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
7832 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
7833 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
7836 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
7837 DECL_CONTEXT (field
) = record_type
;
7838 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
7839 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
7841 last
= &DECL_CHAIN (field
);
7843 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
7845 DECL_CONTEXT (bfield
) = record_type
;
7846 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
7847 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
7849 last
= &DECL_CHAIN (bfield
);
7853 layout_type (record_type
);
7855 /* Build up an array which registers with the runtime all the reductions
7856 and deregisters them at the end. Format documented in libgomp/task.c. */
7857 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
7858 tree avar
= create_tmp_var_raw (atype
);
7859 gimple_add_tmp_var (avar
);
7860 TREE_ADDRESSABLE (avar
) = 1;
7861 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
7862 NULL_TREE
, NULL_TREE
);
7863 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
7864 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7865 gimple_seq seq
= NULL
;
7866 tree sz
= fold_convert (pointer_sized_int_node
,
7867 TYPE_SIZE_UNIT (record_type
));
7869 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
7870 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
7871 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
7872 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
7873 ctx
->task_reductions
.create (1 + cnt
);
7874 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
7875 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
7877 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
7878 gimple_seq_add_seq (start
, seq
);
7879 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
7880 NULL_TREE
, NULL_TREE
);
7881 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
7882 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7883 NULL_TREE
, NULL_TREE
);
7884 t
= build_int_cst (pointer_sized_int_node
,
7885 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
7886 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7887 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
7888 NULL_TREE
, NULL_TREE
);
7889 t
= build_int_cst (pointer_sized_int_node
, -1);
7890 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7891 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
7892 NULL_TREE
, NULL_TREE
);
7893 t
= build_int_cst (pointer_sized_int_node
, 0);
7894 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
7896 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7897 and for each task reduction checks a bool right after the private variable
7898 within that thread's chunk; if the bool is clear, it hasn't been
7899 initialized and thus isn't going to be reduced nor destructed, otherwise
7900 reduce and destruct it. */
7901 tree idx
= create_tmp_var (size_type_node
);
7902 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
7903 tree num_thr_sz
= create_tmp_var (size_type_node
);
7904 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7905 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7906 tree lab3
= NULL_TREE
;
7908 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
7910 /* For worksharing constructs, only perform it in the master thread,
7911 with the exception of cancelled implicit barriers - then only handle
7912 the current thread. */
7913 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
7914 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
7915 tree thr_num
= create_tmp_var (integer_type_node
);
7916 g
= gimple_build_call (t
, 0);
7917 gimple_call_set_lhs (g
, thr_num
);
7918 gimple_seq_add_stmt (end
, g
);
7922 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
7923 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
7924 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
7925 if (code
== OMP_FOR
)
7926 c
= gimple_omp_for_clauses (ctx
->stmt
);
7927 else /* if (code == OMP_SECTIONS) */
7928 c
= gimple_omp_sections_clauses (ctx
->stmt
);
7929 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
7931 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
7933 gimple_seq_add_stmt (end
, g
);
7934 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
7935 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
7936 gimple_seq_add_stmt (end
, g
);
7937 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
7938 build_one_cst (TREE_TYPE (idx
)));
7939 gimple_seq_add_stmt (end
, g
);
7940 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
7941 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
7943 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
7944 gimple_seq_add_stmt (end
, g
);
7945 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
7947 if (code
!= OMP_PARALLEL
)
7949 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
7950 tree num_thr
= create_tmp_var (integer_type_node
);
7951 g
= gimple_build_call (t
, 0);
7952 gimple_call_set_lhs (g
, num_thr
);
7953 gimple_seq_add_stmt (end
, g
);
7954 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
7955 gimple_seq_add_stmt (end
, g
);
7957 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
7961 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7962 OMP_CLAUSE__REDUCTEMP_
);
7963 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
7964 t
= fold_convert (size_type_node
, t
);
7965 gimplify_assign (num_thr_sz
, t
, end
);
7967 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
7968 NULL_TREE
, NULL_TREE
);
7969 tree data
= create_tmp_var (pointer_sized_int_node
);
7970 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
7971 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
7973 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
7974 ptr
= create_tmp_var (build_pointer_type (record_type
));
7976 ptr
= create_tmp_var (ptr_type_node
);
7977 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
7979 tree field
= TYPE_FIELDS (record_type
);
7982 field
= DECL_CHAIN (DECL_CHAIN (field
));
7983 for (int pass
= 0; pass
< 2; pass
++)
7985 tree decl
, type
, next
;
7986 for (tree c
= clauses
;
7987 omp_task_reduction_iterate (pass
, code
, ccode
,
7988 &c
, &decl
, &type
, &next
); c
= next
)
7990 tree var
= decl
, ref
;
7991 if (TREE_CODE (decl
) == MEM_REF
)
7993 var
= TREE_OPERAND (var
, 0);
7994 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7995 var
= TREE_OPERAND (var
, 0);
7997 if (TREE_CODE (var
) == ADDR_EXPR
)
7998 var
= TREE_OPERAND (var
, 0);
7999 else if (TREE_CODE (var
) == INDIRECT_REF
)
8000 var
= TREE_OPERAND (var
, 0);
8001 tree orig_var
= var
;
8002 if (is_variable_sized (var
))
8004 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
8005 var
= DECL_VALUE_EXPR (var
);
8006 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
8007 var
= TREE_OPERAND (var
, 0);
8008 gcc_assert (DECL_P (var
));
8010 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8011 if (orig_var
!= var
)
8012 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
8013 else if (TREE_CODE (v
) == ADDR_EXPR
)
8014 t
= build_fold_addr_expr (t
);
8015 else if (TREE_CODE (v
) == INDIRECT_REF
)
8016 t
= build_fold_indirect_ref (t
);
8017 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
8019 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
8020 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
8021 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
8023 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
8024 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
8025 fold_convert (size_type_node
,
8026 TREE_OPERAND (decl
, 1)));
8030 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
8031 if (!omp_is_reference (decl
))
8032 t
= build_fold_addr_expr (t
);
8034 t
= fold_convert (pointer_sized_int_node
, t
);
8036 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8037 gimple_seq_add_seq (start
, seq
);
8038 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8039 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8040 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8041 t
= unshare_expr (byte_position (field
));
8042 t
= fold_convert (pointer_sized_int_node
, t
);
8043 ctx
->task_reduction_map
->put (c
, cnt
);
8044 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
8047 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
8048 gimple_seq_add_seq (start
, seq
);
8049 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8050 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
8051 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
8053 tree bfield
= DECL_CHAIN (field
);
8055 if (code
== OMP_PARALLEL
|| code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8056 /* In parallel or worksharing all threads unconditionally
8057 initialize all their task reduction private variables. */
8058 cond
= boolean_true_node
;
8059 else if (TREE_TYPE (ptr
) == ptr_type_node
)
8061 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8062 unshare_expr (byte_position (bfield
)));
8064 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
8065 gimple_seq_add_seq (end
, seq
);
8066 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
8067 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
8068 build_int_cst (pbool
, 0));
8071 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
8072 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
8073 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
8074 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
8075 tree condv
= create_tmp_var (boolean_type_node
);
8076 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
8077 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
8079 gimple_seq_add_stmt (end
, g
);
8080 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
8081 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
8083 /* If this reduction doesn't need destruction and parallel
8084 has been cancelled, there is nothing to do for this
8085 reduction, so jump around the merge operation. */
8086 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8087 g
= gimple_build_cond (NE_EXPR
, cancellable
,
8088 build_zero_cst (TREE_TYPE (cancellable
)),
8090 gimple_seq_add_stmt (end
, g
);
8091 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8095 if (TREE_TYPE (ptr
) == ptr_type_node
)
8097 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
8098 unshare_expr (byte_position (field
)));
8100 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
8101 gimple_seq_add_seq (end
, seq
);
8102 tree pbool
= build_pointer_type (TREE_TYPE (field
));
8103 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
8104 build_int_cst (pbool
, 0));
8107 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
8108 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
8110 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
8111 if (TREE_CODE (decl
) != MEM_REF
&& omp_is_reference (decl
))
8112 ref
= build_simple_mem_ref (ref
);
8113 /* reduction(-:var) sums up the partial results, so it acts
8114 identically to reduction(+:var). */
8115 if (rcode
== MINUS_EXPR
)
8117 if (TREE_CODE (decl
) == MEM_REF
)
8119 tree type
= TREE_TYPE (new_var
);
8120 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8121 tree i
= create_tmp_var (TREE_TYPE (v
));
8122 tree ptype
= build_pointer_type (TREE_TYPE (type
));
8125 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
8126 tree vv
= create_tmp_var (TREE_TYPE (v
));
8127 gimplify_assign (vv
, v
, start
);
8130 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
8131 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
8132 new_var
= build_fold_addr_expr (new_var
);
8133 new_var
= fold_convert (ptype
, new_var
);
8134 ref
= fold_convert (ptype
, ref
);
8135 tree m
= create_tmp_var (ptype
);
8136 gimplify_assign (m
, new_var
, end
);
8138 m
= create_tmp_var (ptype
);
8139 gimplify_assign (m
, ref
, end
);
8141 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
8142 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
8143 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
8144 gimple_seq_add_stmt (end
, gimple_build_label (body
));
8145 tree priv
= build_simple_mem_ref (new_var
);
8146 tree out
= build_simple_mem_ref (ref
);
8147 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8149 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8150 tree decl_placeholder
8151 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
8152 tree lab6
= NULL_TREE
;
8155 /* If this reduction needs destruction and parallel
8156 has been cancelled, jump around the merge operation
8157 to the destruction. */
8158 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8159 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8160 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8161 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8163 gimple_seq_add_stmt (end
, g
);
8164 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8166 SET_DECL_VALUE_EXPR (placeholder
, out
);
8167 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8168 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
8169 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
8170 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8171 gimple_seq_add_seq (end
,
8172 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8174 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8176 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
8180 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8181 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
8184 gimple_seq tseq
= NULL
;
8185 gimplify_stmt (&x
, &tseq
);
8186 gimple_seq_add_seq (end
, tseq
);
8191 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
8192 out
= unshare_expr (out
);
8193 gimplify_assign (out
, x
, end
);
8196 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
8197 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8198 gimple_seq_add_stmt (end
, g
);
8199 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
8200 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8201 gimple_seq_add_stmt (end
, g
);
8202 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
8203 build_int_cst (TREE_TYPE (i
), 1));
8204 gimple_seq_add_stmt (end
, g
);
8205 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
8206 gimple_seq_add_stmt (end
, g
);
8207 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
8209 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8211 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8212 tree oldv
= NULL_TREE
;
8213 tree lab6
= NULL_TREE
;
8216 /* If this reduction needs destruction and parallel
8217 has been cancelled, jump around the merge operation
8218 to the destruction. */
8219 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
8220 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
8221 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
8222 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
8224 gimple_seq_add_stmt (end
, g
);
8225 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
8227 if (omp_is_reference (decl
)
8228 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
8230 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8231 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
8232 tree refv
= create_tmp_var (TREE_TYPE (ref
));
8233 gimplify_assign (refv
, ref
, end
);
8234 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
8235 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8236 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8237 tree d
= maybe_lookup_decl (decl
, ctx
);
8239 if (DECL_HAS_VALUE_EXPR_P (d
))
8240 oldv
= DECL_VALUE_EXPR (d
);
8241 if (omp_is_reference (var
))
8243 tree v
= fold_convert (TREE_TYPE (d
),
8244 build_fold_addr_expr (new_var
));
8245 SET_DECL_VALUE_EXPR (d
, v
);
8248 SET_DECL_VALUE_EXPR (d
, new_var
);
8249 DECL_HAS_VALUE_EXPR_P (d
) = 1;
8250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
8252 SET_DECL_VALUE_EXPR (d
, oldv
);
8255 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
8256 DECL_HAS_VALUE_EXPR_P (d
) = 0;
8258 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8260 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
8261 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
8263 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
8264 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
8267 gimple_seq tseq
= NULL
;
8268 gimplify_stmt (&x
, &tseq
);
8269 gimple_seq_add_seq (end
, tseq
);
8274 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
8275 ref
= unshare_expr (ref
);
8276 gimplify_assign (ref
, x
, end
);
8278 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
8280 field
= DECL_CHAIN (bfield
);
8284 if (code
== OMP_TASKGROUP
)
8286 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
8287 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8288 gimple_seq_add_stmt (start
, g
);
8293 if (code
== OMP_FOR
)
8294 c
= gimple_omp_for_clauses (ctx
->stmt
);
8295 else if (code
== OMP_SECTIONS
)
8296 c
= gimple_omp_sections_clauses (ctx
->stmt
);
8298 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
8299 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
8300 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
8301 build_fold_addr_expr (avar
));
8302 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
8305 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
8306 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
8308 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
8309 gimple_seq_add_stmt (end
, g
);
8310 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
8311 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
8313 enum built_in_function bfn
8314 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
8315 t
= builtin_decl_explicit (bfn
);
8316 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
8320 arg
= create_tmp_var (c_bool_type
);
8321 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
8325 arg
= build_int_cst (c_bool_type
, 0);
8326 g
= gimple_build_call (t
, 1, arg
);
8330 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
8331 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
8333 gimple_seq_add_stmt (end
, g
);
8334 t
= build_constructor (atype
, NULL
);
8335 TREE_THIS_VOLATILE (t
) = 1;
8336 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
8339 /* Expand code for an OpenMP taskgroup directive. */
8342 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8344 gimple
*stmt
= gsi_stmt (*gsi_p
);
8347 gimple_seq dseq
= NULL
;
8348 tree block
= make_node (BLOCK
);
8350 bind
= gimple_build_bind (NULL
, NULL
, block
);
8351 gsi_replace (gsi_p
, bind
, true);
8352 gimple_bind_add_stmt (bind
, stmt
);
8354 push_gimplify_context ();
8356 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
8358 gimple_bind_add_stmt (bind
, x
);
8360 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
8361 gimple_omp_taskgroup_clauses (stmt
),
8362 gimple_bind_body_ptr (bind
), &dseq
);
8364 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8365 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8366 gimple_omp_set_body (stmt
, NULL
);
8368 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8369 gimple_bind_add_seq (bind
, dseq
);
8371 pop_gimplify_context (bind
);
8373 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8374 BLOCK_VARS (block
) = ctx
->block_vars
;
8378 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8381 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
8384 struct omp_for_data fd
;
8385 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
8388 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
8389 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
8390 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
8394 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8395 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
8396 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
8397 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8399 /* Merge depend clauses from multiple adjacent
8400 #pragma omp ordered depend(sink:...) constructs
8401 into one #pragma omp ordered depend(sink:...), so that
8402 we can optimize them together. */
8403 gimple_stmt_iterator gsi
= *gsi_p
;
8405 while (!gsi_end_p (gsi
))
8407 gimple
*stmt
= gsi_stmt (gsi
);
8408 if (is_gimple_debug (stmt
)
8409 || gimple_code (stmt
) == GIMPLE_NOP
)
8414 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
8416 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
8417 c
= gimple_omp_ordered_clauses (ord_stmt2
);
8419 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
8420 || OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8423 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
8425 gsi_remove (&gsi
, true);
8429 /* Canonicalize sink dependence clauses into one folded clause if
8432 The basic algorithm is to create a sink vector whose first
8433 element is the GCD of all the first elements, and whose remaining
8434 elements are the minimum of the subsequent columns.
8436 We ignore dependence vectors whose first element is zero because
8437 such dependencies are known to be executed by the same thread.
8439 We take into account the direction of the loop, so a minimum
8440 becomes a maximum if the loop is iterating forwards. We also
8441 ignore sink clauses where the loop direction is unknown, or where
8442 the offsets are clearly invalid because they are not a multiple
8443 of the loop increment.
8447 #pragma omp for ordered(2)
8448 for (i=0; i < N; ++i)
8449 for (j=0; j < M; ++j)
8451 #pragma omp ordered \
8452 depend(sink:i-8,j-2) \
8453 depend(sink:i,j-1) \ // Completely ignored because i+0.
8454 depend(sink:i-4,j-3) \
8455 depend(sink:i-6,j-4)
8456 #pragma omp ordered depend(source)
8461 depend(sink:-gcd(8,4,6),-min(2,3,4))
8466 /* FIXME: Computing GCD's where the first element is zero is
8467 non-trivial in the presence of collapsed loops. Do this later. */
8468 if (fd
.collapse
> 1)
8471 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
8473 /* wide_int is not a POD so it must be default-constructed. */
8474 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
8475 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
8477 tree folded_dep
= NULL_TREE
;
8478 /* TRUE if the first dimension's offset is negative. */
8479 bool neg_offset_p
= false;
8481 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
8483 while ((c
= *list_p
) != NULL
)
8485 bool remove
= false;
8487 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
);
8488 if (OMP_CLAUSE_DEPEND_KIND (c
) != OMP_CLAUSE_DEPEND_SINK
)
8489 goto next_ordered_clause
;
8492 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
8493 vec
&& TREE_CODE (vec
) == TREE_LIST
;
8494 vec
= TREE_CHAIN (vec
), ++i
)
8496 gcc_assert (i
< len
);
8498 /* omp_extract_for_data has canonicalized the condition. */
8499 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
8500 || fd
.loops
[i
].cond_code
== GT_EXPR
);
8501 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
8502 bool maybe_lexically_later
= true;
8504 /* While the committee makes up its mind, bail if we have any
8505 non-constant steps. */
8506 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
8507 goto lower_omp_ordered_ret
;
8509 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
8510 if (POINTER_TYPE_P (itype
))
8512 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
8513 TYPE_PRECISION (itype
),
8516 /* Ignore invalid offsets that are not multiples of the step. */
8517 if (!wi::multiple_of_p (wi::abs (offset
),
8518 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
8521 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8522 "ignoring sink clause with offset that is not "
8523 "a multiple of the loop step");
8525 goto next_ordered_clause
;
8528 /* Calculate the first dimension. The first dimension of
8529 the folded dependency vector is the GCD of the first
8530 elements, while ignoring any first elements whose offset
8534 /* Ignore dependence vectors whose first dimension is 0. */
8538 goto next_ordered_clause
;
8542 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
8544 error_at (OMP_CLAUSE_LOCATION (c
),
8545 "first offset must be in opposite direction "
8546 "of loop iterations");
8547 goto lower_omp_ordered_ret
;
8551 neg_offset_p
= forward
;
8552 /* Initialize the first time around. */
8553 if (folded_dep
== NULL_TREE
)
8556 folded_deps
[0] = offset
;
8559 folded_deps
[0] = wi::gcd (folded_deps
[0],
8563 /* Calculate minimum for the remaining dimensions. */
8566 folded_deps
[len
+ i
- 1] = offset
;
8567 if (folded_dep
== c
)
8568 folded_deps
[i
] = offset
;
8569 else if (maybe_lexically_later
8570 && !wi::eq_p (folded_deps
[i
], offset
))
8572 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
8576 for (j
= 1; j
<= i
; j
++)
8577 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
8580 maybe_lexically_later
= false;
8584 gcc_assert (i
== len
);
8588 next_ordered_clause
:
8590 *list_p
= OMP_CLAUSE_CHAIN (c
);
8592 list_p
= &OMP_CLAUSE_CHAIN (c
);
8598 folded_deps
[0] = -folded_deps
[0];
8600 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
8601 if (POINTER_TYPE_P (itype
))
8604 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
8605 = wide_int_to_tree (itype
, folded_deps
[0]);
8606 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
8607 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
8610 lower_omp_ordered_ret
:
8612 /* Ordered without clauses is #pragma omp threads, while we want
8613 a nop instead if we remove all clauses. */
8614 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
8615 gsi_replace (gsi_p
, gimple_build_nop (), true);
8619 /* Expand code for an OpenMP ordered directive. */
8622 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8625 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
8626 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
8629 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8631 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8634 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
8635 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8636 OMP_CLAUSE_THREADS
);
8638 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
8641 /* FIXME: This is needs to be moved to the expansion to verify various
8642 conditions only testable on cfg with dominators computed, and also
8643 all the depend clauses to be merged still might need to be available
8644 for the runtime checks. */
8646 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
8650 push_gimplify_context ();
8652 block
= make_node (BLOCK
);
8653 bind
= gimple_build_bind (NULL
, NULL
, block
);
8654 gsi_replace (gsi_p
, bind
, true);
8655 gimple_bind_add_stmt (bind
, stmt
);
8659 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
8660 build_int_cst (NULL_TREE
, threads
));
8661 cfun
->has_simduid_loops
= true;
8664 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
8666 gimple_bind_add_stmt (bind
, x
);
8668 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
8671 counter
= create_tmp_var (integer_type_node
);
8672 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
8673 gimple_call_set_lhs (g
, counter
);
8674 gimple_bind_add_stmt (bind
, g
);
8676 body
= create_artificial_label (UNKNOWN_LOCATION
);
8677 test
= create_artificial_label (UNKNOWN_LOCATION
);
8678 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
8680 tree simt_pred
= create_tmp_var (integer_type_node
);
8681 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
8682 gimple_call_set_lhs (g
, simt_pred
);
8683 gimple_bind_add_stmt (bind
, g
);
8685 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
8686 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
8687 gimple_bind_add_stmt (bind
, g
);
8689 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
8691 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
8692 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
8693 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
8694 gimple_omp_set_body (stmt
, NULL
);
8698 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
8699 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
8700 gimple_bind_add_stmt (bind
, g
);
8702 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
8703 tree nonneg
= create_tmp_var (integer_type_node
);
8704 gimple_seq tseq
= NULL
;
8705 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
8706 gimple_bind_add_seq (bind
, tseq
);
8708 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
8709 gimple_call_set_lhs (g
, nonneg
);
8710 gimple_bind_add_stmt (bind
, g
);
8712 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
8713 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
8714 gimple_bind_add_stmt (bind
, g
);
8716 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
8719 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
8720 build_int_cst (NULL_TREE
, threads
));
8722 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
8724 gimple_bind_add_stmt (bind
, x
);
8726 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
8728 pop_gimplify_context (bind
);
8730 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8731 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8735 /* Expand code for an OpenMP scan directive and the structured block
8736 before the scan directive. */
8739 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8741 gimple
*stmt
= gsi_stmt (*gsi_p
);
8743 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
8744 tree lane
= NULL_TREE
;
8745 gimple_seq before
= NULL
;
8746 omp_context
*octx
= ctx
->outer
;
8748 if (octx
->scan_exclusive
&& !has_clauses
)
8750 gimple_stmt_iterator gsi2
= *gsi_p
;
8752 gimple
*stmt2
= gsi_stmt (gsi2
);
8753 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8754 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8755 the one with exclusive clause(s), comes first. */
8757 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
8758 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
8760 gsi_remove (gsi_p
, false);
8761 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
8762 ctx
= maybe_lookup_ctx (stmt2
);
8764 lower_omp_scan (gsi_p
, ctx
);
8769 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
8770 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8771 && (gimple_omp_for_kind (octx
->stmt
) & GF_OMP_FOR_SIMD
));
8772 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
8773 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
8774 && !gimple_omp_for_combined_p (octx
->stmt
));
8775 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
8776 if (is_for_simd
&& octx
->for_simd_scan_phase
)
8779 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
8780 OMP_CLAUSE__SIMDUID_
))
8782 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
8783 lane
= create_tmp_var (unsigned_type_node
);
8784 tree t
= build_int_cst (integer_type_node
,
8786 : octx
->scan_inclusive
? 2 : 3);
8788 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
8789 gimple_call_set_lhs (g
, lane
);
8790 gimple_seq_add_stmt (&before
, g
);
8793 if (is_simd
|| is_for
)
8795 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
8796 c
; c
= OMP_CLAUSE_CHAIN (c
))
8797 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8798 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
8800 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8801 tree var
= OMP_CLAUSE_DECL (c
);
8802 tree new_var
= lookup_decl (var
, octx
);
8804 tree var2
= NULL_TREE
;
8805 tree var3
= NULL_TREE
;
8806 tree var4
= NULL_TREE
;
8807 tree lane0
= NULL_TREE
;
8808 tree new_vard
= new_var
;
8809 if (omp_is_reference (var
))
8811 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8814 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
8816 val
= DECL_VALUE_EXPR (new_vard
);
8817 if (new_vard
!= new_var
)
8819 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
8820 val
= TREE_OPERAND (val
, 0);
8822 if (TREE_CODE (val
) == ARRAY_REF
8823 && VAR_P (TREE_OPERAND (val
, 0)))
8825 tree v
= TREE_OPERAND (val
, 0);
8826 if (lookup_attribute ("omp simd array",
8827 DECL_ATTRIBUTES (v
)))
8829 val
= unshare_expr (val
);
8830 lane0
= TREE_OPERAND (val
, 1);
8831 TREE_OPERAND (val
, 1) = lane
;
8832 var2
= lookup_decl (v
, octx
);
8833 if (octx
->scan_exclusive
)
8834 var4
= lookup_decl (var2
, octx
);
8836 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8837 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
8840 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
8841 var2
, lane
, NULL_TREE
, NULL_TREE
);
8842 TREE_THIS_NOTRAP (var2
) = 1;
8843 if (octx
->scan_exclusive
)
8845 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
8846 var4
, lane
, NULL_TREE
,
8848 TREE_THIS_NOTRAP (var4
) = 1;
8859 var2
= build_outer_var_ref (var
, octx
);
8860 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8862 var3
= maybe_lookup_decl (new_vard
, octx
);
8863 if (var3
== new_vard
|| var3
== NULL_TREE
)
8865 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
8867 var4
= maybe_lookup_decl (var3
, octx
);
8868 if (var4
== var3
|| var4
== NULL_TREE
)
8870 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
8881 && octx
->scan_exclusive
8883 && var4
== NULL_TREE
)
8884 var4
= create_tmp_var (TREE_TYPE (val
));
8886 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8888 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
8893 /* If we've added a separate identity element
8894 variable, copy it over into val. */
8895 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8897 gimplify_and_add (x
, &before
);
8899 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
8901 /* Otherwise, assign to it the identity element. */
8902 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
8904 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
8905 tree ref
= build_outer_var_ref (var
, octx
);
8906 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
8907 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
8910 if (new_vard
!= new_var
)
8911 val
= build_fold_addr_expr_loc (clause_loc
, val
);
8912 SET_DECL_VALUE_EXPR (new_vard
, val
);
8914 SET_DECL_VALUE_EXPR (placeholder
, ref
);
8915 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8916 lower_omp (&tseq
, octx
);
8918 SET_DECL_VALUE_EXPR (new_vard
, x
);
8919 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
8920 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
8921 gimple_seq_add_seq (&before
, tseq
);
8923 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
8929 if (octx
->scan_exclusive
)
8931 tree v4
= unshare_expr (var4
);
8932 tree v2
= unshare_expr (var2
);
8933 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
8934 gimplify_and_add (x
, &before
);
8936 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
8937 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
8938 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
8940 if (x
&& new_vard
!= new_var
)
8941 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
8943 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
8944 SET_DECL_VALUE_EXPR (placeholder
, var2
);
8945 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
8946 lower_omp (&tseq
, octx
);
8947 gimple_seq_add_seq (&before
, tseq
);
8948 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8950 SET_DECL_VALUE_EXPR (new_vard
, x
);
8951 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
8952 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
8953 if (octx
->scan_inclusive
)
8955 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8957 gimplify_and_add (x
, &before
);
8959 else if (lane0
== NULL_TREE
)
8961 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
8963 gimplify_and_add (x
, &before
);
8971 /* input phase. Set val to initializer before
8973 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
8974 gimplify_assign (val
, x
, &before
);
8979 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
8980 if (code
== MINUS_EXPR
)
8983 tree x
= build2 (code
, TREE_TYPE (var2
),
8984 unshare_expr (var2
), unshare_expr (val
));
8985 if (octx
->scan_inclusive
)
8987 gimplify_assign (unshare_expr (var2
), x
, &before
);
8988 gimplify_assign (val
, var2
, &before
);
8992 gimplify_assign (unshare_expr (var4
),
8993 unshare_expr (var2
), &before
);
8994 gimplify_assign (var2
, x
, &before
);
8995 if (lane0
== NULL_TREE
)
8996 gimplify_assign (val
, var4
, &before
);
9000 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
9002 tree vexpr
= unshare_expr (var4
);
9003 TREE_OPERAND (vexpr
, 1) = lane0
;
9004 if (new_vard
!= new_var
)
9005 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
9006 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
9010 if (is_simd
&& !is_for_simd
)
9012 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
9013 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
9014 gsi_replace (gsi_p
, gimple_build_nop (), true);
9017 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
9020 gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (stmt
));
9021 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
9026 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9027 substitution of a couple of function calls. But in the NAMED case,
9028 requires that languages coordinate a symbol name. It is therefore
9029 best put here in common code. */
9031 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
9034 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9037 tree name
, lock
, unlock
;
9038 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
9040 location_t loc
= gimple_location (stmt
);
9043 name
= gimple_omp_critical_name (stmt
);
9048 if (!critical_name_mutexes
)
9049 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
9051 tree
*n
= critical_name_mutexes
->get (name
);
9056 decl
= create_tmp_var_raw (ptr_type_node
);
9058 new_str
= ACONCAT ((".gomp_critical_user_",
9059 IDENTIFIER_POINTER (name
), NULL
));
9060 DECL_NAME (decl
) = get_identifier (new_str
);
9061 TREE_PUBLIC (decl
) = 1;
9062 TREE_STATIC (decl
) = 1;
9063 DECL_COMMON (decl
) = 1;
9064 DECL_ARTIFICIAL (decl
) = 1;
9065 DECL_IGNORED_P (decl
) = 1;
9067 varpool_node::finalize_decl (decl
);
9069 critical_name_mutexes
->put (name
, decl
);
9074 /* If '#pragma omp critical' is inside offloaded region or
9075 inside function marked as offloadable, the symbol must be
9076 marked as offloadable too. */
9078 if (cgraph_node::get (current_function_decl
)->offloadable
)
9079 varpool_node::get_create (decl
)->offloadable
= 1;
9081 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
9082 if (is_gimple_omp_offloaded (octx
->stmt
))
9084 varpool_node::get_create (decl
)->offloadable
= 1;
9088 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
9089 lock
= build_call_expr_loc (loc
, lock
, 1,
9090 build_fold_addr_expr_loc (loc
, decl
));
9092 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
9093 unlock
= build_call_expr_loc (loc
, unlock
, 1,
9094 build_fold_addr_expr_loc (loc
, decl
));
9098 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
9099 lock
= build_call_expr_loc (loc
, lock
, 0);
9101 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
9102 unlock
= build_call_expr_loc (loc
, unlock
, 0);
9105 push_gimplify_context ();
9107 block
= make_node (BLOCK
);
9108 bind
= gimple_build_bind (NULL
, NULL
, block
);
9109 gsi_replace (gsi_p
, bind
, true);
9110 gimple_bind_add_stmt (bind
, stmt
);
9112 tbody
= gimple_bind_body (bind
);
9113 gimplify_and_add (lock
, &tbody
);
9114 gimple_bind_set_body (bind
, tbody
);
9116 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9117 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9118 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9119 gimple_omp_set_body (stmt
, NULL
);
9121 tbody
= gimple_bind_body (bind
);
9122 gimplify_and_add (unlock
, &tbody
);
9123 gimple_bind_set_body (bind
, tbody
);
9125 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9127 pop_gimplify_context (bind
);
9128 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9129 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
9132 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9133 for a lastprivate clause. Given a loop control predicate of (V
9134 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9135 is appended to *DLIST, iterator initialization is appended to
9136 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9137 to be emitted in a critical section. */
9140 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
9141 gimple_seq
*dlist
, gimple_seq
*clist
,
9142 struct omp_context
*ctx
)
9144 tree clauses
, cond
, vinit
;
9145 enum tree_code cond_code
;
9148 cond_code
= fd
->loop
.cond_code
;
9149 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
9151 /* When possible, use a strict equality expression. This can let VRP
9152 type optimizations deduce the value and remove a copy. */
9153 if (tree_fits_shwi_p (fd
->loop
.step
))
9155 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
9156 if (step
== 1 || step
== -1)
9157 cond_code
= EQ_EXPR
;
9160 if (gimple_omp_for_kind (fd
->for_stmt
) == GF_OMP_FOR_KIND_GRID_LOOP
9161 || gimple_omp_for_grid_phony (fd
->for_stmt
))
9162 cond
= omp_grid_lastprivate_predicate (fd
);
9165 tree n2
= fd
->loop
.n2
;
9166 if (fd
->collapse
> 1
9167 && TREE_CODE (n2
) != INTEGER_CST
9168 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
9170 struct omp_context
*taskreg_ctx
= NULL
;
9171 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
9173 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
9174 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
9175 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
9177 if (gimple_omp_for_combined_into_p (gfor
))
9179 gcc_assert (ctx
->outer
->outer
9180 && is_parallel_ctx (ctx
->outer
->outer
));
9181 taskreg_ctx
= ctx
->outer
->outer
;
9185 struct omp_for_data outer_fd
;
9186 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
9187 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
9190 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
9191 taskreg_ctx
= ctx
->outer
->outer
;
9193 else if (is_taskreg_ctx (ctx
->outer
))
9194 taskreg_ctx
= ctx
->outer
;
9198 tree taskreg_clauses
9199 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
9200 tree innerc
= omp_find_clause (taskreg_clauses
,
9201 OMP_CLAUSE__LOOPTEMP_
);
9202 gcc_assert (innerc
);
9203 for (i
= 0; i
< fd
->collapse
; i
++)
9205 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9206 OMP_CLAUSE__LOOPTEMP_
);
9207 gcc_assert (innerc
);
9209 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
9210 OMP_CLAUSE__LOOPTEMP_
);
9212 n2
= fold_convert (TREE_TYPE (n2
),
9213 lookup_decl (OMP_CLAUSE_DECL (innerc
),
9217 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
9220 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
9222 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
9223 if (!gimple_seq_empty_p (stmts
))
9225 gimple_seq_add_seq (&stmts
, *dlist
);
9228 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9229 vinit
= fd
->loop
.n1
;
9230 if (cond_code
== EQ_EXPR
9231 && tree_fits_shwi_p (fd
->loop
.n2
)
9232 && ! integer_zerop (fd
->loop
.n2
))
9233 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
9235 vinit
= unshare_expr (vinit
);
9237 /* Initialize the iterator variable, so that threads that don't execute
9238 any iterations don't execute the lastprivate clauses by accident. */
9239 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
9243 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9246 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
9247 struct walk_stmt_info
*wi
)
9249 gimple
*stmt
= gsi_stmt (*gsi_p
);
9251 *handled_ops_p
= true;
9252 switch (gimple_code (stmt
))
9256 case GIMPLE_OMP_FOR
:
9257 if ((gimple_omp_for_kind (stmt
) & GF_OMP_FOR_SIMD
)
9258 && gimple_omp_for_combined_into_p (stmt
))
9259 *handled_ops_p
= false;
9262 case GIMPLE_OMP_SCAN
:
9263 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
9264 return integer_zero_node
;
9271 /* Helper function for lower_omp_for, add transformations for a worksharing
9272 loop with scan directives inside of it.
9273 For worksharing loop not combined with simd, transform:
9274 #pragma omp for reduction(inscan,+:r) private(i)
9275 for (i = 0; i < n; i = i + 1)
9280 #pragma omp scan inclusive(r)
9286 into two worksharing loops + code to merge results:
9288 num_threads = omp_get_num_threads ();
9289 thread_num = omp_get_thread_num ();
9290 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9295 // For UDRs this is UDR init, or if ctors are needed, copy from
9296 // var3 that has been constructed to contain the neutral element.
9300 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9301 // a shared array with num_threads elements and rprivb to a local array
9302 // number of elements equal to the number of (contiguous) iterations the
9303 // current thread will perform. controlb and controlp variables are
9304 // temporaries to handle deallocation of rprivb at the end of second
9306 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9307 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9308 for (i = 0; i < n; i = i + 1)
9311 // For UDRs this is UDR init or copy from var3.
9313 // This is the input phase from user code.
9317 // For UDRs this is UDR merge.
9319 // Rather than handing it over to the user, save to local thread's
9321 rprivb[ivar] = var2;
9322 // For exclusive scan, the above two statements are swapped.
9326 // And remember the final value from this thread's into the shared
9328 rpriva[(sizetype) thread_num] = var2;
9329 // If more than one thread, compute using Work-Efficient prefix sum
9330 // the inclusive parallel scan of the rpriva array.
9331 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9336 num_threadsu = (unsigned int) num_threads;
9337 thread_numup1 = (unsigned int) thread_num + 1;
9340 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9344 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9349 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9350 mul = REALPART_EXPR <cplx>;
9351 ovf = IMAGPART_EXPR <cplx>;
9352 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9355 andvm1 = andv + 4294967295;
9357 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9359 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9360 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9361 rpriva[l] = rpriva[l - k] + rpriva[l];
9363 if (down == 0) goto <D.2121>; else goto <D.2122>;
9371 if (k != 0) goto <D.2108>; else goto <D.2103>;
9373 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9375 // For UDRs this is UDR init or copy from var3.
9379 var2 = rpriva[thread_num - 1];
9382 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9383 reduction(inscan,+:r) private(i)
9384 for (i = 0; i < n; i = i + 1)
9387 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9388 r = var2 + rprivb[ivar];
9391 // This is the scan phase from user code.
9393 // Plus a bump of the iterator.
9399 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
9400 struct omp_for_data
*fd
, omp_context
*ctx
)
9402 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
9403 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
9405 gimple_seq body
= gimple_omp_body (stmt
);
9406 gimple_stmt_iterator input1_gsi
= gsi_none ();
9407 struct walk_stmt_info wi
;
9408 memset (&wi
, 0, sizeof (wi
));
9410 wi
.info
= (void *) &input1_gsi
;
9411 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
9412 gcc_assert (!gsi_end_p (input1_gsi
));
9414 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
9415 gimple_stmt_iterator gsi
= input1_gsi
;
9417 gimple_stmt_iterator scan1_gsi
= gsi
;
9418 gimple
*scan_stmt1
= gsi_stmt (gsi
);
9419 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
9421 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
9422 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
9423 gimple_omp_set_body (input_stmt1
, NULL
);
9424 gimple_omp_set_body (scan_stmt1
, NULL
);
9425 gimple_omp_set_body (stmt
, NULL
);
9427 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
9428 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
9429 gimple_omp_set_body (stmt
, body
);
9430 gimple_omp_set_body (input_stmt1
, input_body
);
9432 gimple_stmt_iterator input2_gsi
= gsi_none ();
9433 memset (&wi
, 0, sizeof (wi
));
9435 wi
.info
= (void *) &input2_gsi
;
9436 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
9437 gcc_assert (!gsi_end_p (input2_gsi
));
9439 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
9442 gimple_stmt_iterator scan2_gsi
= gsi
;
9443 gimple
*scan_stmt2
= gsi_stmt (gsi
);
9444 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
9445 gimple_omp_set_body (scan_stmt2
, scan_body
);
9447 gimple_stmt_iterator input3_gsi
= gsi_none ();
9448 gimple_stmt_iterator scan3_gsi
= gsi_none ();
9449 gimple_stmt_iterator input4_gsi
= gsi_none ();
9450 gimple_stmt_iterator scan4_gsi
= gsi_none ();
9451 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
9452 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
9453 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
9456 memset (&wi
, 0, sizeof (wi
));
9458 wi
.info
= (void *) &input3_gsi
;
9459 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
9460 gcc_assert (!gsi_end_p (input3_gsi
));
9462 input_stmt3
= gsi_stmt (input3_gsi
);
9466 scan_stmt3
= gsi_stmt (gsi
);
9467 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
9469 memset (&wi
, 0, sizeof (wi
));
9471 wi
.info
= (void *) &input4_gsi
;
9472 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
9473 gcc_assert (!gsi_end_p (input4_gsi
));
9475 input_stmt4
= gsi_stmt (input4_gsi
);
9479 scan_stmt4
= gsi_stmt (gsi
);
9480 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
9482 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
9483 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
9486 tree num_threads
= create_tmp_var (integer_type_node
);
9487 tree thread_num
= create_tmp_var (integer_type_node
);
9488 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9489 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9490 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
9491 gimple_call_set_lhs (g
, num_threads
);
9492 gimple_seq_add_stmt (body_p
, g
);
9493 g
= gimple_build_call (threadnum_decl
, 0);
9494 gimple_call_set_lhs (g
, thread_num
);
9495 gimple_seq_add_stmt (body_p
, g
);
9497 tree ivar
= create_tmp_var (sizetype
);
9498 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
9499 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
9500 tree k
= create_tmp_var (unsigned_type_node
);
9501 tree l
= create_tmp_var (unsigned_type_node
);
9503 gimple_seq clist
= NULL
, mdlist
= NULL
;
9504 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
9505 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
9506 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
9507 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
9508 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9509 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9510 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
9512 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
9513 tree var
= OMP_CLAUSE_DECL (c
);
9514 tree new_var
= lookup_decl (var
, ctx
);
9515 tree var3
= NULL_TREE
;
9516 tree new_vard
= new_var
;
9517 if (omp_is_reference (var
))
9518 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
9519 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9521 var3
= maybe_lookup_decl (new_vard
, ctx
);
9522 if (var3
== new_vard
)
9526 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
9527 tree rpriva
= create_tmp_var (ptype
);
9528 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9529 OMP_CLAUSE_DECL (nc
) = rpriva
;
9531 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9533 tree rprivb
= create_tmp_var (ptype
);
9534 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
9535 OMP_CLAUSE_DECL (nc
) = rprivb
;
9536 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
9538 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9540 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
9541 if (new_vard
!= new_var
)
9542 TREE_ADDRESSABLE (var2
) = 1;
9543 gimple_add_tmp_var (var2
);
9545 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
9546 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9547 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9548 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9549 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9551 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
9552 thread_num
, integer_minus_one_node
);
9553 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9554 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9555 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9556 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9557 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9559 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
9560 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9561 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9562 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9563 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9565 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
9566 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
9567 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
9568 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9569 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
9570 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9572 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
9573 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
9574 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
9575 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
9577 tree var4
= is_for_simd
? new_var
: var2
;
9578 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
9581 var5
= lookup_decl (var
, input_simd_ctx
);
9582 var6
= lookup_decl (var
, scan_simd_ctx
);
9583 if (new_vard
!= new_var
)
9585 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
9586 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
9589 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9591 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9594 x
= lang_hooks
.decls
.omp_clause_default_ctor
9595 (c
, var2
, build_outer_var_ref (var
, ctx
));
9597 gimplify_and_add (x
, &clist
);
9599 x
= build_outer_var_ref (var
, ctx
);
9600 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
9602 gimplify_and_add (x
, &thr01_list
);
9604 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
9605 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
9608 x
= unshare_expr (var4
);
9609 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9610 gimplify_and_add (x
, &thrn1_list
);
9611 x
= unshare_expr (var4
);
9612 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
9613 gimplify_and_add (x
, &thr02_list
);
9615 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
9617 /* Otherwise, assign to it the identity element. */
9618 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9619 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9622 if (new_vard
!= new_var
)
9623 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9624 SET_DECL_VALUE_EXPR (new_vard
, val
);
9625 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9627 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
9628 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9629 lower_omp (&tseq
, ctx
);
9630 gimple_seq_add_seq (&thrn1_list
, tseq
);
9631 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
9632 lower_omp (&tseq
, ctx
);
9633 gimple_seq_add_seq (&thr02_list
, tseq
);
9634 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9635 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9636 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9638 SET_DECL_VALUE_EXPR (new_vard
, y
);
9641 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9642 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9646 x
= unshare_expr (var4
);
9647 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
9648 gimplify_and_add (x
, &thrn2_list
);
9652 x
= unshare_expr (rprivb_ref
);
9653 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
9654 gimplify_and_add (x
, &scan1_list
);
9658 if (ctx
->scan_exclusive
)
9660 x
= unshare_expr (rprivb_ref
);
9661 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9662 gimplify_and_add (x
, &scan1_list
);
9665 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9666 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9667 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9668 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9669 lower_omp (&tseq
, ctx
);
9670 gimple_seq_add_seq (&scan1_list
, tseq
);
9672 if (ctx
->scan_inclusive
)
9674 x
= unshare_expr (rprivb_ref
);
9675 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
9676 gimplify_and_add (x
, &scan1_list
);
9680 x
= unshare_expr (rpriva_ref
);
9681 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
9682 unshare_expr (var4
));
9683 gimplify_and_add (x
, &mdlist
);
9685 x
= unshare_expr (is_for_simd
? var6
: new_var
);
9686 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
9687 gimplify_and_add (x
, &input2_list
);
9690 if (new_vard
!= new_var
)
9691 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9693 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9694 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9695 SET_DECL_VALUE_EXPR (new_vard
, val
);
9696 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9699 SET_DECL_VALUE_EXPR (placeholder
, var6
);
9700 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9703 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9704 lower_omp (&tseq
, ctx
);
9706 SET_DECL_VALUE_EXPR (new_vard
, y
);
9709 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9710 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9714 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
9715 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9716 lower_omp (&tseq
, ctx
);
9718 gimple_seq_add_seq (&input2_list
, tseq
);
9720 x
= build_outer_var_ref (var
, ctx
);
9721 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
9722 gimplify_and_add (x
, &last_list
);
9724 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
9725 gimplify_and_add (x
, &reduc_list
);
9726 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
9727 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
9729 if (new_vard
!= new_var
)
9730 val
= build_fold_addr_expr_loc (clause_loc
, val
);
9731 SET_DECL_VALUE_EXPR (new_vard
, val
);
9732 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
9733 SET_DECL_VALUE_EXPR (placeholder
, var2
);
9734 lower_omp (&tseq
, ctx
);
9735 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9736 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
9737 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
9739 SET_DECL_VALUE_EXPR (new_vard
, y
);
9742 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
9743 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
9745 gimple_seq_add_seq (&reduc_list
, tseq
);
9746 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
9747 gimplify_and_add (x
, &reduc_list
);
9749 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
9751 gimplify_and_add (x
, dlist
);
9755 x
= build_outer_var_ref (var
, ctx
);
9756 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
9758 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
9759 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
9761 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
9763 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
9765 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
9766 if (code
== MINUS_EXPR
)
9770 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
9773 if (ctx
->scan_exclusive
)
9774 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
9776 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
9777 gimplify_assign (var2
, x
, &scan1_list
);
9778 if (ctx
->scan_inclusive
)
9779 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
9783 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
9786 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
9787 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
9789 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
9792 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
9793 unshare_expr (rprival_ref
));
9794 gimplify_assign (rprival_ref
, x
, &reduc_list
);
9798 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
9799 gimple_seq_add_stmt (&scan1_list
, g
);
9800 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
9801 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
9802 ? scan_stmt4
: scan_stmt2
), g
);
9804 tree controlb
= create_tmp_var (boolean_type_node
);
9805 tree controlp
= create_tmp_var (ptr_type_node
);
9806 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9807 OMP_CLAUSE_DECL (nc
) = controlb
;
9808 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9810 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9811 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9812 OMP_CLAUSE_DECL (nc
) = controlp
;
9813 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9815 cp1
= &OMP_CLAUSE_CHAIN (nc
);
9816 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9817 OMP_CLAUSE_DECL (nc
) = controlb
;
9818 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9820 cp2
= &OMP_CLAUSE_CHAIN (nc
);
9821 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
9822 OMP_CLAUSE_DECL (nc
) = controlp
;
9823 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
9825 cp2
= &OMP_CLAUSE_CHAIN (nc
);
9827 *cp1
= gimple_omp_for_clauses (stmt
);
9828 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
9829 *cp2
= gimple_omp_for_clauses (new_stmt
);
9830 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
9834 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
9835 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
9837 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
9839 gsi_remove (&input3_gsi
, true);
9840 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
9842 gsi_remove (&scan3_gsi
, true);
9843 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
9845 gsi_remove (&input4_gsi
, true);
9846 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
9848 gsi_remove (&scan4_gsi
, true);
9852 gimple_omp_set_body (scan_stmt1
, scan1_list
);
9853 gimple_omp_set_body (input_stmt2
, input2_list
);
9856 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
9858 gsi_remove (&input1_gsi
, true);
9859 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
9861 gsi_remove (&scan1_gsi
, true);
9862 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
9864 gsi_remove (&input2_gsi
, true);
9865 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
9867 gsi_remove (&scan2_gsi
, true);
9869 gimple_seq_add_seq (body_p
, clist
);
9871 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9872 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9873 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9874 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
9875 gimple_seq_add_stmt (body_p
, g
);
9876 g
= gimple_build_label (lab1
);
9877 gimple_seq_add_stmt (body_p
, g
);
9878 gimple_seq_add_seq (body_p
, thr01_list
);
9879 g
= gimple_build_goto (lab3
);
9880 gimple_seq_add_stmt (body_p
, g
);
9881 g
= gimple_build_label (lab2
);
9882 gimple_seq_add_stmt (body_p
, g
);
9883 gimple_seq_add_seq (body_p
, thrn1_list
);
9884 g
= gimple_build_label (lab3
);
9885 gimple_seq_add_stmt (body_p
, g
);
9887 g
= gimple_build_assign (ivar
, size_zero_node
);
9888 gimple_seq_add_stmt (body_p
, g
);
9890 gimple_seq_add_stmt (body_p
, stmt
);
9891 gimple_seq_add_seq (body_p
, body
);
9892 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
9895 g
= gimple_build_omp_return (true);
9896 gimple_seq_add_stmt (body_p
, g
);
9897 gimple_seq_add_seq (body_p
, mdlist
);
9899 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9900 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9901 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
9902 gimple_seq_add_stmt (body_p
, g
);
9903 g
= gimple_build_label (lab1
);
9904 gimple_seq_add_stmt (body_p
, g
);
9906 g
= omp_build_barrier (NULL
);
9907 gimple_seq_add_stmt (body_p
, g
);
9909 tree down
= create_tmp_var (unsigned_type_node
);
9910 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
9911 gimple_seq_add_stmt (body_p
, g
);
9913 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
9914 gimple_seq_add_stmt (body_p
, g
);
9916 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
9917 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
9918 gimple_seq_add_stmt (body_p
, g
);
9920 tree thread_numu
= create_tmp_var (unsigned_type_node
);
9921 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
9922 gimple_seq_add_stmt (body_p
, g
);
9924 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
9925 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
9926 build_int_cst (unsigned_type_node
, 1));
9927 gimple_seq_add_stmt (body_p
, g
);
9929 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9930 g
= gimple_build_label (lab3
);
9931 gimple_seq_add_stmt (body_p
, g
);
9933 tree twok
= create_tmp_var (unsigned_type_node
);
9934 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
9935 gimple_seq_add_stmt (body_p
, g
);
9937 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9938 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9939 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9940 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
9941 gimple_seq_add_stmt (body_p
, g
);
9942 g
= gimple_build_label (lab4
);
9943 gimple_seq_add_stmt (body_p
, g
);
9944 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
9945 gimple_seq_add_stmt (body_p
, g
);
9946 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
9947 gimple_seq_add_stmt (body_p
, g
);
9949 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
9950 gimple_seq_add_stmt (body_p
, g
);
9951 g
= gimple_build_label (lab6
);
9952 gimple_seq_add_stmt (body_p
, g
);
9954 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
9955 gimple_seq_add_stmt (body_p
, g
);
9957 g
= gimple_build_label (lab5
);
9958 gimple_seq_add_stmt (body_p
, g
);
9960 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
9961 gimple_seq_add_stmt (body_p
, g
);
9963 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
9964 DECL_GIMPLE_REG_P (cplx
) = 1;
9965 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
9966 gimple_call_set_lhs (g
, cplx
);
9967 gimple_seq_add_stmt (body_p
, g
);
9968 tree mul
= create_tmp_var (unsigned_type_node
);
9969 g
= gimple_build_assign (mul
, REALPART_EXPR
,
9970 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
9971 gimple_seq_add_stmt (body_p
, g
);
9972 tree ovf
= create_tmp_var (unsigned_type_node
);
9973 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
9974 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
9975 gimple_seq_add_stmt (body_p
, g
);
9977 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9978 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
9979 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
9981 gimple_seq_add_stmt (body_p
, g
);
9982 g
= gimple_build_label (lab7
);
9983 gimple_seq_add_stmt (body_p
, g
);
9985 tree andv
= create_tmp_var (unsigned_type_node
);
9986 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
9987 gimple_seq_add_stmt (body_p
, g
);
9988 tree andvm1
= create_tmp_var (unsigned_type_node
);
9989 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
9990 build_minus_one_cst (unsigned_type_node
));
9991 gimple_seq_add_stmt (body_p
, g
);
9993 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
9994 gimple_seq_add_stmt (body_p
, g
);
9996 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
9997 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
9998 gimple_seq_add_stmt (body_p
, g
);
9999 g
= gimple_build_label (lab9
);
10000 gimple_seq_add_stmt (body_p
, g
);
10001 gimple_seq_add_seq (body_p
, reduc_list
);
10002 g
= gimple_build_label (lab8
);
10003 gimple_seq_add_stmt (body_p
, g
);
10005 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
10006 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
10007 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
10008 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
10010 gimple_seq_add_stmt (body_p
, g
);
10011 g
= gimple_build_label (lab10
);
10012 gimple_seq_add_stmt (body_p
, g
);
10013 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
10014 gimple_seq_add_stmt (body_p
, g
);
10015 g
= gimple_build_goto (lab12
);
10016 gimple_seq_add_stmt (body_p
, g
);
10017 g
= gimple_build_label (lab11
);
10018 gimple_seq_add_stmt (body_p
, g
);
10019 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
10020 gimple_seq_add_stmt (body_p
, g
);
10021 g
= gimple_build_label (lab12
);
10022 gimple_seq_add_stmt (body_p
, g
);
10024 g
= omp_build_barrier (NULL
);
10025 gimple_seq_add_stmt (body_p
, g
);
10027 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
10029 gimple_seq_add_stmt (body_p
, g
);
10031 g
= gimple_build_label (lab2
);
10032 gimple_seq_add_stmt (body_p
, g
);
10034 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10035 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10036 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
10037 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
10038 gimple_seq_add_stmt (body_p
, g
);
10039 g
= gimple_build_label (lab1
);
10040 gimple_seq_add_stmt (body_p
, g
);
10041 gimple_seq_add_seq (body_p
, thr02_list
);
10042 g
= gimple_build_goto (lab3
);
10043 gimple_seq_add_stmt (body_p
, g
);
10044 g
= gimple_build_label (lab2
);
10045 gimple_seq_add_stmt (body_p
, g
);
10046 gimple_seq_add_seq (body_p
, thrn2_list
);
10047 g
= gimple_build_label (lab3
);
10048 gimple_seq_add_stmt (body_p
, g
);
10050 g
= gimple_build_assign (ivar
, size_zero_node
);
10051 gimple_seq_add_stmt (body_p
, g
);
10052 gimple_seq_add_stmt (body_p
, new_stmt
);
10053 gimple_seq_add_seq (body_p
, new_body
);
10055 gimple_seq new_dlist
= NULL
;
10056 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
10057 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
10058 tree num_threadsm1
= create_tmp_var (integer_type_node
);
10059 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
10060 integer_minus_one_node
);
10061 gimple_seq_add_stmt (&new_dlist
, g
);
10062 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
10063 gimple_seq_add_stmt (&new_dlist
, g
);
10064 g
= gimple_build_label (lab1
);
10065 gimple_seq_add_stmt (&new_dlist
, g
);
10066 gimple_seq_add_seq (&new_dlist
, last_list
);
10067 g
= gimple_build_label (lab2
);
10068 gimple_seq_add_stmt (&new_dlist
, g
);
10069 gimple_seq_add_seq (&new_dlist
, *dlist
);
10070 *dlist
= new_dlist
;
10073 /* Lower code for an OMP loop directive. */
10076 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10078 tree
*rhs_p
, block
;
10079 struct omp_for_data fd
, *fdp
= NULL
;
10080 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
10082 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
10083 gimple_seq cnt_list
= NULL
, clist
= NULL
;
10084 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
10087 push_gimplify_context ();
10089 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
10091 block
= make_node (BLOCK
);
10092 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
10093 /* Replace at gsi right away, so that 'stmt' is no member
10094 of a sequence anymore as we're going to add to a different
10096 gsi_replace (gsi_p
, new_stmt
, true);
10098 /* Move declaration of temporaries in the loop body before we make
10100 omp_for_body
= gimple_omp_body (stmt
);
10101 if (!gimple_seq_empty_p (omp_for_body
)
10102 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
10105 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
10106 tree vars
= gimple_bind_vars (inner_bind
);
10107 gimple_bind_append_vars (new_stmt
, vars
);
10108 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10109 keep them on the inner_bind and it's block. */
10110 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
10111 if (gimple_bind_block (inner_bind
))
10112 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
10115 if (gimple_omp_for_combined_into_p (stmt
))
10117 omp_extract_for_data (stmt
, &fd
, NULL
);
10120 /* We need two temporaries with fd.loop.v type (istart/iend)
10121 and then (fd.collapse - 1) temporaries with the same
10122 type for count2 ... countN-1 vars if not constant. */
10124 tree type
= fd
.iter_type
;
10125 if (fd
.collapse
> 1
10126 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
10127 count
+= fd
.collapse
- 1;
10129 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
10130 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
10131 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
10133 tree clauses
= *pc
;
10136 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
10137 OMP_CLAUSE__LOOPTEMP_
);
10138 if (ctx
->simt_stmt
)
10139 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
10140 OMP_CLAUSE__LOOPTEMP_
);
10141 for (i
= 0; i
< count
; i
++)
10146 gcc_assert (outerc
);
10147 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
10148 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
10149 OMP_CLAUSE__LOOPTEMP_
);
10153 /* If there are 2 adjacent SIMD stmts, one with _simt_
10154 clause, another without, make sure they have the same
10155 decls in _looptemp_ clauses, because the outer stmt
10156 they are combined into will look up just one inner_stmt. */
10157 if (ctx
->simt_stmt
)
10158 temp
= OMP_CLAUSE_DECL (simtc
);
10160 temp
= create_tmp_var (type
);
10161 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
10163 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
10164 OMP_CLAUSE_DECL (*pc
) = temp
;
10165 pc
= &OMP_CLAUSE_CHAIN (*pc
);
10166 if (ctx
->simt_stmt
)
10167 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
10168 OMP_CLAUSE__LOOPTEMP_
);
10173 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10177 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
10178 OMP_CLAUSE_REDUCTION
);
10179 tree rtmp
= NULL_TREE
;
10182 tree type
= build_pointer_type (pointer_sized_int_node
);
10183 tree temp
= create_tmp_var (type
);
10184 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
10185 OMP_CLAUSE_DECL (c
) = temp
;
10186 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
10187 gimple_omp_for_set_clauses (stmt
, c
);
10188 lower_omp_task_reductions (ctx
, OMP_FOR
,
10189 gimple_omp_for_clauses (stmt
),
10190 &tred_ilist
, &tred_dlist
);
10192 rtmp
= make_ssa_name (type
);
10193 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
10196 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
10199 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
10201 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
10202 gimple_omp_for_pre_body (stmt
));
10204 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10206 /* Lower the header expressions. At this point, we can assume that
10207 the header is of the form:
10209 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10211 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10212 using the .omp_data_s mapping, if needed. */
10213 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
10215 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
10216 if (!is_gimple_min_invariant (*rhs_p
))
10217 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10218 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10219 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10221 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
10222 if (!is_gimple_min_invariant (*rhs_p
))
10223 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10224 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
10225 recompute_tree_invariant_for_addr_expr (*rhs_p
);
10227 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
10228 if (!is_gimple_min_invariant (*rhs_p
))
10229 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
10232 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
10234 gimple_seq_add_seq (&body
, cnt_list
);
10236 /* Once lowered, extract the bounds and clauses. */
10237 omp_extract_for_data (stmt
, &fd
, NULL
);
10239 if (is_gimple_omp_oacc (ctx
->stmt
)
10240 && !ctx_in_oacc_kernels_region (ctx
))
10241 lower_oacc_head_tail (gimple_location (stmt
),
10242 gimple_omp_for_clauses (stmt
),
10243 &oacc_head
, &oacc_tail
, ctx
);
10245 /* Add OpenACC partitioning and reduction markers just before the loop. */
10247 gimple_seq_add_seq (&body
, oacc_head
);
10249 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
10251 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10252 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10253 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10254 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10256 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
10257 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
10258 OMP_CLAUSE_LINEAR_STEP (c
)
10259 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
10263 bool phony_loop
= (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_GRID_LOOP
10264 && gimple_omp_for_grid_phony (stmt
));
10265 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
10266 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
10268 gcc_assert (!phony_loop
);
10269 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
10274 gimple_seq_add_stmt (&body
, stmt
);
10275 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
10279 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
10282 /* After the loop, add exit clauses. */
10283 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
10287 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
10288 gcall
*g
= gimple_build_call (fndecl
, 0);
10289 gimple_seq_add_stmt (&body
, g
);
10290 gimple_seq_add_seq (&body
, clist
);
10291 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
10292 g
= gimple_build_call (fndecl
, 0);
10293 gimple_seq_add_stmt (&body
, g
);
10296 if (ctx
->cancellable
)
10297 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
10299 gimple_seq_add_seq (&body
, dlist
);
10303 gimple_seq_add_seq (&tred_ilist
, body
);
10307 body
= maybe_catch_exception (body
);
10311 /* Region exit marker goes at the end of the loop body. */
10312 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
10313 gimple_seq_add_stmt (&body
, g
);
10315 gimple_seq_add_seq (&body
, tred_dlist
);
10317 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
10320 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
10323 /* Add OpenACC joining and reduction markers just after the loop. */
10325 gimple_seq_add_seq (&body
, oacc_tail
);
10327 pop_gimplify_context (new_stmt
);
10329 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
10330 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
10331 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
10332 if (BLOCK_VARS (block
))
10333 TREE_USED (block
) = 1;
10335 gimple_bind_set_body (new_stmt
, body
);
10336 gimple_omp_set_body (stmt
, NULL
);
10337 gimple_omp_for_set_pre_body (stmt
, NULL
);
10340 /* Callback for walk_stmts. Check if the current statement only contains
10341 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10344 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
10345 bool *handled_ops_p
,
10346 struct walk_stmt_info
*wi
)
10348 int *info
= (int *) wi
->info
;
10349 gimple
*stmt
= gsi_stmt (*gsi_p
);
10351 *handled_ops_p
= true;
10352 switch (gimple_code (stmt
))
10358 case GIMPLE_OMP_FOR
:
10359 case GIMPLE_OMP_SECTIONS
:
10360 *info
= *info
== 0 ? 1 : -1;
10369 struct omp_taskcopy_context
10371 /* This field must be at the beginning, as we do "inheritance": Some
10372 callback functions for tree-inline.c (e.g., omp_copy_decl)
10373 receive a copy_body_data pointer that is up-casted to an
10374 omp_context pointer. */
10380 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
10382 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
10384 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
10385 return create_tmp_var (TREE_TYPE (var
));
10391 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
10393 tree name
, new_fields
= NULL
, type
, f
;
10395 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
10396 name
= DECL_NAME (TYPE_NAME (orig_type
));
10397 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
10398 TYPE_DECL
, name
, type
);
10399 TYPE_NAME (type
) = name
;
10401 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
10403 tree new_f
= copy_node (f
);
10404 DECL_CONTEXT (new_f
) = type
;
10405 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
10406 TREE_CHAIN (new_f
) = new_fields
;
10407 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10408 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
10409 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
10411 new_fields
= new_f
;
10412 tcctx
->cb
.decl_map
->put (f
, new_f
);
10414 TYPE_FIELDS (type
) = nreverse (new_fields
);
10415 layout_type (type
);
10419 /* Create task copyfn. */
10422 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
10424 struct function
*child_cfun
;
10425 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
10426 tree record_type
, srecord_type
, bind
, list
;
10427 bool record_needs_remap
= false, srecord_needs_remap
= false;
10429 struct omp_taskcopy_context tcctx
;
10430 location_t loc
= gimple_location (task_stmt
);
10431 size_t looptempno
= 0;
10433 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
10434 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
10435 gcc_assert (child_cfun
->cfg
== NULL
);
10436 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
10438 /* Reset DECL_CONTEXT on function arguments. */
10439 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
10440 DECL_CONTEXT (t
) = child_fn
;
10442 /* Populate the function. */
10443 push_gimplify_context ();
10444 push_cfun (child_cfun
);
10446 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
10447 TREE_SIDE_EFFECTS (bind
) = 1;
10449 DECL_SAVED_TREE (child_fn
) = bind
;
10450 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
10452 /* Remap src and dst argument types if needed. */
10453 record_type
= ctx
->record_type
;
10454 srecord_type
= ctx
->srecord_type
;
10455 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
10456 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10458 record_needs_remap
= true;
10461 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
10462 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
10464 srecord_needs_remap
= true;
10468 if (record_needs_remap
|| srecord_needs_remap
)
10470 memset (&tcctx
, '\0', sizeof (tcctx
));
10471 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
10472 tcctx
.cb
.dst_fn
= child_fn
;
10473 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
10474 gcc_checking_assert (tcctx
.cb
.src_node
);
10475 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
10476 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
10477 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
10478 tcctx
.cb
.eh_lp_nr
= 0;
10479 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
10480 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
10483 if (record_needs_remap
)
10484 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
10485 if (srecord_needs_remap
)
10486 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
10489 tcctx
.cb
.decl_map
= NULL
;
10491 arg
= DECL_ARGUMENTS (child_fn
);
10492 TREE_TYPE (arg
) = build_pointer_type (record_type
);
10493 sarg
= DECL_CHAIN (arg
);
10494 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
10496 /* First pass: initialize temporaries used in record_type and srecord_type
10497 sizes and field offsets. */
10498 if (tcctx
.cb
.decl_map
)
10499 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10500 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10504 decl
= OMP_CLAUSE_DECL (c
);
10505 p
= tcctx
.cb
.decl_map
->get (decl
);
10508 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10509 sf
= (tree
) n
->value
;
10510 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10511 src
= build_simple_mem_ref_loc (loc
, sarg
);
10512 src
= omp_build_component_ref (src
, sf
);
10513 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
10514 append_to_statement_list (t
, &list
);
10517 /* Second pass: copy shared var pointers and copy construct non-VLA
10518 firstprivate vars. */
10519 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10520 switch (OMP_CLAUSE_CODE (c
))
10522 splay_tree_key key
;
10523 case OMP_CLAUSE_SHARED
:
10524 decl
= OMP_CLAUSE_DECL (c
);
10525 key
= (splay_tree_key
) decl
;
10526 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
10527 key
= (splay_tree_key
) &DECL_UID (decl
);
10528 n
= splay_tree_lookup (ctx
->field_map
, key
);
10531 f
= (tree
) n
->value
;
10532 if (tcctx
.cb
.decl_map
)
10533 f
= *tcctx
.cb
.decl_map
->get (f
);
10534 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10535 sf
= (tree
) n
->value
;
10536 if (tcctx
.cb
.decl_map
)
10537 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10538 src
= build_simple_mem_ref_loc (loc
, sarg
);
10539 src
= omp_build_component_ref (src
, sf
);
10540 dst
= build_simple_mem_ref_loc (loc
, arg
);
10541 dst
= omp_build_component_ref (dst
, f
);
10542 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10543 append_to_statement_list (t
, &list
);
10545 case OMP_CLAUSE_REDUCTION
:
10546 case OMP_CLAUSE_IN_REDUCTION
:
10547 decl
= OMP_CLAUSE_DECL (c
);
10548 if (TREE_CODE (decl
) == MEM_REF
)
10550 decl
= TREE_OPERAND (decl
, 0);
10551 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10552 decl
= TREE_OPERAND (decl
, 0);
10553 if (TREE_CODE (decl
) == INDIRECT_REF
10554 || TREE_CODE (decl
) == ADDR_EXPR
)
10555 decl
= TREE_OPERAND (decl
, 0);
10557 key
= (splay_tree_key
) decl
;
10558 n
= splay_tree_lookup (ctx
->field_map
, key
);
10561 f
= (tree
) n
->value
;
10562 if (tcctx
.cb
.decl_map
)
10563 f
= *tcctx
.cb
.decl_map
->get (f
);
10564 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
10565 sf
= (tree
) n
->value
;
10566 if (tcctx
.cb
.decl_map
)
10567 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10568 src
= build_simple_mem_ref_loc (loc
, sarg
);
10569 src
= omp_build_component_ref (src
, sf
);
10570 if (decl
!= OMP_CLAUSE_DECL (c
)
10571 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10572 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10573 src
= build_simple_mem_ref_loc (loc
, src
);
10574 dst
= build_simple_mem_ref_loc (loc
, arg
);
10575 dst
= omp_build_component_ref (dst
, f
);
10576 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10577 append_to_statement_list (t
, &list
);
10579 case OMP_CLAUSE__LOOPTEMP_
:
10580 /* Fields for first two _looptemp_ clauses are initialized by
10581 GOMP_taskloop*, the rest are handled like firstprivate. */
10582 if (looptempno
< 2)
10588 case OMP_CLAUSE__REDUCTEMP_
:
10589 case OMP_CLAUSE_FIRSTPRIVATE
:
10590 decl
= OMP_CLAUSE_DECL (c
);
10591 if (is_variable_sized (decl
))
10593 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10596 f
= (tree
) n
->value
;
10597 if (tcctx
.cb
.decl_map
)
10598 f
= *tcctx
.cb
.decl_map
->get (f
);
10599 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10602 sf
= (tree
) n
->value
;
10603 if (tcctx
.cb
.decl_map
)
10604 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10605 src
= build_simple_mem_ref_loc (loc
, sarg
);
10606 src
= omp_build_component_ref (src
, sf
);
10607 if (use_pointer_for_field (decl
, NULL
) || omp_is_reference (decl
))
10608 src
= build_simple_mem_ref_loc (loc
, src
);
10612 dst
= build_simple_mem_ref_loc (loc
, arg
);
10613 dst
= omp_build_component_ref (dst
, f
);
10614 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
10615 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10617 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10618 append_to_statement_list (t
, &list
);
10620 case OMP_CLAUSE_PRIVATE
:
10621 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
10623 decl
= OMP_CLAUSE_DECL (c
);
10624 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10625 f
= (tree
) n
->value
;
10626 if (tcctx
.cb
.decl_map
)
10627 f
= *tcctx
.cb
.decl_map
->get (f
);
10628 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
10631 sf
= (tree
) n
->value
;
10632 if (tcctx
.cb
.decl_map
)
10633 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10634 src
= build_simple_mem_ref_loc (loc
, sarg
);
10635 src
= omp_build_component_ref (src
, sf
);
10636 if (use_pointer_for_field (decl
, NULL
))
10637 src
= build_simple_mem_ref_loc (loc
, src
);
10641 dst
= build_simple_mem_ref_loc (loc
, arg
);
10642 dst
= omp_build_component_ref (dst
, f
);
10643 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
10644 append_to_statement_list (t
, &list
);
10650 /* Last pass: handle VLA firstprivates. */
10651 if (tcctx
.cb
.decl_map
)
10652 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10653 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10657 decl
= OMP_CLAUSE_DECL (c
);
10658 if (!is_variable_sized (decl
))
10660 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
10663 f
= (tree
) n
->value
;
10664 f
= *tcctx
.cb
.decl_map
->get (f
);
10665 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
10666 ind
= DECL_VALUE_EXPR (decl
);
10667 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
10668 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
10669 n
= splay_tree_lookup (ctx
->sfield_map
,
10670 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10671 sf
= (tree
) n
->value
;
10672 sf
= *tcctx
.cb
.decl_map
->get (sf
);
10673 src
= build_simple_mem_ref_loc (loc
, sarg
);
10674 src
= omp_build_component_ref (src
, sf
);
10675 src
= build_simple_mem_ref_loc (loc
, src
);
10676 dst
= build_simple_mem_ref_loc (loc
, arg
);
10677 dst
= omp_build_component_ref (dst
, f
);
10678 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
10679 append_to_statement_list (t
, &list
);
10680 n
= splay_tree_lookup (ctx
->field_map
,
10681 (splay_tree_key
) TREE_OPERAND (ind
, 0));
10682 df
= (tree
) n
->value
;
10683 df
= *tcctx
.cb
.decl_map
->get (df
);
10684 ptr
= build_simple_mem_ref_loc (loc
, arg
);
10685 ptr
= omp_build_component_ref (ptr
, df
);
10686 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
10687 build_fold_addr_expr_loc (loc
, dst
));
10688 append_to_statement_list (t
, &list
);
10691 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
10692 append_to_statement_list (t
, &list
);
10694 if (tcctx
.cb
.decl_map
)
10695 delete tcctx
.cb
.decl_map
;
10696 pop_gimplify_context (NULL
);
10697 BIND_EXPR_BODY (bind
) = list
;
10702 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
10706 size_t cnt
[4] = { 0, 0, 0, 0 }, idx
= 2, i
;
10708 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
10709 gcc_assert (clauses
);
10710 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10711 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
10712 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10714 case OMP_CLAUSE_DEPEND_LAST
:
10715 /* Lowering already done at gimplification. */
10717 case OMP_CLAUSE_DEPEND_IN
:
10720 case OMP_CLAUSE_DEPEND_OUT
:
10721 case OMP_CLAUSE_DEPEND_INOUT
:
10724 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10727 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10730 case OMP_CLAUSE_DEPEND_SOURCE
:
10731 case OMP_CLAUSE_DEPEND_SINK
:
10734 gcc_unreachable ();
10736 if (cnt
[1] || cnt
[3])
10738 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3];
10739 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
);
10740 tree array
= create_tmp_var (type
);
10741 TREE_ADDRESSABLE (array
) = 1;
10742 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
10746 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
10747 gimple_seq_add_stmt (iseq
, g
);
10748 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
10751 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
10752 gimple_seq_add_stmt (iseq
, g
);
10753 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
10755 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
10756 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
10757 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
10758 gimple_seq_add_stmt (iseq
, g
);
10760 for (i
= 0; i
< 4; i
++)
10764 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10765 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
10769 switch (OMP_CLAUSE_DEPEND_KIND (c
))
10771 case OMP_CLAUSE_DEPEND_IN
:
10775 case OMP_CLAUSE_DEPEND_OUT
:
10776 case OMP_CLAUSE_DEPEND_INOUT
:
10780 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
10784 case OMP_CLAUSE_DEPEND_DEPOBJ
:
10789 gcc_unreachable ();
10791 tree t
= OMP_CLAUSE_DECL (c
);
10792 t
= fold_convert (ptr_type_node
, t
);
10793 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
10794 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
10795 NULL_TREE
, NULL_TREE
);
10796 g
= gimple_build_assign (r
, t
);
10797 gimple_seq_add_stmt (iseq
, g
);
10800 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
10801 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
10802 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
10803 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
10805 tree clobber
= build_constructor (type
, NULL
);
10806 TREE_THIS_VOLATILE (clobber
) = 1;
10807 g
= gimple_build_assign (array
, clobber
);
10808 gimple_seq_add_stmt (oseq
, g
);
10811 /* Lower the OpenMP parallel or task directive in the current statement
10812 in GSI_P. CTX holds context information for the directive. */
10815 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10819 gimple
*stmt
= gsi_stmt (*gsi_p
);
10820 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
10821 gimple_seq par_body
;
10822 location_t loc
= gimple_location (stmt
);
10824 clauses
= gimple_omp_taskreg_clauses (stmt
);
10825 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10826 && gimple_omp_task_taskwait_p (stmt
))
10834 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
10835 par_body
= gimple_bind_body (par_bind
);
10837 child_fn
= ctx
->cb
.dst_fn
;
10838 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
10839 && !gimple_omp_parallel_combined_p (stmt
))
10841 struct walk_stmt_info wi
;
10844 memset (&wi
, 0, sizeof (wi
));
10846 wi
.val_only
= true;
10847 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
10849 gimple_omp_parallel_set_combined_p (stmt
, true);
10851 gimple_seq dep_ilist
= NULL
;
10852 gimple_seq dep_olist
= NULL
;
10853 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10854 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
10856 push_gimplify_context ();
10857 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10858 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
10859 &dep_ilist
, &dep_olist
);
10862 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
10863 && gimple_omp_task_taskwait_p (stmt
))
10867 gsi_replace (gsi_p
, dep_bind
, true);
10868 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10869 gimple_bind_add_stmt (dep_bind
, stmt
);
10870 gimple_bind_add_seq (dep_bind
, dep_olist
);
10871 pop_gimplify_context (dep_bind
);
10876 if (ctx
->srecord_type
)
10877 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
10879 gimple_seq tskred_ilist
= NULL
;
10880 gimple_seq tskred_olist
= NULL
;
10881 if ((is_task_ctx (ctx
)
10882 && gimple_omp_task_taskloop_p (ctx
->stmt
)
10883 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
10884 OMP_CLAUSE_REDUCTION
))
10885 || (is_parallel_ctx (ctx
)
10886 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
10887 OMP_CLAUSE__REDUCTEMP_
)))
10889 if (dep_bind
== NULL
)
10891 push_gimplify_context ();
10892 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10894 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
10896 gimple_omp_taskreg_clauses (ctx
->stmt
),
10897 &tskred_ilist
, &tskred_olist
);
10900 push_gimplify_context ();
10902 gimple_seq par_olist
= NULL
;
10903 gimple_seq par_ilist
= NULL
;
10904 gimple_seq par_rlist
= NULL
;
10905 bool phony_construct
= gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
10906 && gimple_omp_parallel_grid_phony (as_a
<gomp_parallel
*> (stmt
));
10907 if (phony_construct
&& ctx
->record_type
)
10909 gcc_checking_assert (!ctx
->receiver_decl
);
10910 ctx
->receiver_decl
= create_tmp_var
10911 (build_reference_type (ctx
->record_type
), ".omp_rec");
10913 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
10914 lower_omp (&par_body
, ctx
);
10915 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
)
10916 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
10918 /* Declare all the variables created by mapping and the variables
10919 declared in the scope of the parallel body. */
10920 record_vars_into (ctx
->block_vars
, child_fn
);
10921 maybe_remove_omp_member_access_dummy_vars (par_bind
);
10922 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
10924 if (ctx
->record_type
)
10927 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
10928 : ctx
->record_type
, ".omp_data_o");
10929 DECL_NAMELESS (ctx
->sender_decl
) = 1;
10930 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
10931 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
10934 gimple_seq olist
= NULL
;
10935 gimple_seq ilist
= NULL
;
10936 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
10937 lower_send_shared_vars (&ilist
, &olist
, ctx
);
10939 if (ctx
->record_type
)
10941 tree clobber
= build_constructor (TREE_TYPE (ctx
->sender_decl
), NULL
);
10942 TREE_THIS_VOLATILE (clobber
) = 1;
10943 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
10947 /* Once all the expansions are done, sequence all the different
10948 fragments inside gimple_omp_body. */
10950 gimple_seq new_body
= NULL
;
10952 if (ctx
->record_type
)
10954 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
10955 /* fixup_child_record_type might have changed receiver_decl's type. */
10956 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
10957 gimple_seq_add_stmt (&new_body
,
10958 gimple_build_assign (ctx
->receiver_decl
, t
));
10961 gimple_seq_add_seq (&new_body
, par_ilist
);
10962 gimple_seq_add_seq (&new_body
, par_body
);
10963 gimple_seq_add_seq (&new_body
, par_rlist
);
10964 if (ctx
->cancellable
)
10965 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
10966 gimple_seq_add_seq (&new_body
, par_olist
);
10967 new_body
= maybe_catch_exception (new_body
);
10968 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
10969 gimple_seq_add_stmt (&new_body
,
10970 gimple_build_omp_continue (integer_zero_node
,
10971 integer_zero_node
));
10972 if (!phony_construct
)
10974 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
10975 gimple_omp_set_body (stmt
, new_body
);
10978 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
10979 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
10981 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
10982 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
10983 gimple_bind_add_seq (bind
, ilist
);
10984 if (!phony_construct
)
10985 gimple_bind_add_stmt (bind
, stmt
);
10987 gimple_bind_add_seq (bind
, new_body
);
10988 gimple_bind_add_seq (bind
, olist
);
10990 pop_gimplify_context (NULL
);
10994 gimple_bind_add_seq (dep_bind
, dep_ilist
);
10995 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
10996 gimple_bind_add_stmt (dep_bind
, bind
);
10997 gimple_bind_add_seq (dep_bind
, tskred_olist
);
10998 gimple_bind_add_seq (dep_bind
, dep_olist
);
10999 pop_gimplify_context (dep_bind
);
11003 /* Lower the GIMPLE_OMP_TARGET in the current statement
11004 in GSI_P. CTX holds context information for the directive. */
11007 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11010 tree child_fn
, t
, c
;
11011 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
11012 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
11013 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
11014 location_t loc
= gimple_location (stmt
);
11015 bool offloaded
, data_region
;
11016 unsigned int map_cnt
= 0;
11018 offloaded
= is_gimple_omp_offloaded (stmt
);
11019 switch (gimple_omp_target_kind (stmt
))
11021 case GF_OMP_TARGET_KIND_REGION
:
11022 case GF_OMP_TARGET_KIND_UPDATE
:
11023 case GF_OMP_TARGET_KIND_ENTER_DATA
:
11024 case GF_OMP_TARGET_KIND_EXIT_DATA
:
11025 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
11026 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
11027 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
11028 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
:
11029 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
11030 data_region
= false;
11032 case GF_OMP_TARGET_KIND_DATA
:
11033 case GF_OMP_TARGET_KIND_OACC_DATA
:
11034 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
11035 data_region
= true;
11038 gcc_unreachable ();
11041 clauses
= gimple_omp_target_clauses (stmt
);
11043 gimple_seq dep_ilist
= NULL
;
11044 gimple_seq dep_olist
= NULL
;
11045 if (omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
11047 push_gimplify_context ();
11048 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
11049 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
11050 &dep_ilist
, &dep_olist
);
11057 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
11058 tgt_body
= gimple_bind_body (tgt_bind
);
11060 else if (data_region
)
11061 tgt_body
= gimple_omp_body (stmt
);
11062 child_fn
= ctx
->cb
.dst_fn
;
11064 push_gimplify_context ();
11067 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11068 switch (OMP_CLAUSE_CODE (c
))
11074 case OMP_CLAUSE_MAP
:
11076 /* First check what we're prepared to handle in the following. */
11077 switch (OMP_CLAUSE_MAP_KIND (c
))
11079 case GOMP_MAP_ALLOC
:
11081 case GOMP_MAP_FROM
:
11082 case GOMP_MAP_TOFROM
:
11083 case GOMP_MAP_POINTER
:
11084 case GOMP_MAP_TO_PSET
:
11085 case GOMP_MAP_DELETE
:
11086 case GOMP_MAP_RELEASE
:
11087 case GOMP_MAP_ALWAYS_TO
:
11088 case GOMP_MAP_ALWAYS_FROM
:
11089 case GOMP_MAP_ALWAYS_TOFROM
:
11090 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
11091 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
11092 case GOMP_MAP_STRUCT
:
11093 case GOMP_MAP_ALWAYS_POINTER
:
11095 case GOMP_MAP_FORCE_ALLOC
:
11096 case GOMP_MAP_FORCE_TO
:
11097 case GOMP_MAP_FORCE_FROM
:
11098 case GOMP_MAP_FORCE_TOFROM
:
11099 case GOMP_MAP_FORCE_PRESENT
:
11100 case GOMP_MAP_FORCE_DEVICEPTR
:
11101 case GOMP_MAP_DEVICE_RESIDENT
:
11102 case GOMP_MAP_LINK
:
11103 gcc_assert (is_gimple_omp_oacc (stmt
));
11106 gcc_unreachable ();
11110 case OMP_CLAUSE_TO
:
11111 case OMP_CLAUSE_FROM
:
11113 var
= OMP_CLAUSE_DECL (c
);
11116 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
11117 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11118 && (OMP_CLAUSE_MAP_KIND (c
)
11119 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
11124 if (DECL_SIZE (var
)
11125 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11127 tree var2
= DECL_VALUE_EXPR (var
);
11128 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11129 var2
= TREE_OPERAND (var2
, 0);
11130 gcc_assert (DECL_P (var2
));
11135 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11136 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11137 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11139 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11141 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
11142 && varpool_node::get_create (var
)->offloadable
)
11145 tree type
= build_pointer_type (TREE_TYPE (var
));
11146 tree new_var
= lookup_decl (var
, ctx
);
11147 x
= create_tmp_var_raw (type
, get_name (new_var
));
11148 gimple_add_tmp_var (x
);
11149 x
= build_simple_mem_ref (x
);
11150 SET_DECL_VALUE_EXPR (new_var
, x
);
11151 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11156 if (!maybe_lookup_field (var
, ctx
))
11159 /* Don't remap oacc parallel reduction variables, because the
11160 intermediate result must be local to each gang. */
11161 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11162 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
11164 x
= build_receiver_ref (var
, true, ctx
);
11165 tree new_var
= lookup_decl (var
, ctx
);
11167 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11168 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11169 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11170 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11171 x
= build_simple_mem_ref (x
);
11172 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11174 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11175 if (omp_is_reference (new_var
)
11176 && TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
)
11178 /* Create a local object to hold the instance
11180 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
11181 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
11182 tree inst
= create_tmp_var (type
, id
);
11183 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
11184 x
= build_fold_addr_expr (inst
);
11186 gimplify_assign (new_var
, x
, &fplist
);
11188 else if (DECL_P (new_var
))
11190 SET_DECL_VALUE_EXPR (new_var
, x
);
11191 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11194 gcc_unreachable ();
11199 case OMP_CLAUSE_FIRSTPRIVATE
:
11200 if (is_oacc_parallel (ctx
))
11201 goto oacc_firstprivate
;
11203 var
= OMP_CLAUSE_DECL (c
);
11204 if (!omp_is_reference (var
)
11205 && !is_gimple_reg_type (TREE_TYPE (var
)))
11207 tree new_var
= lookup_decl (var
, ctx
);
11208 if (is_variable_sized (var
))
11210 tree pvar
= DECL_VALUE_EXPR (var
);
11211 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11212 pvar
= TREE_OPERAND (pvar
, 0);
11213 gcc_assert (DECL_P (pvar
));
11214 tree new_pvar
= lookup_decl (pvar
, ctx
);
11215 x
= build_fold_indirect_ref (new_pvar
);
11216 TREE_THIS_NOTRAP (x
) = 1;
11219 x
= build_receiver_ref (var
, true, ctx
);
11220 SET_DECL_VALUE_EXPR (new_var
, x
);
11221 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11225 case OMP_CLAUSE_PRIVATE
:
11226 if (is_gimple_omp_oacc (ctx
->stmt
))
11228 var
= OMP_CLAUSE_DECL (c
);
11229 if (is_variable_sized (var
))
11231 tree new_var
= lookup_decl (var
, ctx
);
11232 tree pvar
= DECL_VALUE_EXPR (var
);
11233 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11234 pvar
= TREE_OPERAND (pvar
, 0);
11235 gcc_assert (DECL_P (pvar
));
11236 tree new_pvar
= lookup_decl (pvar
, ctx
);
11237 x
= build_fold_indirect_ref (new_pvar
);
11238 TREE_THIS_NOTRAP (x
) = 1;
11239 SET_DECL_VALUE_EXPR (new_var
, x
);
11240 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11244 case OMP_CLAUSE_USE_DEVICE_PTR
:
11245 case OMP_CLAUSE_IS_DEVICE_PTR
:
11246 var
= OMP_CLAUSE_DECL (c
);
11248 if (is_variable_sized (var
))
11250 tree new_var
= lookup_decl (var
, ctx
);
11251 tree pvar
= DECL_VALUE_EXPR (var
);
11252 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11253 pvar
= TREE_OPERAND (pvar
, 0);
11254 gcc_assert (DECL_P (pvar
));
11255 tree new_pvar
= lookup_decl (pvar
, ctx
);
11256 x
= build_fold_indirect_ref (new_pvar
);
11257 TREE_THIS_NOTRAP (x
) = 1;
11258 SET_DECL_VALUE_EXPR (new_var
, x
);
11259 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11261 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11263 tree new_var
= lookup_decl (var
, ctx
);
11264 tree type
= build_pointer_type (TREE_TYPE (var
));
11265 x
= create_tmp_var_raw (type
, get_name (new_var
));
11266 gimple_add_tmp_var (x
);
11267 x
= build_simple_mem_ref (x
);
11268 SET_DECL_VALUE_EXPR (new_var
, x
);
11269 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11273 tree new_var
= lookup_decl (var
, ctx
);
11274 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
11275 gimple_add_tmp_var (x
);
11276 SET_DECL_VALUE_EXPR (new_var
, x
);
11277 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
11284 target_nesting_level
++;
11285 lower_omp (&tgt_body
, ctx
);
11286 target_nesting_level
--;
11288 else if (data_region
)
11289 lower_omp (&tgt_body
, ctx
);
11293 /* Declare all the variables created by mapping and the variables
11294 declared in the scope of the target body. */
11295 record_vars_into (ctx
->block_vars
, child_fn
);
11296 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
11297 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
11302 if (ctx
->record_type
)
11305 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
11306 DECL_NAMELESS (ctx
->sender_decl
) = 1;
11307 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
11308 t
= make_tree_vec (3);
11309 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
11310 TREE_VEC_ELT (t
, 1)
11311 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
11312 ".omp_data_sizes");
11313 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
11314 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
11315 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
11316 tree tkind_type
= short_unsigned_type_node
;
11317 int talign_shift
= 8;
11318 TREE_VEC_ELT (t
, 2)
11319 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
11320 ".omp_data_kinds");
11321 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
11322 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
11323 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
11324 gimple_omp_target_set_data_arg (stmt
, t
);
11326 vec
<constructor_elt
, va_gc
> *vsize
;
11327 vec
<constructor_elt
, va_gc
> *vkind
;
11328 vec_alloc (vsize
, map_cnt
);
11329 vec_alloc (vkind
, map_cnt
);
11330 unsigned int map_idx
= 0;
11332 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11333 switch (OMP_CLAUSE_CODE (c
))
11335 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
11336 unsigned int talign
;
11341 case OMP_CLAUSE_MAP
:
11342 case OMP_CLAUSE_TO
:
11343 case OMP_CLAUSE_FROM
:
11344 oacc_firstprivate_map
:
11346 ovar
= OMP_CLAUSE_DECL (c
);
11347 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11348 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11349 || (OMP_CLAUSE_MAP_KIND (c
)
11350 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11352 if (!DECL_P (ovar
))
11354 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11355 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
11357 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c
))
11358 == get_base_address (ovar
));
11359 nc
= OMP_CLAUSE_CHAIN (c
);
11360 ovar
= OMP_CLAUSE_DECL (nc
);
11364 tree x
= build_sender_ref (ovar
, ctx
);
11366 = build_fold_addr_expr_with_type (ovar
, ptr_type_node
);
11367 gimplify_assign (x
, v
, &ilist
);
11373 if (DECL_SIZE (ovar
)
11374 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
11376 tree ovar2
= DECL_VALUE_EXPR (ovar
);
11377 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
11378 ovar2
= TREE_OPERAND (ovar2
, 0);
11379 gcc_assert (DECL_P (ovar2
));
11382 if (!maybe_lookup_field (ovar
, ctx
))
11386 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
11387 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
11388 talign
= DECL_ALIGN_UNIT (ovar
);
11391 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11392 x
= build_sender_ref (ovar
, ctx
);
11394 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11395 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11396 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
11397 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
11399 gcc_assert (offloaded
);
11401 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
11402 mark_addressable (avar
);
11403 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
11404 talign
= DECL_ALIGN_UNIT (avar
);
11405 avar
= build_fold_addr_expr (avar
);
11406 gimplify_assign (x
, avar
, &ilist
);
11408 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11410 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
11411 if (!omp_is_reference (var
))
11413 if (is_gimple_reg (var
)
11414 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11415 TREE_NO_WARNING (var
) = 1;
11416 var
= build_fold_addr_expr (var
);
11419 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11420 gimplify_assign (x
, var
, &ilist
);
11422 else if (is_gimple_reg (var
))
11424 gcc_assert (offloaded
);
11425 tree avar
= create_tmp_var (TREE_TYPE (var
));
11426 mark_addressable (avar
);
11427 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
11428 if (GOMP_MAP_COPY_TO_P (map_kind
)
11429 || map_kind
== GOMP_MAP_POINTER
11430 || map_kind
== GOMP_MAP_TO_PSET
11431 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11433 /* If we need to initialize a temporary
11434 with VAR because it is not addressable, and
11435 the variable hasn't been initialized yet, then
11436 we'll get a warning for the store to avar.
11437 Don't warn in that case, the mapping might
11439 TREE_NO_WARNING (var
) = 1;
11440 gimplify_assign (avar
, var
, &ilist
);
11442 avar
= build_fold_addr_expr (avar
);
11443 gimplify_assign (x
, avar
, &ilist
);
11444 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
11445 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
11446 && !TYPE_READONLY (TREE_TYPE (var
)))
11448 x
= unshare_expr (x
);
11449 x
= build_simple_mem_ref (x
);
11450 gimplify_assign (var
, x
, &olist
);
11455 var
= build_fold_addr_expr (var
);
11456 gimplify_assign (x
, var
, &ilist
);
11460 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11462 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11463 s
= TREE_TYPE (ovar
);
11464 if (TREE_CODE (s
) == REFERENCE_TYPE
)
11466 s
= TYPE_SIZE_UNIT (s
);
11469 s
= OMP_CLAUSE_SIZE (c
);
11470 if (s
== NULL_TREE
)
11471 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11472 s
= fold_convert (size_type_node
, s
);
11473 purpose
= size_int (map_idx
++);
11474 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11475 if (TREE_CODE (s
) != INTEGER_CST
)
11476 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11478 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
11479 switch (OMP_CLAUSE_CODE (c
))
11481 case OMP_CLAUSE_MAP
:
11482 tkind
= OMP_CLAUSE_MAP_KIND (c
);
11483 tkind_zero
= tkind
;
11484 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
11487 case GOMP_MAP_ALLOC
:
11489 case GOMP_MAP_FROM
:
11490 case GOMP_MAP_TOFROM
:
11491 case GOMP_MAP_ALWAYS_TO
:
11492 case GOMP_MAP_ALWAYS_FROM
:
11493 case GOMP_MAP_ALWAYS_TOFROM
:
11494 case GOMP_MAP_RELEASE
:
11495 case GOMP_MAP_FORCE_TO
:
11496 case GOMP_MAP_FORCE_FROM
:
11497 case GOMP_MAP_FORCE_TOFROM
:
11498 case GOMP_MAP_FORCE_PRESENT
:
11499 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
11501 case GOMP_MAP_DELETE
:
11502 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
11506 if (tkind_zero
!= tkind
)
11508 if (integer_zerop (s
))
11509 tkind
= tkind_zero
;
11510 else if (integer_nonzerop (s
))
11511 tkind_zero
= tkind
;
11514 case OMP_CLAUSE_FIRSTPRIVATE
:
11515 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
11516 tkind
= GOMP_MAP_TO
;
11517 tkind_zero
= tkind
;
11519 case OMP_CLAUSE_TO
:
11520 tkind
= GOMP_MAP_TO
;
11521 tkind_zero
= tkind
;
11523 case OMP_CLAUSE_FROM
:
11524 tkind
= GOMP_MAP_FROM
;
11525 tkind_zero
= tkind
;
11528 gcc_unreachable ();
11530 gcc_checking_assert (tkind
11531 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11532 gcc_checking_assert (tkind_zero
11533 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11534 talign
= ceil_log2 (talign
);
11535 tkind
|= talign
<< talign_shift
;
11536 tkind_zero
|= talign
<< talign_shift
;
11537 gcc_checking_assert (tkind
11538 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11539 gcc_checking_assert (tkind_zero
11540 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11541 if (tkind
== tkind_zero
)
11542 x
= build_int_cstu (tkind_type
, tkind
);
11545 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
11546 x
= build3 (COND_EXPR
, tkind_type
,
11547 fold_build2 (EQ_EXPR
, boolean_type_node
,
11548 unshare_expr (s
), size_zero_node
),
11549 build_int_cstu (tkind_type
, tkind_zero
),
11550 build_int_cstu (tkind_type
, tkind
));
11552 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
11557 case OMP_CLAUSE_FIRSTPRIVATE
:
11558 if (is_oacc_parallel (ctx
))
11559 goto oacc_firstprivate_map
;
11560 ovar
= OMP_CLAUSE_DECL (c
);
11561 if (omp_is_reference (ovar
))
11562 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11564 talign
= DECL_ALIGN_UNIT (ovar
);
11565 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11566 x
= build_sender_ref (ovar
, ctx
);
11567 tkind
= GOMP_MAP_FIRSTPRIVATE
;
11568 type
= TREE_TYPE (ovar
);
11569 if (omp_is_reference (ovar
))
11570 type
= TREE_TYPE (type
);
11571 if ((INTEGRAL_TYPE_P (type
)
11572 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11573 || TREE_CODE (type
) == POINTER_TYPE
)
11575 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11577 if (omp_is_reference (var
))
11578 t
= build_simple_mem_ref (var
);
11579 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11580 TREE_NO_WARNING (var
) = 1;
11581 if (TREE_CODE (type
) != POINTER_TYPE
)
11582 t
= fold_convert (pointer_sized_int_node
, t
);
11583 t
= fold_convert (TREE_TYPE (x
), t
);
11584 gimplify_assign (x
, t
, &ilist
);
11586 else if (omp_is_reference (var
))
11587 gimplify_assign (x
, var
, &ilist
);
11588 else if (is_gimple_reg (var
))
11590 tree avar
= create_tmp_var (TREE_TYPE (var
));
11591 mark_addressable (avar
);
11592 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11593 TREE_NO_WARNING (var
) = 1;
11594 gimplify_assign (avar
, var
, &ilist
);
11595 avar
= build_fold_addr_expr (avar
);
11596 gimplify_assign (x
, avar
, &ilist
);
11600 var
= build_fold_addr_expr (var
);
11601 gimplify_assign (x
, var
, &ilist
);
11603 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
11605 else if (omp_is_reference (ovar
))
11606 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
11608 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
11609 s
= fold_convert (size_type_node
, s
);
11610 purpose
= size_int (map_idx
++);
11611 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11612 if (TREE_CODE (s
) != INTEGER_CST
)
11613 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
11615 gcc_checking_assert (tkind
11616 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11617 talign
= ceil_log2 (talign
);
11618 tkind
|= talign
<< talign_shift
;
11619 gcc_checking_assert (tkind
11620 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11621 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11622 build_int_cstu (tkind_type
, tkind
));
11625 case OMP_CLAUSE_USE_DEVICE_PTR
:
11626 case OMP_CLAUSE_IS_DEVICE_PTR
:
11627 ovar
= OMP_CLAUSE_DECL (c
);
11628 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
11629 x
= build_sender_ref (ovar
, ctx
);
11630 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
11631 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
11633 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
11634 type
= TREE_TYPE (ovar
);
11635 if (TREE_CODE (type
) == ARRAY_TYPE
)
11636 var
= build_fold_addr_expr (var
);
11639 if (omp_is_reference (ovar
))
11641 type
= TREE_TYPE (type
);
11642 if (TREE_CODE (type
) != ARRAY_TYPE
)
11643 var
= build_simple_mem_ref (var
);
11644 var
= fold_convert (TREE_TYPE (x
), var
);
11647 gimplify_assign (x
, var
, &ilist
);
11649 purpose
= size_int (map_idx
++);
11650 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
11651 gcc_checking_assert (tkind
11652 < (HOST_WIDE_INT_C (1U) << talign_shift
));
11653 gcc_checking_assert (tkind
11654 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
11655 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
11656 build_int_cstu (tkind_type
, tkind
));
11660 gcc_assert (map_idx
== map_cnt
);
11662 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
11663 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
11664 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
11665 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
11666 for (int i
= 1; i
<= 2; i
++)
11667 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
11669 gimple_seq initlist
= NULL
;
11670 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
11671 TREE_VEC_ELT (t
, i
)),
11672 &initlist
, true, NULL_TREE
);
11673 gimple_seq_add_seq (&ilist
, initlist
);
11675 tree clobber
= build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, i
)),
11677 TREE_THIS_VOLATILE (clobber
) = 1;
11678 gimple_seq_add_stmt (&olist
,
11679 gimple_build_assign (TREE_VEC_ELT (t
, i
),
11683 tree clobber
= build_constructor (ctx
->record_type
, NULL
);
11684 TREE_THIS_VOLATILE (clobber
) = 1;
11685 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
11689 /* Once all the expansions are done, sequence all the different
11690 fragments inside gimple_omp_body. */
11695 && ctx
->record_type
)
11697 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
11698 /* fixup_child_record_type might have changed receiver_decl's type. */
11699 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
11700 gimple_seq_add_stmt (&new_body
,
11701 gimple_build_assign (ctx
->receiver_decl
, t
));
11703 gimple_seq_add_seq (&new_body
, fplist
);
11705 if (offloaded
|| data_region
)
11707 tree prev
= NULL_TREE
;
11708 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11709 switch (OMP_CLAUSE_CODE (c
))
11714 case OMP_CLAUSE_FIRSTPRIVATE
:
11715 if (is_gimple_omp_oacc (ctx
->stmt
))
11717 var
= OMP_CLAUSE_DECL (c
);
11718 if (omp_is_reference (var
)
11719 || is_gimple_reg_type (TREE_TYPE (var
)))
11721 tree new_var
= lookup_decl (var
, ctx
);
11723 type
= TREE_TYPE (var
);
11724 if (omp_is_reference (var
))
11725 type
= TREE_TYPE (type
);
11726 if ((INTEGRAL_TYPE_P (type
)
11727 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
11728 || TREE_CODE (type
) == POINTER_TYPE
)
11730 x
= build_receiver_ref (var
, false, ctx
);
11731 if (TREE_CODE (type
) != POINTER_TYPE
)
11732 x
= fold_convert (pointer_sized_int_node
, x
);
11733 x
= fold_convert (type
, x
);
11734 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11736 if (omp_is_reference (var
))
11738 tree v
= create_tmp_var_raw (type
, get_name (var
));
11739 gimple_add_tmp_var (v
);
11740 TREE_ADDRESSABLE (v
) = 1;
11741 gimple_seq_add_stmt (&new_body
,
11742 gimple_build_assign (v
, x
));
11743 x
= build_fold_addr_expr (v
);
11745 gimple_seq_add_stmt (&new_body
,
11746 gimple_build_assign (new_var
, x
));
11750 x
= build_receiver_ref (var
, !omp_is_reference (var
), ctx
);
11751 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11753 gimple_seq_add_stmt (&new_body
,
11754 gimple_build_assign (new_var
, x
));
11757 else if (is_variable_sized (var
))
11759 tree pvar
= DECL_VALUE_EXPR (var
);
11760 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11761 pvar
= TREE_OPERAND (pvar
, 0);
11762 gcc_assert (DECL_P (pvar
));
11763 tree new_var
= lookup_decl (pvar
, ctx
);
11764 x
= build_receiver_ref (var
, false, ctx
);
11765 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11766 gimple_seq_add_stmt (&new_body
,
11767 gimple_build_assign (new_var
, x
));
11770 case OMP_CLAUSE_PRIVATE
:
11771 if (is_gimple_omp_oacc (ctx
->stmt
))
11773 var
= OMP_CLAUSE_DECL (c
);
11774 if (omp_is_reference (var
))
11776 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11777 tree new_var
= lookup_decl (var
, ctx
);
11778 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
11779 if (TREE_CONSTANT (x
))
11781 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
11783 gimple_add_tmp_var (x
);
11784 TREE_ADDRESSABLE (x
) = 1;
11785 x
= build_fold_addr_expr_loc (clause_loc
, x
);
11790 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11791 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11792 gimple_seq_add_stmt (&new_body
,
11793 gimple_build_assign (new_var
, x
));
11796 case OMP_CLAUSE_USE_DEVICE_PTR
:
11797 case OMP_CLAUSE_IS_DEVICE_PTR
:
11798 var
= OMP_CLAUSE_DECL (c
);
11799 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
11800 x
= build_sender_ref (var
, ctx
);
11802 x
= build_receiver_ref (var
, false, ctx
);
11803 if (is_variable_sized (var
))
11805 tree pvar
= DECL_VALUE_EXPR (var
);
11806 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11807 pvar
= TREE_OPERAND (pvar
, 0);
11808 gcc_assert (DECL_P (pvar
));
11809 tree new_var
= lookup_decl (pvar
, ctx
);
11810 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11811 gimple_seq_add_stmt (&new_body
,
11812 gimple_build_assign (new_var
, x
));
11814 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
11816 tree new_var
= lookup_decl (var
, ctx
);
11817 new_var
= DECL_VALUE_EXPR (new_var
);
11818 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
11819 new_var
= TREE_OPERAND (new_var
, 0);
11820 gcc_assert (DECL_P (new_var
));
11821 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11822 gimple_seq_add_stmt (&new_body
,
11823 gimple_build_assign (new_var
, x
));
11827 tree type
= TREE_TYPE (var
);
11828 tree new_var
= lookup_decl (var
, ctx
);
11829 if (omp_is_reference (var
))
11831 type
= TREE_TYPE (type
);
11832 if (TREE_CODE (type
) != ARRAY_TYPE
)
11834 tree v
= create_tmp_var_raw (type
, get_name (var
));
11835 gimple_add_tmp_var (v
);
11836 TREE_ADDRESSABLE (v
) = 1;
11837 x
= fold_convert (type
, x
);
11838 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
11840 gimple_seq_add_stmt (&new_body
,
11841 gimple_build_assign (v
, x
));
11842 x
= build_fold_addr_expr (v
);
11845 new_var
= DECL_VALUE_EXPR (new_var
);
11846 x
= fold_convert (TREE_TYPE (new_var
), x
);
11847 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11848 gimple_seq_add_stmt (&new_body
,
11849 gimple_build_assign (new_var
, x
));
11853 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11854 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11855 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11856 or references to VLAs. */
11857 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11858 switch (OMP_CLAUSE_CODE (c
))
11863 case OMP_CLAUSE_MAP
:
11864 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11865 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11867 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11868 poly_int64 offset
= 0;
11870 var
= OMP_CLAUSE_DECL (c
);
11872 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
11873 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
11875 && varpool_node::get_create (var
)->offloadable
)
11877 if (TREE_CODE (var
) == INDIRECT_REF
11878 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
11879 var
= TREE_OPERAND (var
, 0);
11880 if (TREE_CODE (var
) == COMPONENT_REF
)
11882 var
= get_addr_base_and_unit_offset (var
, &offset
);
11883 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
11885 else if (DECL_SIZE (var
)
11886 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
11888 tree var2
= DECL_VALUE_EXPR (var
);
11889 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
11890 var2
= TREE_OPERAND (var2
, 0);
11891 gcc_assert (DECL_P (var2
));
11894 tree new_var
= lookup_decl (var
, ctx
), x
;
11895 tree type
= TREE_TYPE (new_var
);
11897 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
11898 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11901 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
11903 new_var
= build2 (MEM_REF
, type
,
11904 build_fold_addr_expr (new_var
),
11905 build_int_cst (build_pointer_type (type
),
11908 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
11910 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
11911 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
11912 new_var
= build2 (MEM_REF
, type
,
11913 build_fold_addr_expr (new_var
),
11914 build_int_cst (build_pointer_type (type
),
11918 is_ref
= omp_is_reference (var
);
11919 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11921 bool ref_to_array
= false;
11924 type
= TREE_TYPE (type
);
11925 if (TREE_CODE (type
) == ARRAY_TYPE
)
11927 type
= build_pointer_type (type
);
11928 ref_to_array
= true;
11931 else if (TREE_CODE (type
) == ARRAY_TYPE
)
11933 tree decl2
= DECL_VALUE_EXPR (new_var
);
11934 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
11935 decl2
= TREE_OPERAND (decl2
, 0);
11936 gcc_assert (DECL_P (decl2
));
11938 type
= TREE_TYPE (new_var
);
11940 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
11941 x
= fold_convert_loc (clause_loc
, type
, x
);
11942 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
11944 tree bias
= OMP_CLAUSE_SIZE (c
);
11946 bias
= lookup_decl (bias
, ctx
);
11947 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
11948 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
11950 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
11951 TREE_TYPE (x
), x
, bias
);
11954 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
11955 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11956 if (is_ref
&& !ref_to_array
)
11958 tree t
= create_tmp_var_raw (type
, get_name (var
));
11959 gimple_add_tmp_var (t
);
11960 TREE_ADDRESSABLE (t
) = 1;
11961 gimple_seq_add_stmt (&new_body
,
11962 gimple_build_assign (t
, x
));
11963 x
= build_fold_addr_expr_loc (clause_loc
, t
);
11965 gimple_seq_add_stmt (&new_body
,
11966 gimple_build_assign (new_var
, x
));
11969 else if (OMP_CLAUSE_CHAIN (c
)
11970 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
11972 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11973 == GOMP_MAP_FIRSTPRIVATE_POINTER
11974 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11975 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
11978 case OMP_CLAUSE_PRIVATE
:
11979 var
= OMP_CLAUSE_DECL (c
);
11980 if (is_variable_sized (var
))
11982 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11983 tree new_var
= lookup_decl (var
, ctx
);
11984 tree pvar
= DECL_VALUE_EXPR (var
);
11985 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
11986 pvar
= TREE_OPERAND (pvar
, 0);
11987 gcc_assert (DECL_P (pvar
));
11988 tree new_pvar
= lookup_decl (pvar
, ctx
);
11989 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
11990 tree al
= size_int (DECL_ALIGN (var
));
11991 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
11992 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
11993 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
11994 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
11995 gimple_seq_add_stmt (&new_body
,
11996 gimple_build_assign (new_pvar
, x
));
11998 else if (omp_is_reference (var
) && !is_gimple_omp_oacc (ctx
->stmt
))
12000 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
12001 tree new_var
= lookup_decl (var
, ctx
);
12002 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
12003 if (TREE_CONSTANT (x
))
12008 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
12009 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
12010 tree al
= size_int (TYPE_ALIGN (rtype
));
12011 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
12014 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
12015 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
12016 gimple_seq_add_stmt (&new_body
,
12017 gimple_build_assign (new_var
, x
));
12022 gimple_seq fork_seq
= NULL
;
12023 gimple_seq join_seq
= NULL
;
12025 if (is_oacc_parallel (ctx
))
12027 /* If there are reductions on the offloaded region itself, treat
12028 them as a dummy GANG loop. */
12029 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
12031 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
12032 false, NULL
, NULL
, &fork_seq
, &join_seq
, ctx
);
12035 gimple_seq_add_seq (&new_body
, fork_seq
);
12036 gimple_seq_add_seq (&new_body
, tgt_body
);
12037 gimple_seq_add_seq (&new_body
, join_seq
);
12040 new_body
= maybe_catch_exception (new_body
);
12042 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12043 gimple_omp_set_body (stmt
, new_body
);
12046 bind
= gimple_build_bind (NULL
, NULL
,
12047 tgt_bind
? gimple_bind_block (tgt_bind
)
12049 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12050 gimple_bind_add_seq (bind
, ilist
);
12051 gimple_bind_add_stmt (bind
, stmt
);
12052 gimple_bind_add_seq (bind
, olist
);
12054 pop_gimplify_context (NULL
);
12058 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12059 gimple_bind_add_stmt (dep_bind
, bind
);
12060 gimple_bind_add_seq (dep_bind
, dep_olist
);
12061 pop_gimplify_context (dep_bind
);
12065 /* Expand code for an OpenMP teams directive. */
12068 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12070 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
12071 push_gimplify_context ();
12073 tree block
= make_node (BLOCK
);
12074 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
12075 gsi_replace (gsi_p
, bind
, true);
12076 gimple_seq bind_body
= NULL
;
12077 gimple_seq dlist
= NULL
;
12078 gimple_seq olist
= NULL
;
12080 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12081 OMP_CLAUSE_NUM_TEAMS
);
12082 if (num_teams
== NULL_TREE
)
12083 num_teams
= build_int_cst (unsigned_type_node
, 0);
12086 num_teams
= OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams
);
12087 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
12088 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
12090 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
12091 OMP_CLAUSE_THREAD_LIMIT
);
12092 if (thread_limit
== NULL_TREE
)
12093 thread_limit
= build_int_cst (unsigned_type_node
, 0);
12096 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
12097 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
12098 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
12102 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
12103 &bind_body
, &dlist
, ctx
, NULL
);
12104 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
12105 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
12107 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12109 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
12110 location_t loc
= gimple_location (teams_stmt
);
12111 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS
);
12112 gimple
*call
= gimple_build_call (decl
, 2, num_teams
, thread_limit
);
12113 gimple_set_location (call
, loc
);
12114 gimple_seq_add_stmt (&bind_body
, call
);
12117 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
12118 gimple_omp_set_body (teams_stmt
, NULL
);
12119 gimple_seq_add_seq (&bind_body
, olist
);
12120 gimple_seq_add_seq (&bind_body
, dlist
);
12121 if (!gimple_omp_teams_grid_phony (teams_stmt
))
12122 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
12123 gimple_bind_set_body (bind
, bind_body
);
12125 pop_gimplify_context (bind
);
12127 gimple_bind_append_vars (bind
, ctx
->block_vars
);
12128 BLOCK_VARS (block
) = ctx
->block_vars
;
12129 if (BLOCK_VARS (block
))
12130 TREE_USED (block
) = 1;
12133 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12136 lower_omp_grid_body (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12138 gimple
*stmt
= gsi_stmt (*gsi_p
);
12139 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
12140 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt
),
12141 gimple_build_omp_return (false));
12145 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12146 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12147 of OMP context, but with task_shared_vars set. */
12150 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
12155 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12156 if (VAR_P (t
) && data
== NULL
&& DECL_HAS_VALUE_EXPR_P (t
))
12159 if (task_shared_vars
12161 && bitmap_bit_p (task_shared_vars
, DECL_UID (t
)))
12164 /* If a global variable has been privatized, TREE_CONSTANT on
12165 ADDR_EXPR might be wrong. */
12166 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
12167 recompute_tree_invariant_for_addr_expr (t
);
12169 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
12173 /* Data to be communicated between lower_omp_regimplify_operands and
12174 lower_omp_regimplify_operands_p. */
12176 struct lower_omp_regimplify_operands_data
12182 /* Helper function for lower_omp_regimplify_operands. Find
12183 omp_member_access_dummy_var vars and adjust temporarily their
12184 DECL_VALUE_EXPRs if needed. */
12187 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
12190 tree t
= omp_member_access_dummy_var (*tp
);
12193 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12194 lower_omp_regimplify_operands_data
*ldata
12195 = (lower_omp_regimplify_operands_data
*) wi
->info
;
12196 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
12199 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
12200 ldata
->decls
->safe_push (*tp
);
12201 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
12202 SET_DECL_VALUE_EXPR (*tp
, v
);
12205 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
12209 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12210 of omp_member_access_dummy_var vars during regimplification. */
12213 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
12214 gimple_stmt_iterator
*gsi_p
)
12216 auto_vec
<tree
, 10> decls
;
12219 struct walk_stmt_info wi
;
12220 memset (&wi
, '\0', sizeof (wi
));
12221 struct lower_omp_regimplify_operands_data data
;
12223 data
.decls
= &decls
;
12225 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
12227 gimple_regimplify_operands (stmt
, gsi_p
);
12228 while (!decls
.is_empty ())
12230 tree t
= decls
.pop ();
12231 tree v
= decls
.pop ();
12232 SET_DECL_VALUE_EXPR (t
, v
);
12237 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12239 gimple
*stmt
= gsi_stmt (*gsi_p
);
12240 struct walk_stmt_info wi
;
12243 if (gimple_has_location (stmt
))
12244 input_location
= gimple_location (stmt
);
12246 if (task_shared_vars
)
12247 memset (&wi
, '\0', sizeof (wi
));
12249 /* If we have issued syntax errors, avoid doing any heavy lifting.
12250 Just replace the OMP directives with a NOP to avoid
12251 confusing RTL expansion. */
12252 if (seen_error () && is_gimple_omp (stmt
))
12254 gsi_replace (gsi_p
, gimple_build_nop (), true);
12258 switch (gimple_code (stmt
))
12262 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12263 if ((ctx
|| task_shared_vars
)
12264 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
12265 lower_omp_regimplify_p
,
12266 ctx
? NULL
: &wi
, NULL
)
12267 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
12268 lower_omp_regimplify_p
,
12269 ctx
? NULL
: &wi
, NULL
)))
12270 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
12274 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
12276 case GIMPLE_EH_FILTER
:
12277 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
12280 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
12281 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
12283 case GIMPLE_TRANSACTION
:
12284 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
12288 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
12289 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
12291 case GIMPLE_OMP_PARALLEL
:
12292 case GIMPLE_OMP_TASK
:
12293 ctx
= maybe_lookup_ctx (stmt
);
12295 if (ctx
->cancellable
)
12296 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12297 lower_omp_taskreg (gsi_p
, ctx
);
12299 case GIMPLE_OMP_FOR
:
12300 ctx
= maybe_lookup_ctx (stmt
);
12302 if (ctx
->cancellable
)
12303 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12304 lower_omp_for (gsi_p
, ctx
);
12306 case GIMPLE_OMP_SECTIONS
:
12307 ctx
= maybe_lookup_ctx (stmt
);
12309 if (ctx
->cancellable
)
12310 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
12311 lower_omp_sections (gsi_p
, ctx
);
12313 case GIMPLE_OMP_SINGLE
:
12314 ctx
= maybe_lookup_ctx (stmt
);
12316 lower_omp_single (gsi_p
, ctx
);
12318 case GIMPLE_OMP_MASTER
:
12319 ctx
= maybe_lookup_ctx (stmt
);
12321 lower_omp_master (gsi_p
, ctx
);
12323 case GIMPLE_OMP_TASKGROUP
:
12324 ctx
= maybe_lookup_ctx (stmt
);
12326 lower_omp_taskgroup (gsi_p
, ctx
);
12328 case GIMPLE_OMP_ORDERED
:
12329 ctx
= maybe_lookup_ctx (stmt
);
12331 lower_omp_ordered (gsi_p
, ctx
);
12333 case GIMPLE_OMP_SCAN
:
12334 ctx
= maybe_lookup_ctx (stmt
);
12336 lower_omp_scan (gsi_p
, ctx
);
12338 case GIMPLE_OMP_CRITICAL
:
12339 ctx
= maybe_lookup_ctx (stmt
);
12341 lower_omp_critical (gsi_p
, ctx
);
12343 case GIMPLE_OMP_ATOMIC_LOAD
:
12344 if ((ctx
|| task_shared_vars
)
12345 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12346 as_a
<gomp_atomic_load
*> (stmt
)),
12347 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
12348 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12350 case GIMPLE_OMP_TARGET
:
12351 ctx
= maybe_lookup_ctx (stmt
);
12353 lower_omp_target (gsi_p
, ctx
);
12355 case GIMPLE_OMP_TEAMS
:
12356 ctx
= maybe_lookup_ctx (stmt
);
12358 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
12359 lower_omp_taskreg (gsi_p
, ctx
);
12361 lower_omp_teams (gsi_p
, ctx
);
12363 case GIMPLE_OMP_GRID_BODY
:
12364 ctx
= maybe_lookup_ctx (stmt
);
12366 lower_omp_grid_body (gsi_p
, ctx
);
12370 call_stmt
= as_a
<gcall
*> (stmt
);
12371 fndecl
= gimple_call_fndecl (call_stmt
);
12373 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
12374 switch (DECL_FUNCTION_CODE (fndecl
))
12376 case BUILT_IN_GOMP_BARRIER
:
12380 case BUILT_IN_GOMP_CANCEL
:
12381 case BUILT_IN_GOMP_CANCELLATION_POINT
:
12384 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
12385 cctx
= cctx
->outer
;
12386 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
12387 if (!cctx
->cancellable
)
12389 if (DECL_FUNCTION_CODE (fndecl
)
12390 == BUILT_IN_GOMP_CANCELLATION_POINT
)
12392 stmt
= gimple_build_nop ();
12393 gsi_replace (gsi_p
, stmt
, false);
12397 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
12399 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
12400 gimple_call_set_fndecl (call_stmt
, fndecl
);
12401 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
12404 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
12405 gimple_call_set_lhs (call_stmt
, lhs
);
12406 tree fallthru_label
;
12407 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
12409 g
= gimple_build_label (fallthru_label
);
12410 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12411 g
= gimple_build_cond (NE_EXPR
, lhs
,
12412 fold_convert (TREE_TYPE (lhs
),
12413 boolean_false_node
),
12414 cctx
->cancel_label
, fallthru_label
);
12415 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12422 case GIMPLE_ASSIGN
:
12423 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
12425 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
12426 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
12427 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
12428 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
12429 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
12430 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
12431 && (gimple_omp_target_kind (up
->stmt
)
12432 == GF_OMP_TARGET_KIND_DATA
)))
12434 else if (!up
->lastprivate_conditional_map
)
12436 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
12437 if (TREE_CODE (lhs
) == MEM_REF
12438 && DECL_P (TREE_OPERAND (lhs
, 0))
12439 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
12440 0))) == REFERENCE_TYPE
)
12441 lhs
= TREE_OPERAND (lhs
, 0);
12443 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
12446 if (up
->combined_into_simd_safelen1
)
12449 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
12452 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
12453 clauses
= gimple_omp_for_clauses (up
->stmt
);
12455 clauses
= gimple_omp_sections_clauses (up
->stmt
);
12456 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
12457 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
12458 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12459 OMP_CLAUSE__CONDTEMP_
);
12460 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
12461 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
12462 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
12469 if ((ctx
|| task_shared_vars
)
12470 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
12473 /* Just remove clobbers, this should happen only if we have
12474 "privatized" local addressable variables in SIMD regions,
12475 the clobber isn't needed in that case and gimplifying address
12476 of the ARRAY_REF into a pointer and creating MEM_REF based
12477 clobber would create worse code than we get with the clobber
12479 if (gimple_clobber_p (stmt
))
12481 gsi_replace (gsi_p
, gimple_build_nop (), true);
12484 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
12491 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
12493 location_t saved_location
= input_location
;
12494 gimple_stmt_iterator gsi
;
12495 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12496 lower_omp_1 (&gsi
, ctx
);
12497 /* During gimplification, we haven't folded statments inside offloading
12498 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12499 if (target_nesting_level
|| taskreg_nesting_level
)
12500 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
12502 input_location
= saved_location
;
12505 /* Main entry point. */
12507 static unsigned int
12508 execute_lower_omp (void)
12514 /* This pass always runs, to provide PROP_gimple_lomp.
12515 But often, there is nothing to do. */
12516 if (flag_openacc
== 0 && flag_openmp
== 0
12517 && flag_openmp_simd
== 0)
12520 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
12521 delete_omp_context
);
12523 body
= gimple_body (current_function_decl
);
12525 if (hsa_gen_requested_p ())
12526 omp_grid_gridify_all_targets (&body
);
12528 scan_omp (&body
, NULL
);
12529 gcc_assert (taskreg_nesting_level
== 0);
12530 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
12531 finish_taskreg_scan (ctx
);
12532 taskreg_contexts
.release ();
12534 if (all_contexts
->root
)
12536 if (task_shared_vars
)
12537 push_gimplify_context ();
12538 lower_omp (&body
, NULL
);
12539 if (task_shared_vars
)
12540 pop_gimplify_context (NULL
);
12545 splay_tree_delete (all_contexts
);
12546 all_contexts
= NULL
;
12548 BITMAP_FREE (task_shared_vars
);
12550 /* If current function is a method, remove artificial dummy VAR_DECL created
12551 for non-static data member privatization, they aren't needed for
12552 debuginfo nor anything else, have been already replaced everywhere in the
12553 IL and cause problems with LTO. */
12554 if (DECL_ARGUMENTS (current_function_decl
)
12555 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
12556 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
12558 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
12564 const pass_data pass_data_lower_omp
=
12566 GIMPLE_PASS
, /* type */
12567 "omplower", /* name */
12568 OPTGROUP_OMP
, /* optinfo_flags */
12569 TV_NONE
, /* tv_id */
12570 PROP_gimple_any
, /* properties_required */
12571 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
12572 0, /* properties_destroyed */
12573 0, /* todo_flags_start */
12574 0, /* todo_flags_finish */
12577 class pass_lower_omp
: public gimple_opt_pass
12580 pass_lower_omp (gcc::context
*ctxt
)
12581 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
12584 /* opt_pass methods: */
12585 virtual unsigned int execute (function
*) { return execute_lower_omp (); }
12587 }; // class pass_lower_omp
12589 } // anon namespace
12592 make_pass_lower_omp (gcc::context
*ctxt
)
12594 return new pass_lower_omp (ctxt
);
12597 /* The following is a utility to diagnose structured block violations.
12598 It is not part of the "omplower" pass, as that's invoked too late. It
12599 should be invoked by the respective front ends after gimplification. */
12601 static splay_tree all_labels
;
12603 /* Check for mismatched contexts and generate an error if needed. Return
12604 true if an error is detected. */
12607 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
12608 gimple
*branch_ctx
, gimple
*label_ctx
)
12610 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
12611 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
12613 if (label_ctx
== branch_ctx
)
12616 const char* kind
= NULL
;
12620 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
12621 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
12623 gcc_checking_assert (kind
== NULL
);
12629 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
12633 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12634 so we could traverse it and issue a correct "exit" or "enter" error
12635 message upon a structured block violation.
12637 We built the context by building a list with tree_cons'ing, but there is
12638 no easy counterpart in gimple tuples. It seems like far too much work
12639 for issuing exit/enter error messages. If someone really misses the
12640 distinct error message... patches welcome. */
12643 /* Try to avoid confusing the user by producing and error message
12644 with correct "exit" or "enter" verbiage. We prefer "exit"
12645 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12646 if (branch_ctx
== NULL
)
12652 if (TREE_VALUE (label_ctx
) == branch_ctx
)
12657 label_ctx
= TREE_CHAIN (label_ctx
);
12662 error ("invalid exit from %s structured block", kind
);
12664 error ("invalid entry to %s structured block", kind
);
12667 /* If it's obvious we have an invalid entry, be specific about the error. */
12668 if (branch_ctx
== NULL
)
12669 error ("invalid entry to %s structured block", kind
);
12672 /* Otherwise, be vague and lazy, but efficient. */
12673 error ("invalid branch to/from %s structured block", kind
);
12676 gsi_replace (gsi_p
, gimple_build_nop (), false);
12680 /* Pass 1: Create a minimal tree of structured blocks, and record
12681 where each label is found. */
12684 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12685 struct walk_stmt_info
*wi
)
12687 gimple
*context
= (gimple
*) wi
->info
;
12688 gimple
*inner_context
;
12689 gimple
*stmt
= gsi_stmt (*gsi_p
);
12691 *handled_ops_p
= true;
12693 switch (gimple_code (stmt
))
12697 case GIMPLE_OMP_PARALLEL
:
12698 case GIMPLE_OMP_TASK
:
12699 case GIMPLE_OMP_SECTIONS
:
12700 case GIMPLE_OMP_SINGLE
:
12701 case GIMPLE_OMP_SECTION
:
12702 case GIMPLE_OMP_MASTER
:
12703 case GIMPLE_OMP_ORDERED
:
12704 case GIMPLE_OMP_SCAN
:
12705 case GIMPLE_OMP_CRITICAL
:
12706 case GIMPLE_OMP_TARGET
:
12707 case GIMPLE_OMP_TEAMS
:
12708 case GIMPLE_OMP_TASKGROUP
:
12709 /* The minimal context here is just the current OMP construct. */
12710 inner_context
= stmt
;
12711 wi
->info
= inner_context
;
12712 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12713 wi
->info
= context
;
12716 case GIMPLE_OMP_FOR
:
12717 inner_context
= stmt
;
12718 wi
->info
= inner_context
;
12719 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12721 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12722 diagnose_sb_1
, NULL
, wi
);
12723 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
12724 wi
->info
= context
;
12728 splay_tree_insert (all_labels
,
12729 (splay_tree_key
) gimple_label_label (
12730 as_a
<glabel
*> (stmt
)),
12731 (splay_tree_value
) context
);
12741 /* Pass 2: Check each branch and see if its context differs from that of
12742 the destination label's context. */
12745 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
12746 struct walk_stmt_info
*wi
)
12748 gimple
*context
= (gimple
*) wi
->info
;
12750 gimple
*stmt
= gsi_stmt (*gsi_p
);
12752 *handled_ops_p
= true;
12754 switch (gimple_code (stmt
))
12758 case GIMPLE_OMP_PARALLEL
:
12759 case GIMPLE_OMP_TASK
:
12760 case GIMPLE_OMP_SECTIONS
:
12761 case GIMPLE_OMP_SINGLE
:
12762 case GIMPLE_OMP_SECTION
:
12763 case GIMPLE_OMP_MASTER
:
12764 case GIMPLE_OMP_ORDERED
:
12765 case GIMPLE_OMP_SCAN
:
12766 case GIMPLE_OMP_CRITICAL
:
12767 case GIMPLE_OMP_TARGET
:
12768 case GIMPLE_OMP_TEAMS
:
12769 case GIMPLE_OMP_TASKGROUP
:
12771 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12772 wi
->info
= context
;
12775 case GIMPLE_OMP_FOR
:
12777 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12779 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
12780 diagnose_sb_2
, NULL
, wi
);
12781 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
12782 wi
->info
= context
;
12787 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
12788 tree lab
= gimple_cond_true_label (cond_stmt
);
12791 n
= splay_tree_lookup (all_labels
,
12792 (splay_tree_key
) lab
);
12793 diagnose_sb_0 (gsi_p
, context
,
12794 n
? (gimple
*) n
->value
: NULL
);
12796 lab
= gimple_cond_false_label (cond_stmt
);
12799 n
= splay_tree_lookup (all_labels
,
12800 (splay_tree_key
) lab
);
12801 diagnose_sb_0 (gsi_p
, context
,
12802 n
? (gimple
*) n
->value
: NULL
);
12809 tree lab
= gimple_goto_dest (stmt
);
12810 if (TREE_CODE (lab
) != LABEL_DECL
)
12813 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
12814 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
12818 case GIMPLE_SWITCH
:
12820 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
12822 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
12824 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
12825 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
12826 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
12832 case GIMPLE_RETURN
:
12833 diagnose_sb_0 (gsi_p
, context
, NULL
);
12843 static unsigned int
12844 diagnose_omp_structured_block_errors (void)
12846 struct walk_stmt_info wi
;
12847 gimple_seq body
= gimple_body (current_function_decl
);
12849 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
12851 memset (&wi
, 0, sizeof (wi
));
12852 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
12854 memset (&wi
, 0, sizeof (wi
));
12855 wi
.want_locations
= true;
12856 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
12858 gimple_set_body (current_function_decl
, body
);
12860 splay_tree_delete (all_labels
);
12868 const pass_data pass_data_diagnose_omp_blocks
=
12870 GIMPLE_PASS
, /* type */
12871 "*diagnose_omp_blocks", /* name */
12872 OPTGROUP_OMP
, /* optinfo_flags */
12873 TV_NONE
, /* tv_id */
12874 PROP_gimple_any
, /* properties_required */
12875 0, /* properties_provided */
12876 0, /* properties_destroyed */
12877 0, /* todo_flags_start */
12878 0, /* todo_flags_finish */
12881 class pass_diagnose_omp_blocks
: public gimple_opt_pass
12884 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
12885 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
12888 /* opt_pass methods: */
12889 virtual bool gate (function
*)
12891 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
12893 virtual unsigned int execute (function
*)
12895 return diagnose_omp_structured_block_errors ();
12898 }; // class pass_diagnose_omp_blocks
12900 } // anon namespace
12903 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
12905 return new pass_diagnose_omp_blocks (ctxt
);
12909 #include "gt-omp-low.h"