]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
re PR c++/86210 (Missing -Wnonnull warning for function defined in the same TU)
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
41dbbb37
TS
1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
953ff289
DN
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
85ec4feb 7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5 29#include "target.h"
953ff289 30#include "tree.h"
c7131fb2 31#include "gimple.h"
957060b5 32#include "tree-pass.h"
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
2fb9a547
AM
39#include "internal-fn.h"
40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
18f429e2 43#include "gimplify-me.h"
5be5c238 44#include "gimple-walk.h"
726a989a 45#include "tree-iterator.h"
953ff289
DN
46#include "tree-inline.h"
47#include "langhooks.h"
442b4905 48#include "tree-dfa.h"
7a300452 49#include "tree-ssa.h"
6be42dd4 50#include "splay-tree.h"
629b3d75 51#include "omp-general.h"
0645c1a2 52#include "omp-low.h"
629b3d75 53#include "omp-grid.h"
4484a35a 54#include "gimple-low.h"
dd912cb8 55#include "symbol-summary.h"
1fe37220 56#include "tree-nested.h"
1f6be682 57#include "context.h"
41dbbb37 58#include "gomp-constants.h"
9bd46bc9 59#include "gimple-pretty-print.h"
13293add 60#include "hsa-common.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
953ff289 63
41dbbb37 64/* Lowering of OMP parallel and workshare constructs proceeds in two
953ff289
DN
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
c0220ea4 68 re-gimplifying things when variables have been replaced with complex
953ff289
DN
69 expressions.
70
7ebaeab5 71 Final code generation is done by pass_expand_omp. The flowgraph is
41dbbb37
TS
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
953ff289
DN
74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
a79683d5 78struct omp_context
953ff289
DN
79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
355fe088 88 gimple *stmt;
953ff289 89
b8698a0f 90 /* Map variables to fields in a structure that allows communication
953ff289
DN
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
a68ab351
JJ
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
953ff289
DN
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
acf0174b
JJ
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
6e6cf7b0
JJ
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
953ff289
DN
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
121
953ff289
DN
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
acf0174b
JJ
124
125 /* True if this construct can be cancelled. */
126 bool cancellable;
a79683d5 127};
953ff289 128
953ff289 129static splay_tree all_contexts;
a68ab351 130static int taskreg_nesting_level;
acf0174b 131static int target_nesting_level;
a68ab351 132static bitmap task_shared_vars;
5771c391 133static vec<omp_context *> taskreg_contexts;
953ff289 134
26127932 135static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
136static tree scan_omp_1_op (tree *, int *, void *);
137
138#define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
0a35513e 143 case GIMPLE_TRANSACTION: \
726a989a
RB
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
147
e4834818
NS
148/* Return true if CTX corresponds to an oacc parallel region. */
149
150static bool
151is_oacc_parallel (omp_context *ctx)
152{
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157}
158
159/* Return true if CTX corresponds to an oacc kernels region. */
160
161static bool
162is_oacc_kernels (omp_context *ctx)
163{
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168}
169
d9a6bd32
JJ
170/* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
173
174tree
175omp_member_access_dummy_var (tree decl)
176{
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
183
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
187
188 while (1)
189 switch (TREE_CODE (v))
190 {
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
206 }
207}
208
209/* Helper for unshare_and_remap, called through walk_tree. */
210
211static tree
212unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213{
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
216 {
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
219 }
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
223}
224
225/* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
227
228static tree
229unshare_and_remap (tree x, tree from, tree to)
230{
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
235}
236
726a989a
RB
237/* Convenience function for calling scan_omp_1_op on tree operands. */
238
239static inline tree
240scan_omp_op (tree *tp, omp_context *ctx)
241{
242 struct walk_stmt_info wi;
243
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
247
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249}
250
355a7673 251static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
252static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289 254
953ff289
DN
255/* Return true if CTX is for an omp parallel. */
256
257static inline bool
258is_parallel_ctx (omp_context *ctx)
259{
726a989a 260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
261}
262
50674e96 263
a68ab351
JJ
264/* Return true if CTX is for an omp task. */
265
266static inline bool
267is_task_ctx (omp_context *ctx)
268{
726a989a 269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
270}
271
272
d9a6bd32
JJ
273/* Return true if CTX is for an omp taskloop. */
274
275static inline bool
276is_taskloop_ctx (omp_context *ctx)
277{
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280}
281
282
a68ab351
JJ
283/* Return true if CTX is for an omp parallel or omp task. */
284
285static inline bool
286is_taskreg_ctx (omp_context *ctx)
287{
d9a6bd32 288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
a68ab351
JJ
289}
290
953ff289
DN
291/* Return true if EXPR is variable sized. */
292
293static inline bool
22ea9ec0 294is_variable_sized (const_tree expr)
953ff289
DN
295{
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297}
298
41dbbb37 299/* Lookup variables. The "maybe" form
953ff289
DN
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
302
303static inline tree
304lookup_decl (tree var, omp_context *ctx)
305{
b787e7a2 306 tree *n = ctx->cb.decl_map->get (var);
6be42dd4 307 return *n;
953ff289
DN
308}
309
310static inline tree
7c8f7639 311maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 312{
b787e7a2 313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
6be42dd4 314 return n ? *n : NULL_TREE;
953ff289
DN
315}
316
317static inline tree
318lookup_field (tree var, omp_context *ctx)
319{
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
323}
324
a68ab351 325static inline tree
d9a6bd32 326lookup_sfield (splay_tree_key key, omp_context *ctx)
a68ab351
JJ
327{
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
d9a6bd32 330 ? ctx->sfield_map : ctx->field_map, key);
a68ab351
JJ
331 return (tree) n->value;
332}
333
953ff289 334static inline tree
d9a6bd32
JJ
335lookup_sfield (tree var, omp_context *ctx)
336{
337 return lookup_sfield ((splay_tree_key) var, ctx);
338}
339
340static inline tree
341maybe_lookup_field (splay_tree_key key, omp_context *ctx)
953ff289
DN
342{
343 splay_tree_node n;
d9a6bd32 344 n = splay_tree_lookup (ctx->field_map, key);
953ff289
DN
345 return n ? (tree) n->value : NULL_TREE;
346}
347
d9a6bd32
JJ
348static inline tree
349maybe_lookup_field (tree var, omp_context *ctx)
350{
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
352}
353
7c8f7639
JJ
354/* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
953ff289
DN
356
357static bool
a68ab351 358use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289 359{
9dc5773f
JJ
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
953ff289
DN
362 return true;
363
6fc0bb99 364 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 365 when we know the value is not accessible from an outer scope. */
7c8f7639 366 if (shared_ctx)
953ff289 367 {
41dbbb37
TS
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369
953ff289
DN
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
376
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
077b0dfb 381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
382 return true;
383
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
7c8f7639 388
6d840d99
JJ
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
396
7c8f7639
JJ
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
6d840d99 402 if (shared_ctx->is_nested)
7c8f7639
JJ
403 {
404 omp_context *up;
405
406 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
408 break;
409
d9c194cb 410 if (up)
7c8f7639
JJ
411 {
412 tree c;
413
726a989a 414 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
419
420 if (c)
25142650 421 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
422 }
423 }
a68ab351 424
6d840d99 425 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
6d840d99 428 if (is_task_ctx (shared_ctx))
a68ab351 429 {
25142650
JJ
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
d9a6bd32 433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
a68ab351
JJ
434 {
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
442 }
443 return true;
444 }
953ff289
DN
445 }
446
447 return false;
448}
449
917948d3
ZD
450/* Construct a new automatic decl similar to VAR. */
451
452static tree
453omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
454{
455 tree copy = copy_var_decl (var, name, type);
456
457 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 458 DECL_CHAIN (copy) = ctx->block_vars;
d9a6bd32
JJ
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
953ff289
DN
467 ctx->block_vars = copy;
468
469 return copy;
470}
471
472static tree
473omp_copy_decl_1 (tree var, omp_context *ctx)
474{
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476}
477
a9a58711
JJ
478/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480static tree
481omp_build_component_ref (tree obj, tree field)
482{
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
489}
490
953ff289
DN
491/* Build tree nodes to access the field for VAR on the receiver side. */
492
493static tree
494build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
495{
496 tree x, field = lookup_field (var, ctx);
497
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
503
70f34814 504 x = build_simple_mem_ref (ctx->receiver_decl);
f1b9b669 505 TREE_THIS_NOTRAP (x) = 1;
a9a58711 506 x = omp_build_component_ref (x, field);
953ff289 507 if (by_ref)
096b85f4
TV
508 {
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
511 }
953ff289
DN
512
513 return x;
514}
515
516/* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
519
520static tree
c39dad64
JJ
521build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
953ff289
DN
523{
524 tree x;
525
8ca5b2a2 526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
527 x = var;
528 else if (is_variable_sized (var))
529 {
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
c39dad64 531 x = build_outer_var_ref (x, ctx, code);
70f34814 532 x = build_simple_mem_ref (x);
953ff289 533 }
a68ab351 534 else if (is_taskreg_ctx (ctx))
953ff289 535 {
7c8f7639 536 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
537 x = build_receiver_ref (var, by_ref, ctx);
538 }
c39dad64
JJ
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
74bf76ed 545 {
c39dad64
JJ
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
74bf76ed
JJ
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
f3b331d1 554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
555 if (x == NULL_TREE)
556 x = var;
557 }
c39dad64 558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
d9a6bd32
JJ
559 {
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
565 {
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
570 }
571 else
572 {
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
579
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
584 }
585 }
953ff289 586 else if (ctx->outer)
b2b40051
MJ
587 {
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
590 {
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
594 }
c39dad64 595 x = lookup_decl (var, outer);
b2b40051 596 }
629b3d75 597 else if (omp_is_reference (var))
eeb1d9e0
JJ
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
d9a6bd32
JJ
601 else if (omp_member_access_dummy_var (var))
602 x = var;
953ff289
DN
603 else
604 gcc_unreachable ();
605
d9a6bd32
JJ
606 if (x == var)
607 {
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
610 {
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
617 }
618 }
619
629b3d75 620 if (omp_is_reference (var))
70f34814 621 x = build_simple_mem_ref (x);
953ff289
DN
622
623 return x;
624}
625
626/* Build tree nodes to access the field for VAR on the sender side. */
627
628static tree
d9a6bd32 629build_sender_ref (splay_tree_key key, omp_context *ctx)
953ff289 630{
d9a6bd32 631 tree field = lookup_sfield (key, ctx);
a9a58711 632 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
633}
634
d9a6bd32
JJ
635static tree
636build_sender_ref (tree var, omp_context *ctx)
637{
638 return build_sender_ref ((splay_tree_key) var, ctx);
639}
640
86938de6
TV
641/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
953ff289
DN
643
644static void
86938de6
TV
645install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 bool base_pointers_restrict = false)
953ff289 647{
a68ab351 648 tree field, type, sfield = NULL_TREE;
d9a6bd32 649 splay_tree_key key = (splay_tree_key) var;
953ff289 650
d9a6bd32
JJ
651 if ((mask & 8) != 0)
652 {
653 key = (splay_tree_key) &DECL_UID (var);
654 gcc_checking_assert (key != (splay_tree_key) var);
655 }
a68ab351 656 gcc_assert ((mask & 1) == 0
d9a6bd32 657 || !splay_tree_lookup (ctx->field_map, key));
a68ab351 658 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
d9a6bd32 659 || !splay_tree_lookup (ctx->sfield_map, key));
41dbbb37
TS
660 gcc_assert ((mask & 3) == 3
661 || !is_gimple_omp_oacc (ctx->stmt));
953ff289
DN
662
663 type = TREE_TYPE (var);
8498c16b
TV
664 /* Prevent redeclaring the var in the split-off function with a restrict
665 pointer type. Note that we only clear type itself, restrict qualifiers in
666 the pointed-to type will be ignored by points-to analysis. */
667 if (POINTER_TYPE_P (type)
668 && TYPE_RESTRICT (type))
669 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
670
acf0174b
JJ
671 if (mask & 4)
672 {
673 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674 type = build_pointer_type (build_pointer_type (type));
675 }
676 else if (by_ref)
86938de6
TV
677 {
678 type = build_pointer_type (type);
679 if (base_pointers_restrict)
680 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
681 }
629b3d75 682 else if ((mask & 3) == 1 && omp_is_reference (var))
a68ab351 683 type = TREE_TYPE (type);
953ff289 684
c2255bc4
AH
685 field = build_decl (DECL_SOURCE_LOCATION (var),
686 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
687
688 /* Remember what variable this field was created for. This does have a
689 side effect of making dwarf2out ignore this member, so for helpful
690 debugging we clear it later in delete_omp_context. */
691 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
692 if (type == TREE_TYPE (var))
693 {
fe37c7af 694 SET_DECL_ALIGN (field, DECL_ALIGN (var));
a68ab351
JJ
695 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
697 }
698 else
fe37c7af 699 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
953ff289 700
a68ab351
JJ
701 if ((mask & 3) == 3)
702 {
703 insert_field_into_struct (ctx->record_type, field);
704 if (ctx->srecord_type)
705 {
c2255bc4
AH
706 sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 FIELD_DECL, DECL_NAME (var), type);
a68ab351 708 DECL_ABSTRACT_ORIGIN (sfield) = var;
fe37c7af 709 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
a68ab351
JJ
710 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 insert_field_into_struct (ctx->srecord_type, sfield);
713 }
714 }
715 else
716 {
717 if (ctx->srecord_type == NULL_TREE)
718 {
719 tree t;
720
721 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
724 {
d9a6bd32 725 sfield = build_decl (DECL_SOURCE_LOCATION (t),
c2255bc4 726 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
727 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 insert_field_into_struct (ctx->srecord_type, sfield);
729 splay_tree_insert (ctx->sfield_map,
730 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 (splay_tree_value) sfield);
732 }
733 }
734 sfield = field;
735 insert_field_into_struct ((mask & 1) ? ctx->record_type
736 : ctx->srecord_type, field);
737 }
953ff289 738
a68ab351 739 if (mask & 1)
d9a6bd32 740 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
a68ab351 741 if ((mask & 2) && ctx->sfield_map)
d9a6bd32 742 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
953ff289
DN
743}
744
745static tree
746install_var_local (tree var, omp_context *ctx)
747{
748 tree new_var = omp_copy_decl_1 (var, ctx);
749 insert_decl_map (&ctx->cb, var, new_var);
750 return new_var;
751}
752
753/* Adjust the replacement for DECL in CTX for the new context. This means
754 copying the DECL_VALUE_EXPR, and fixing up the type. */
755
756static void
757fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
758{
759 tree new_decl, size;
760
761 new_decl = lookup_decl (decl, ctx);
762
763 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
764
765 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766 && DECL_HAS_VALUE_EXPR_P (decl))
767 {
768 tree ve = DECL_VALUE_EXPR (decl);
726a989a 769 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
770 SET_DECL_VALUE_EXPR (new_decl, ve);
771 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772 }
773
774 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
775 {
776 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE (TREE_TYPE (new_decl));
779 DECL_SIZE (new_decl) = size;
780
781 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782 if (size == error_mark_node)
783 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784 DECL_SIZE_UNIT (new_decl) = size;
785 }
786}
787
788/* The callback for remap_decl. Search all containing contexts for a
789 mapping of the variable; this avoids having to duplicate the splay
790 tree ahead of time. We know a mapping doesn't already exist in the
791 given context. Create new mappings to implement default semantics. */
792
793static tree
794omp_copy_decl (tree var, copy_body_data *cb)
795{
796 omp_context *ctx = (omp_context *) cb;
797 tree new_var;
798
953ff289
DN
799 if (TREE_CODE (var) == LABEL_DECL)
800 {
50aa16c3
JJ
801 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 return var;
c2255bc4 803 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 804 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
805 insert_decl_map (&ctx->cb, var, new_var);
806 return new_var;
807 }
808
a68ab351 809 while (!is_taskreg_ctx (ctx))
953ff289
DN
810 {
811 ctx = ctx->outer;
812 if (ctx == NULL)
813 return var;
814 new_var = maybe_lookup_decl (var, ctx);
815 if (new_var)
816 return new_var;
817 }
818
8ca5b2a2
JJ
819 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820 return var;
821
953ff289
DN
822 return error_mark_node;
823}
824
629b3d75 825/* Create a new context, with OUTER_CTX being the surrounding context. */
50674e96 826
629b3d75
MJ
827static omp_context *
828new_omp_context (gimple *stmt, omp_context *outer_ctx)
50674e96 829{
629b3d75 830 omp_context *ctx = XCNEW (omp_context);
50674e96 831
629b3d75
MJ
832 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 (splay_tree_value) ctx);
834 ctx->stmt = stmt;
50674e96 835
629b3d75 836 if (outer_ctx)
777f7f9a 837 {
629b3d75
MJ
838 ctx->outer = outer_ctx;
839 ctx->cb = outer_ctx->cb;
840 ctx->cb.block = NULL;
841 ctx->depth = outer_ctx->depth + 1;
953ff289
DN
842 }
843 else
844 {
845 ctx->cb.src_fn = current_function_decl;
846 ctx->cb.dst_fn = current_function_decl;
d52f5295 847 ctx->cb.src_node = cgraph_node::get (current_function_decl);
fe660d7b 848 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
849 ctx->cb.dst_node = ctx->cb.src_node;
850 ctx->cb.src_cfun = cfun;
851 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 852 ctx->cb.eh_lp_nr = 0;
953ff289
DN
853 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854 ctx->depth = 1;
855 }
856
b787e7a2 857 ctx->cb.decl_map = new hash_map<tree, tree>;
953ff289
DN
858
859 return ctx;
860}
861
726a989a 862static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
863
864/* Finalize task copyfn. */
865
866static void
538dd0b7 867finalize_task_copyfn (gomp_task *task_stmt)
2368a460
JJ
868{
869 struct function *child_cfun;
af16bc76 870 tree child_fn;
355a7673 871 gimple_seq seq = NULL, new_seq;
538dd0b7 872 gbind *bind;
2368a460 873
726a989a 874 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
875 if (child_fn == NULL_TREE)
876 return;
877
878 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 879 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 880
2368a460 881 push_cfun (child_cfun);
3ad065ef 882 bind = gimplify_body (child_fn, false);
726a989a
RB
883 gimple_seq_add_stmt (&seq, bind);
884 new_seq = maybe_catch_exception (seq);
885 if (new_seq != seq)
886 {
887 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 888 seq = NULL;
726a989a
RB
889 gimple_seq_add_stmt (&seq, bind);
890 }
891 gimple_set_body (child_fn, seq);
2368a460 892 pop_cfun ();
2368a460 893
d7ed20db 894 /* Inform the callgraph about the new function. */
edafad14
TV
895 cgraph_node *node = cgraph_node::get_create (child_fn);
896 node->parallelized_function = 1;
d52f5295 897 cgraph_node::add_new_function (child_fn, false);
2368a460
JJ
898}
899
953ff289
DN
900/* Destroy a omp_context data structures. Called through the splay tree
901 value delete callback. */
902
903static void
904delete_omp_context (splay_tree_value value)
905{
906 omp_context *ctx = (omp_context *) value;
907
b787e7a2 908 delete ctx->cb.decl_map;
953ff289
DN
909
910 if (ctx->field_map)
911 splay_tree_delete (ctx->field_map);
a68ab351
JJ
912 if (ctx->sfield_map)
913 splay_tree_delete (ctx->sfield_map);
953ff289
DN
914
915 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
916 it produces corrupt debug information. */
917 if (ctx->record_type)
918 {
919 tree t;
910ad8de 920 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
921 DECL_ABSTRACT_ORIGIN (t) = NULL;
922 }
a68ab351
JJ
923 if (ctx->srecord_type)
924 {
925 tree t;
910ad8de 926 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
927 DECL_ABSTRACT_ORIGIN (t) = NULL;
928 }
953ff289 929
2368a460 930 if (is_task_ctx (ctx))
538dd0b7 931 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
2368a460 932
953ff289
DN
933 XDELETE (ctx);
934}
935
936/* Fix up RECEIVER_DECL with a type that has been remapped to the child
937 context. */
938
939static void
940fixup_child_record_type (omp_context *ctx)
941{
942 tree f, type = ctx->record_type;
943
b2b40051
MJ
944 if (!ctx->receiver_decl)
945 return;
953ff289
DN
946 /* ??? It isn't sufficient to just call remap_type here, because
947 variably_modified_type_p doesn't work the way we expect for
948 record types. Testing each field for whether it needs remapping
949 and creating a new record by hand works, however. */
910ad8de 950 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
951 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952 break;
953 if (f)
954 {
955 tree name, new_fields = NULL;
956
957 type = lang_hooks.types.make_type (RECORD_TYPE);
958 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
959 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 TYPE_DECL, name, type);
953ff289
DN
961 TYPE_NAME (type) = name;
962
910ad8de 963 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
964 {
965 tree new_f = copy_node (f);
966 DECL_CONTEXT (new_f) = type;
967 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 968 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
969 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 &ctx->cb, NULL);
972 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 &ctx->cb, NULL);
953ff289
DN
974 new_fields = new_f;
975
976 /* Arrange to be able to look up the receiver field
977 given the sender field. */
978 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 (splay_tree_value) new_f);
980 }
981 TYPE_FIELDS (type) = nreverse (new_fields);
982 layout_type (type);
983 }
984
d9a6bd32
JJ
985 /* In a target region we never modify any of the pointers in *.omp_data_i,
986 so attempt to help the optimizers. */
987 if (is_gimple_omp_offloaded (ctx->stmt))
988 type = build_qualified_type (type, TYPE_QUAL_CONST);
989
a2a2fe4b
RB
990 TREE_TYPE (ctx->receiver_decl)
991 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
953ff289
DN
992}
993
994/* Instantiate decls as necessary in CTX to satisfy the data sharing
86938de6
TV
995 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
996 restrict. */
953ff289
DN
997
998static void
86938de6
TV
999scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 bool base_pointers_restrict = false)
953ff289
DN
1001{
1002 tree c, decl;
1003 bool scan_array_reductions = false;
1004
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1006 {
1007 bool by_ref;
1008
aaf46ef9 1009 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1010 {
1011 case OMP_CLAUSE_PRIVATE:
1012 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1013 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 goto do_private;
1015 else if (!is_variable_sized (decl))
953ff289
DN
1016 install_var_local (decl, ctx);
1017 break;
1018
1019 case OMP_CLAUSE_SHARED:
9cf32741 1020 decl = OMP_CLAUSE_DECL (c);
acf0174b
JJ
1021 /* Ignore shared directives in teams construct. */
1022 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
9cf32741
JJ
1023 {
1024 /* Global variables don't need to be copied,
1025 the receiver side will use them directly. */
1026 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 if (is_global_var (odecl))
1028 break;
1029 insert_decl_map (&ctx->cb, decl, odecl);
1030 break;
1031 }
a68ab351 1032 gcc_assert (is_taskreg_ctx (ctx));
5da250fc
JJ
1033 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 || !is_variable_sized (decl));
8ca5b2a2
JJ
1035 /* Global variables don't need to be copied,
1036 the receiver side will use them directly. */
1037 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 break;
d9a6bd32 1039 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1a80d6b8
JJ
1040 {
1041 use_pointer_for_field (decl, ctx);
1042 break;
1043 }
1044 by_ref = use_pointer_for_field (decl, NULL);
1045 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
953ff289
DN
1046 || TREE_ADDRESSABLE (decl)
1047 || by_ref
629b3d75 1048 || omp_is_reference (decl))
953ff289 1049 {
1a80d6b8 1050 by_ref = use_pointer_for_field (decl, ctx);
a68ab351 1051 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1052 install_var_local (decl, ctx);
1053 break;
1054 }
1055 /* We don't need to copy const scalar vars back. */
aaf46ef9 1056 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1057 goto do_private;
1058
d9a6bd32
JJ
1059 case OMP_CLAUSE_REDUCTION:
1060 decl = OMP_CLAUSE_DECL (c);
1061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 && TREE_CODE (decl) == MEM_REF)
1063 {
1064 tree t = TREE_OPERAND (decl, 0);
e01d41e5
JJ
1065 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 t = TREE_OPERAND (t, 0);
d9a6bd32
JJ
1067 if (TREE_CODE (t) == INDIRECT_REF
1068 || TREE_CODE (t) == ADDR_EXPR)
1069 t = TREE_OPERAND (t, 0);
1070 install_var_local (t, ctx);
1071 if (is_taskreg_ctx (ctx)
1072 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 && !is_variable_sized (t))
1074 {
1075 by_ref = use_pointer_for_field (t, ctx);
1076 install_var_field (t, by_ref, 3, ctx);
1077 }
1078 break;
1079 }
1080 goto do_private;
1081
953ff289
DN
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Let the corresponding firstprivate clause create
1084 the variable. */
1085 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 break;
1087 /* FALLTHRU */
1088
1089 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 1090 case OMP_CLAUSE_LINEAR:
953ff289
DN
1091 decl = OMP_CLAUSE_DECL (c);
1092 do_private:
d9a6bd32
JJ
1093 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 && is_gimple_omp_offloaded (ctx->stmt))
1096 {
1097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
629b3d75 1098 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
d9a6bd32
JJ
1099 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 install_var_field (decl, true, 3, ctx);
1101 else
1102 install_var_field (decl, false, 3, ctx);
1103 }
953ff289 1104 if (is_variable_sized (decl))
953ff289 1105 {
a68ab351
JJ
1106 if (is_task_ctx (ctx))
1107 install_var_field (decl, false, 1, ctx);
1108 break;
1109 }
1110 else if (is_taskreg_ctx (ctx))
1111 {
1112 bool global
1113 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1114 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1115
1116 if (is_task_ctx (ctx)
629b3d75 1117 && (global || by_ref || omp_is_reference (decl)))
a68ab351
JJ
1118 {
1119 install_var_field (decl, false, 1, ctx);
1120 if (!global)
1121 install_var_field (decl, by_ref, 2, ctx);
1122 }
1123 else if (!global)
1124 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1125 }
1126 install_var_local (decl, ctx);
1127 break;
1128
d9a6bd32
JJ
1129 case OMP_CLAUSE_USE_DEVICE_PTR:
1130 decl = OMP_CLAUSE_DECL (c);
1131 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 install_var_field (decl, true, 3, ctx);
1133 else
1134 install_var_field (decl, false, 3, ctx);
1135 if (DECL_SIZE (decl)
1136 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1137 {
1138 tree decl2 = DECL_VALUE_EXPR (decl);
1139 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 decl2 = TREE_OPERAND (decl2, 0);
1141 gcc_assert (DECL_P (decl2));
1142 install_var_local (decl2, ctx);
1143 }
1144 install_var_local (decl, ctx);
1145 break;
1146
1147 case OMP_CLAUSE_IS_DEVICE_PTR:
1148 decl = OMP_CLAUSE_DECL (c);
1149 goto do_private;
1150
acf0174b 1151 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 1152 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
1153 decl = OMP_CLAUSE_DECL (c);
1154 install_var_field (decl, false, 3, ctx);
1155 install_var_local (decl, ctx);
1156 break;
1157
953ff289 1158 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1159 case OMP_CLAUSE_COPYIN:
1160 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1161 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1162 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1163 break;
1164
20906c66 1165 case OMP_CLAUSE_FINAL:
953ff289
DN
1166 case OMP_CLAUSE_IF:
1167 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1168 case OMP_CLAUSE_NUM_TEAMS:
1169 case OMP_CLAUSE_THREAD_LIMIT:
1170 case OMP_CLAUSE_DEVICE:
953ff289 1171 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1172 case OMP_CLAUSE_DIST_SCHEDULE:
1173 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
1174 case OMP_CLAUSE_PRIORITY:
1175 case OMP_CLAUSE_GRAINSIZE:
1176 case OMP_CLAUSE_NUM_TASKS:
41dbbb37
TS
1177 case OMP_CLAUSE_NUM_GANGS:
1178 case OMP_CLAUSE_NUM_WORKERS:
1179 case OMP_CLAUSE_VECTOR_LENGTH:
953ff289 1180 if (ctx->outer)
726a989a 1181 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1182 break;
1183
acf0174b
JJ
1184 case OMP_CLAUSE_TO:
1185 case OMP_CLAUSE_FROM:
1186 case OMP_CLAUSE_MAP:
1187 if (ctx->outer)
1188 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 decl = OMP_CLAUSE_DECL (c);
1190 /* Global variables with "omp declare target" attribute
1191 don't need to be copied, the receiver side will use them
4a38b02b
IV
1192 directly. However, global variables with "omp declare target link"
1193 attribute need to be copied. */
acf0174b
JJ
1194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 && DECL_P (decl)
e01d41e5
JJ
1196 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 && (OMP_CLAUSE_MAP_KIND (c)
1198 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1199 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1200 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
4a38b02b
IV
1201 && varpool_node::get_create (decl)->offloadable
1202 && !lookup_attribute ("omp declare target link",
1203 DECL_ATTRIBUTES (decl)))
acf0174b
JJ
1204 break;
1205 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1206 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
acf0174b 1207 {
41dbbb37
TS
1208 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 not offloaded; there is nothing to map for those. */
1210 if (!is_gimple_omp_offloaded (ctx->stmt)
b8910447
JJ
1211 && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
acf0174b
JJ
1213 break;
1214 }
d9a6bd32 1215 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
e01d41e5
JJ
1216 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 || (OMP_CLAUSE_MAP_KIND (c)
1218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
d9a6bd32
JJ
1219 {
1220 if (TREE_CODE (decl) == COMPONENT_REF
1221 || (TREE_CODE (decl) == INDIRECT_REF
1222 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 == REFERENCE_TYPE)))
1225 break;
1226 if (DECL_SIZE (decl)
1227 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1228 {
1229 tree decl2 = DECL_VALUE_EXPR (decl);
1230 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 decl2 = TREE_OPERAND (decl2, 0);
1232 gcc_assert (DECL_P (decl2));
1233 install_var_local (decl2, ctx);
1234 }
1235 install_var_local (decl, ctx);
1236 break;
1237 }
acf0174b
JJ
1238 if (DECL_P (decl))
1239 {
1240 if (DECL_SIZE (decl)
1241 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1242 {
1243 tree decl2 = DECL_VALUE_EXPR (decl);
1244 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 decl2 = TREE_OPERAND (decl2, 0);
1246 gcc_assert (DECL_P (decl2));
e01d41e5 1247 install_var_field (decl2, true, 3, ctx);
acf0174b
JJ
1248 install_var_local (decl2, ctx);
1249 install_var_local (decl, ctx);
1250 }
1251 else
1252 {
1253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
acf0174b
JJ
1255 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 install_var_field (decl, true, 7, ctx);
1258 else
86938de6
TV
1259 install_var_field (decl, true, 3, ctx,
1260 base_pointers_restrict);
c42cfb5c
CP
1261 if (is_gimple_omp_offloaded (ctx->stmt)
1262 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
acf0174b
JJ
1263 install_var_local (decl, ctx);
1264 }
1265 }
1266 else
1267 {
1268 tree base = get_base_address (decl);
1269 tree nc = OMP_CLAUSE_CHAIN (c);
1270 if (DECL_P (base)
1271 && nc != NULL_TREE
1272 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 && OMP_CLAUSE_DECL (nc) == base
41dbbb37 1274 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
acf0174b
JJ
1275 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1276 {
1277 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1279 }
1280 else
1281 {
f014c653
JJ
1282 if (ctx->outer)
1283 {
1284 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 decl = OMP_CLAUSE_DECL (c);
1286 }
acf0174b
JJ
1287 gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 (splay_tree_key) decl));
1289 tree field
1290 = build_decl (OMP_CLAUSE_LOCATION (c),
1291 FIELD_DECL, NULL_TREE, ptr_type_node);
fe37c7af 1292 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
acf0174b
JJ
1293 insert_field_into_struct (ctx->record_type, field);
1294 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 (splay_tree_value) field);
1296 }
1297 }
1298 break;
1299
b2b40051
MJ
1300 case OMP_CLAUSE__GRIDDIM_:
1301 if (ctx->outer)
1302 {
1303 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1305 }
1306 break;
1307
953ff289
DN
1308 case OMP_CLAUSE_NOWAIT:
1309 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1310 case OMP_CLAUSE_COLLAPSE:
1311 case OMP_CLAUSE_UNTIED:
20906c66 1312 case OMP_CLAUSE_MERGEABLE:
acf0174b 1313 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1314 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
1315 case OMP_CLAUSE_SIMDLEN:
1316 case OMP_CLAUSE_THREADS:
1317 case OMP_CLAUSE_SIMD:
1318 case OMP_CLAUSE_NOGROUP:
1319 case OMP_CLAUSE_DEFAULTMAP:
41dbbb37
TS
1320 case OMP_CLAUSE_ASYNC:
1321 case OMP_CLAUSE_WAIT:
1322 case OMP_CLAUSE_GANG:
1323 case OMP_CLAUSE_WORKER:
1324 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1325 case OMP_CLAUSE_INDEPENDENT:
1326 case OMP_CLAUSE_AUTO:
1327 case OMP_CLAUSE_SEQ:
02889d23 1328 case OMP_CLAUSE_TILE:
6c7509bc 1329 case OMP_CLAUSE__SIMT_:
8a4674bb 1330 case OMP_CLAUSE_DEFAULT:
953ff289
DN
1331 break;
1332
acf0174b
JJ
1333 case OMP_CLAUSE_ALIGNED:
1334 decl = OMP_CLAUSE_DECL (c);
1335 if (is_global_var (decl)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_local (decl, ctx);
1338 break;
1339
41dbbb37 1340 case OMP_CLAUSE__CACHE_:
953ff289
DN
1341 default:
1342 gcc_unreachable ();
1343 }
1344 }
1345
1346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1347 {
aaf46ef9 1348 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1349 {
1350 case OMP_CLAUSE_LASTPRIVATE:
1351 /* Let the corresponding firstprivate clause create
1352 the variable. */
726a989a 1353 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1354 scan_array_reductions = true;
953ff289
DN
1355 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 break;
1357 /* FALLTHRU */
1358
953ff289 1359 case OMP_CLAUSE_FIRSTPRIVATE:
41dbbb37 1360 case OMP_CLAUSE_PRIVATE:
74bf76ed 1361 case OMP_CLAUSE_LINEAR:
d9a6bd32 1362 case OMP_CLAUSE_IS_DEVICE_PTR:
953ff289
DN
1363 decl = OMP_CLAUSE_DECL (c);
1364 if (is_variable_sized (decl))
d9a6bd32
JJ
1365 {
1366 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 && is_gimple_omp_offloaded (ctx->stmt))
1369 {
1370 tree decl2 = DECL_VALUE_EXPR (decl);
1371 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 decl2 = TREE_OPERAND (decl2, 0);
1373 gcc_assert (DECL_P (decl2));
1374 install_var_local (decl2, ctx);
1375 fixup_remapped_decl (decl2, ctx, false);
1376 }
1377 install_var_local (decl, ctx);
1378 }
953ff289 1379 fixup_remapped_decl (decl, ctx,
aaf46ef9 1380 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1381 && OMP_CLAUSE_PRIVATE_DEBUG (c));
d9a6bd32
JJ
1382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
953ff289 1384 scan_array_reductions = true;
d9a6bd32
JJ
1385 break;
1386
1387 case OMP_CLAUSE_REDUCTION:
1388 decl = OMP_CLAUSE_DECL (c);
1389 if (TREE_CODE (decl) != MEM_REF)
1390 {
1391 if (is_variable_sized (decl))
1392 install_var_local (decl, ctx);
1393 fixup_remapped_decl (decl, ctx, false);
1394 }
1395 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
f7468577 1396 scan_array_reductions = true;
953ff289
DN
1397 break;
1398
1399 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1400 /* Ignore shared directives in teams construct. */
1401 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 break;
953ff289 1403 decl = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
1404 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 break;
1406 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1407 {
1408 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 ctx->outer)))
1410 break;
1411 bool by_ref = use_pointer_for_field (decl, ctx);
1412 install_var_field (decl, by_ref, 11, ctx);
1413 break;
1414 }
1415 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1416 break;
1417
acf0174b 1418 case OMP_CLAUSE_MAP:
41dbbb37 1419 if (!is_gimple_omp_offloaded (ctx->stmt))
acf0174b
JJ
1420 break;
1421 decl = OMP_CLAUSE_DECL (c);
1422 if (DECL_P (decl)
e01d41e5
JJ
1423 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 && (OMP_CLAUSE_MAP_KIND (c)
1425 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1426 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1427 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1f6be682 1428 && varpool_node::get_create (decl)->offloadable)
acf0174b
JJ
1429 break;
1430 if (DECL_P (decl))
1431 {
d9a6bd32
JJ
1432 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
acf0174b
JJ
1434 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1436 {
1437 tree new_decl = lookup_decl (decl, ctx);
1438 TREE_TYPE (new_decl)
1439 = remap_type (TREE_TYPE (decl), &ctx->cb);
1440 }
1441 else if (DECL_SIZE (decl)
1442 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1443 {
1444 tree decl2 = DECL_VALUE_EXPR (decl);
1445 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 decl2 = TREE_OPERAND (decl2, 0);
1447 gcc_assert (DECL_P (decl2));
1448 fixup_remapped_decl (decl2, ctx, false);
1449 fixup_remapped_decl (decl, ctx, true);
1450 }
1451 else
1452 fixup_remapped_decl (decl, ctx, false);
1453 }
1454 break;
1455
953ff289
DN
1456 case OMP_CLAUSE_COPYPRIVATE:
1457 case OMP_CLAUSE_COPYIN:
1458 case OMP_CLAUSE_DEFAULT:
1459 case OMP_CLAUSE_IF:
1460 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1461 case OMP_CLAUSE_NUM_TEAMS:
1462 case OMP_CLAUSE_THREAD_LIMIT:
1463 case OMP_CLAUSE_DEVICE:
953ff289 1464 case OMP_CLAUSE_SCHEDULE:
acf0174b 1465 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1466 case OMP_CLAUSE_NOWAIT:
1467 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1468 case OMP_CLAUSE_COLLAPSE:
1469 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1470 case OMP_CLAUSE_FINAL:
1471 case OMP_CLAUSE_MERGEABLE:
acf0174b 1472 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1473 case OMP_CLAUSE_SAFELEN:
d9a6bd32 1474 case OMP_CLAUSE_SIMDLEN:
acf0174b
JJ
1475 case OMP_CLAUSE_ALIGNED:
1476 case OMP_CLAUSE_DEPEND:
1477 case OMP_CLAUSE__LOOPTEMP_:
1478 case OMP_CLAUSE_TO:
1479 case OMP_CLAUSE_FROM:
d9a6bd32
JJ
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_THREADS:
1484 case OMP_CLAUSE_SIMD:
1485 case OMP_CLAUSE_NOGROUP:
1486 case OMP_CLAUSE_DEFAULTMAP:
1487 case OMP_CLAUSE_USE_DEVICE_PTR:
41dbbb37
TS
1488 case OMP_CLAUSE_ASYNC:
1489 case OMP_CLAUSE_WAIT:
1490 case OMP_CLAUSE_NUM_GANGS:
1491 case OMP_CLAUSE_NUM_WORKERS:
1492 case OMP_CLAUSE_VECTOR_LENGTH:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
02889d23 1499 case OMP_CLAUSE_TILE:
b2b40051 1500 case OMP_CLAUSE__GRIDDIM_:
6c7509bc 1501 case OMP_CLAUSE__SIMT_:
41dbbb37
TS
1502 break;
1503
41dbbb37 1504 case OMP_CLAUSE__CACHE_:
953ff289
DN
1505 default:
1506 gcc_unreachable ();
1507 }
1508 }
1509
41dbbb37
TS
1510 gcc_checking_assert (!scan_array_reductions
1511 || !is_gimple_omp_oacc (ctx->stmt));
953ff289 1512 if (scan_array_reductions)
6b37bdaf
PP
1513 {
1514 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1517 {
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1520 }
1521 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1527 }
953ff289
DN
1528}
1529
5e9d6aa4 1530/* Create a new name for omp child function. Returns an identifier. */
953ff289 1531
953ff289 1532static tree
5e9d6aa4 1533create_omp_child_function_name (bool task_copy)
953ff289 1534{
9a771876
JJ
1535 return clone_function_name (current_function_decl,
1536 task_copy ? "_omp_cpyfn" : "_omp_fn");
1537}
1538
9669b00b
AM
1539/* Return true if CTX may belong to offloaded code: either if current function
1540 is offloaded, or any enclosing context corresponds to a target region. */
1541
1542static bool
1543omp_maybe_offloaded_ctx (omp_context *ctx)
1544{
1545 if (cgraph_node::get (current_function_decl)->offloadable)
1546 return true;
1547 for (; ctx; ctx = ctx->outer)
1548 if (is_gimple_omp_offloaded (ctx->stmt))
1549 return true;
1550 return false;
1551}
1552
953ff289
DN
1553/* Build a decl for the omp child function. It'll not contain a body
1554 yet, just the bare decl. */
1555
1556static void
a68ab351 1557create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1558{
1559 tree decl, type, name, t;
1560
5e9d6aa4 1561 name = create_omp_child_function_name (task_copy);
a68ab351
JJ
1562 if (task_copy)
1563 type = build_function_type_list (void_type_node, ptr_type_node,
1564 ptr_type_node, NULL_TREE);
1565 else
1566 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1567
9a771876 1568 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
953ff289 1569
41dbbb37
TS
1570 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 || !task_copy);
a68ab351
JJ
1572 if (!task_copy)
1573 ctx->cb.dst_fn = decl;
1574 else
726a989a 1575 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1576
1577 TREE_STATIC (decl) = 1;
1578 TREE_USED (decl) = 1;
1579 DECL_ARTIFICIAL (decl) = 1;
1580 DECL_IGNORED_P (decl) = 0;
1581 TREE_PUBLIC (decl) = 0;
1582 DECL_UNINLINABLE (decl) = 1;
1583 DECL_EXTERNAL (decl) = 0;
1584 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1585 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 1586 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
5c38262d 1587 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
f1542d9a
JJ
1588 /* Remove omp declare simd attribute from the new attributes. */
1589 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1590 {
1591 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1592 a = a2;
1593 a = TREE_CHAIN (a);
1594 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1595 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1596 *p = TREE_CHAIN (*p);
1597 else
1598 {
1599 tree chain = TREE_CHAIN (*p);
1600 *p = copy_node (*p);
1601 p = &TREE_CHAIN (*p);
1602 *p = chain;
1603 }
1604 }
5c38262d
JJ
1605 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1606 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1607 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1608 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1609 DECL_FUNCTION_VERSIONED (decl)
1610 = DECL_FUNCTION_VERSIONED (current_function_decl);
1611
9669b00b 1612 if (omp_maybe_offloaded_ctx (ctx))
acf0174b 1613 {
9669b00b
AM
1614 cgraph_node::get_create (decl)->offloadable = 1;
1615 if (ENABLE_OFFLOADING)
1616 g->have_offload = true;
acf0174b 1617 }
953ff289 1618
d7823208
BS
1619 if (cgraph_node::get_create (decl)->offloadable
1620 && !lookup_attribute ("omp declare target",
1621 DECL_ATTRIBUTES (current_function_decl)))
9669b00b
AM
1622 {
1623 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1624 ? "omp target entrypoint"
1625 : "omp declare target");
1626 DECL_ATTRIBUTES (decl)
1627 = tree_cons (get_identifier (target_attr),
1628 NULL_TREE, DECL_ATTRIBUTES (decl));
1629 }
d7823208 1630
c2255bc4
AH
1631 t = build_decl (DECL_SOURCE_LOCATION (decl),
1632 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1633 DECL_ARTIFICIAL (t) = 1;
1634 DECL_IGNORED_P (t) = 1;
07485407 1635 DECL_CONTEXT (t) = decl;
953ff289
DN
1636 DECL_RESULT (decl) = t;
1637
9a771876
JJ
1638 tree data_name = get_identifier (".omp_data_i");
1639 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1640 ptr_type_node);
953ff289 1641 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1642 DECL_NAMELESS (t) = 1;
953ff289 1643 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1644 DECL_CONTEXT (t) = current_function_decl;
953ff289 1645 TREE_USED (t) = 1;
d9a6bd32 1646 TREE_READONLY (t) = 1;
953ff289 1647 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1648 if (!task_copy)
1649 ctx->receiver_decl = t;
1650 else
1651 {
c2255bc4
AH
1652 t = build_decl (DECL_SOURCE_LOCATION (decl),
1653 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1654 ptr_type_node);
1655 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1656 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1657 DECL_ARG_TYPE (t) = ptr_type_node;
1658 DECL_CONTEXT (t) = current_function_decl;
1659 TREE_USED (t) = 1;
628c189e 1660 TREE_ADDRESSABLE (t) = 1;
910ad8de 1661 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1662 DECL_ARGUMENTS (decl) = t;
1663 }
953ff289 1664
b8698a0f 1665 /* Allocate memory for the function structure. The call to
50674e96 1666 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1667 it afterward. */
db2960f4 1668 push_struct_function (decl);
726a989a 1669 cfun->function_end_locus = gimple_location (ctx->stmt);
381cdae4 1670 init_tree_ssa (cfun);
db2960f4 1671 pop_cfun ();
953ff289
DN
1672}
1673
acf0174b
JJ
1674/* Callback for walk_gimple_seq. Check if combined parallel
1675 contains gimple_omp_for_combined_into_p OMP_FOR. */
1676
629b3d75
MJ
1677tree
1678omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1679 bool *handled_ops_p,
1680 struct walk_stmt_info *wi)
acf0174b 1681{
355fe088 1682 gimple *stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
1683
1684 *handled_ops_p = true;
1685 switch (gimple_code (stmt))
1686 {
1687 WALK_SUBSTMTS;
1688
1689 case GIMPLE_OMP_FOR:
1690 if (gimple_omp_for_combined_into_p (stmt)
d9a6bd32
JJ
1691 && gimple_omp_for_kind (stmt)
1692 == *(const enum gf_mask *) (wi->info))
acf0174b
JJ
1693 {
1694 wi->info = stmt;
1695 return integer_zero_node;
1696 }
1697 break;
1698 default:
1699 break;
1700 }
1701 return NULL;
1702}
1703
d9a6bd32
JJ
1704/* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1705
1706static void
1707add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1708 omp_context *outer_ctx)
1709{
1710 struct walk_stmt_info wi;
1711
1712 memset (&wi, 0, sizeof (wi));
1713 wi.val_only = true;
1714 wi.info = (void *) &msk;
629b3d75 1715 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
d9a6bd32
JJ
1716 if (wi.info != (void *) &msk)
1717 {
1718 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1719 struct omp_for_data fd;
629b3d75 1720 omp_extract_for_data (for_stmt, &fd, NULL);
d9a6bd32
JJ
1721 /* We need two temporaries with fd.loop.v type (istart/iend)
1722 and then (fd.collapse - 1) temporaries with the same
1723 type for count2 ... countN-1 vars if not constant. */
1724 size_t count = 2, i;
1725 tree type = fd.iter_type;
1726 if (fd.collapse > 1
1727 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1728 {
1729 count += fd.collapse - 1;
e01d41e5 1730 /* If there are lastprivate clauses on the inner
d9a6bd32
JJ
1731 GIMPLE_OMP_FOR, add one more temporaries for the total number
1732 of iterations (product of count1 ... countN-1). */
629b3d75 1733 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
e01d41e5
JJ
1734 OMP_CLAUSE_LASTPRIVATE))
1735 count++;
1736 else if (msk == GF_OMP_FOR_KIND_FOR
629b3d75 1737 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
e01d41e5 1738 OMP_CLAUSE_LASTPRIVATE))
d9a6bd32
JJ
1739 count++;
1740 }
1741 for (i = 0; i < count; i++)
1742 {
1743 tree temp = create_tmp_var (type);
1744 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1745 insert_decl_map (&outer_ctx->cb, temp, temp);
1746 OMP_CLAUSE_DECL (c) = temp;
1747 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1748 gimple_omp_taskreg_set_clauses (stmt, c);
1749 }
1750 }
1751}
1752
953ff289
DN
1753/* Scan an OpenMP parallel directive. */
1754
1755static void
726a989a 1756scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1757{
1758 omp_context *ctx;
1759 tree name;
538dd0b7 1760 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
953ff289
DN
1761
1762 /* Ignore parallel directives with empty bodies, unless there
1763 are copyin clauses. */
1764 if (optimize > 0
726a989a 1765 && empty_body_p (gimple_omp_body (stmt))
629b3d75 1766 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
726a989a 1767 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1768 {
726a989a 1769 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1770 return;
1771 }
1772
acf0174b 1773 if (gimple_omp_parallel_combined_p (stmt))
d9a6bd32 1774 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
acf0174b 1775
726a989a 1776 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1777 taskreg_contexts.safe_push (ctx);
a68ab351 1778 if (taskreg_nesting_level > 1)
50674e96 1779 ctx->is_nested = true;
953ff289 1780 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289 1781 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1782 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1783 name = build_decl (gimple_location (stmt),
1784 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1785 DECL_ARTIFICIAL (name) = 1;
1786 DECL_NAMELESS (name) = 1;
953ff289 1787 TYPE_NAME (ctx->record_type) = name;
f7484978 1788 TYPE_ARTIFICIAL (ctx->record_type) = 1;
b2b40051
MJ
1789 if (!gimple_omp_parallel_grid_phony (stmt))
1790 {
1791 create_omp_child_function (ctx, false);
1792 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1793 }
953ff289 1794
726a989a 1795 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 1796 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
1797
1798 if (TYPE_FIELDS (ctx->record_type) == NULL)
1799 ctx->record_type = ctx->receiver_decl = NULL;
953ff289
DN
1800}
1801
a68ab351
JJ
1802/* Scan an OpenMP task directive. */
1803
1804static void
726a989a 1805scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1806{
1807 omp_context *ctx;
726a989a 1808 tree name, t;
538dd0b7 1809 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
a68ab351 1810
fbc698e0
JJ
1811 /* Ignore task directives with empty bodies, unless they have depend
1812 clause. */
a68ab351 1813 if (optimize > 0
fbc698e0
JJ
1814 && empty_body_p (gimple_omp_body (stmt))
1815 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
a68ab351 1816 {
726a989a 1817 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1818 return;
1819 }
1820
d9a6bd32
JJ
1821 if (gimple_omp_task_taskloop_p (stmt))
1822 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1823
726a989a 1824 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1825 taskreg_contexts.safe_push (ctx);
a68ab351
JJ
1826 if (taskreg_nesting_level > 1)
1827 ctx->is_nested = true;
1828 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
a68ab351
JJ
1829 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1830 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1831 name = build_decl (gimple_location (stmt),
1832 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1833 DECL_ARTIFICIAL (name) = 1;
1834 DECL_NAMELESS (name) = 1;
a68ab351 1835 TYPE_NAME (ctx->record_type) = name;
f7484978 1836 TYPE_ARTIFICIAL (ctx->record_type) = 1;
a68ab351 1837 create_omp_child_function (ctx, false);
726a989a 1838 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1839
726a989a 1840 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1841
1842 if (ctx->srecord_type)
1843 {
1844 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1845 name = build_decl (gimple_location (stmt),
1846 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1847 DECL_ARTIFICIAL (name) = 1;
1848 DECL_NAMELESS (name) = 1;
a68ab351 1849 TYPE_NAME (ctx->srecord_type) = name;
f7484978 1850 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
a68ab351
JJ
1851 create_omp_child_function (ctx, true);
1852 }
1853
26127932 1854 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
1855
1856 if (TYPE_FIELDS (ctx->record_type) == NULL)
1857 {
1858 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1859 t = build_int_cst (long_integer_type_node, 0);
1860 gimple_omp_task_set_arg_size (stmt, t);
1861 t = build_int_cst (long_integer_type_node, 1);
1862 gimple_omp_task_set_arg_align (stmt, t);
a68ab351 1863 }
5771c391
JJ
1864}
1865
655e5265
JJ
1866/* Helper function for finish_taskreg_scan, called through walk_tree.
1867 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1868 tree, replace it in the expression. */
1869
1870static tree
1871finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1872{
1873 if (VAR_P (*tp))
1874 {
1875 omp_context *ctx = (omp_context *) data;
1876 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1877 if (t != *tp)
1878 {
1879 if (DECL_HAS_VALUE_EXPR_P (t))
1880 t = unshare_expr (DECL_VALUE_EXPR (t));
1881 *tp = t;
1882 }
1883 *walk_subtrees = 0;
1884 }
1885 else if (IS_TYPE_OR_DECL_P (*tp))
1886 *walk_subtrees = 0;
1887 return NULL_TREE;
1888}
5771c391
JJ
1889
1890/* If any decls have been made addressable during scan_omp,
1891 adjust their fields if needed, and layout record types
1892 of parallel/task constructs. */
1893
1894static void
1895finish_taskreg_scan (omp_context *ctx)
1896{
1897 if (ctx->record_type == NULL_TREE)
1898 return;
1899
1900 /* If any task_shared_vars were needed, verify all
1901 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1902 statements if use_pointer_for_field hasn't changed
1903 because of that. If it did, update field types now. */
1904 if (task_shared_vars)
1905 {
1906 tree c;
1907
1908 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1909 c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
1910 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1911 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5771c391
JJ
1912 {
1913 tree decl = OMP_CLAUSE_DECL (c);
1914
1915 /* Global variables don't need to be copied,
1916 the receiver side will use them directly. */
1917 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1918 continue;
1919 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1920 || !use_pointer_for_field (decl, ctx))
1921 continue;
1922 tree field = lookup_field (decl, ctx);
1923 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1924 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1925 continue;
1926 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1927 TREE_THIS_VOLATILE (field) = 0;
1928 DECL_USER_ALIGN (field) = 0;
fe37c7af 1929 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
5771c391 1930 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
fe37c7af 1931 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
5771c391
JJ
1932 if (ctx->srecord_type)
1933 {
1934 tree sfield = lookup_sfield (decl, ctx);
1935 TREE_TYPE (sfield) = TREE_TYPE (field);
1936 TREE_THIS_VOLATILE (sfield) = 0;
1937 DECL_USER_ALIGN (sfield) = 0;
fe37c7af 1938 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
5771c391 1939 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
fe37c7af 1940 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
5771c391
JJ
1941 }
1942 }
1943 }
1944
1945 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1946 {
1947 layout_type (ctx->record_type);
1948 fixup_child_record_type (ctx);
1949 }
a68ab351
JJ
1950 else
1951 {
5771c391 1952 location_t loc = gimple_location (ctx->stmt);
a68ab351
JJ
1953 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1954 /* Move VLA fields to the end. */
1955 p = &TYPE_FIELDS (ctx->record_type);
1956 while (*p)
1957 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1958 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1959 {
1960 *q = *p;
1961 *p = TREE_CHAIN (*p);
1962 TREE_CHAIN (*q) = NULL_TREE;
1963 q = &TREE_CHAIN (*q);
1964 }
1965 else
910ad8de 1966 p = &DECL_CHAIN (*p);
a68ab351 1967 *p = vla_fields;
d9a6bd32
JJ
1968 if (gimple_omp_task_taskloop_p (ctx->stmt))
1969 {
1970 /* Move fields corresponding to first and second _looptemp_
1971 clause first. There are filled by GOMP_taskloop
1972 and thus need to be in specific positions. */
1973 tree c1 = gimple_omp_task_clauses (ctx->stmt);
629b3d75
MJ
1974 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1975 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
d9a6bd32
JJ
1976 OMP_CLAUSE__LOOPTEMP_);
1977 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1978 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1979 p = &TYPE_FIELDS (ctx->record_type);
1980 while (*p)
1981 if (*p == f1 || *p == f2)
1982 *p = DECL_CHAIN (*p);
1983 else
1984 p = &DECL_CHAIN (*p);
1985 DECL_CHAIN (f1) = f2;
1986 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1987 TYPE_FIELDS (ctx->record_type) = f1;
1988 if (ctx->srecord_type)
1989 {
1990 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1991 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1992 p = &TYPE_FIELDS (ctx->srecord_type);
1993 while (*p)
1994 if (*p == f1 || *p == f2)
1995 *p = DECL_CHAIN (*p);
1996 else
1997 p = &DECL_CHAIN (*p);
1998 DECL_CHAIN (f1) = f2;
1999 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2000 TYPE_FIELDS (ctx->srecord_type) = f1;
2001 }
2002 }
a68ab351
JJ
2003 layout_type (ctx->record_type);
2004 fixup_child_record_type (ctx);
2005 if (ctx->srecord_type)
2006 layout_type (ctx->srecord_type);
5771c391
JJ
2007 tree t = fold_convert_loc (loc, long_integer_type_node,
2008 TYPE_SIZE_UNIT (ctx->record_type));
655e5265
JJ
2009 if (TREE_CODE (t) != INTEGER_CST)
2010 {
2011 t = unshare_expr (t);
2012 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2013 }
5771c391 2014 gimple_omp_task_set_arg_size (ctx->stmt, t);
726a989a 2015 t = build_int_cst (long_integer_type_node,
a68ab351 2016 TYPE_ALIGN_UNIT (ctx->record_type));
5771c391 2017 gimple_omp_task_set_arg_align (ctx->stmt, t);
a68ab351
JJ
2018 }
2019}
2020
e4834818 2021/* Find the enclosing offload context. */
953ff289 2022
41dbbb37
TS
2023static omp_context *
2024enclosing_target_ctx (omp_context *ctx)
2025{
e4834818
NS
2026 for (; ctx; ctx = ctx->outer)
2027 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2028 break;
2029
41dbbb37
TS
2030 return ctx;
2031}
2032
e4834818
NS
2033/* Return true if ctx is part of an oacc kernels region. */
2034
41dbbb37 2035static bool
e4834818 2036ctx_in_oacc_kernels_region (omp_context *ctx)
41dbbb37 2037{
e4834818
NS
2038 for (;ctx != NULL; ctx = ctx->outer)
2039 {
2040 gimple *stmt = ctx->stmt;
2041 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2042 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2043 return true;
2044 }
2045
2046 return false;
2047}
2048
2049/* Check the parallelism clauses inside a kernels regions.
2050 Until kernels handling moves to use the same loop indirection
2051 scheme as parallel, we need to do this checking early. */
2052
2053static unsigned
2054check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2055{
2056 bool checking = true;
2057 unsigned outer_mask = 0;
2058 unsigned this_mask = 0;
2059 bool has_seq = false, has_auto = false;
2060
2061 if (ctx->outer)
2062 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2063 if (!stmt)
2064 {
2065 checking = false;
2066 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2067 return outer_mask;
2068 stmt = as_a <gomp_for *> (ctx->stmt);
2069 }
2070
2071 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2072 {
2073 switch (OMP_CLAUSE_CODE (c))
2074 {
2075 case OMP_CLAUSE_GANG:
2076 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2077 break;
2078 case OMP_CLAUSE_WORKER:
2079 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2080 break;
2081 case OMP_CLAUSE_VECTOR:
2082 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2083 break;
2084 case OMP_CLAUSE_SEQ:
2085 has_seq = true;
2086 break;
2087 case OMP_CLAUSE_AUTO:
2088 has_auto = true;
2089 break;
2090 default:
2091 break;
2092 }
2093 }
2094
2095 if (checking)
2096 {
2097 if (has_seq && (this_mask || has_auto))
2098 error_at (gimple_location (stmt), "%<seq%> overrides other"
2099 " OpenACC loop specifiers");
2100 else if (has_auto && this_mask)
2101 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2102 " OpenACC loop specifiers");
2103
2104 if (this_mask & outer_mask)
2105 error_at (gimple_location (stmt), "inner loop uses same"
2106 " OpenACC parallelism as containing loop");
2107 }
2108
2109 return outer_mask | this_mask;
41dbbb37
TS
2110}
2111
2112/* Scan a GIMPLE_OMP_FOR. */
953ff289 2113
6e6cf7b0 2114static omp_context *
538dd0b7 2115scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
953ff289 2116{
50674e96 2117 omp_context *ctx;
726a989a 2118 size_t i;
41dbbb37
TS
2119 tree clauses = gimple_omp_for_clauses (stmt);
2120
50674e96 2121 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2122
41dbbb37
TS
2123 if (is_gimple_omp_oacc (stmt))
2124 {
e4834818
NS
2125 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2126
2127 if (!tgt || is_oacc_parallel (tgt))
2128 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2129 {
2130 char const *check = NULL;
2131
2132 switch (OMP_CLAUSE_CODE (c))
2133 {
2134 case OMP_CLAUSE_GANG:
2135 check = "gang";
2136 break;
2137
2138 case OMP_CLAUSE_WORKER:
2139 check = "worker";
2140 break;
2141
2142 case OMP_CLAUSE_VECTOR:
2143 check = "vector";
2144 break;
2145
2146 default:
2147 break;
2148 }
2149
2150 if (check && OMP_CLAUSE_OPERAND (c, 0))
2151 error_at (gimple_location (stmt),
2152 "argument not permitted on %qs clause in"
2153 " OpenACC %<parallel%>", check);
2154 }
2155
2156 if (tgt && is_oacc_kernels (tgt))
2157 {
2158 /* Strip out reductions, as they are not handled yet. */
2159 tree *prev_ptr = &clauses;
2160
2161 while (tree probe = *prev_ptr)
41dbbb37 2162 {
e4834818
NS
2163 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2164
2165 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2166 *prev_ptr = *next_ptr;
2167 else
2168 prev_ptr = next_ptr;
41dbbb37 2169 }
e4834818
NS
2170
2171 gimple_omp_for_set_clauses (stmt, clauses);
2172 check_oacc_kernel_gwv (stmt, ctx);
41dbbb37
TS
2173 }
2174 }
2175
2176 scan_sharing_clauses (clauses, ctx);
953ff289 2177
26127932 2178 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2179 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2180 {
726a989a
RB
2181 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2184 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2185 }
26127932 2186 scan_omp (gimple_omp_body_ptr (stmt), ctx);
6e6cf7b0 2187 return ctx;
953ff289
DN
2188}
2189
6c7509bc
JJ
2190/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2191
2192static void
2193scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2194 omp_context *outer_ctx)
2195{
2196 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2197 gsi_replace (gsi, bind, false);
2198 gimple_seq seq = NULL;
2199 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2200 tree cond = create_tmp_var_raw (integer_type_node);
2201 DECL_CONTEXT (cond) = current_function_decl;
2202 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2203 gimple_bind_set_vars (bind, cond);
2204 gimple_call_set_lhs (g, cond);
2205 gimple_seq_add_stmt (&seq, g);
2206 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2208 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2209 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2210 gimple_seq_add_stmt (&seq, g);
2211 g = gimple_build_label (lab1);
2212 gimple_seq_add_stmt (&seq, g);
2213 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2214 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2215 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2216 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2217 gimple_omp_for_set_clauses (new_stmt, clause);
2218 gimple_seq_add_stmt (&seq, new_stmt);
2219 g = gimple_build_goto (lab3);
2220 gimple_seq_add_stmt (&seq, g);
2221 g = gimple_build_label (lab2);
2222 gimple_seq_add_stmt (&seq, g);
2223 gimple_seq_add_stmt (&seq, stmt);
2224 g = gimple_build_label (lab3);
2225 gimple_seq_add_stmt (&seq, g);
2226 gimple_bind_set_body (bind, seq);
2227 update_stmt (bind);
2228 scan_omp_for (new_stmt, outer_ctx);
6e6cf7b0 2229 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
6c7509bc
JJ
2230}
2231
953ff289
DN
2232/* Scan an OpenMP sections directive. */
2233
2234static void
538dd0b7 2235scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
953ff289 2236{
953ff289
DN
2237 omp_context *ctx;
2238
2239 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2240 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2241 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2242}
2243
2244/* Scan an OpenMP single directive. */
2245
2246static void
538dd0b7 2247scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
953ff289 2248{
953ff289
DN
2249 omp_context *ctx;
2250 tree name;
2251
2252 ctx = new_omp_context (stmt, outer_ctx);
2253 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2254 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2255 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2256 name = build_decl (gimple_location (stmt),
2257 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2258 TYPE_NAME (ctx->record_type) = name;
2259
726a989a 2260 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2261 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2262
2263 if (TYPE_FIELDS (ctx->record_type) == NULL)
2264 ctx->record_type = NULL;
2265 else
2266 layout_type (ctx->record_type);
2267}
2268
86938de6
TV
2269/* Return true if the CLAUSES of an omp target guarantee that the base pointers
2270 used in the corresponding offloaded function are restrict. */
2271
2272static bool
2273omp_target_base_pointers_restrict_p (tree clauses)
2274{
2275 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2276 used by OpenACC. */
2277 if (flag_openacc == 0)
2278 return false;
2279
2280 /* I. Basic example:
2281
2282 void foo (void)
2283 {
2284 unsigned int a[2], b[2];
2285
2286 #pragma acc kernels \
2287 copyout (a) \
2288 copyout (b)
2289 {
2290 a[0] = 0;
2291 b[0] = 1;
2292 }
2293 }
2294
2295 After gimplification, we have:
2296
2297 #pragma omp target oacc_kernels \
2298 map(force_from:a [len: 8]) \
2299 map(force_from:b [len: 8])
2300 {
2301 a[0] = 0;
2302 b[0] = 1;
2303 }
2304
2305 Because both mappings have the force prefix, we know that they will be
2306 allocated when calling the corresponding offloaded function, which means we
2307 can mark the base pointers for a and b in the offloaded function as
2308 restrict. */
2309
2310 tree c;
2311 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2312 {
2313 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2314 return false;
2315
2316 switch (OMP_CLAUSE_MAP_KIND (c))
2317 {
2318 case GOMP_MAP_FORCE_ALLOC:
2319 case GOMP_MAP_FORCE_TO:
2320 case GOMP_MAP_FORCE_FROM:
2321 case GOMP_MAP_FORCE_TOFROM:
2322 break;
2323 default:
2324 return false;
2325 }
2326 }
2327
2328 return true;
2329}
2330
41dbbb37 2331/* Scan a GIMPLE_OMP_TARGET. */
acf0174b
JJ
2332
2333static void
538dd0b7 2334scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
acf0174b
JJ
2335{
2336 omp_context *ctx;
2337 tree name;
41dbbb37
TS
2338 bool offloaded = is_gimple_omp_offloaded (stmt);
2339 tree clauses = gimple_omp_target_clauses (stmt);
acf0174b
JJ
2340
2341 ctx = new_omp_context (stmt, outer_ctx);
2342 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
acf0174b
JJ
2343 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2344 name = create_tmp_var_name (".omp_data_t");
2345 name = build_decl (gimple_location (stmt),
2346 TYPE_DECL, name, ctx->record_type);
2347 DECL_ARTIFICIAL (name) = 1;
2348 DECL_NAMELESS (name) = 1;
2349 TYPE_NAME (ctx->record_type) = name;
f7484978 2350 TYPE_ARTIFICIAL (ctx->record_type) = 1;
86938de6
TV
2351
2352 bool base_pointers_restrict = false;
41dbbb37 2353 if (offloaded)
acf0174b
JJ
2354 {
2355 create_omp_child_function (ctx, false);
2356 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
86938de6
TV
2357
2358 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2359 if (base_pointers_restrict
2360 && dump_file && (dump_flags & TDF_DETAILS))
2361 fprintf (dump_file,
2362 "Base pointers in offloaded function are restrict\n");
acf0174b
JJ
2363 }
2364
86938de6 2365 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
acf0174b
JJ
2366 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2367
2368 if (TYPE_FIELDS (ctx->record_type) == NULL)
2369 ctx->record_type = ctx->receiver_decl = NULL;
2370 else
2371 {
2372 TYPE_FIELDS (ctx->record_type)
2373 = nreverse (TYPE_FIELDS (ctx->record_type));
b2b29377
MM
2374 if (flag_checking)
2375 {
2376 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2377 for (tree field = TYPE_FIELDS (ctx->record_type);
2378 field;
2379 field = DECL_CHAIN (field))
2380 gcc_assert (DECL_ALIGN (field) == align);
2381 }
acf0174b 2382 layout_type (ctx->record_type);
41dbbb37 2383 if (offloaded)
acf0174b
JJ
2384 fixup_child_record_type (ctx);
2385 }
2386}
2387
2388/* Scan an OpenMP teams directive. */
2389
2390static void
538dd0b7 2391scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
acf0174b
JJ
2392{
2393 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2394 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2395 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2396}
953ff289 2397
41dbbb37 2398/* Check nesting restrictions. */
26127932 2399static bool
355fe088 2400check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
a6fc8e21 2401{
d9a6bd32
JJ
2402 tree c;
2403
b2b40051
MJ
2404 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2405 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2406 the original copy of its contents. */
2407 return true;
2408
41dbbb37
TS
2409 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2410 inside an OpenACC CTX. */
2411 if (!(is_gimple_omp (stmt)
640b7e74
TV
2412 && is_gimple_omp_oacc (stmt))
2413 /* Except for atomic codes that we share with OpenMP. */
2414 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2415 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2416 {
629b3d75 2417 if (oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2418 {
2419 error_at (gimple_location (stmt),
2420 "non-OpenACC construct inside of OpenACC routine");
2421 return false;
2422 }
2423 else
2424 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2425 if (is_gimple_omp (octx->stmt)
2426 && is_gimple_omp_oacc (octx->stmt))
2427 {
2428 error_at (gimple_location (stmt),
2429 "non-OpenACC construct inside of OpenACC region");
2430 return false;
2431 }
41dbbb37
TS
2432 }
2433
74bf76ed
JJ
2434 if (ctx != NULL)
2435 {
2436 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 2437 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2438 {
d9a6bd32
JJ
2439 c = NULL_TREE;
2440 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2441 {
2442 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2443 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18 2444 {
629b3d75 2445 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
d9f4ea18
JJ
2446 && (ctx->outer == NULL
2447 || !gimple_omp_for_combined_into_p (ctx->stmt)
2448 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2449 || (gimple_omp_for_kind (ctx->outer->stmt)
2450 != GF_OMP_FOR_KIND_FOR)
2451 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2452 {
2453 error_at (gimple_location (stmt),
2454 "%<ordered simd threads%> must be closely "
2455 "nested inside of %<for simd%> region");
2456 return false;
2457 }
2458 return true;
2459 }
d9a6bd32 2460 }
74bf76ed 2461 error_at (gimple_location (stmt),
d9a6bd32 2462 "OpenMP constructs other than %<#pragma omp ordered simd%>"
d9f4ea18 2463 " may not be nested inside %<simd%> region");
74bf76ed
JJ
2464 return false;
2465 }
acf0174b
JJ
2466 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2467 {
2468 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
56b1c60e
MJ
2469 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2470 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
acf0174b
JJ
2471 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2472 {
2473 error_at (gimple_location (stmt),
d9f4ea18
JJ
2474 "only %<distribute%> or %<parallel%> regions are "
2475 "allowed to be strictly nested inside %<teams%> "
2476 "region");
acf0174b
JJ
2477 return false;
2478 }
2479 }
74bf76ed 2480 }
726a989a 2481 switch (gimple_code (stmt))
a6fc8e21 2482 {
726a989a 2483 case GIMPLE_OMP_FOR:
0aadce73 2484 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2485 return true;
acf0174b
JJ
2486 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2487 {
2488 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2489 {
2490 error_at (gimple_location (stmt),
d9f4ea18
JJ
2491 "%<distribute%> region must be strictly nested "
2492 "inside %<teams%> construct");
acf0174b
JJ
2493 return false;
2494 }
2495 return true;
2496 }
d9a6bd32
JJ
2497 /* We split taskloop into task and nested taskloop in it. */
2498 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2499 return true;
68d58afb
NS
2500 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2501 {
2502 bool ok = false;
01914336 2503
68d58afb
NS
2504 if (ctx)
2505 switch (gimple_code (ctx->stmt))
2506 {
2507 case GIMPLE_OMP_FOR:
2508 ok = (gimple_omp_for_kind (ctx->stmt)
2509 == GF_OMP_FOR_KIND_OACC_LOOP);
2510 break;
2511
2512 case GIMPLE_OMP_TARGET:
2513 switch (gimple_omp_target_kind (ctx->stmt))
2514 {
2515 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2516 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2517 ok = true;
2518 break;
2519
2520 default:
2521 break;
2522 }
2523
2524 default:
2525 break;
2526 }
629b3d75 2527 else if (oacc_get_fn_attrib (current_function_decl))
68d58afb
NS
2528 ok = true;
2529 if (!ok)
2530 {
2531 error_at (gimple_location (stmt),
2532 "OpenACC loop directive must be associated with"
2533 " an OpenACC compute region");
2534 return false;
2535 }
2536 }
acf0174b
JJ
2537 /* FALLTHRU */
2538 case GIMPLE_CALL:
2539 if (is_gimple_call (stmt)
2540 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2541 == BUILT_IN_GOMP_CANCEL
2542 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2543 == BUILT_IN_GOMP_CANCELLATION_POINT))
2544 {
2545 const char *bad = NULL;
2546 const char *kind = NULL;
d9f4ea18
JJ
2547 const char *construct
2548 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 == BUILT_IN_GOMP_CANCEL)
2550 ? "#pragma omp cancel"
2551 : "#pragma omp cancellation point";
acf0174b
JJ
2552 if (ctx == NULL)
2553 {
2554 error_at (gimple_location (stmt), "orphaned %qs construct",
d9f4ea18 2555 construct);
acf0174b
JJ
2556 return false;
2557 }
9541ffee 2558 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2559 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2560 : 0)
2561 {
2562 case 1:
2563 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2564 bad = "#pragma omp parallel";
2565 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 == BUILT_IN_GOMP_CANCEL
2567 && !integer_zerop (gimple_call_arg (stmt, 1)))
2568 ctx->cancellable = true;
2569 kind = "parallel";
2570 break;
2571 case 2:
2572 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2573 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2574 bad = "#pragma omp for";
2575 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2576 == BUILT_IN_GOMP_CANCEL
2577 && !integer_zerop (gimple_call_arg (stmt, 1)))
2578 {
2579 ctx->cancellable = true;
629b3d75 2580 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2581 OMP_CLAUSE_NOWAIT))
2582 warning_at (gimple_location (stmt), 0,
2583 "%<#pragma omp cancel for%> inside "
2584 "%<nowait%> for construct");
629b3d75 2585 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2586 OMP_CLAUSE_ORDERED))
2587 warning_at (gimple_location (stmt), 0,
2588 "%<#pragma omp cancel for%> inside "
2589 "%<ordered%> for construct");
2590 }
2591 kind = "for";
2592 break;
2593 case 4:
2594 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2595 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2596 bad = "#pragma omp sections";
2597 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2598 == BUILT_IN_GOMP_CANCEL
2599 && !integer_zerop (gimple_call_arg (stmt, 1)))
2600 {
2601 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2602 {
2603 ctx->cancellable = true;
629b3d75 2604 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2605 (ctx->stmt),
2606 OMP_CLAUSE_NOWAIT))
2607 warning_at (gimple_location (stmt), 0,
2608 "%<#pragma omp cancel sections%> inside "
2609 "%<nowait%> sections construct");
2610 }
2611 else
2612 {
2613 gcc_assert (ctx->outer
2614 && gimple_code (ctx->outer->stmt)
2615 == GIMPLE_OMP_SECTIONS);
2616 ctx->outer->cancellable = true;
629b3d75 2617 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2618 (ctx->outer->stmt),
2619 OMP_CLAUSE_NOWAIT))
2620 warning_at (gimple_location (stmt), 0,
2621 "%<#pragma omp cancel sections%> inside "
2622 "%<nowait%> sections construct");
2623 }
2624 }
2625 kind = "sections";
2626 break;
2627 case 8:
2628 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2629 bad = "#pragma omp task";
2630 else
d9f4ea18
JJ
2631 {
2632 for (omp_context *octx = ctx->outer;
2633 octx; octx = octx->outer)
2634 {
2635 switch (gimple_code (octx->stmt))
2636 {
2637 case GIMPLE_OMP_TASKGROUP:
2638 break;
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (octx->stmt)
2641 != GF_OMP_TARGET_KIND_REGION)
2642 continue;
2643 /* FALLTHRU */
2644 case GIMPLE_OMP_PARALLEL:
2645 case GIMPLE_OMP_TEAMS:
2646 error_at (gimple_location (stmt),
2647 "%<%s taskgroup%> construct not closely "
2648 "nested inside of %<taskgroup%> region",
2649 construct);
2650 return false;
2651 default:
2652 continue;
2653 }
2654 break;
2655 }
2656 ctx->cancellable = true;
2657 }
acf0174b
JJ
2658 kind = "taskgroup";
2659 break;
2660 default:
2661 error_at (gimple_location (stmt), "invalid arguments");
2662 return false;
2663 }
2664 if (bad)
2665 {
2666 error_at (gimple_location (stmt),
2667 "%<%s %s%> construct not closely nested inside of %qs",
d9f4ea18 2668 construct, kind, bad);
acf0174b
JJ
2669 return false;
2670 }
2671 }
74bf76ed 2672 /* FALLTHRU */
726a989a
RB
2673 case GIMPLE_OMP_SECTIONS:
2674 case GIMPLE_OMP_SINGLE:
a6fc8e21 2675 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2676 switch (gimple_code (ctx->stmt))
a6fc8e21 2677 {
726a989a 2678 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2679 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2680 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2681 break;
2682 /* FALLTHRU */
726a989a
RB
2683 case GIMPLE_OMP_SECTIONS:
2684 case GIMPLE_OMP_SINGLE:
2685 case GIMPLE_OMP_ORDERED:
2686 case GIMPLE_OMP_MASTER:
2687 case GIMPLE_OMP_TASK:
acf0174b 2688 case GIMPLE_OMP_CRITICAL:
726a989a 2689 if (is_gimple_call (stmt))
a68ab351 2690 {
acf0174b
JJ
2691 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2692 != BUILT_IN_GOMP_BARRIER)
2693 return true;
26127932
JJ
2694 error_at (gimple_location (stmt),
2695 "barrier region may not be closely nested inside "
d9f4ea18
JJ
2696 "of work-sharing, %<critical%>, %<ordered%>, "
2697 "%<master%>, explicit %<task%> or %<taskloop%> "
2698 "region");
26127932 2699 return false;
a68ab351 2700 }
26127932
JJ
2701 error_at (gimple_location (stmt),
2702 "work-sharing region may not be closely nested inside "
d9f4ea18
JJ
2703 "of work-sharing, %<critical%>, %<ordered%>, "
2704 "%<master%>, explicit %<task%> or %<taskloop%> region");
26127932 2705 return false;
726a989a 2706 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2707 case GIMPLE_OMP_TEAMS:
26127932 2708 return true;
d9f4ea18
JJ
2709 case GIMPLE_OMP_TARGET:
2710 if (gimple_omp_target_kind (ctx->stmt)
2711 == GF_OMP_TARGET_KIND_REGION)
2712 return true;
2713 break;
a6fc8e21
JJ
2714 default:
2715 break;
2716 }
2717 break;
726a989a 2718 case GIMPLE_OMP_MASTER:
a6fc8e21 2719 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2720 switch (gimple_code (ctx->stmt))
a6fc8e21 2721 {
726a989a 2722 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2723 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2724 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2725 break;
2726 /* FALLTHRU */
726a989a
RB
2727 case GIMPLE_OMP_SECTIONS:
2728 case GIMPLE_OMP_SINGLE:
2729 case GIMPLE_OMP_TASK:
26127932 2730 error_at (gimple_location (stmt),
d9f4ea18
JJ
2731 "%<master%> region may not be closely nested inside "
2732 "of work-sharing, explicit %<task%> or %<taskloop%> "
2733 "region");
26127932 2734 return false;
726a989a 2735 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2736 case GIMPLE_OMP_TEAMS:
26127932 2737 return true;
d9f4ea18
JJ
2738 case GIMPLE_OMP_TARGET:
2739 if (gimple_omp_target_kind (ctx->stmt)
2740 == GF_OMP_TARGET_KIND_REGION)
2741 return true;
2742 break;
a6fc8e21
JJ
2743 default:
2744 break;
2745 }
2746 break;
d9a6bd32
JJ
2747 case GIMPLE_OMP_TASK:
2748 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2750 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2751 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2752 {
2753 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 error_at (OMP_CLAUSE_LOCATION (c),
2755 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2756 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2757 return false;
2758 }
2759 break;
726a989a 2760 case GIMPLE_OMP_ORDERED:
d9a6bd32
JJ
2761 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2762 c; c = OMP_CLAUSE_CHAIN (c))
2763 {
2764 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2765 {
2766 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
d9f4ea18 2767 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
d9a6bd32
JJ
2768 continue;
2769 }
2770 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2771 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2772 || kind == OMP_CLAUSE_DEPEND_SINK)
2773 {
2774 tree oclause;
2775 /* Look for containing ordered(N) loop. */
2776 if (ctx == NULL
2777 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2778 || (oclause
629b3d75 2779 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
d9a6bd32
JJ
2780 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2781 {
2782 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2783 "%<ordered%> construct with %<depend%> clause "
2784 "must be closely nested inside an %<ordered%> "
2785 "loop");
d9a6bd32
JJ
2786 return false;
2787 }
2788 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2789 {
2790 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2791 "%<ordered%> construct with %<depend%> clause "
2792 "must be closely nested inside a loop with "
2793 "%<ordered%> clause with a parameter");
d9a6bd32
JJ
2794 return false;
2795 }
2796 }
2797 else
2798 {
2799 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2800 "invalid depend kind in omp %<ordered%> %<depend%>");
2801 return false;
2802 }
2803 }
2804 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2805 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18
JJ
2806 {
2807 /* ordered simd must be closely nested inside of simd region,
2808 and simd region must not encounter constructs other than
2809 ordered simd, therefore ordered simd may be either orphaned,
2810 or ctx->stmt must be simd. The latter case is handled already
2811 earlier. */
2812 if (ctx != NULL)
2813 {
2814 error_at (gimple_location (stmt),
2815 "%<ordered%> %<simd%> must be closely nested inside "
2816 "%<simd%> region");
d9a6bd32
JJ
2817 return false;
2818 }
2819 }
a6fc8e21 2820 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2821 switch (gimple_code (ctx->stmt))
a6fc8e21 2822 {
726a989a
RB
2823 case GIMPLE_OMP_CRITICAL:
2824 case GIMPLE_OMP_TASK:
d9f4ea18
JJ
2825 case GIMPLE_OMP_ORDERED:
2826 ordered_in_taskloop:
26127932 2827 error_at (gimple_location (stmt),
d9f4ea18
JJ
2828 "%<ordered%> region may not be closely nested inside "
2829 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2830 "%<taskloop%> region");
26127932 2831 return false;
726a989a 2832 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2833 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2834 goto ordered_in_taskloop;
629b3d75 2835 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2836 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2837 {
2838 error_at (gimple_location (stmt),
d9f4ea18
JJ
2839 "%<ordered%> region must be closely nested inside "
2840 "a loop region with an %<ordered%> clause");
26127932
JJ
2841 return false;
2842 }
2843 return true;
d9f4ea18
JJ
2844 case GIMPLE_OMP_TARGET:
2845 if (gimple_omp_target_kind (ctx->stmt)
2846 != GF_OMP_TARGET_KIND_REGION)
2847 break;
2848 /* FALLTHRU */
726a989a 2849 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2850 case GIMPLE_OMP_TEAMS:
acf0174b 2851 error_at (gimple_location (stmt),
d9f4ea18
JJ
2852 "%<ordered%> region must be closely nested inside "
2853 "a loop region with an %<ordered%> clause");
acf0174b 2854 return false;
a6fc8e21
JJ
2855 default:
2856 break;
2857 }
2858 break;
726a989a 2859 case GIMPLE_OMP_CRITICAL:
538dd0b7
DM
2860 {
2861 tree this_stmt_name
2862 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2863 for (; ctx != NULL; ctx = ctx->outer)
2864 if (gomp_critical *other_crit
2865 = dyn_cast <gomp_critical *> (ctx->stmt))
2866 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2867 {
2868 error_at (gimple_location (stmt),
d9f4ea18
JJ
2869 "%<critical%> region may not be nested inside "
2870 "a %<critical%> region with the same name");
538dd0b7
DM
2871 return false;
2872 }
2873 }
a6fc8e21 2874 break;
acf0174b
JJ
2875 case GIMPLE_OMP_TEAMS:
2876 if (ctx == NULL
2877 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2878 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2879 {
2880 error_at (gimple_location (stmt),
d9f4ea18
JJ
2881 "%<teams%> construct not closely nested inside of "
2882 "%<target%> construct");
acf0174b
JJ
2883 return false;
2884 }
2885 break;
f014c653 2886 case GIMPLE_OMP_TARGET:
d9a6bd32
JJ
2887 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2888 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2889 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2890 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2891 {
2892 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2893 error_at (OMP_CLAUSE_LOCATION (c),
2894 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2895 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2896 return false;
2897 }
640b7e74 2898 if (is_gimple_omp_offloaded (stmt)
629b3d75 2899 && oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2900 {
2901 error_at (gimple_location (stmt),
2902 "OpenACC region inside of OpenACC routine, nested "
2903 "parallelism not supported yet");
2904 return false;
2905 }
f014c653 2906 for (; ctx != NULL; ctx = ctx->outer)
41dbbb37
TS
2907 {
2908 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2909 {
2910 if (is_gimple_omp (stmt)
2911 && is_gimple_omp_oacc (stmt)
2912 && is_gimple_omp (ctx->stmt))
2913 {
2914 error_at (gimple_location (stmt),
2915 "OpenACC construct inside of non-OpenACC region");
2916 return false;
2917 }
2918 continue;
2919 }
2920
2921 const char *stmt_name, *ctx_stmt_name;
2922 switch (gimple_omp_target_kind (stmt))
2923 {
2924 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2925 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2926 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
d9a6bd32
JJ
2927 case GF_OMP_TARGET_KIND_ENTER_DATA:
2928 stmt_name = "target enter data"; break;
2929 case GF_OMP_TARGET_KIND_EXIT_DATA:
2930 stmt_name = "target exit data"; break;
41dbbb37
TS
2931 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2932 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2933 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2934 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
d9a6bd32
JJ
2935 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2936 stmt_name = "enter/exit data"; break;
37d5ad46
JB
2937 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2938 break;
41dbbb37
TS
2939 default: gcc_unreachable ();
2940 }
2941 switch (gimple_omp_target_kind (ctx->stmt))
2942 {
2943 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2944 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
d9a6bd32
JJ
2945 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2946 ctx_stmt_name = "parallel"; break;
2947 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2948 ctx_stmt_name = "kernels"; break;
41dbbb37 2949 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
37d5ad46
JB
2950 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2951 ctx_stmt_name = "host_data"; break;
41dbbb37
TS
2952 default: gcc_unreachable ();
2953 }
2954
2955 /* OpenACC/OpenMP mismatch? */
2956 if (is_gimple_omp_oacc (stmt)
2957 != is_gimple_omp_oacc (ctx->stmt))
2958 {
2959 error_at (gimple_location (stmt),
d9f4ea18 2960 "%s %qs construct inside of %s %qs region",
41dbbb37
TS
2961 (is_gimple_omp_oacc (stmt)
2962 ? "OpenACC" : "OpenMP"), stmt_name,
2963 (is_gimple_omp_oacc (ctx->stmt)
2964 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2965 return false;
2966 }
2967 if (is_gimple_omp_offloaded (ctx->stmt))
2968 {
2969 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2970 if (is_gimple_omp_oacc (ctx->stmt))
2971 {
2972 error_at (gimple_location (stmt),
d9f4ea18 2973 "%qs construct inside of %qs region",
41dbbb37
TS
2974 stmt_name, ctx_stmt_name);
2975 return false;
2976 }
2977 else
2978 {
41dbbb37 2979 warning_at (gimple_location (stmt), 0,
d9f4ea18 2980 "%qs construct inside of %qs region",
41dbbb37
TS
2981 stmt_name, ctx_stmt_name);
2982 }
2983 }
2984 }
f014c653 2985 break;
a6fc8e21
JJ
2986 default:
2987 break;
2988 }
26127932 2989 return true;
a6fc8e21
JJ
2990}
2991
2992
726a989a
RB
2993/* Helper function scan_omp.
2994
2995 Callback for walk_tree or operators in walk_gimple_stmt used to
41dbbb37 2996 scan for OMP directives in TP. */
953ff289
DN
2997
2998static tree
726a989a 2999scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 3000{
d3bfe4de
KG
3001 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3002 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
3003 tree t = *tp;
3004
726a989a
RB
3005 switch (TREE_CODE (t))
3006 {
3007 case VAR_DECL:
3008 case PARM_DECL:
3009 case LABEL_DECL:
3010 case RESULT_DECL:
3011 if (ctx)
b2b40051
MJ
3012 {
3013 tree repl = remap_decl (t, &ctx->cb);
3014 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3015 *tp = repl;
3016 }
726a989a
RB
3017 break;
3018
3019 default:
3020 if (ctx && TYPE_P (t))
3021 *tp = remap_type (t, &ctx->cb);
3022 else if (!DECL_P (t))
a900ae6b
JJ
3023 {
3024 *walk_subtrees = 1;
3025 if (ctx)
70f34814
RG
3026 {
3027 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3028 if (tem != TREE_TYPE (t))
3029 {
3030 if (TREE_CODE (t) == INTEGER_CST)
8e6cdc90 3031 *tp = wide_int_to_tree (tem, wi::to_wide (t));
70f34814
RG
3032 else
3033 TREE_TYPE (t) = tem;
3034 }
3035 }
a900ae6b 3036 }
726a989a
RB
3037 break;
3038 }
3039
3040 return NULL_TREE;
3041}
3042
c02065fc
AH
3043/* Return true if FNDECL is a setjmp or a longjmp. */
3044
3045static bool
3046setjmp_or_longjmp_p (const_tree fndecl)
3047{
3048 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3049 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3050 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3051 return true;
3052
3053 tree declname = DECL_NAME (fndecl);
3054 if (!declname)
3055 return false;
3056 const char *name = IDENTIFIER_POINTER (declname);
3057 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3058}
3059
726a989a
RB
3060
3061/* Helper function for scan_omp.
3062
41dbbb37 3063 Callback for walk_gimple_stmt used to scan for OMP directives in
726a989a
RB
3064 the current statement in GSI. */
3065
3066static tree
3067scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3068 struct walk_stmt_info *wi)
3069{
355fe088 3070 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
3071 omp_context *ctx = (omp_context *) wi->info;
3072
3073 if (gimple_has_location (stmt))
3074 input_location = gimple_location (stmt);
953ff289 3075
41dbbb37 3076 /* Check the nesting restrictions. */
acf0174b
JJ
3077 bool remove = false;
3078 if (is_gimple_omp (stmt))
3079 remove = !check_omp_nesting_restrictions (stmt, ctx);
3080 else if (is_gimple_call (stmt))
3081 {
3082 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
3083 if (fndecl)
3084 {
3085 if (setjmp_or_longjmp_p (fndecl)
3086 && ctx
3087 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3088 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
c02065fc
AH
3089 {
3090 remove = true;
3091 error_at (gimple_location (stmt),
3092 "setjmp/longjmp inside simd construct");
3093 }
3094 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3095 switch (DECL_FUNCTION_CODE (fndecl))
3096 {
3097 case BUILT_IN_GOMP_BARRIER:
3098 case BUILT_IN_GOMP_CANCEL:
3099 case BUILT_IN_GOMP_CANCELLATION_POINT:
3100 case BUILT_IN_GOMP_TASKYIELD:
3101 case BUILT_IN_GOMP_TASKWAIT:
3102 case BUILT_IN_GOMP_TASKGROUP_START:
3103 case BUILT_IN_GOMP_TASKGROUP_END:
3104 remove = !check_omp_nesting_restrictions (stmt, ctx);
3105 break;
3106 default:
3107 break;
3108 }
3109 }
acf0174b
JJ
3110 }
3111 if (remove)
3112 {
3113 stmt = gimple_build_nop ();
3114 gsi_replace (gsi, stmt, false);
a68ab351 3115 }
a6fc8e21 3116
726a989a
RB
3117 *handled_ops_p = true;
3118
3119 switch (gimple_code (stmt))
953ff289 3120 {
726a989a 3121 case GIMPLE_OMP_PARALLEL:
a68ab351 3122 taskreg_nesting_level++;
726a989a 3123 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
3124 taskreg_nesting_level--;
3125 break;
3126
726a989a 3127 case GIMPLE_OMP_TASK:
a68ab351 3128 taskreg_nesting_level++;
726a989a 3129 scan_omp_task (gsi, ctx);
a68ab351 3130 taskreg_nesting_level--;
953ff289
DN
3131 break;
3132
726a989a 3133 case GIMPLE_OMP_FOR:
6c7509bc
JJ
3134 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3135 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3136 && omp_maybe_offloaded_ctx (ctx)
3137 && omp_max_simt_vf ())
3138 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3139 else
3140 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
953ff289
DN
3141 break;
3142
726a989a 3143 case GIMPLE_OMP_SECTIONS:
538dd0b7 3144 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
953ff289
DN
3145 break;
3146
726a989a 3147 case GIMPLE_OMP_SINGLE:
538dd0b7 3148 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
953ff289
DN
3149 break;
3150
726a989a
RB
3151 case GIMPLE_OMP_SECTION:
3152 case GIMPLE_OMP_MASTER:
acf0174b 3153 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
3154 case GIMPLE_OMP_ORDERED:
3155 case GIMPLE_OMP_CRITICAL:
b2b40051 3156 case GIMPLE_OMP_GRID_BODY:
726a989a 3157 ctx = new_omp_context (stmt, ctx);
26127932 3158 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
3159 break;
3160
acf0174b 3161 case GIMPLE_OMP_TARGET:
538dd0b7 3162 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
acf0174b
JJ
3163 break;
3164
3165 case GIMPLE_OMP_TEAMS:
538dd0b7 3166 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
acf0174b
JJ
3167 break;
3168
726a989a 3169 case GIMPLE_BIND:
953ff289
DN
3170 {
3171 tree var;
953ff289 3172
726a989a
RB
3173 *handled_ops_p = false;
3174 if (ctx)
538dd0b7
DM
3175 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3176 var ;
3177 var = DECL_CHAIN (var))
726a989a 3178 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
3179 }
3180 break;
953ff289 3181 default:
726a989a 3182 *handled_ops_p = false;
953ff289
DN
3183 break;
3184 }
3185
3186 return NULL_TREE;
3187}
3188
3189
726a989a 3190/* Scan all the statements starting at the current statement. CTX
41dbbb37 3191 contains context information about the OMP directives and
726a989a 3192 clauses found during the scan. */
953ff289
DN
3193
3194static void
26127932 3195scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
3196{
3197 location_t saved_location;
3198 struct walk_stmt_info wi;
3199
3200 memset (&wi, 0, sizeof (wi));
953ff289 3201 wi.info = ctx;
953ff289
DN
3202 wi.want_locations = true;
3203
3204 saved_location = input_location;
26127932 3205 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
3206 input_location = saved_location;
3207}
3208\f
3209/* Re-gimplification and code generation routines. */
3210
6724f8a6
JJ
3211/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3212 of BIND if in a method. */
3213
3214static void
3215maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3216{
3217 if (DECL_ARGUMENTS (current_function_decl)
3218 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3219 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3220 == POINTER_TYPE))
3221 {
3222 tree vars = gimple_bind_vars (bind);
3223 for (tree *pvar = &vars; *pvar; )
3224 if (omp_member_access_dummy_var (*pvar))
3225 *pvar = DECL_CHAIN (*pvar);
3226 else
3227 pvar = &DECL_CHAIN (*pvar);
3228 gimple_bind_set_vars (bind, vars);
3229 }
3230}
3231
3232/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3233 block and its subblocks. */
3234
3235static void
3236remove_member_access_dummy_vars (tree block)
3237{
3238 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3239 if (omp_member_access_dummy_var (*pvar))
3240 *pvar = DECL_CHAIN (*pvar);
3241 else
3242 pvar = &DECL_CHAIN (*pvar);
3243
3244 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3245 remove_member_access_dummy_vars (block);
3246}
3247
953ff289
DN
3248/* If a context was created for STMT when it was scanned, return it. */
3249
3250static omp_context *
355fe088 3251maybe_lookup_ctx (gimple *stmt)
953ff289
DN
3252{
3253 splay_tree_node n;
3254 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3255 return n ? (omp_context *) n->value : NULL;
3256}
3257
50674e96
DN
3258
3259/* Find the mapping for DECL in CTX or the immediately enclosing
3260 context that has a mapping for DECL.
3261
3262 If CTX is a nested parallel directive, we may have to use the decl
3263 mappings created in CTX's parent context. Suppose that we have the
3264 following parallel nesting (variable UIDs showed for clarity):
3265
3266 iD.1562 = 0;
3267 #omp parallel shared(iD.1562) -> outer parallel
3268 iD.1562 = iD.1562 + 1;
3269
3270 #omp parallel shared (iD.1562) -> inner parallel
3271 iD.1562 = iD.1562 - 1;
3272
3273 Each parallel structure will create a distinct .omp_data_s structure
3274 for copying iD.1562 in/out of the directive:
3275
3276 outer parallel .omp_data_s.1.i -> iD.1562
3277 inner parallel .omp_data_s.2.i -> iD.1562
3278
3279 A shared variable mapping will produce a copy-out operation before
3280 the parallel directive and a copy-in operation after it. So, in
3281 this case we would have:
3282
3283 iD.1562 = 0;
3284 .omp_data_o.1.i = iD.1562;
3285 #omp parallel shared(iD.1562) -> outer parallel
3286 .omp_data_i.1 = &.omp_data_o.1
3287 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3288
3289 .omp_data_o.2.i = iD.1562; -> **
3290 #omp parallel shared(iD.1562) -> inner parallel
3291 .omp_data_i.2 = &.omp_data_o.2
3292 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3293
3294
3295 ** This is a problem. The symbol iD.1562 cannot be referenced
3296 inside the body of the outer parallel region. But since we are
3297 emitting this copy operation while expanding the inner parallel
3298 directive, we need to access the CTX structure of the outer
3299 parallel directive to get the correct mapping:
3300
3301 .omp_data_o.2.i = .omp_data_i.1->i
3302
3303 Since there may be other workshare or parallel directives enclosing
3304 the parallel directive, it may be necessary to walk up the context
3305 parent chain. This is not a problem in general because nested
3306 parallelism happens only rarely. */
3307
3308static tree
3309lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3310{
3311 tree t;
3312 omp_context *up;
3313
50674e96
DN
3314 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3315 t = maybe_lookup_decl (decl, up);
3316
d2dda7fe 3317 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 3318
64964499 3319 return t ? t : decl;
50674e96
DN
3320}
3321
3322
8ca5b2a2
JJ
3323/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3324 in outer contexts. */
3325
3326static tree
3327maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3328{
3329 tree t = NULL;
3330 omp_context *up;
3331
d2dda7fe
JJ
3332 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3333 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
3334
3335 return t ? t : decl;
3336}
3337
3338
f2c9f71d 3339/* Construct the initialization value for reduction operation OP. */
953ff289
DN
3340
3341tree
f2c9f71d 3342omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
953ff289 3343{
f2c9f71d 3344 switch (op)
953ff289
DN
3345 {
3346 case PLUS_EXPR:
3347 case MINUS_EXPR:
3348 case BIT_IOR_EXPR:
3349 case BIT_XOR_EXPR:
3350 case TRUTH_OR_EXPR:
3351 case TRUTH_ORIF_EXPR:
3352 case TRUTH_XOR_EXPR:
3353 case NE_EXPR:
e8160c9a 3354 return build_zero_cst (type);
953ff289
DN
3355
3356 case MULT_EXPR:
3357 case TRUTH_AND_EXPR:
3358 case TRUTH_ANDIF_EXPR:
3359 case EQ_EXPR:
db3927fb 3360 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
3361
3362 case BIT_AND_EXPR:
db3927fb 3363 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
3364
3365 case MAX_EXPR:
3366 if (SCALAR_FLOAT_TYPE_P (type))
3367 {
3368 REAL_VALUE_TYPE max, min;
3d3dbadd 3369 if (HONOR_INFINITIES (type))
953ff289
DN
3370 {
3371 real_inf (&max);
3372 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3373 }
3374 else
3375 real_maxval (&min, 1, TYPE_MODE (type));
3376 return build_real (type, min);
3377 }
3ff2d74e
TV
3378 else if (POINTER_TYPE_P (type))
3379 {
3380 wide_int min
3381 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3382 return wide_int_to_tree (type, min);
3383 }
953ff289
DN
3384 else
3385 {
3386 gcc_assert (INTEGRAL_TYPE_P (type));
3387 return TYPE_MIN_VALUE (type);
3388 }
3389
3390 case MIN_EXPR:
3391 if (SCALAR_FLOAT_TYPE_P (type))
3392 {
3393 REAL_VALUE_TYPE max;
3d3dbadd 3394 if (HONOR_INFINITIES (type))
953ff289
DN
3395 real_inf (&max);
3396 else
3397 real_maxval (&max, 0, TYPE_MODE (type));
3398 return build_real (type, max);
3399 }
3ff2d74e
TV
3400 else if (POINTER_TYPE_P (type))
3401 {
3402 wide_int max
3403 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3404 return wide_int_to_tree (type, max);
3405 }
953ff289
DN
3406 else
3407 {
3408 gcc_assert (INTEGRAL_TYPE_P (type));
3409 return TYPE_MAX_VALUE (type);
3410 }
3411
3412 default:
3413 gcc_unreachable ();
3414 }
3415}
3416
f2c9f71d
TS
3417/* Construct the initialization value for reduction CLAUSE. */
3418
3419tree
3420omp_reduction_init (tree clause, tree type)
3421{
3422 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3423 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3424}
3425
acf0174b
JJ
3426/* Return alignment to be assumed for var in CLAUSE, which should be
3427 OMP_CLAUSE_ALIGNED. */
3428
3429static tree
3430omp_clause_aligned_alignment (tree clause)
3431{
3432 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3433 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3434
3435 /* Otherwise return implementation defined alignment. */
3436 unsigned int al = 1;
16d22000 3437 opt_scalar_mode mode_iter;
86e36728
RS
3438 auto_vector_sizes sizes;
3439 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3440 poly_uint64 vs = 0;
3441 for (unsigned int i = 0; i < sizes.length (); ++i)
3442 vs = ordered_max (vs, sizes[i]);
acf0174b
JJ
3443 static enum mode_class classes[]
3444 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3445 for (int i = 0; i < 4; i += 2)
16d22000
RS
3446 /* The for loop above dictates that we only walk through scalar classes. */
3447 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
acf0174b 3448 {
16d22000
RS
3449 scalar_mode mode = mode_iter.require ();
3450 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
acf0174b
JJ
3451 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3452 continue;
86e36728
RS
3453 while (maybe_ne (vs, 0U)
3454 && known_lt (GET_MODE_SIZE (vmode), vs)
490d0f6c
RS
3455 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3456 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
01914336 3457
acf0174b
JJ
3458 tree type = lang_hooks.types.type_for_mode (mode, 1);
3459 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3460 continue;
cf098191
RS
3461 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3462 GET_MODE_SIZE (mode));
86e36728 3463 type = build_vector_type (type, nelts);
acf0174b
JJ
3464 if (TYPE_MODE (type) != vmode)
3465 continue;
3466 if (TYPE_ALIGN_UNIT (type) > al)
3467 al = TYPE_ALIGN_UNIT (type);
3468 }
3469 return build_int_cst (integer_type_node, al);
3470}
3471
6943af07
AM
3472
3473/* This structure is part of the interface between lower_rec_simd_input_clauses
3474 and lower_rec_input_clauses. */
3475
3476struct omplow_simd_context {
9d2f08ab 3477 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
6943af07
AM
3478 tree idx;
3479 tree lane;
0c6b03b5
AM
3480 vec<tree, va_heap> simt_eargs;
3481 gimple_seq simt_dlist;
9d2f08ab 3482 poly_uint64_pod max_vf;
6943af07
AM
3483 bool is_simt;
3484};
3485
74bf76ed
JJ
3486/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3487 privatization. */
3488
3489static bool
6943af07
AM
3490lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3491 omplow_simd_context *sctx, tree &ivar, tree &lvar)
74bf76ed 3492{
9d2f08ab 3493 if (known_eq (sctx->max_vf, 0U))
74bf76ed 3494 {
6943af07 3495 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d2f08ab 3496 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3497 {
629b3d75 3498 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 3499 OMP_CLAUSE_SAFELEN);
9d2f08ab
RS
3500 if (c)
3501 {
3502 poly_uint64 safe_len;
3503 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3504 || maybe_lt (safe_len, 1U))
3505 sctx->max_vf = 1;
3506 else
3507 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3508 }
74bf76ed 3509 }
9d2f08ab 3510 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3511 {
6943af07
AM
3512 sctx->idx = create_tmp_var (unsigned_type_node);
3513 sctx->lane = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
3514 }
3515 }
9d2f08ab 3516 if (known_eq (sctx->max_vf, 1U))
74bf76ed
JJ
3517 return false;
3518
0c6b03b5
AM
3519 if (sctx->is_simt)
3520 {
3521 if (is_gimple_reg (new_var))
3522 {
3523 ivar = lvar = new_var;
3524 return true;
3525 }
3526 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3527 ivar = lvar = create_tmp_var (type);
3528 TREE_ADDRESSABLE (ivar) = 1;
3529 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3530 NULL, DECL_ATTRIBUTES (ivar));
3531 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3532 tree clobber = build_constructor (type, NULL);
3533 TREE_THIS_VOLATILE (clobber) = 1;
3534 gimple *g = gimple_build_assign (ivar, clobber);
3535 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3536 }
3537 else
3538 {
3539 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3540 tree avar = create_tmp_var_raw (atype);
3541 if (TREE_ADDRESSABLE (new_var))
3542 TREE_ADDRESSABLE (avar) = 1;
3543 DECL_ATTRIBUTES (avar)
3544 = tree_cons (get_identifier ("omp simd array"), NULL,
3545 DECL_ATTRIBUTES (avar));
3546 gimple_add_tmp_var (avar);
3547 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3548 NULL_TREE, NULL_TREE);
3549 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3550 NULL_TREE, NULL_TREE);
3551 }
acf0174b
JJ
3552 if (DECL_P (new_var))
3553 {
3554 SET_DECL_VALUE_EXPR (new_var, lvar);
3555 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3556 }
74bf76ed
JJ
3557 return true;
3558}
3559
decaaec8
JJ
3560/* Helper function of lower_rec_input_clauses. For a reference
3561 in simd reduction, add an underlying variable it will reference. */
3562
3563static void
3564handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3565{
3566 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3567 if (TREE_CONSTANT (z))
3568 {
d9a6bd32
JJ
3569 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3570 get_name (new_vard));
decaaec8
JJ
3571 gimple_add_tmp_var (z);
3572 TREE_ADDRESSABLE (z) = 1;
3573 z = build_fold_addr_expr_loc (loc, z);
3574 gimplify_assign (new_vard, z, ilist);
3575 }
3576}
3577
953ff289
DN
3578/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3579 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3580 private variables. Initialization statements go in ILIST, while calls
3581 to destructors go in DLIST. */
3582
3583static void
726a989a 3584lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 3585 omp_context *ctx, struct omp_for_data *fd)
953ff289 3586{
5039610b 3587 tree c, dtor, copyin_seq, x, ptr;
953ff289 3588 bool copyin_by_ref = false;
8ca5b2a2 3589 bool lastprivate_firstprivate = false;
acf0174b 3590 bool reduction_omp_orig_ref = false;
953ff289 3591 int pass;
74bf76ed 3592 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3593 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
6943af07 3594 omplow_simd_context sctx = omplow_simd_context ();
0c6b03b5
AM
3595 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3596 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
9669b00b 3597 gimple_seq llist[3] = { };
953ff289 3598
953ff289 3599 copyin_seq = NULL;
6943af07 3600 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
953ff289 3601
74bf76ed
JJ
3602 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3603 with data sharing clauses referencing variable sized vars. That
3604 is unnecessarily hard to support and very unlikely to result in
3605 vectorized code anyway. */
3606 if (is_simd)
3607 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3608 switch (OMP_CLAUSE_CODE (c))
3609 {
da6f124d
JJ
3610 case OMP_CLAUSE_LINEAR:
3611 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6943af07 3612 sctx.max_vf = 1;
da6f124d 3613 /* FALLTHRU */
74bf76ed
JJ
3614 case OMP_CLAUSE_PRIVATE:
3615 case OMP_CLAUSE_FIRSTPRIVATE:
3616 case OMP_CLAUSE_LASTPRIVATE:
74bf76ed 3617 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3618 sctx.max_vf = 1;
74bf76ed 3619 break;
d9a6bd32
JJ
3620 case OMP_CLAUSE_REDUCTION:
3621 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3622 || is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3623 sctx.max_vf = 1;
d9a6bd32 3624 break;
74bf76ed
JJ
3625 default:
3626 continue;
3627 }
3628
0c6b03b5 3629 /* Add a placeholder for simduid. */
9d2f08ab 3630 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
0c6b03b5
AM
3631 sctx.simt_eargs.safe_push (NULL_TREE);
3632
953ff289
DN
3633 /* Do all the fixed sized types in the first pass, and the variable sized
3634 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3635 the variable sized types are processed before we use them in the
953ff289
DN
3636 variable sized operations. */
3637 for (pass = 0; pass < 2; ++pass)
3638 {
3639 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3640 {
aaf46ef9 3641 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3642 tree var, new_var;
3643 bool by_ref;
db3927fb 3644 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3645
3646 switch (c_kind)
3647 {
3648 case OMP_CLAUSE_PRIVATE:
3649 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3650 continue;
3651 break;
3652 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3653 /* Ignore shared directives in teams construct. */
3654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3655 continue;
8ca5b2a2
JJ
3656 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3657 {
d9a6bd32
JJ
3658 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3659 || is_global_var (OMP_CLAUSE_DECL (c)));
8ca5b2a2
JJ
3660 continue;
3661 }
953ff289 3662 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3663 case OMP_CLAUSE_COPYIN:
d9a6bd32 3664 break;
acf0174b 3665 case OMP_CLAUSE_LINEAR:
d9a6bd32
JJ
3666 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3667 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3668 lastprivate_firstprivate = true;
acf0174b 3669 break;
953ff289 3670 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3671 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3672 reduction_omp_orig_ref = true;
953ff289 3673 break;
acf0174b 3674 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 3675 /* Handle _looptemp_ clauses only on parallel/task. */
acf0174b
JJ
3676 if (fd)
3677 continue;
74bf76ed 3678 break;
077b0dfb 3679 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3680 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3681 {
3682 lastprivate_firstprivate = true;
d9a6bd32 3683 if (pass != 0 || is_taskloop_ctx (ctx))
8ca5b2a2
JJ
3684 continue;
3685 }
92d28cbb
JJ
3686 /* Even without corresponding firstprivate, if
3687 decl is Fortran allocatable, it needs outer var
3688 reference. */
3689 else if (pass == 0
3690 && lang_hooks.decls.omp_private_outer_ref
3691 (OMP_CLAUSE_DECL (c)))
3692 lastprivate_firstprivate = true;
077b0dfb 3693 break;
acf0174b
JJ
3694 case OMP_CLAUSE_ALIGNED:
3695 if (pass == 0)
3696 continue;
3697 var = OMP_CLAUSE_DECL (c);
3698 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3699 && !is_global_var (var))
3700 {
3701 new_var = maybe_lookup_decl (var, ctx);
3702 if (new_var == NULL_TREE)
3703 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3704 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3705 tree alarg = omp_clause_aligned_alignment (c);
3706 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3707 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
acf0174b
JJ
3708 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3709 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3710 gimplify_and_add (x, ilist);
3711 }
3712 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3713 && is_global_var (var))
3714 {
3715 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3716 new_var = lookup_decl (var, ctx);
3717 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3718 t = build_fold_addr_expr_loc (clause_loc, t);
3719 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3720 tree alarg = omp_clause_aligned_alignment (c);
3721 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3722 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
acf0174b 3723 t = fold_convert_loc (clause_loc, ptype, t);
b731b390 3724 x = create_tmp_var (ptype);
acf0174b
JJ
3725 t = build2 (MODIFY_EXPR, ptype, x, t);
3726 gimplify_and_add (t, ilist);
3727 t = build_simple_mem_ref_loc (clause_loc, x);
3728 SET_DECL_VALUE_EXPR (new_var, t);
3729 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3730 }
3731 continue;
953ff289
DN
3732 default:
3733 continue;
3734 }
3735
3736 new_var = var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
3737 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3738 {
3739 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
3740 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3741 var = TREE_OPERAND (var, 0);
d9a6bd32
JJ
3742 if (TREE_CODE (var) == INDIRECT_REF
3743 || TREE_CODE (var) == ADDR_EXPR)
3744 var = TREE_OPERAND (var, 0);
3745 if (is_variable_sized (var))
3746 {
3747 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3748 var = DECL_VALUE_EXPR (var);
3749 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3750 var = TREE_OPERAND (var, 0);
3751 gcc_assert (DECL_P (var));
3752 }
3753 new_var = var;
3754 }
953ff289
DN
3755 if (c_kind != OMP_CLAUSE_COPYIN)
3756 new_var = lookup_decl (var, ctx);
3757
3758 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3759 {
3760 if (pass != 0)
3761 continue;
3762 }
d9a6bd32
JJ
3763 /* C/C++ array section reductions. */
3764 else if (c_kind == OMP_CLAUSE_REDUCTION
3765 && var != OMP_CLAUSE_DECL (c))
953ff289
DN
3766 {
3767 if (pass == 0)
3768 continue;
3769
e01d41e5 3770 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
d9a6bd32 3771 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
e01d41e5
JJ
3772 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3773 {
3774 tree b = TREE_OPERAND (orig_var, 1);
3775 b = maybe_lookup_decl (b, ctx);
3776 if (b == NULL)
3777 {
3778 b = TREE_OPERAND (orig_var, 1);
3779 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3780 }
3781 if (integer_zerop (bias))
3782 bias = b;
3783 else
3784 {
3785 bias = fold_convert_loc (clause_loc,
3786 TREE_TYPE (b), bias);
3787 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3788 TREE_TYPE (b), b, bias);
3789 }
3790 orig_var = TREE_OPERAND (orig_var, 0);
3791 }
d9a6bd32
JJ
3792 if (TREE_CODE (orig_var) == INDIRECT_REF
3793 || TREE_CODE (orig_var) == ADDR_EXPR)
3794 orig_var = TREE_OPERAND (orig_var, 0);
3795 tree d = OMP_CLAUSE_DECL (c);
3796 tree type = TREE_TYPE (d);
3797 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3798 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3799 const char *name = get_name (orig_var);
3800 if (TREE_CONSTANT (v))
a68ab351 3801 {
d9a6bd32
JJ
3802 x = create_tmp_var_raw (type, name);
3803 gimple_add_tmp_var (x);
3804 TREE_ADDRESSABLE (x) = 1;
3805 x = build_fold_addr_expr_loc (clause_loc, x);
3806 }
3807 else
3808 {
3809 tree atmp
3810 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3811 tree t = maybe_lookup_decl (v, ctx);
3812 if (t)
3813 v = t;
3814 else
3815 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3816 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3817 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3818 TREE_TYPE (v), v,
3819 build_int_cst (TREE_TYPE (v), 1));
3820 t = fold_build2_loc (clause_loc, MULT_EXPR,
3821 TREE_TYPE (v), t,
3822 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3823 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3824 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3825 }
3826
3827 tree ptype = build_pointer_type (TREE_TYPE (type));
3828 x = fold_convert_loc (clause_loc, ptype, x);
3829 tree y = create_tmp_var (ptype, name);
3830 gimplify_assign (y, x, ilist);
3831 x = y;
e01d41e5
JJ
3832 tree yb = y;
3833
3834 if (!integer_zerop (bias))
3835 {
48a78aee
JJ
3836 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3837 bias);
3838 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3839 x);
3840 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3841 pointer_sized_int_node, yb, bias);
3842 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
e01d41e5
JJ
3843 yb = create_tmp_var (ptype, name);
3844 gimplify_assign (yb, x, ilist);
3845 x = yb;
3846 }
3847
3848 d = TREE_OPERAND (d, 0);
3849 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3850 d = TREE_OPERAND (d, 0);
3851 if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
3852 {
3853 if (orig_var != var)
3854 {
3855 gcc_assert (is_variable_sized (orig_var));
3856 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3857 x);
3858 gimplify_assign (new_var, x, ilist);
3859 tree new_orig_var = lookup_decl (orig_var, ctx);
3860 tree t = build_fold_indirect_ref (new_var);
3861 DECL_IGNORED_P (new_var) = 0;
3862 TREE_THIS_NOTRAP (t);
3863 SET_DECL_VALUE_EXPR (new_orig_var, t);
3864 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3865 }
3866 else
3867 {
3868 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3869 build_int_cst (ptype, 0));
3870 SET_DECL_VALUE_EXPR (new_var, x);
3871 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3872 }
3873 }
3874 else
3875 {
3876 gcc_assert (orig_var == var);
e01d41e5 3877 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
3878 {
3879 x = create_tmp_var (ptype, name);
3880 TREE_ADDRESSABLE (x) = 1;
e01d41e5 3881 gimplify_assign (x, yb, ilist);
d9a6bd32
JJ
3882 x = build_fold_addr_expr_loc (clause_loc, x);
3883 }
3884 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3885 gimplify_assign (new_var, x, ilist);
3886 }
3887 tree y1 = create_tmp_var (ptype, NULL);
3888 gimplify_assign (y1, y, ilist);
3889 tree i2 = NULL_TREE, y2 = NULL_TREE;
3890 tree body2 = NULL_TREE, end2 = NULL_TREE;
3891 tree y3 = NULL_TREE, y4 = NULL_TREE;
3892 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3893 {
3894 y2 = create_tmp_var (ptype, NULL);
3895 gimplify_assign (y2, y, ilist);
3896 tree ref = build_outer_var_ref (var, ctx);
3897 /* For ref build_outer_var_ref already performs this. */
e01d41e5 3898 if (TREE_CODE (d) == INDIRECT_REF)
629b3d75 3899 gcc_assert (omp_is_reference (var));
e01d41e5 3900 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32 3901 ref = build_fold_addr_expr (ref);
629b3d75 3902 else if (omp_is_reference (var))
d9a6bd32
JJ
3903 ref = build_fold_addr_expr (ref);
3904 ref = fold_convert_loc (clause_loc, ptype, ref);
3905 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3906 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3907 {
3908 y3 = create_tmp_var (ptype, NULL);
3909 gimplify_assign (y3, unshare_expr (ref), ilist);
3910 }
3911 if (is_simd)
3912 {
3913 y4 = create_tmp_var (ptype, NULL);
3914 gimplify_assign (y4, ref, dlist);
3915 }
3916 }
3917 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3918 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3919 tree body = create_artificial_label (UNKNOWN_LOCATION);
3920 tree end = create_artificial_label (UNKNOWN_LOCATION);
3921 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3922 if (y2)
3923 {
3924 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3925 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3926 body2 = create_artificial_label (UNKNOWN_LOCATION);
3927 end2 = create_artificial_label (UNKNOWN_LOCATION);
3928 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3929 }
3930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3931 {
3932 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3933 tree decl_placeholder
3934 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3935 SET_DECL_VALUE_EXPR (decl_placeholder,
3936 build_simple_mem_ref (y1));
3937 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3938 SET_DECL_VALUE_EXPR (placeholder,
3939 y3 ? build_simple_mem_ref (y3)
3940 : error_mark_node);
3941 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3942 x = lang_hooks.decls.omp_clause_default_ctor
3943 (c, build_simple_mem_ref (y1),
3944 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3945 if (x)
3946 gimplify_and_add (x, ilist);
3947 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3948 {
3949 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3950 lower_omp (&tseq, ctx);
3951 gimple_seq_add_seq (ilist, tseq);
3952 }
3953 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3954 if (is_simd)
3955 {
3956 SET_DECL_VALUE_EXPR (decl_placeholder,
3957 build_simple_mem_ref (y2));
3958 SET_DECL_VALUE_EXPR (placeholder,
3959 build_simple_mem_ref (y4));
3960 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3961 lower_omp (&tseq, ctx);
3962 gimple_seq_add_seq (dlist, tseq);
3963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3964 }
3965 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3966 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3967 x = lang_hooks.decls.omp_clause_dtor
3968 (c, build_simple_mem_ref (y2));
3969 if (x)
3970 {
3971 gimple_seq tseq = NULL;
3972 dtor = x;
3973 gimplify_stmt (&dtor, &tseq);
3974 gimple_seq_add_seq (dlist, tseq);
3975 }
3976 }
3977 else
3978 {
3979 x = omp_reduction_init (c, TREE_TYPE (type));
3980 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3981
3982 /* reduction(-:var) sums up the partial results, so it
3983 acts identically to reduction(+:var). */
3984 if (code == MINUS_EXPR)
3985 code = PLUS_EXPR;
3986
3987 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3988 if (is_simd)
3989 {
3990 x = build2 (code, TREE_TYPE (type),
3991 build_simple_mem_ref (y4),
3992 build_simple_mem_ref (y2));
3993 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3994 }
3995 }
3996 gimple *g
3997 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3998 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3999 gimple_seq_add_stmt (ilist, g);
4000 if (y3)
4001 {
4002 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4003 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4004 gimple_seq_add_stmt (ilist, g);
4005 }
4006 g = gimple_build_assign (i, PLUS_EXPR, i,
4007 build_int_cst (TREE_TYPE (i), 1));
4008 gimple_seq_add_stmt (ilist, g);
4009 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4010 gimple_seq_add_stmt (ilist, g);
4011 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4012 if (y2)
4013 {
4014 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4015 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4016 gimple_seq_add_stmt (dlist, g);
4017 if (y4)
4018 {
4019 g = gimple_build_assign
4020 (y4, POINTER_PLUS_EXPR, y4,
4021 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4022 gimple_seq_add_stmt (dlist, g);
4023 }
4024 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4025 build_int_cst (TREE_TYPE (i2), 1));
4026 gimple_seq_add_stmt (dlist, g);
4027 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4028 gimple_seq_add_stmt (dlist, g);
4029 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4030 }
4031 continue;
4032 }
4033 else if (is_variable_sized (var))
4034 {
4035 /* For variable sized types, we need to allocate the
4036 actual storage here. Call alloca and store the
4037 result in the pointer decl that we created elsewhere. */
4038 if (pass == 0)
4039 continue;
4040
4041 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4042 {
4043 gcall *stmt;
4044 tree tmp, atmp;
4045
4046 ptr = DECL_VALUE_EXPR (new_var);
4047 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4048 ptr = TREE_OPERAND (ptr, 0);
a68ab351
JJ
4049 gcc_assert (DECL_P (ptr));
4050 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
4051
4052 /* void *tmp = __builtin_alloca */
d9a6bd32
JJ
4053 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4054 stmt = gimple_build_call (atmp, 2, x,
4055 size_int (DECL_ALIGN (var)));
b731b390 4056 tmp = create_tmp_var_raw (ptr_type_node);
726a989a
RB
4057 gimple_add_tmp_var (tmp);
4058 gimple_call_set_lhs (stmt, tmp);
4059
4060 gimple_seq_add_stmt (ilist, stmt);
4061
db3927fb 4062 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 4063 gimplify_assign (ptr, x, ilist);
a68ab351 4064 }
953ff289 4065 }
629b3d75 4066 else if (omp_is_reference (var))
953ff289 4067 {
50674e96
DN
4068 /* For references that are being privatized for Fortran,
4069 allocate new backing storage for the new pointer
4070 variable. This allows us to avoid changing all the
4071 code that expects a pointer to something that expects
acf0174b 4072 a direct variable. */
953ff289
DN
4073 if (pass == 0)
4074 continue;
4075
4076 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
4077 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4078 {
4079 x = build_receiver_ref (var, false, ctx);
db3927fb 4080 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4081 }
4082 else if (TREE_CONSTANT (x))
953ff289 4083 {
decaaec8
JJ
4084 /* For reduction in SIMD loop, defer adding the
4085 initialization of the reference, because if we decide
4086 to use SIMD array for it, the initilization could cause
4087 expansion ICE. */
4088 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4ceffa27
JJ
4089 x = NULL_TREE;
4090 else
4091 {
4ceffa27 4092 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
d9a6bd32 4093 get_name (var));
4ceffa27
JJ
4094 gimple_add_tmp_var (x);
4095 TREE_ADDRESSABLE (x) = 1;
4096 x = build_fold_addr_expr_loc (clause_loc, x);
4097 }
953ff289
DN
4098 }
4099 else
4100 {
d9a6bd32
JJ
4101 tree atmp
4102 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4103 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4104 tree al = size_int (TYPE_ALIGN (rtype));
4105 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
953ff289
DN
4106 }
4107
4ceffa27
JJ
4108 if (x)
4109 {
4110 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4111 gimplify_assign (new_var, x, ilist);
4112 }
953ff289 4113
70f34814 4114 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
4115 }
4116 else if (c_kind == OMP_CLAUSE_REDUCTION
4117 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4118 {
4119 if (pass == 0)
4120 continue;
4121 }
4122 else if (pass != 0)
4123 continue;
4124
aaf46ef9 4125 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
4126 {
4127 case OMP_CLAUSE_SHARED:
acf0174b
JJ
4128 /* Ignore shared directives in teams construct. */
4129 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4130 continue;
8ca5b2a2
JJ
4131 /* Shared global vars are just accessed directly. */
4132 if (is_global_var (new_var))
4133 break;
d9a6bd32
JJ
4134 /* For taskloop firstprivate/lastprivate, represented
4135 as firstprivate and shared clause on the task, new_var
4136 is the firstprivate var. */
4137 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4138 break;
953ff289
DN
4139 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4140 needs to be delayed until after fixup_child_record_type so
4141 that we get the correct type during the dereference. */
7c8f7639 4142 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
4143 x = build_receiver_ref (var, by_ref, ctx);
4144 SET_DECL_VALUE_EXPR (new_var, x);
4145 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4146
4147 /* ??? If VAR is not passed by reference, and the variable
4148 hasn't been initialized yet, then we'll get a warning for
4149 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 4150 able to notice this and not store anything at all, but
953ff289
DN
4151 we're generating code too early. Suppress the warning. */
4152 if (!by_ref)
4153 TREE_NO_WARNING (var) = 1;
4154 break;
4155
4156 case OMP_CLAUSE_LASTPRIVATE:
4157 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4158 break;
4159 /* FALLTHRU */
4160
4161 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
4162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4163 x = build_outer_var_ref (var, ctx);
4164 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4165 {
4166 if (is_task_ctx (ctx))
4167 x = build_receiver_ref (var, false, ctx);
4168 else
c39dad64 4169 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
a68ab351
JJ
4170 }
4171 else
4172 x = NULL;
74bf76ed 4173 do_private:
acf0174b 4174 tree nx;
d9a6bd32
JJ
4175 nx = lang_hooks.decls.omp_clause_default_ctor
4176 (c, unshare_expr (new_var), x);
74bf76ed
JJ
4177 if (is_simd)
4178 {
4179 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 4180 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed 4181 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6943af07
AM
4182 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4183 ivar, lvar))
74bf76ed 4184 {
acf0174b 4185 if (nx)
74bf76ed
JJ
4186 x = lang_hooks.decls.omp_clause_default_ctor
4187 (c, unshare_expr (ivar), x);
acf0174b 4188 if (nx && x)
74bf76ed
JJ
4189 gimplify_and_add (x, &llist[0]);
4190 if (y)
4191 {
4192 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4193 if (y)
4194 {
4195 gimple_seq tseq = NULL;
4196
4197 dtor = y;
4198 gimplify_stmt (&dtor, &tseq);
4199 gimple_seq_add_seq (&llist[1], tseq);
4200 }
4201 }
4202 break;
4203 }
4204 }
acf0174b
JJ
4205 if (nx)
4206 gimplify_and_add (nx, ilist);
953ff289
DN
4207 /* FALLTHRU */
4208
4209 do_dtor:
4210 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4211 if (x)
4212 {
726a989a
RB
4213 gimple_seq tseq = NULL;
4214
953ff289 4215 dtor = x;
726a989a 4216 gimplify_stmt (&dtor, &tseq);
355a7673 4217 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
4218 }
4219 break;
4220
74bf76ed
JJ
4221 case OMP_CLAUSE_LINEAR:
4222 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4223 goto do_firstprivate;
4224 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4225 x = NULL;
4226 else
4227 x = build_outer_var_ref (var, ctx);
4228 goto do_private;
4229
953ff289 4230 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
4231 if (is_task_ctx (ctx))
4232 {
629b3d75 4233 if (omp_is_reference (var) || is_variable_sized (var))
a68ab351
JJ
4234 goto do_dtor;
4235 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4236 ctx))
4237 || use_pointer_for_field (var, NULL))
4238 {
4239 x = build_receiver_ref (var, false, ctx);
4240 SET_DECL_VALUE_EXPR (new_var, x);
4241 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4242 goto do_dtor;
4243 }
4244 }
74bf76ed 4245 do_firstprivate:
953ff289 4246 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
4247 if (is_simd)
4248 {
acf0174b
JJ
4249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4250 && gimple_omp_for_combined_into_p (ctx->stmt))
4251 {
da6f124d
JJ
4252 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4253 tree stept = TREE_TYPE (t);
629b3d75 4254 tree ct = omp_find_clause (clauses,
da6f124d
JJ
4255 OMP_CLAUSE__LOOPTEMP_);
4256 gcc_assert (ct);
4257 tree l = OMP_CLAUSE_DECL (ct);
56ad0e38
JJ
4258 tree n1 = fd->loop.n1;
4259 tree step = fd->loop.step;
4260 tree itype = TREE_TYPE (l);
4261 if (POINTER_TYPE_P (itype))
4262 itype = signed_type_for (itype);
4263 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4264 if (TYPE_UNSIGNED (itype)
4265 && fd->loop.cond_code == GT_EXPR)
4266 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4267 fold_build1 (NEGATE_EXPR, itype, l),
4268 fold_build1 (NEGATE_EXPR,
4269 itype, step));
4270 else
4271 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
acf0174b
JJ
4272 t = fold_build2 (MULT_EXPR, stept,
4273 fold_convert (stept, l), t);
da6f124d
JJ
4274
4275 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4276 {
4277 x = lang_hooks.decls.omp_clause_linear_ctor
4278 (c, new_var, x, t);
4279 gimplify_and_add (x, ilist);
4280 goto do_dtor;
4281 }
4282
acf0174b
JJ
4283 if (POINTER_TYPE_P (TREE_TYPE (x)))
4284 x = fold_build2 (POINTER_PLUS_EXPR,
4285 TREE_TYPE (x), x, t);
4286 else
4287 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4288 }
4289
74bf76ed
JJ
4290 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4291 || TREE_ADDRESSABLE (new_var))
6943af07
AM
4292 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4293 ivar, lvar))
74bf76ed
JJ
4294 {
4295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4296 {
b731b390 4297 tree iv = create_tmp_var (TREE_TYPE (new_var));
74bf76ed
JJ
4298 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4299 gimplify_and_add (x, ilist);
4300 gimple_stmt_iterator gsi
4301 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
538dd0b7 4302 gassign *g
74bf76ed
JJ
4303 = gimple_build_assign (unshare_expr (lvar), iv);
4304 gsi_insert_before_without_update (&gsi, g,
4305 GSI_SAME_STMT);
da6f124d 4306 tree t = OMP_CLAUSE_LINEAR_STEP (c);
74bf76ed
JJ
4307 enum tree_code code = PLUS_EXPR;
4308 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4309 code = POINTER_PLUS_EXPR;
0d0e4a03 4310 g = gimple_build_assign (iv, code, iv, t);
74bf76ed
JJ
4311 gsi_insert_before_without_update (&gsi, g,
4312 GSI_SAME_STMT);
4313 break;
4314 }
4315 x = lang_hooks.decls.omp_clause_copy_ctor
4316 (c, unshare_expr (ivar), x);
4317 gimplify_and_add (x, &llist[0]);
4318 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4319 if (x)
4320 {
4321 gimple_seq tseq = NULL;
4322
4323 dtor = x;
4324 gimplify_stmt (&dtor, &tseq);
4325 gimple_seq_add_seq (&llist[1], tseq);
4326 }
4327 break;
4328 }
4329 }
d9a6bd32
JJ
4330 x = lang_hooks.decls.omp_clause_copy_ctor
4331 (c, unshare_expr (new_var), x);
953ff289
DN
4332 gimplify_and_add (x, ilist);
4333 goto do_dtor;
953ff289 4334
acf0174b 4335 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 4336 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
4337 x = build_outer_var_ref (var, ctx);
4338 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4339 gimplify_and_add (x, ilist);
4340 break;
4341
953ff289 4342 case OMP_CLAUSE_COPYIN:
7c8f7639 4343 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
4344 x = build_receiver_ref (var, by_ref, ctx);
4345 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4346 append_to_statement_list (x, &copyin_seq);
4347 copyin_by_ref |= by_ref;
4348 break;
4349
4350 case OMP_CLAUSE_REDUCTION:
e5014671
NS
4351 /* OpenACC reductions are initialized using the
4352 GOACC_REDUCTION internal function. */
4353 if (is_gimple_omp_oacc (ctx->stmt))
4354 break;
953ff289
DN
4355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4356 {
a68ab351 4357 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
355fe088 4358 gimple *tseq;
a68ab351
JJ
4359 x = build_outer_var_ref (var, ctx);
4360
629b3d75 4361 if (omp_is_reference (var)
acf0174b
JJ
4362 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4363 TREE_TYPE (x)))
db3927fb 4364 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4365 SET_DECL_VALUE_EXPR (placeholder, x);
4366 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b 4367 tree new_vard = new_var;
629b3d75 4368 if (omp_is_reference (var))
acf0174b
JJ
4369 {
4370 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4371 new_vard = TREE_OPERAND (new_var, 0);
4372 gcc_assert (DECL_P (new_vard));
4373 }
74bf76ed 4374 if (is_simd
6943af07
AM
4375 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4376 ivar, lvar))
74bf76ed 4377 {
acf0174b
JJ
4378 if (new_vard == new_var)
4379 {
4380 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4381 SET_DECL_VALUE_EXPR (new_var, ivar);
4382 }
4383 else
4384 {
4385 SET_DECL_VALUE_EXPR (new_vard,
4386 build_fold_addr_expr (ivar));
4387 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4388 }
4389 x = lang_hooks.decls.omp_clause_default_ctor
4390 (c, unshare_expr (ivar),
4391 build_outer_var_ref (var, ctx));
4392 if (x)
4393 gimplify_and_add (x, &llist[0]);
4394 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4395 {
4396 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4397 lower_omp (&tseq, ctx);
4398 gimple_seq_add_seq (&llist[0], tseq);
4399 }
4400 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4401 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4402 lower_omp (&tseq, ctx);
4403 gimple_seq_add_seq (&llist[1], tseq);
4404 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4405 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4406 if (new_vard == new_var)
4407 SET_DECL_VALUE_EXPR (new_var, lvar);
4408 else
4409 SET_DECL_VALUE_EXPR (new_vard,
4410 build_fold_addr_expr (lvar));
4411 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4412 if (x)
4413 {
4414 tseq = NULL;
4415 dtor = x;
4416 gimplify_stmt (&dtor, &tseq);
4417 gimple_seq_add_seq (&llist[1], tseq);
4418 }
4419 break;
4420 }
4ceffa27
JJ
4421 /* If this is a reference to constant size reduction var
4422 with placeholder, we haven't emitted the initializer
4423 for it because it is undesirable if SIMD arrays are used.
4424 But if they aren't used, we need to emit the deferred
4425 initialization now. */
629b3d75 4426 else if (omp_is_reference (var) && is_simd)
decaaec8 4427 handle_simd_reference (clause_loc, new_vard, ilist);
acf0174b 4428 x = lang_hooks.decls.omp_clause_default_ctor
92d28cbb
JJ
4429 (c, unshare_expr (new_var),
4430 build_outer_var_ref (var, ctx));
acf0174b
JJ
4431 if (x)
4432 gimplify_and_add (x, ilist);
4433 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4434 {
4435 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4436 lower_omp (&tseq, ctx);
4437 gimple_seq_add_seq (ilist, tseq);
4438 }
4439 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4440 if (is_simd)
4441 {
4442 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4443 lower_omp (&tseq, ctx);
4444 gimple_seq_add_seq (dlist, tseq);
4445 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4446 }
4447 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4448 goto do_dtor;
4449 }
4450 else
4451 {
4452 x = omp_reduction_init (c, TREE_TYPE (new_var));
4453 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
e9792e1d
JJ
4454 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4455
4456 /* reduction(-:var) sums up the partial results, so it
4457 acts identically to reduction(+:var). */
4458 if (code == MINUS_EXPR)
4459 code = PLUS_EXPR;
4460
decaaec8 4461 tree new_vard = new_var;
629b3d75 4462 if (is_simd && omp_is_reference (var))
decaaec8
JJ
4463 {
4464 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4465 new_vard = TREE_OPERAND (new_var, 0);
4466 gcc_assert (DECL_P (new_vard));
4467 }
acf0174b 4468 if (is_simd
6943af07
AM
4469 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4470 ivar, lvar))
acf0174b 4471 {
acf0174b
JJ
4472 tree ref = build_outer_var_ref (var, ctx);
4473
4474 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4475
6943af07 4476 if (sctx.is_simt)
9669b00b
AM
4477 {
4478 if (!simt_lane)
4479 simt_lane = create_tmp_var (unsigned_type_node);
4480 x = build_call_expr_internal_loc
4481 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4482 TREE_TYPE (ivar), 2, ivar, simt_lane);
4483 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4484 gimplify_assign (ivar, x, &llist[2]);
4485 }
acf0174b 4486 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
4487 ref = build_outer_var_ref (var, ctx);
4488 gimplify_assign (ref, x, &llist[1]);
decaaec8
JJ
4489
4490 if (new_vard != new_var)
4491 {
4492 SET_DECL_VALUE_EXPR (new_vard,
4493 build_fold_addr_expr (lvar));
4494 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4495 }
74bf76ed
JJ
4496 }
4497 else
4498 {
629b3d75 4499 if (omp_is_reference (var) && is_simd)
decaaec8 4500 handle_simd_reference (clause_loc, new_vard, ilist);
74bf76ed
JJ
4501 gimplify_assign (new_var, x, ilist);
4502 if (is_simd)
e9792e1d
JJ
4503 {
4504 tree ref = build_outer_var_ref (var, ctx);
4505
4506 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4507 ref = build_outer_var_ref (var, ctx);
4508 gimplify_assign (ref, x, dlist);
4509 }
74bf76ed 4510 }
953ff289
DN
4511 }
4512 break;
4513
4514 default:
4515 gcc_unreachable ();
4516 }
4517 }
4518 }
4519
9d2f08ab 4520 if (known_eq (sctx.max_vf, 1U))
0c6b03b5
AM
4521 sctx.is_simt = false;
4522
4523 if (sctx.lane || sctx.is_simt)
74bf76ed 4524 {
0c6b03b5 4525 uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
4526 /* Don't want uninit warnings on simduid, it is always uninitialized,
4527 but we use it not for the value, but for the DECL_UID only. */
4528 TREE_NO_WARNING (uid) = 1;
0c6b03b5
AM
4529 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4530 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4531 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4532 gimple_omp_for_set_clauses (ctx->stmt, c);
4533 }
4534 /* Emit calls denoting privatized variables and initializing a pointer to
4535 structure that holds private variables as fields after ompdevlow pass. */
4536 if (sctx.is_simt)
4537 {
4538 sctx.simt_eargs[0] = uid;
4539 gimple *g
4540 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4541 gimple_call_set_lhs (g, uid);
4542 gimple_seq_add_stmt (ilist, g);
4543 sctx.simt_eargs.release ();
4544
4545 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4546 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4547 gimple_call_set_lhs (g, simtrec);
4548 gimple_seq_add_stmt (ilist, g);
4549 }
4550 if (sctx.lane)
4551 {
355fe088 4552 gimple *g
74bf76ed 4553 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
6943af07 4554 gimple_call_set_lhs (g, sctx.lane);
74bf76ed
JJ
4555 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4556 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6943af07 4557 g = gimple_build_assign (sctx.lane, INTEGER_CST,
0d0e4a03 4558 build_int_cst (unsigned_type_node, 0));
74bf76ed 4559 gimple_seq_add_stmt (ilist, g);
9669b00b
AM
4560 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4561 if (llist[2])
4562 {
4563 tree simt_vf = create_tmp_var (unsigned_type_node);
4564 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4565 gimple_call_set_lhs (g, simt_vf);
4566 gimple_seq_add_stmt (dlist, g);
4567
4568 tree t = build_int_cst (unsigned_type_node, 1);
4569 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4570 gimple_seq_add_stmt (dlist, g);
4571
4572 t = build_int_cst (unsigned_type_node, 0);
6943af07 4573 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
9669b00b
AM
4574 gimple_seq_add_stmt (dlist, g);
4575
4576 tree body = create_artificial_label (UNKNOWN_LOCATION);
4577 tree header = create_artificial_label (UNKNOWN_LOCATION);
4578 tree end = create_artificial_label (UNKNOWN_LOCATION);
4579 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4580 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4581
4582 gimple_seq_add_seq (dlist, llist[2]);
4583
4584 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4585 gimple_seq_add_stmt (dlist, g);
4586
4587 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4588 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4589 gimple_seq_add_stmt (dlist, g);
4590
4591 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4592 }
74bf76ed
JJ
4593 for (int i = 0; i < 2; i++)
4594 if (llist[i])
4595 {
b731b390 4596 tree vf = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
4597 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4598 gimple_call_set_lhs (g, vf);
4599 gimple_seq *seq = i == 0 ? ilist : dlist;
4600 gimple_seq_add_stmt (seq, g);
4601 tree t = build_int_cst (unsigned_type_node, 0);
6943af07 4602 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
74bf76ed
JJ
4603 gimple_seq_add_stmt (seq, g);
4604 tree body = create_artificial_label (UNKNOWN_LOCATION);
4605 tree header = create_artificial_label (UNKNOWN_LOCATION);
4606 tree end = create_artificial_label (UNKNOWN_LOCATION);
4607 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4608 gimple_seq_add_stmt (seq, gimple_build_label (body));
4609 gimple_seq_add_seq (seq, llist[i]);
4610 t = build_int_cst (unsigned_type_node, 1);
6943af07 4611 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
74bf76ed
JJ
4612 gimple_seq_add_stmt (seq, g);
4613 gimple_seq_add_stmt (seq, gimple_build_label (header));
6943af07 4614 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
74bf76ed
JJ
4615 gimple_seq_add_stmt (seq, g);
4616 gimple_seq_add_stmt (seq, gimple_build_label (end));
4617 }
4618 }
0c6b03b5
AM
4619 if (sctx.is_simt)
4620 {
4621 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4622 gimple *g
4623 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4624 gimple_seq_add_stmt (dlist, g);
4625 }
74bf76ed 4626
953ff289
DN
4627 /* The copyin sequence is not to be executed by the main thread, since
4628 that would result in self-copies. Perhaps not visible to scalars,
4629 but it certainly is to C++ operator=. */
4630 if (copyin_seq)
4631 {
e79983f4
MM
4632 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4633 0);
953ff289
DN
4634 x = build2 (NE_EXPR, boolean_type_node, x,
4635 build_int_cst (TREE_TYPE (x), 0));
4636 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4637 gimplify_and_add (x, ilist);
4638 }
4639
4640 /* If any copyin variable is passed by reference, we must ensure the
4641 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
4642 threads. Similarly for variables in both firstprivate and
4643 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
4644 happens after firstprivate copying in all threads. And similarly
4645 for UDRs if initializer expression refers to omp_orig. */
4646 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
4647 {
4648 /* Don't add any barrier for #pragma omp simd or
4649 #pragma omp distribute. */
4650 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
e2110f8f 4651 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
629b3d75 4652 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
74bf76ed
JJ
4653 }
4654
4655 /* If max_vf is non-zero, then we can use only a vectorization factor
4656 up to the max_vf we chose. So stick it into the safelen clause. */
9d2f08ab 4657 if (maybe_ne (sctx.max_vf, 0U))
74bf76ed 4658 {
629b3d75 4659 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 4660 OMP_CLAUSE_SAFELEN);
9d2f08ab 4661 poly_uint64 safe_len;
74bf76ed 4662 if (c == NULL_TREE
9d2f08ab
RS
4663 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4664 && maybe_gt (safe_len, sctx.max_vf)))
74bf76ed
JJ
4665 {
4666 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4667 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943af07 4668 sctx.max_vf);
74bf76ed
JJ
4669 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4670 gimple_omp_for_set_clauses (ctx->stmt, c);
4671 }
4672 }
953ff289
DN
4673}
4674
50674e96 4675
953ff289
DN
4676/* Generate code to implement the LASTPRIVATE clauses. This is used for
4677 both parallel and workshare constructs. PREDICATE may be NULL if it's
4678 always true. */
4679
4680static void
726a989a 4681lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 4682 omp_context *ctx)
953ff289 4683{
74bf76ed 4684 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 4685 bool par_clauses = false;
9669b00b 4686 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
953ff289 4687
74bf76ed
JJ
4688 /* Early exit if there are no lastprivate or linear clauses. */
4689 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4690 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4691 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4692 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4693 break;
953ff289
DN
4694 if (clauses == NULL)
4695 {
4696 /* If this was a workshare clause, see if it had been combined
4697 with its parallel. In that case, look for the clauses on the
4698 parallel statement itself. */
4699 if (is_parallel_ctx (ctx))
4700 return;
4701
4702 ctx = ctx->outer;
4703 if (ctx == NULL || !is_parallel_ctx (ctx))
4704 return;
4705
629b3d75 4706 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
4707 OMP_CLAUSE_LASTPRIVATE);
4708 if (clauses == NULL)
4709 return;
a68ab351 4710 par_clauses = true;
953ff289
DN
4711 }
4712
9669b00b
AM
4713 bool maybe_simt = false;
4714 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4715 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4716 {
629b3d75
MJ
4717 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4718 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
9669b00b
AM
4719 if (simduid)
4720 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4721 }
4722
726a989a
RB
4723 if (predicate)
4724 {
538dd0b7 4725 gcond *stmt;
726a989a 4726 tree label_true, arm1, arm2;
56b1c60e 4727 enum tree_code pred_code = TREE_CODE (predicate);
726a989a 4728
c2255bc4
AH
4729 label = create_artificial_label (UNKNOWN_LOCATION);
4730 label_true = create_artificial_label (UNKNOWN_LOCATION);
56b1c60e
MJ
4731 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4732 {
4733 arm1 = TREE_OPERAND (predicate, 0);
4734 arm2 = TREE_OPERAND (predicate, 1);
4735 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4736 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4737 }
4738 else
4739 {
4740 arm1 = predicate;
4741 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4742 arm2 = boolean_false_node;
4743 pred_code = NE_EXPR;
4744 }
9669b00b
AM
4745 if (maybe_simt)
4746 {
56b1c60e 4747 c = build2 (pred_code, boolean_type_node, arm1, arm2);
9669b00b
AM
4748 c = fold_convert (integer_type_node, c);
4749 simtcond = create_tmp_var (integer_type_node);
4750 gimplify_assign (simtcond, c, stmt_list);
4751 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4752 1, simtcond);
4753 c = create_tmp_var (integer_type_node);
4754 gimple_call_set_lhs (g, c);
4755 gimple_seq_add_stmt (stmt_list, g);
4756 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4757 label_true, label);
4758 }
4759 else
56b1c60e 4760 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
726a989a
RB
4761 gimple_seq_add_stmt (stmt_list, stmt);
4762 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4763 }
953ff289 4764
a68ab351 4765 for (c = clauses; c ;)
953ff289
DN
4766 {
4767 tree var, new_var;
db3927fb 4768 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 4769
74bf76ed
JJ
4770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4771 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4772 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
4773 {
4774 var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
4775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4776 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4777 && is_taskloop_ctx (ctx))
4778 {
4779 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4780 new_var = lookup_decl (var, ctx->outer);
4781 }
4782 else
2187f2a2
JJ
4783 {
4784 new_var = lookup_decl (var, ctx);
4785 /* Avoid uninitialized warnings for lastprivate and
4786 for linear iterators. */
4787 if (predicate
4788 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4789 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4790 TREE_NO_WARNING (new_var) = 1;
4791 }
953ff289 4792
2260d19d 4793 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
74bf76ed
JJ
4794 {
4795 tree val = DECL_VALUE_EXPR (new_var);
2260d19d 4796 if (TREE_CODE (val) == ARRAY_REF
74bf76ed
JJ
4797 && VAR_P (TREE_OPERAND (val, 0))
4798 && lookup_attribute ("omp simd array",
4799 DECL_ATTRIBUTES (TREE_OPERAND (val,
4800 0))))
4801 {
4802 if (lastlane == NULL)
4803 {
b731b390 4804 lastlane = create_tmp_var (unsigned_type_node);
538dd0b7 4805 gcall *g
74bf76ed
JJ
4806 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4807 2, simduid,
4808 TREE_OPERAND (val, 1));
4809 gimple_call_set_lhs (g, lastlane);
4810 gimple_seq_add_stmt (stmt_list, g);
4811 }
4812 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4813 TREE_OPERAND (val, 0), lastlane,
4814 NULL_TREE, NULL_TREE);
0c6b03b5 4815 }
2260d19d
AM
4816 }
4817 else if (maybe_simt)
4818 {
4819 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4820 ? DECL_VALUE_EXPR (new_var)
4821 : new_var);
4822 if (simtlast == NULL)
0c6b03b5 4823 {
2260d19d
AM
4824 simtlast = create_tmp_var (unsigned_type_node);
4825 gcall *g = gimple_build_call_internal
4826 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4827 gimple_call_set_lhs (g, simtlast);
4828 gimple_seq_add_stmt (stmt_list, g);
74bf76ed 4829 }
2260d19d
AM
4830 x = build_call_expr_internal_loc
4831 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4832 TREE_TYPE (val), 2, val, simtlast);
4833 new_var = unshare_expr (new_var);
4834 gimplify_assign (new_var, x, stmt_list);
4835 new_var = unshare_expr (new_var);
74bf76ed
JJ
4836 }
4837
4838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4839 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 4840 {
355a7673 4841 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
4842 gimple_seq_add_seq (stmt_list,
4843 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 4844 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 4845 }
f7468577
JJ
4846 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4847 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4848 {
4849 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4850 gimple_seq_add_seq (stmt_list,
4851 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4852 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4853 }
953ff289 4854
d9a6bd32
JJ
4855 x = NULL_TREE;
4856 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4857 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4858 {
4859 gcc_checking_assert (is_taskloop_ctx (ctx));
4860 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4861 ctx->outer->outer);
4862 if (is_global_var (ovar))
4863 x = ovar;
4864 }
4865 if (!x)
c39dad64 4866 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
629b3d75 4867 if (omp_is_reference (var))
70f34814 4868 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 4869 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 4870 gimplify_and_add (x, stmt_list);
a68ab351
JJ
4871 }
4872 c = OMP_CLAUSE_CHAIN (c);
4873 if (c == NULL && !par_clauses)
4874 {
4875 /* If this was a workshare clause, see if it had been combined
4876 with its parallel. In that case, continue looking for the
4877 clauses also on the parallel statement itself. */
4878 if (is_parallel_ctx (ctx))
4879 break;
4880
4881 ctx = ctx->outer;
4882 if (ctx == NULL || !is_parallel_ctx (ctx))
4883 break;
4884
629b3d75 4885 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
4886 OMP_CLAUSE_LASTPRIVATE);
4887 par_clauses = true;
4888 }
953ff289
DN
4889 }
4890
726a989a
RB
4891 if (label)
4892 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
4893}
4894
e5014671
NS
4895/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4896 (which might be a placeholder). INNER is true if this is an inner
4897 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4898 join markers. Generate the before-loop forking sequence in
4899 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4900 general form of these sequences is
4901
4902 GOACC_REDUCTION_SETUP
4903 GOACC_FORK
4904 GOACC_REDUCTION_INIT
4905 ...
4906 GOACC_REDUCTION_FINI
4907 GOACC_JOIN
4908 GOACC_REDUCTION_TEARDOWN. */
4909
41dbbb37 4910static void
e5014671
NS
4911lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4912 gcall *fork, gcall *join, gimple_seq *fork_seq,
4913 gimple_seq *join_seq, omp_context *ctx)
41dbbb37 4914{
e5014671
NS
4915 gimple_seq before_fork = NULL;
4916 gimple_seq after_fork = NULL;
4917 gimple_seq before_join = NULL;
4918 gimple_seq after_join = NULL;
4919 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4920 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4921 unsigned offset = 0;
4922
4923 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4924 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4925 {
4926 tree orig = OMP_CLAUSE_DECL (c);
4927 tree var = maybe_lookup_decl (orig, ctx);
4928 tree ref_to_res = NULL_TREE;
c42cfb5c
CP
4929 tree incoming, outgoing, v1, v2, v3;
4930 bool is_private = false;
e5014671
NS
4931
4932 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4933 if (rcode == MINUS_EXPR)
4934 rcode = PLUS_EXPR;
4935 else if (rcode == TRUTH_ANDIF_EXPR)
4936 rcode = BIT_AND_EXPR;
4937 else if (rcode == TRUTH_ORIF_EXPR)
4938 rcode = BIT_IOR_EXPR;
4939 tree op = build_int_cst (unsigned_type_node, rcode);
4940
4941 if (!var)
4942 var = orig;
e5014671
NS
4943
4944 incoming = outgoing = var;
01914336 4945
e5014671
NS
4946 if (!inner)
4947 {
4948 /* See if an outer construct also reduces this variable. */
4949 omp_context *outer = ctx;
41dbbb37 4950
e5014671
NS
4951 while (omp_context *probe = outer->outer)
4952 {
4953 enum gimple_code type = gimple_code (probe->stmt);
4954 tree cls;
41dbbb37 4955
e5014671
NS
4956 switch (type)
4957 {
4958 case GIMPLE_OMP_FOR:
4959 cls = gimple_omp_for_clauses (probe->stmt);
4960 break;
41dbbb37 4961
e5014671
NS
4962 case GIMPLE_OMP_TARGET:
4963 if (gimple_omp_target_kind (probe->stmt)
4964 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4965 goto do_lookup;
41dbbb37 4966
e5014671
NS
4967 cls = gimple_omp_target_clauses (probe->stmt);
4968 break;
41dbbb37 4969
e5014671
NS
4970 default:
4971 goto do_lookup;
4972 }
01914336 4973
e5014671
NS
4974 outer = probe;
4975 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4976 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4977 && orig == OMP_CLAUSE_DECL (cls))
c42cfb5c
CP
4978 {
4979 incoming = outgoing = lookup_decl (orig, probe);
4980 goto has_outer_reduction;
4981 }
4982 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4983 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4984 && orig == OMP_CLAUSE_DECL (cls))
4985 {
4986 is_private = true;
4987 goto do_lookup;
4988 }
e5014671 4989 }
41dbbb37 4990
e5014671
NS
4991 do_lookup:
4992 /* This is the outermost construct with this reduction,
4993 see if there's a mapping for it. */
4994 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
c42cfb5c 4995 && maybe_lookup_field (orig, outer) && !is_private)
e5014671
NS
4996 {
4997 ref_to_res = build_receiver_ref (orig, false, outer);
629b3d75 4998 if (omp_is_reference (orig))
e5014671 4999 ref_to_res = build_simple_mem_ref (ref_to_res);
41dbbb37 5000
c42cfb5c
CP
5001 tree type = TREE_TYPE (var);
5002 if (POINTER_TYPE_P (type))
5003 type = TREE_TYPE (type);
5004
e5014671 5005 outgoing = var;
c42cfb5c 5006 incoming = omp_reduction_init_op (loc, rcode, type);
e5014671
NS
5007 }
5008 else
11c4c4ba
CLT
5009 {
5010 /* Try to look at enclosing contexts for reduction var,
5011 use original if no mapping found. */
5012 tree t = NULL_TREE;
5013 omp_context *c = ctx->outer;
5014 while (c && !t)
5015 {
5016 t = maybe_lookup_decl (orig, c);
5017 c = c->outer;
5018 }
5019 incoming = outgoing = (t ? t : orig);
5020 }
01914336 5021
e5014671
NS
5022 has_outer_reduction:;
5023 }
41dbbb37 5024
e5014671
NS
5025 if (!ref_to_res)
5026 ref_to_res = integer_zero_node;
41dbbb37 5027
01914336 5028 if (omp_is_reference (orig))
c42cfb5c
CP
5029 {
5030 tree type = TREE_TYPE (var);
5031 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5032
5033 if (!inner)
5034 {
5035 tree x = create_tmp_var (TREE_TYPE (type), id);
5036 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5037 }
5038
5039 v1 = create_tmp_var (type, id);
5040 v2 = create_tmp_var (type, id);
5041 v3 = create_tmp_var (type, id);
5042
5043 gimplify_assign (v1, var, fork_seq);
5044 gimplify_assign (v2, var, fork_seq);
5045 gimplify_assign (v3, var, fork_seq);
5046
5047 var = build_simple_mem_ref (var);
5048 v1 = build_simple_mem_ref (v1);
5049 v2 = build_simple_mem_ref (v2);
5050 v3 = build_simple_mem_ref (v3);
5051 outgoing = build_simple_mem_ref (outgoing);
5052
e387fc64 5053 if (!TREE_CONSTANT (incoming))
c42cfb5c
CP
5054 incoming = build_simple_mem_ref (incoming);
5055 }
5056 else
5057 v1 = v2 = v3 = var;
5058
e5014671 5059 /* Determine position in reduction buffer, which may be used
ef1d3b57
RS
5060 by target. The parser has ensured that this is not a
5061 variable-sized type. */
5062 fixed_size_mode mode
5063 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
e5014671
NS
5064 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5065 offset = (offset + align - 1) & ~(align - 1);
5066 tree off = build_int_cst (sizetype, offset);
5067 offset += GET_MODE_SIZE (mode);
41dbbb37 5068
e5014671
NS
5069 if (!init_code)
5070 {
5071 init_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_INIT);
5073 fini_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_FINI);
5075 setup_code = build_int_cst (integer_type_node,
5076 IFN_GOACC_REDUCTION_SETUP);
5077 teardown_code = build_int_cst (integer_type_node,
5078 IFN_GOACC_REDUCTION_TEARDOWN);
5079 }
5080
5081 tree setup_call
5082 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5083 TREE_TYPE (var), 6, setup_code,
5084 unshare_expr (ref_to_res),
5085 incoming, level, op, off);
5086 tree init_call
5087 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5088 TREE_TYPE (var), 6, init_code,
5089 unshare_expr (ref_to_res),
c42cfb5c 5090 v1, level, op, off);
e5014671
NS
5091 tree fini_call
5092 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5093 TREE_TYPE (var), 6, fini_code,
5094 unshare_expr (ref_to_res),
c42cfb5c 5095 v2, level, op, off);
e5014671
NS
5096 tree teardown_call
5097 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5098 TREE_TYPE (var), 6, teardown_code,
c42cfb5c 5099 ref_to_res, v3, level, op, off);
e5014671 5100
c42cfb5c
CP
5101 gimplify_assign (v1, setup_call, &before_fork);
5102 gimplify_assign (v2, init_call, &after_fork);
5103 gimplify_assign (v3, fini_call, &before_join);
e5014671
NS
5104 gimplify_assign (outgoing, teardown_call, &after_join);
5105 }
5106
5107 /* Now stitch things together. */
5108 gimple_seq_add_seq (fork_seq, before_fork);
5109 if (fork)
5110 gimple_seq_add_stmt (fork_seq, fork);
5111 gimple_seq_add_seq (fork_seq, after_fork);
5112
5113 gimple_seq_add_seq (join_seq, before_join);
5114 if (join)
5115 gimple_seq_add_stmt (join_seq, join);
5116 gimple_seq_add_seq (join_seq, after_join);
41dbbb37 5117}
50674e96 5118
953ff289
DN
5119/* Generate code to implement the REDUCTION clauses. */
5120
5121static void
726a989a 5122lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 5123{
726a989a 5124 gimple_seq sub_seq = NULL;
355fe088 5125 gimple *stmt;
374d0225 5126 tree x, c;
953ff289
DN
5127 int count = 0;
5128
e5014671
NS
5129 /* OpenACC loop reductions are handled elsewhere. */
5130 if (is_gimple_omp_oacc (ctx->stmt))
5131 return;
5132
74bf76ed
JJ
5133 /* SIMD reductions are handled in lower_rec_input_clauses. */
5134 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 5135 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed
JJ
5136 return;
5137
953ff289
DN
5138 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5139 update in that case, otherwise use a lock. */
5140 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 5141 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289 5142 {
d9a6bd32
JJ
5143 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5144 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
953ff289 5145 {
acf0174b 5146 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
5147 count = -1;
5148 break;
5149 }
5150 count++;
5151 }
5152
5153 if (count == 0)
5154 return;
5155
5156 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5157 {
d9a6bd32 5158 tree var, ref, new_var, orig_var;
953ff289 5159 enum tree_code code;
db3927fb 5160 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5161
aaf46ef9 5162 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
5163 continue;
5164
c24783c4 5165 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
d9a6bd32
JJ
5166 orig_var = var = OMP_CLAUSE_DECL (c);
5167 if (TREE_CODE (var) == MEM_REF)
5168 {
5169 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
5170 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5171 var = TREE_OPERAND (var, 0);
c24783c4 5172 if (TREE_CODE (var) == ADDR_EXPR)
d9a6bd32 5173 var = TREE_OPERAND (var, 0);
c24783c4
JJ
5174 else
5175 {
5176 /* If this is a pointer or referenced based array
5177 section, the var could be private in the outer
5178 context e.g. on orphaned loop construct. Pretend this
5179 is private variable's outer reference. */
5180 ccode = OMP_CLAUSE_PRIVATE;
5181 if (TREE_CODE (var) == INDIRECT_REF)
5182 var = TREE_OPERAND (var, 0);
5183 }
d9a6bd32
JJ
5184 orig_var = var;
5185 if (is_variable_sized (var))
5186 {
5187 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5188 var = DECL_VALUE_EXPR (var);
5189 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5190 var = TREE_OPERAND (var, 0);
5191 gcc_assert (DECL_P (var));
5192 }
5193 }
953ff289 5194 new_var = lookup_decl (var, ctx);
629b3d75 5195 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
70f34814 5196 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
c24783c4 5197 ref = build_outer_var_ref (var, ctx, ccode);
953ff289 5198 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
5199
5200 /* reduction(-:var) sums up the partial results, so it acts
5201 identically to reduction(+:var). */
953ff289
DN
5202 if (code == MINUS_EXPR)
5203 code = PLUS_EXPR;
5204
e5014671 5205 if (count == 1)
953ff289 5206 {
db3927fb 5207 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5208
5209 addr = save_expr (addr);
5210 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 5211 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 5212 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 5213 gimplify_and_add (x, stmt_seqp);
953ff289
DN
5214 return;
5215 }
d9a6bd32
JJ
5216 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5217 {
5218 tree d = OMP_CLAUSE_DECL (c);
5219 tree type = TREE_TYPE (d);
5220 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5221 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5222 tree ptype = build_pointer_type (TREE_TYPE (type));
e01d41e5
JJ
5223 tree bias = TREE_OPERAND (d, 1);
5224 d = TREE_OPERAND (d, 0);
5225 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5226 {
5227 tree b = TREE_OPERAND (d, 1);
5228 b = maybe_lookup_decl (b, ctx);
5229 if (b == NULL)
5230 {
5231 b = TREE_OPERAND (d, 1);
5232 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5233 }
5234 if (integer_zerop (bias))
5235 bias = b;
5236 else
5237 {
5238 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5239 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5240 TREE_TYPE (b), b, bias);
5241 }
5242 d = TREE_OPERAND (d, 0);
5243 }
d9a6bd32
JJ
5244 /* For ref build_outer_var_ref already performs this, so
5245 only new_var needs a dereference. */
e01d41e5 5246 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
5247 {
5248 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
629b3d75 5249 gcc_assert (omp_is_reference (var) && var == orig_var);
d9a6bd32 5250 }
e01d41e5 5251 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
5252 {
5253 if (orig_var == var)
5254 {
5255 new_var = build_fold_addr_expr (new_var);
5256 ref = build_fold_addr_expr (ref);
5257 }
5258 }
5259 else
5260 {
5261 gcc_assert (orig_var == var);
629b3d75 5262 if (omp_is_reference (var))
d9a6bd32
JJ
5263 ref = build_fold_addr_expr (ref);
5264 }
5265 if (DECL_P (v))
5266 {
5267 tree t = maybe_lookup_decl (v, ctx);
5268 if (t)
5269 v = t;
5270 else
5271 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5272 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5273 }
e01d41e5
JJ
5274 if (!integer_zerop (bias))
5275 {
5276 bias = fold_convert_loc (clause_loc, sizetype, bias);
5277 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5278 TREE_TYPE (new_var), new_var,
5279 unshare_expr (bias));
5280 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5281 TREE_TYPE (ref), ref, bias);
5282 }
d9a6bd32
JJ
5283 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5284 ref = fold_convert_loc (clause_loc, ptype, ref);
5285 tree m = create_tmp_var (ptype, NULL);
5286 gimplify_assign (m, new_var, stmt_seqp);
5287 new_var = m;
5288 m = create_tmp_var (ptype, NULL);
5289 gimplify_assign (m, ref, stmt_seqp);
5290 ref = m;
5291 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5292 tree body = create_artificial_label (UNKNOWN_LOCATION);
5293 tree end = create_artificial_label (UNKNOWN_LOCATION);
5294 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5295 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5296 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5297 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5298 {
5299 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5300 tree decl_placeholder
5301 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5302 SET_DECL_VALUE_EXPR (placeholder, out);
5303 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5304 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5305 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5306 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5307 gimple_seq_add_seq (&sub_seq,
5308 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5310 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5311 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5312 }
5313 else
5314 {
5315 x = build2 (code, TREE_TYPE (out), out, priv);
5316 out = unshare_expr (out);
5317 gimplify_assign (out, x, &sub_seq);
5318 }
5319 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5320 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5321 gimple_seq_add_stmt (&sub_seq, g);
5322 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5323 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5324 gimple_seq_add_stmt (&sub_seq, g);
5325 g = gimple_build_assign (i, PLUS_EXPR, i,
5326 build_int_cst (TREE_TYPE (i), 1));
5327 gimple_seq_add_stmt (&sub_seq, g);
5328 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5329 gimple_seq_add_stmt (&sub_seq, g);
5330 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5331 }
41dbbb37 5332 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
953ff289
DN
5333 {
5334 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5335
629b3d75 5336 if (omp_is_reference (var)
acf0174b
JJ
5337 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5338 TREE_TYPE (ref)))
db3927fb 5339 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5340 SET_DECL_VALUE_EXPR (placeholder, ref);
5341 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 5342 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
5343 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
5345 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5346 }
5347 else
5348 {
5349 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5350 ref = build_outer_var_ref (var, ctx);
726a989a 5351 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
5352 }
5353 }
5354
e79983f4
MM
5355 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5356 0);
726a989a 5357 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 5358
726a989a 5359 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 5360
e79983f4
MM
5361 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5362 0);
726a989a 5363 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
5364}
5365
50674e96 5366
953ff289
DN
5367/* Generate code to implement the COPYPRIVATE clauses. */
5368
5369static void
726a989a 5370lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
5371 omp_context *ctx)
5372{
5373 tree c;
5374
5375 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5376 {
78db7d92 5377 tree var, new_var, ref, x;
953ff289 5378 bool by_ref;
db3927fb 5379 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5380
aaf46ef9 5381 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
5382 continue;
5383
5384 var = OMP_CLAUSE_DECL (c);
7c8f7639 5385 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
5386
5387 ref = build_sender_ref (var, ctx);
78db7d92
JJ
5388 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5389 if (by_ref)
5390 {
5391 x = build_fold_addr_expr_loc (clause_loc, new_var);
5392 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5393 }
726a989a 5394 gimplify_assign (ref, x, slist);
953ff289 5395
78db7d92
JJ
5396 ref = build_receiver_ref (var, false, ctx);
5397 if (by_ref)
5398 {
5399 ref = fold_convert_loc (clause_loc,
5400 build_pointer_type (TREE_TYPE (new_var)),
5401 ref);
5402 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5403 }
629b3d75 5404 if (omp_is_reference (var))
953ff289 5405 {
78db7d92 5406 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
5407 ref = build_simple_mem_ref_loc (clause_loc, ref);
5408 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 5409 }
78db7d92 5410 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
5411 gimplify_and_add (x, rlist);
5412 }
5413}
5414
50674e96 5415
953ff289
DN
5416/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5417 and REDUCTION from the sender (aka parent) side. */
5418
5419static void
726a989a
RB
5420lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5421 omp_context *ctx)
953ff289 5422{
d9a6bd32
JJ
5423 tree c, t;
5424 int ignored_looptemp = 0;
5425 bool is_taskloop = false;
5426
5427 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5428 by GOMP_taskloop. */
5429 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5430 {
5431 ignored_looptemp = 2;
5432 is_taskloop = true;
5433 }
953ff289
DN
5434
5435 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5436 {
50674e96 5437 tree val, ref, x, var;
953ff289 5438 bool by_ref, do_in = false, do_out = false;
db3927fb 5439 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5440
aaf46ef9 5441 switch (OMP_CLAUSE_CODE (c))
953ff289 5442 {
a68ab351
JJ
5443 case OMP_CLAUSE_PRIVATE:
5444 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5445 break;
5446 continue;
953ff289
DN
5447 case OMP_CLAUSE_FIRSTPRIVATE:
5448 case OMP_CLAUSE_COPYIN:
5449 case OMP_CLAUSE_LASTPRIVATE:
5450 case OMP_CLAUSE_REDUCTION:
d9a6bd32
JJ
5451 break;
5452 case OMP_CLAUSE_SHARED:
5453 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5454 break;
5455 continue;
acf0174b 5456 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32
JJ
5457 if (ignored_looptemp)
5458 {
5459 ignored_looptemp--;
5460 continue;
5461 }
953ff289
DN
5462 break;
5463 default:
5464 continue;
5465 }
5466
d2dda7fe 5467 val = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
5468 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5469 && TREE_CODE (val) == MEM_REF)
5470 {
5471 val = TREE_OPERAND (val, 0);
e01d41e5
JJ
5472 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5473 val = TREE_OPERAND (val, 0);
d9a6bd32
JJ
5474 if (TREE_CODE (val) == INDIRECT_REF
5475 || TREE_CODE (val) == ADDR_EXPR)
5476 val = TREE_OPERAND (val, 0);
5477 if (is_variable_sized (val))
5478 continue;
5479 }
5480
5481 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5482 outer taskloop region. */
5483 omp_context *ctx_for_o = ctx;
5484 if (is_taskloop
5485 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5486 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5487 ctx_for_o = ctx->outer;
5488
5489 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
50674e96 5490
8ca5b2a2
JJ
5491 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5492 && is_global_var (var))
5493 continue;
d9a6bd32
JJ
5494
5495 t = omp_member_access_dummy_var (var);
5496 if (t)
5497 {
5498 var = DECL_VALUE_EXPR (var);
5499 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5500 if (o != t)
5501 var = unshare_and_remap (var, t, o);
5502 else
5503 var = unshare_expr (var);
5504 }
5505
5506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5507 {
5508 /* Handle taskloop firstprivate/lastprivate, where the
5509 lastprivate on GIMPLE_OMP_TASK is represented as
5510 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5511 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5512 x = omp_build_component_ref (ctx->sender_decl, f);
5513 if (use_pointer_for_field (val, ctx))
5514 var = build_fold_addr_expr (var);
5515 gimplify_assign (x, var, ilist);
5516 DECL_ABSTRACT_ORIGIN (f) = NULL;
5517 continue;
5518 }
5519
5520 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5521 || val == OMP_CLAUSE_DECL (c))
5522 && is_variable_sized (val))
953ff289 5523 continue;
7c8f7639 5524 by_ref = use_pointer_for_field (val, NULL);
953ff289 5525
aaf46ef9 5526 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5527 {
5528 case OMP_CLAUSE_FIRSTPRIVATE:
ec35ea45
JJ
5529 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5530 && !by_ref
5531 && is_task_ctx (ctx))
5532 TREE_NO_WARNING (var) = 1;
5533 do_in = true;
5534 break;
5535
5536 case OMP_CLAUSE_PRIVATE:
953ff289 5537 case OMP_CLAUSE_COPYIN:
acf0174b 5538 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
5539 do_in = true;
5540 break;
5541
5542 case OMP_CLAUSE_LASTPRIVATE:
629b3d75 5543 if (by_ref || omp_is_reference (val))
953ff289
DN
5544 {
5545 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5546 continue;
5547 do_in = true;
5548 }
5549 else
a68ab351
JJ
5550 {
5551 do_out = true;
5552 if (lang_hooks.decls.omp_private_outer_ref (val))
5553 do_in = true;
5554 }
953ff289
DN
5555 break;
5556
5557 case OMP_CLAUSE_REDUCTION:
5558 do_in = true;
d9a6bd32 5559 if (val == OMP_CLAUSE_DECL (c))
629b3d75 5560 do_out = !(by_ref || omp_is_reference (val));
d9a6bd32
JJ
5561 else
5562 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
953ff289
DN
5563 break;
5564
5565 default:
5566 gcc_unreachable ();
5567 }
5568
5569 if (do_in)
5570 {
5571 ref = build_sender_ref (val, ctx);
db3927fb 5572 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 5573 gimplify_assign (ref, x, ilist);
a68ab351
JJ
5574 if (is_task_ctx (ctx))
5575 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 5576 }
50674e96 5577
953ff289
DN
5578 if (do_out)
5579 {
5580 ref = build_sender_ref (val, ctx);
726a989a 5581 gimplify_assign (var, ref, olist);
953ff289
DN
5582 }
5583 }
5584}
5585
726a989a
RB
5586/* Generate code to implement SHARED from the sender (aka parent)
5587 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5588 list things that got automatically shared. */
953ff289
DN
5589
5590static void
726a989a 5591lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 5592{
d9a6bd32 5593 tree var, ovar, nvar, t, f, x, record_type;
953ff289
DN
5594
5595 if (ctx->record_type == NULL)
5596 return;
50674e96 5597
a68ab351 5598 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 5599 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
5600 {
5601 ovar = DECL_ABSTRACT_ORIGIN (f);
d9a6bd32
JJ
5602 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5603 continue;
5604
953ff289
DN
5605 nvar = maybe_lookup_decl (ovar, ctx);
5606 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5607 continue;
5608
50674e96
DN
5609 /* If CTX is a nested parallel directive. Find the immediately
5610 enclosing parallel or workshare construct that contains a
5611 mapping for OVAR. */
d2dda7fe 5612 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 5613
d9a6bd32
JJ
5614 t = omp_member_access_dummy_var (var);
5615 if (t)
5616 {
5617 var = DECL_VALUE_EXPR (var);
5618 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5619 if (o != t)
5620 var = unshare_and_remap (var, t, o);
5621 else
5622 var = unshare_expr (var);
5623 }
5624
7c8f7639 5625 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
5626 {
5627 x = build_sender_ref (ovar, ctx);
50674e96 5628 var = build_fold_addr_expr (var);
726a989a 5629 gimplify_assign (x, var, ilist);
953ff289
DN
5630 }
5631 else
5632 {
5633 x = build_sender_ref (ovar, ctx);
726a989a 5634 gimplify_assign (x, var, ilist);
953ff289 5635
14e5b285
RG
5636 if (!TREE_READONLY (var)
5637 /* We don't need to receive a new reference to a result
5638 or parm decl. In fact we may not store to it as we will
5639 invalidate any pending RSO and generate wrong gimple
5640 during inlining. */
5641 && !((TREE_CODE (var) == RESULT_DECL
5642 || TREE_CODE (var) == PARM_DECL)
5643 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
5644 {
5645 x = build_sender_ref (ovar, ctx);
726a989a 5646 gimplify_assign (var, x, olist);
a68ab351 5647 }
953ff289
DN
5648 }
5649 }
5650}
5651
e4834818
NS
5652/* Emit an OpenACC head marker call, encapulating the partitioning and
5653 other information that must be processed by the target compiler.
5654 Return the maximum number of dimensions the associated loop might
5655 be partitioned over. */
5656
5657static unsigned
5658lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5659 gimple_seq *seq, omp_context *ctx)
5660{
5661 unsigned levels = 0;
5662 unsigned tag = 0;
5663 tree gang_static = NULL_TREE;
5664 auto_vec<tree, 5> args;
5665
5666 args.quick_push (build_int_cst
5667 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5668 args.quick_push (ddvar);
5669 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5670 {
5671 switch (OMP_CLAUSE_CODE (c))
5672 {
5673 case OMP_CLAUSE_GANG:
5674 tag |= OLF_DIM_GANG;
5675 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5676 /* static:* is represented by -1, and we can ignore it, as
5677 scheduling is always static. */
5678 if (gang_static && integer_minus_onep (gang_static))
5679 gang_static = NULL_TREE;
5680 levels++;
5681 break;
5682
5683 case OMP_CLAUSE_WORKER:
5684 tag |= OLF_DIM_WORKER;
5685 levels++;
5686 break;
5687
5688 case OMP_CLAUSE_VECTOR:
5689 tag |= OLF_DIM_VECTOR;
5690 levels++;
5691 break;
5692
5693 case OMP_CLAUSE_SEQ:
5694 tag |= OLF_SEQ;
5695 break;
5696
5697 case OMP_CLAUSE_AUTO:
5698 tag |= OLF_AUTO;
5699 break;
5700
5701 case OMP_CLAUSE_INDEPENDENT:
5702 tag |= OLF_INDEPENDENT;
5703 break;
5704
02889d23
CLT
5705 case OMP_CLAUSE_TILE:
5706 tag |= OLF_TILE;
5707 break;
5708
e4834818
NS
5709 default:
5710 continue;
5711 }
5712 }
5713
5714 if (gang_static)
5715 {
5716 if (DECL_P (gang_static))
5717 gang_static = build_outer_var_ref (gang_static, ctx);
5718 tag |= OLF_GANG_STATIC;
5719 }
5720
5721 /* In a parallel region, loops are implicitly INDEPENDENT. */
5722 omp_context *tgt = enclosing_target_ctx (ctx);
5723 if (!tgt || is_oacc_parallel (tgt))
5724 tag |= OLF_INDEPENDENT;
5725
02889d23
CLT
5726 if (tag & OLF_TILE)
5727 /* Tiling could use all 3 levels. */
5728 levels = 3;
5729 else
5730 {
5731 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5732 Ensure at least one level, or 2 for possible auto
5733 partitioning */
5734 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5735 << OLF_DIM_BASE) | OLF_SEQ));
5736
5737 if (levels < 1u + maybe_auto)
5738 levels = 1u + maybe_auto;
5739 }
e4834818
NS
5740
5741 args.quick_push (build_int_cst (integer_type_node, levels));
5742 args.quick_push (build_int_cst (integer_type_node, tag));
5743 if (gang_static)
5744 args.quick_push (gang_static);
5745
5746 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5747 gimple_set_location (call, loc);
5748 gimple_set_lhs (call, ddvar);
5749 gimple_seq_add_stmt (seq, call);
5750
5751 return levels;
5752}
5753
5754/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5755 partitioning level of the enclosed region. */
5756
5757static void
5758lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5759 tree tofollow, gimple_seq *seq)
5760{
5761 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5762 : IFN_UNIQUE_OACC_TAIL_MARK);
5763 tree marker = build_int_cst (integer_type_node, marker_kind);
5764 int nargs = 2 + (tofollow != NULL_TREE);
5765 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5766 marker, ddvar, tofollow);
5767 gimple_set_location (call, loc);
5768 gimple_set_lhs (call, ddvar);
5769 gimple_seq_add_stmt (seq, call);
5770}
5771
5772/* Generate the before and after OpenACC loop sequences. CLAUSES are
5773 the loop clauses, from which we extract reductions. Initialize
5774 HEAD and TAIL. */
5775
5776static void
5777lower_oacc_head_tail (location_t loc, tree clauses,
5778 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5779{
5780 bool inner = false;
5781 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5782 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5783
5784 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
e4834818
NS
5785 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5786 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5787
4877b5a4 5788 gcc_assert (count);
e4834818
NS
5789 for (unsigned done = 1; count; count--, done++)
5790 {
5791 gimple_seq fork_seq = NULL;
5792 gimple_seq join_seq = NULL;
5793
5794 tree place = build_int_cst (integer_type_node, -1);
5795 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5796 fork_kind, ddvar, place);
5797 gimple_set_location (fork, loc);
5798 gimple_set_lhs (fork, ddvar);
5799
5800 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5801 join_kind, ddvar, place);
5802 gimple_set_location (join, loc);
5803 gimple_set_lhs (join, ddvar);
5804
5805 /* Mark the beginning of this level sequence. */
5806 if (inner)
5807 lower_oacc_loop_marker (loc, ddvar, true,
5808 build_int_cst (integer_type_node, count),
5809 &fork_seq);
5810 lower_oacc_loop_marker (loc, ddvar, false,
5811 build_int_cst (integer_type_node, done),
5812 &join_seq);
5813
e5014671
NS
5814 lower_oacc_reductions (loc, clauses, place, inner,
5815 fork, join, &fork_seq, &join_seq, ctx);
e4834818
NS
5816
5817 /* Append this level to head. */
5818 gimple_seq_add_seq (head, fork_seq);
5819 /* Prepend it to tail. */
5820 gimple_seq_add_seq (&join_seq, *tail);
5821 *tail = join_seq;
5822
5823 inner = true;
5824 }
5825
5826 /* Mark the end of the sequence. */
5827 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5828 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5829}
726a989a 5830
629b3d75
MJ
5831/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5832 catch handler and return it. This prevents programs from violating the
5833 structured block semantics with throws. */
726a989a 5834
629b3d75
MJ
5835static gimple_seq
5836maybe_catch_exception (gimple_seq body)
726a989a 5837{
629b3d75
MJ
5838 gimple *g;
5839 tree decl;
b2b40051 5840
629b3d75
MJ
5841 if (!flag_exceptions)
5842 return body;
b2b40051 5843
629b3d75
MJ
5844 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5845 decl = lang_hooks.eh_protect_cleanup_actions ();
5846 else
5847 decl = builtin_decl_explicit (BUILT_IN_TRAP);
b2b40051 5848
629b3d75
MJ
5849 g = gimple_build_eh_must_not_throw (decl);
5850 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5851 GIMPLE_TRY_CATCH);
b2b40051 5852
629b3d75 5853 return gimple_seq_alloc_with_stmt (g);
b2b40051
MJ
5854}
5855
629b3d75
MJ
5856\f
5857/* Routines to lower OMP directives into OMP-GIMPLE. */
726a989a 5858
629b3d75
MJ
5859/* If ctx is a worksharing context inside of a cancellable parallel
5860 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5861 and conditional branch to parallel's cancel_label to handle
5862 cancellation in the implicit barrier. */
953ff289
DN
5863
5864static void
629b3d75 5865maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
953ff289 5866{
629b3d75
MJ
5867 gimple *omp_return = gimple_seq_last_stmt (*body);
5868 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5869 if (gimple_omp_return_nowait_p (omp_return))
5870 return;
5871 if (ctx->outer
5872 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5873 && ctx->outer->cancellable)
50674e96 5874 {
629b3d75
MJ
5875 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5876 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5877 tree lhs = create_tmp_var (c_bool_type);
5878 gimple_omp_return_set_lhs (omp_return, lhs);
5879 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5880 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5881 fold_convert (c_bool_type,
5882 boolean_false_node),
5883 ctx->outer->cancel_label, fallthru_label);
5884 gimple_seq_add_stmt (body, g);
5885 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
50674e96 5886 }
629b3d75 5887}
953ff289 5888
629b3d75
MJ
5889/* Lower the OpenMP sections directive in the current statement in GSI_P.
5890 CTX is the enclosing OMP context for the current statement. */
953ff289 5891
629b3d75
MJ
5892static void
5893lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5894{
5895 tree block, control;
5896 gimple_stmt_iterator tgsi;
5897 gomp_sections *stmt;
5898 gimple *t;
5899 gbind *new_stmt, *bind;
5900 gimple_seq ilist, dlist, olist, new_body;
953ff289 5901
629b3d75 5902 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
953ff289 5903
629b3d75 5904 push_gimplify_context ();
acf0174b 5905
629b3d75
MJ
5906 dlist = NULL;
5907 ilist = NULL;
5908 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5909 &ilist, &dlist, ctx, NULL);
953ff289 5910
629b3d75
MJ
5911 new_body = gimple_omp_body (stmt);
5912 gimple_omp_set_body (stmt, NULL);
5913 tgsi = gsi_start (new_body);
5914 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
953ff289 5915 {
629b3d75
MJ
5916 omp_context *sctx;
5917 gimple *sec_start;
50674e96 5918
629b3d75
MJ
5919 sec_start = gsi_stmt (tgsi);
5920 sctx = maybe_lookup_ctx (sec_start);
5921 gcc_assert (sctx);
5922
5923 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5924 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5925 GSI_CONTINUE_LINKING);
5926 gimple_omp_set_body (sec_start, NULL);
5927
5928 if (gsi_one_before_end_p (tgsi))
50674e96 5929 {
629b3d75
MJ
5930 gimple_seq l = NULL;
5931 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5932 &l, ctx);
5933 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5934 gimple_omp_section_set_last (sec_start);
5935 }
917948d3 5936
629b3d75
MJ
5937 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5938 GSI_CONTINUE_LINKING);
5939 }
50674e96 5940
629b3d75
MJ
5941 block = make_node (BLOCK);
5942 bind = gimple_build_bind (NULL, new_body, block);
50674e96 5943
629b3d75
MJ
5944 olist = NULL;
5945 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5946
629b3d75
MJ
5947 block = make_node (BLOCK);
5948 new_stmt = gimple_build_bind (NULL, NULL, block);
5949 gsi_replace (gsi_p, new_stmt, true);
50674e96 5950
629b3d75
MJ
5951 pop_gimplify_context (new_stmt);
5952 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5953 BLOCK_VARS (block) = gimple_bind_vars (bind);
5954 if (BLOCK_VARS (block))
5955 TREE_USED (block) = 1;
50674e96 5956
629b3d75
MJ
5957 new_body = NULL;
5958 gimple_seq_add_seq (&new_body, ilist);
5959 gimple_seq_add_stmt (&new_body, stmt);
5960 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5961 gimple_seq_add_stmt (&new_body, bind);
50674e96 5962
629b3d75
MJ
5963 control = create_tmp_var (unsigned_type_node, ".section");
5964 t = gimple_build_omp_continue (control, control);
5965 gimple_omp_sections_set_control (stmt, control);
5966 gimple_seq_add_stmt (&new_body, t);
50674e96 5967
629b3d75
MJ
5968 gimple_seq_add_seq (&new_body, olist);
5969 if (ctx->cancellable)
5970 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5971 gimple_seq_add_seq (&new_body, dlist);
917948d3 5972
629b3d75 5973 new_body = maybe_catch_exception (new_body);
50674e96 5974
01914336
MJ
5975 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5976 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5977 t = gimple_build_omp_return (nowait);
629b3d75
MJ
5978 gimple_seq_add_stmt (&new_body, t);
5979 maybe_add_implicit_barrier_cancel (ctx, &new_body);
953ff289 5980
629b3d75 5981 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
5982}
5983
9a771876 5984
629b3d75
MJ
5985/* A subroutine of lower_omp_single. Expand the simple form of
5986 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
9a771876 5987
629b3d75
MJ
5988 if (GOMP_single_start ())
5989 BODY;
5990 [ GOMP_barrier (); ] -> unless 'nowait' is present.
9a771876 5991
629b3d75
MJ
5992 FIXME. It may be better to delay expanding the logic of this until
5993 pass_expand_omp. The expanded logic may make the job more difficult
5994 to a synchronization analysis pass. */
a68ab351
JJ
5995
5996static void
629b3d75 5997lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
a68ab351 5998{
629b3d75
MJ
5999 location_t loc = gimple_location (single_stmt);
6000 tree tlabel = create_artificial_label (loc);
6001 tree flabel = create_artificial_label (loc);
6002 gimple *call, *cond;
6003 tree lhs, decl;
20906c66 6004
629b3d75
MJ
6005 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6006 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6007 call = gimple_build_call (decl, 0);
6008 gimple_call_set_lhs (call, lhs);
6009 gimple_seq_add_stmt (pre_p, call);
a68ab351 6010
629b3d75
MJ
6011 cond = gimple_build_cond (EQ_EXPR, lhs,
6012 fold_convert_loc (loc, TREE_TYPE (lhs),
6013 boolean_true_node),
6014 tlabel, flabel);
6015 gimple_seq_add_stmt (pre_p, cond);
6016 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6017 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6018 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
a68ab351
JJ
6019}
6020
6021
629b3d75
MJ
6022/* A subroutine of lower_omp_single. Expand the simple form of
6023 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289 6024
629b3d75 6025 #pragma omp single copyprivate (a, b, c)
953ff289 6026
629b3d75 6027 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
953ff289 6028
629b3d75
MJ
6029 {
6030 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6031 {
6032 BODY;
6033 copyout.a = a;
6034 copyout.b = b;
6035 copyout.c = c;
6036 GOMP_single_copy_end (&copyout);
6037 }
6038 else
6039 {
6040 a = copyout_p->a;
6041 b = copyout_p->b;
6042 c = copyout_p->c;
6043 }
6044 GOMP_barrier ();
6045 }
726a989a 6046
629b3d75
MJ
6047 FIXME. It may be better to delay expanding the logic of this until
6048 pass_expand_omp. The expanded logic may make the job more difficult
6049 to a synchronization analysis pass. */
953ff289 6050
629b3d75
MJ
6051static void
6052lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6053 omp_context *ctx)
6054{
6055 tree ptr_type, t, l0, l1, l2, bfn_decl;
6056 gimple_seq copyin_seq;
6057 location_t loc = gimple_location (single_stmt);
953ff289 6058
629b3d75 6059 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
953ff289 6060
629b3d75
MJ
6061 ptr_type = build_pointer_type (ctx->record_type);
6062 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
953ff289 6063
629b3d75
MJ
6064 l0 = create_artificial_label (loc);
6065 l1 = create_artificial_label (loc);
6066 l2 = create_artificial_label (loc);
953ff289 6067
629b3d75
MJ
6068 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6069 t = build_call_expr_loc (loc, bfn_decl, 0);
6070 t = fold_convert_loc (loc, ptr_type, t);
6071 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289 6072
629b3d75
MJ
6073 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6074 build_int_cst (ptr_type, 0));
6075 t = build3 (COND_EXPR, void_type_node, t,
6076 build_and_jump (&l0), build_and_jump (&l1));
6077 gimplify_and_add (t, pre_p);
953ff289 6078
629b3d75 6079 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 6080
629b3d75 6081 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289 6082
629b3d75
MJ
6083 copyin_seq = NULL;
6084 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6085 &copyin_seq, ctx);
953ff289 6086
629b3d75
MJ
6087 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6088 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6089 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6090 gimplify_and_add (t, pre_p);
2aee3e57 6091
629b3d75
MJ
6092 t = build_and_jump (&l2);
6093 gimplify_and_add (t, pre_p);
953ff289 6094
629b3d75 6095 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 6096
629b3d75 6097 gimple_seq_add_seq (pre_p, copyin_seq);
777f7f9a 6098
629b3d75 6099 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
777f7f9a 6100}
50674e96 6101
629b3d75
MJ
6102
6103/* Expand code for an OpenMP single directive. */
2b4cf991
JJ
6104
6105static void
629b3d75 6106lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
2b4cf991 6107{
629b3d75 6108 tree block;
629b3d75
MJ
6109 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6110 gbind *bind;
6111 gimple_seq bind_body, bind_body_tail = NULL, dlist;
2b4cf991 6112
629b3d75 6113 push_gimplify_context ();
2b4cf991 6114
629b3d75
MJ
6115 block = make_node (BLOCK);
6116 bind = gimple_build_bind (NULL, NULL, block);
6117 gsi_replace (gsi_p, bind, true);
6118 bind_body = NULL;
6119 dlist = NULL;
6120 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6121 &bind_body, &dlist, ctx, NULL);
6122 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
2b4cf991 6123
629b3d75 6124 gimple_seq_add_stmt (&bind_body, single_stmt);
2b4cf991 6125
629b3d75
MJ
6126 if (ctx->record_type)
6127 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6128 else
6129 lower_omp_single_simple (single_stmt, &bind_body);
2b4cf991 6130
629b3d75 6131 gimple_omp_set_body (single_stmt, NULL);
2b4cf991 6132
629b3d75 6133 gimple_seq_add_seq (&bind_body, dlist);
5a0f4dd3 6134
629b3d75 6135 bind_body = maybe_catch_exception (bind_body);
5a0f4dd3 6136
01914336
MJ
6137 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6138 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6139 gimple *g = gimple_build_omp_return (nowait);
6140 gimple_seq_add_stmt (&bind_body_tail, g);
629b3d75
MJ
6141 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6142 if (ctx->record_type)
6143 {
6144 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6145 tree clobber = build_constructor (ctx->record_type, NULL);
6146 TREE_THIS_VOLATILE (clobber) = 1;
6147 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6148 clobber), GSI_SAME_STMT);
6149 }
6150 gimple_seq_add_seq (&bind_body, bind_body_tail);
6151 gimple_bind_set_body (bind, bind_body);
5a0f4dd3 6152
629b3d75 6153 pop_gimplify_context (bind);
5a0f4dd3 6154
629b3d75
MJ
6155 gimple_bind_append_vars (bind, ctx->block_vars);
6156 BLOCK_VARS (block) = ctx->block_vars;
6157 if (BLOCK_VARS (block))
6158 TREE_USED (block) = 1;
5a0f4dd3
JJ
6159}
6160
74bf76ed 6161
629b3d75 6162/* Expand code for an OpenMP master directive. */
953ff289
DN
6163
6164static void
629b3d75 6165lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6166{
629b3d75
MJ
6167 tree block, lab = NULL, x, bfn_decl;
6168 gimple *stmt = gsi_stmt (*gsi_p);
6169 gbind *bind;
6170 location_t loc = gimple_location (stmt);
6171 gimple_seq tseq;
50674e96 6172
629b3d75 6173 push_gimplify_context ();
50674e96 6174
629b3d75
MJ
6175 block = make_node (BLOCK);
6176 bind = gimple_build_bind (NULL, NULL, block);
6177 gsi_replace (gsi_p, bind, true);
6178 gimple_bind_add_stmt (bind, stmt);
50674e96 6179
629b3d75
MJ
6180 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6181 x = build_call_expr_loc (loc, bfn_decl, 0);
6182 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6183 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6184 tseq = NULL;
6185 gimplify_and_add (x, &tseq);
6186 gimple_bind_add_seq (bind, tseq);
9a771876 6187
629b3d75
MJ
6188 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6189 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6190 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6191 gimple_omp_set_body (stmt, NULL);
b357f682 6192
629b3d75 6193 gimple_bind_add_stmt (bind, gimple_build_label (lab));
99819c63 6194
629b3d75 6195 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e01d41e5 6196
629b3d75 6197 pop_gimplify_context (bind);
b8698a0f 6198
629b3d75
MJ
6199 gimple_bind_append_vars (bind, ctx->block_vars);
6200 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
6201}
6202
e4834818 6203
629b3d75 6204/* Expand code for an OpenMP taskgroup directive. */
e4834818 6205
629b3d75
MJ
6206static void
6207lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
e4834818 6208{
629b3d75
MJ
6209 gimple *stmt = gsi_stmt (*gsi_p);
6210 gcall *x;
6211 gbind *bind;
6212 tree block = make_node (BLOCK);
e4834818 6213
629b3d75
MJ
6214 bind = gimple_build_bind (NULL, NULL, block);
6215 gsi_replace (gsi_p, bind, true);
6216 gimple_bind_add_stmt (bind, stmt);
e4834818 6217
629b3d75
MJ
6218 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6219 0);
6220 gimple_bind_add_stmt (bind, x);
e4834818 6221
629b3d75
MJ
6222 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6223 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6224 gimple_omp_set_body (stmt, NULL);
e4834818 6225
629b3d75 6226 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e4834818 6227
629b3d75
MJ
6228 gimple_bind_append_vars (bind, ctx->block_vars);
6229 BLOCK_VARS (block) = ctx->block_vars;
e4834818
NS
6230}
6231
50674e96 6232
629b3d75 6233/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
74bf76ed
JJ
6234
6235static void
629b3d75
MJ
6236lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6237 omp_context *ctx)
74bf76ed 6238{
629b3d75
MJ
6239 struct omp_for_data fd;
6240 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6241 return;
74bf76ed 6242
629b3d75
MJ
6243 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6244 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6245 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6246 if (!fd.ordered)
6247 return;
acf0174b 6248
629b3d75
MJ
6249 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6250 tree c = gimple_omp_ordered_clauses (ord_stmt);
6251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6252 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
74bf76ed 6253 {
629b3d75
MJ
6254 /* Merge depend clauses from multiple adjacent
6255 #pragma omp ordered depend(sink:...) constructs
6256 into one #pragma omp ordered depend(sink:...), so that
6257 we can optimize them together. */
6258 gimple_stmt_iterator gsi = *gsi_p;
6259 gsi_next (&gsi);
6260 while (!gsi_end_p (gsi))
74bf76ed 6261 {
629b3d75
MJ
6262 gimple *stmt = gsi_stmt (gsi);
6263 if (is_gimple_debug (stmt)
6264 || gimple_code (stmt) == GIMPLE_NOP)
74bf76ed 6265 {
629b3d75
MJ
6266 gsi_next (&gsi);
6267 continue;
74bf76ed 6268 }
629b3d75
MJ
6269 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6270 break;
6271 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6272 c = gimple_omp_ordered_clauses (ord_stmt2);
6273 if (c == NULL_TREE
6274 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6275 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6276 break;
6277 while (*list_p)
6278 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6279 *list_p = c;
6280 gsi_remove (&gsi, true);
74bf76ed
JJ
6281 }
6282 }
74bf76ed 6283
629b3d75
MJ
6284 /* Canonicalize sink dependence clauses into one folded clause if
6285 possible.
74bf76ed 6286
629b3d75
MJ
6287 The basic algorithm is to create a sink vector whose first
6288 element is the GCD of all the first elements, and whose remaining
6289 elements are the minimum of the subsequent columns.
74bf76ed 6290
629b3d75
MJ
6291 We ignore dependence vectors whose first element is zero because
6292 such dependencies are known to be executed by the same thread.
acf0174b 6293
629b3d75
MJ
6294 We take into account the direction of the loop, so a minimum
6295 becomes a maximum if the loop is iterating forwards. We also
6296 ignore sink clauses where the loop direction is unknown, or where
6297 the offsets are clearly invalid because they are not a multiple
6298 of the loop increment.
6299
6300 For example:
6301
6302 #pragma omp for ordered(2)
6303 for (i=0; i < N; ++i)
6304 for (j=0; j < M; ++j)
acf0174b 6305 {
629b3d75
MJ
6306 #pragma omp ordered \
6307 depend(sink:i-8,j-2) \
6308 depend(sink:i,j-1) \ // Completely ignored because i+0.
6309 depend(sink:i-4,j-3) \
6310 depend(sink:i-6,j-4)
6311 #pragma omp ordered depend(source)
acf0174b 6312 }
acf0174b 6313
629b3d75 6314 Folded clause is:
74bf76ed 6315
629b3d75
MJ
6316 depend(sink:-gcd(8,4,6),-min(2,3,4))
6317 -or-
6318 depend(sink:-2,-2)
6319 */
74bf76ed 6320
629b3d75
MJ
6321 /* FIXME: Computing GCD's where the first element is zero is
6322 non-trivial in the presence of collapsed loops. Do this later. */
6323 if (fd.collapse > 1)
6324 return;
74bf76ed 6325
629b3d75 6326 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
c3684b7b
MS
6327
6328 /* wide_int is not a POD so it must be default-constructed. */
6329 for (unsigned i = 0; i != 2 * len - 1; ++i)
6330 new (static_cast<void*>(folded_deps + i)) wide_int ();
6331
629b3d75
MJ
6332 tree folded_dep = NULL_TREE;
6333 /* TRUE if the first dimension's offset is negative. */
6334 bool neg_offset_p = false;
74bf76ed 6335
629b3d75
MJ
6336 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6337 unsigned int i;
6338 while ((c = *list_p) != NULL)
74bf76ed 6339 {
629b3d75 6340 bool remove = false;
74bf76ed 6341
629b3d75
MJ
6342 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6343 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6344 goto next_ordered_clause;
74bf76ed 6345
629b3d75
MJ
6346 tree vec;
6347 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6348 vec && TREE_CODE (vec) == TREE_LIST;
6349 vec = TREE_CHAIN (vec), ++i)
74bf76ed 6350 {
629b3d75 6351 gcc_assert (i < len);
74bf76ed 6352
629b3d75
MJ
6353 /* omp_extract_for_data has canonicalized the condition. */
6354 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6355 || fd.loops[i].cond_code == GT_EXPR);
6356 bool forward = fd.loops[i].cond_code == LT_EXPR;
6357 bool maybe_lexically_later = true;
953ff289 6358
629b3d75
MJ
6359 /* While the committee makes up its mind, bail if we have any
6360 non-constant steps. */
6361 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6362 goto lower_omp_ordered_ret;
953ff289 6363
629b3d75
MJ
6364 tree itype = TREE_TYPE (TREE_VALUE (vec));
6365 if (POINTER_TYPE_P (itype))
6366 itype = sizetype;
8e6cdc90 6367 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
629b3d75
MJ
6368 TYPE_PRECISION (itype),
6369 TYPE_SIGN (itype));
a68ab351 6370
629b3d75 6371 /* Ignore invalid offsets that are not multiples of the step. */
8e6cdc90
RS
6372 if (!wi::multiple_of_p (wi::abs (offset),
6373 wi::abs (wi::to_wide (fd.loops[i].step)),
6374 UNSIGNED))
b4c3a85b 6375 {
629b3d75
MJ
6376 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6377 "ignoring sink clause with offset that is not "
6378 "a multiple of the loop step");
6379 remove = true;
6380 goto next_ordered_clause;
b4c3a85b 6381 }
d9a6bd32 6382
629b3d75
MJ
6383 /* Calculate the first dimension. The first dimension of
6384 the folded dependency vector is the GCD of the first
6385 elements, while ignoring any first elements whose offset
6386 is 0. */
6387 if (i == 0)
b4c3a85b 6388 {
629b3d75
MJ
6389 /* Ignore dependence vectors whose first dimension is 0. */
6390 if (offset == 0)
b4c3a85b 6391 {
629b3d75
MJ
6392 remove = true;
6393 goto next_ordered_clause;
b4c3a85b 6394 }
d9a6bd32 6395 else
629b3d75
MJ
6396 {
6397 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6398 {
6399 error_at (OMP_CLAUSE_LOCATION (c),
6400 "first offset must be in opposite direction "
6401 "of loop iterations");
6402 goto lower_omp_ordered_ret;
6403 }
6404 if (forward)
6405 offset = -offset;
6406 neg_offset_p = forward;
6407 /* Initialize the first time around. */
6408 if (folded_dep == NULL_TREE)
6409 {
6410 folded_dep = c;
6411 folded_deps[0] = offset;
6412 }
6413 else
6414 folded_deps[0] = wi::gcd (folded_deps[0],
6415 offset, UNSIGNED);
6416 }
d9a6bd32 6417 }
629b3d75 6418 /* Calculate minimum for the remaining dimensions. */
d9a6bd32 6419 else
d9a6bd32 6420 {
629b3d75
MJ
6421 folded_deps[len + i - 1] = offset;
6422 if (folded_dep == c)
6423 folded_deps[i] = offset;
6424 else if (maybe_lexically_later
6425 && !wi::eq_p (folded_deps[i], offset))
6426 {
6427 if (forward ^ wi::gts_p (folded_deps[i], offset))
6428 {
6429 unsigned int j;
6430 folded_dep = c;
6431 for (j = 1; j <= i; j++)
6432 folded_deps[j] = folded_deps[len + j - 1];
6433 }
6434 else
6435 maybe_lexically_later = false;
6436 }
d9a6bd32 6437 }
d9a6bd32 6438 }
629b3d75 6439 gcc_assert (i == len);
d9a6bd32 6440
629b3d75
MJ
6441 remove = true;
6442
6443 next_ordered_clause:
6444 if (remove)
6445 *list_p = OMP_CLAUSE_CHAIN (c);
d9a6bd32 6446 else
629b3d75 6447 list_p = &OMP_CLAUSE_CHAIN (c);
d9a6bd32 6448 }
d9a6bd32 6449
629b3d75 6450 if (folded_dep)
d9a6bd32 6451 {
629b3d75
MJ
6452 if (neg_offset_p)
6453 folded_deps[0] = -folded_deps[0];
d9a6bd32 6454
629b3d75
MJ
6455 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6456 if (POINTER_TYPE_P (itype))
6457 itype = sizetype;
6458
6459 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6460 = wide_int_to_tree (itype, folded_deps[0]);
6461 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6462 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
d9a6bd32
JJ
6463 }
6464
629b3d75 6465 lower_omp_ordered_ret:
d9a6bd32 6466
629b3d75
MJ
6467 /* Ordered without clauses is #pragma omp threads, while we want
6468 a nop instead if we remove all clauses. */
6469 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6470 gsi_replace (gsi_p, gimple_build_nop (), true);
d9a6bd32
JJ
6471}
6472
6473
629b3d75 6474/* Expand code for an OpenMP ordered directive. */
953ff289 6475
777f7f9a 6476static void
629b3d75 6477lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6478{
629b3d75
MJ
6479 tree block;
6480 gimple *stmt = gsi_stmt (*gsi_p), *g;
6481 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6482 gcall *x;
6483 gbind *bind;
6484 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6485 OMP_CLAUSE_SIMD);
6486 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6487 loop. */
6488 bool maybe_simt
6489 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6490 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6491 OMP_CLAUSE_THREADS);
d9a6bd32 6492
629b3d75
MJ
6493 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6494 OMP_CLAUSE_DEPEND))
d9a6bd32 6495 {
629b3d75
MJ
6496 /* FIXME: This is needs to be moved to the expansion to verify various
6497 conditions only testable on cfg with dominators computed, and also
6498 all the depend clauses to be merged still might need to be available
6499 for the runtime checks. */
6500 if (0)
6501 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6502 return;
a68ab351 6503 }
d9a6bd32 6504
629b3d75
MJ
6505 push_gimplify_context ();
6506
6507 block = make_node (BLOCK);
6508 bind = gimple_build_bind (NULL, NULL, block);
6509 gsi_replace (gsi_p, bind, true);
6510 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 6511
629b3d75 6512 if (simd)
917948d3 6513 {
629b3d75
MJ
6514 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6515 build_int_cst (NULL_TREE, threads));
6516 cfun->has_simduid_loops = true;
917948d3
ZD
6517 }
6518 else
629b3d75
MJ
6519 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6520 0);
6521 gimple_bind_add_stmt (bind, x);
6522
6523 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6524 if (maybe_simt)
953ff289 6525 {
629b3d75
MJ
6526 counter = create_tmp_var (integer_type_node);
6527 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6528 gimple_call_set_lhs (g, counter);
6529 gimple_bind_add_stmt (bind, g);
d9a6bd32 6530
629b3d75
MJ
6531 body = create_artificial_label (UNKNOWN_LOCATION);
6532 test = create_artificial_label (UNKNOWN_LOCATION);
6533 gimple_bind_add_stmt (bind, gimple_build_label (body));
953ff289 6534
629b3d75
MJ
6535 tree simt_pred = create_tmp_var (integer_type_node);
6536 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6537 gimple_call_set_lhs (g, simt_pred);
6538 gimple_bind_add_stmt (bind, g);
d9a6bd32 6539
629b3d75
MJ
6540 tree t = create_artificial_label (UNKNOWN_LOCATION);
6541 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6542 gimple_bind_add_stmt (bind, g);
74bf76ed 6543
629b3d75 6544 gimple_bind_add_stmt (bind, gimple_build_label (t));
acf0174b 6545 }
629b3d75
MJ
6546 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6547 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6548 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6549 gimple_omp_set_body (stmt, NULL);
acf0174b 6550
629b3d75 6551 if (maybe_simt)
d9a6bd32 6552 {
629b3d75
MJ
6553 gimple_bind_add_stmt (bind, gimple_build_label (test));
6554 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6555 gimple_bind_add_stmt (bind, g);
50674e96 6556
629b3d75
MJ
6557 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6558 tree nonneg = create_tmp_var (integer_type_node);
6559 gimple_seq tseq = NULL;
6560 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6561 gimple_bind_add_seq (bind, tseq);
d9a6bd32 6562
629b3d75
MJ
6563 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6564 gimple_call_set_lhs (g, nonneg);
6565 gimple_bind_add_stmt (bind, g);
d9a6bd32 6566
629b3d75
MJ
6567 tree end = create_artificial_label (UNKNOWN_LOCATION);
6568 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6569 gimple_bind_add_stmt (bind, g);
50674e96 6570
629b3d75 6571 gimple_bind_add_stmt (bind, gimple_build_label (end));
e5c95afe 6572 }
629b3d75
MJ
6573 if (simd)
6574 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6575 build_int_cst (NULL_TREE, threads));
777f7f9a 6576 else
629b3d75
MJ
6577 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6578 0);
6579 gimple_bind_add_stmt (bind, x);
917948d3 6580
629b3d75 6581 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6582
629b3d75 6583 pop_gimplify_context (bind);
917948d3 6584
629b3d75
MJ
6585 gimple_bind_append_vars (bind, ctx->block_vars);
6586 BLOCK_VARS (block) = gimple_bind_vars (bind);
6587}
56102c7f 6588
56102c7f 6589
629b3d75
MJ
6590/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6591 substitution of a couple of function calls. But in the NAMED case,
6592 requires that languages coordinate a symbol name. It is therefore
6593 best put here in common code. */
56102c7f 6594
629b3d75 6595static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
56102c7f 6596
629b3d75
MJ
6597static void
6598lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6599{
6600 tree block;
6601 tree name, lock, unlock;
6602 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6603 gbind *bind;
6604 location_t loc = gimple_location (stmt);
6605 gimple_seq tbody;
56102c7f 6606
629b3d75
MJ
6607 name = gimple_omp_critical_name (stmt);
6608 if (name)
6609 {
6610 tree decl;
56102c7f 6611
629b3d75
MJ
6612 if (!critical_name_mutexes)
6613 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
56102c7f 6614
629b3d75
MJ
6615 tree *n = critical_name_mutexes->get (name);
6616 if (n == NULL)
74bf76ed 6617 {
629b3d75 6618 char *new_str;
953ff289 6619
629b3d75 6620 decl = create_tmp_var_raw (ptr_type_node);
953ff289 6621
629b3d75
MJ
6622 new_str = ACONCAT ((".gomp_critical_user_",
6623 IDENTIFIER_POINTER (name), NULL));
6624 DECL_NAME (decl) = get_identifier (new_str);
6625 TREE_PUBLIC (decl) = 1;
6626 TREE_STATIC (decl) = 1;
6627 DECL_COMMON (decl) = 1;
6628 DECL_ARTIFICIAL (decl) = 1;
6629 DECL_IGNORED_P (decl) = 1;
953ff289 6630
629b3d75 6631 varpool_node::finalize_decl (decl);
953ff289 6632
629b3d75
MJ
6633 critical_name_mutexes->put (name, decl);
6634 }
6635 else
6636 decl = *n;
953ff289 6637
629b3d75
MJ
6638 /* If '#pragma omp critical' is inside offloaded region or
6639 inside function marked as offloadable, the symbol must be
6640 marked as offloadable too. */
6641 omp_context *octx;
6642 if (cgraph_node::get (current_function_decl)->offloadable)
6643 varpool_node::get_create (decl)->offloadable = 1;
6644 else
6645 for (octx = ctx->outer; octx; octx = octx->outer)
6646 if (is_gimple_omp_offloaded (octx->stmt))
6647 {
6648 varpool_node::get_create (decl)->offloadable = 1;
6649 break;
6650 }
777f7f9a 6651
629b3d75 6652 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
01914336
MJ
6653 lock = build_call_expr_loc (loc, lock, 1,
6654 build_fold_addr_expr_loc (loc, decl));
777f7f9a 6655
629b3d75
MJ
6656 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6657 unlock = build_call_expr_loc (loc, unlock, 1,
6658 build_fold_addr_expr_loc (loc, decl));
acf0174b 6659 }
acf0174b 6660 else
5a0f4dd3 6661 {
629b3d75
MJ
6662 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6663 lock = build_call_expr_loc (loc, lock, 0);
5a0f4dd3 6664
629b3d75
MJ
6665 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6666 unlock = build_call_expr_loc (loc, unlock, 0);
acf0174b 6667 }
953ff289 6668
629b3d75 6669 push_gimplify_context ();
fb79f500 6670
629b3d75
MJ
6671 block = make_node (BLOCK);
6672 bind = gimple_build_bind (NULL, NULL, block);
6673 gsi_replace (gsi_p, bind, true);
6674 gimple_bind_add_stmt (bind, stmt);
fb79f500 6675
629b3d75
MJ
6676 tbody = gimple_bind_body (bind);
6677 gimplify_and_add (lock, &tbody);
6678 gimple_bind_set_body (bind, tbody);
fb79f500 6679
629b3d75
MJ
6680 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6681 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6682 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6683 gimple_omp_set_body (stmt, NULL);
953ff289 6684
629b3d75
MJ
6685 tbody = gimple_bind_body (bind);
6686 gimplify_and_add (unlock, &tbody);
6687 gimple_bind_set_body (bind, tbody);
953ff289 6688
629b3d75 6689 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6690
629b3d75
MJ
6691 pop_gimplify_context (bind);
6692 gimple_bind_append_vars (bind, ctx->block_vars);
6693 BLOCK_VARS (block) = gimple_bind_vars (bind);
6694}
50674e96 6695
629b3d75
MJ
6696/* A subroutine of lower_omp_for. Generate code to emit the predicate
6697 for a lastprivate clause. Given a loop control predicate of (V
6698 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6699 is appended to *DLIST, iterator initialization is appended to
6700 *BODY_P. */
50674e96 6701
629b3d75
MJ
6702static void
6703lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6704 gimple_seq *dlist, struct omp_context *ctx)
6705{
6706 tree clauses, cond, vinit;
6707 enum tree_code cond_code;
6708 gimple_seq stmts;
953ff289 6709
629b3d75
MJ
6710 cond_code = fd->loop.cond_code;
6711 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
acf0174b 6712
629b3d75
MJ
6713 /* When possible, use a strict equality expression. This can let VRP
6714 type optimizations deduce the value and remove a copy. */
6715 if (tree_fits_shwi_p (fd->loop.step))
acf0174b 6716 {
629b3d75
MJ
6717 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6718 if (step == 1 || step == -1)
6719 cond_code = EQ_EXPR;
acf0174b 6720 }
629b3d75
MJ
6721
6722 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6723 || gimple_omp_for_grid_phony (fd->for_stmt))
6724 cond = omp_grid_lastprivate_predicate (fd);
a68ab351 6725 else
acf0174b 6726 {
629b3d75
MJ
6727 tree n2 = fd->loop.n2;
6728 if (fd->collapse > 1
6729 && TREE_CODE (n2) != INTEGER_CST
6730 && gimple_omp_for_combined_into_p (fd->for_stmt))
d9a6bd32 6731 {
629b3d75
MJ
6732 struct omp_context *taskreg_ctx = NULL;
6733 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
d9a6bd32 6734 {
629b3d75
MJ
6735 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6736 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6737 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
d9a6bd32 6738 {
629b3d75
MJ
6739 if (gimple_omp_for_combined_into_p (gfor))
6740 {
6741 gcc_assert (ctx->outer->outer
6742 && is_parallel_ctx (ctx->outer->outer));
6743 taskreg_ctx = ctx->outer->outer;
6744 }
6745 else
6746 {
6747 struct omp_for_data outer_fd;
6748 omp_extract_for_data (gfor, &outer_fd, NULL);
6749 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6750 }
d9a6bd32 6751 }
629b3d75
MJ
6752 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6753 taskreg_ctx = ctx->outer->outer;
6754 }
6755 else if (is_taskreg_ctx (ctx->outer))
6756 taskreg_ctx = ctx->outer;
6757 if (taskreg_ctx)
6758 {
6759 int i;
6760 tree taskreg_clauses
6761 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6762 tree innerc = omp_find_clause (taskreg_clauses,
6763 OMP_CLAUSE__LOOPTEMP_);
6764 gcc_assert (innerc);
6765 for (i = 0; i < fd->collapse; i++)
6766 {
6767 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6768 OMP_CLAUSE__LOOPTEMP_);
6769 gcc_assert (innerc);
6770 }
6771 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6772 OMP_CLAUSE__LOOPTEMP_);
6773 if (innerc)
6774 n2 = fold_convert (TREE_TYPE (n2),
6775 lookup_decl (OMP_CLAUSE_DECL (innerc),
6776 taskreg_ctx));
d9a6bd32 6777 }
acf0174b 6778 }
629b3d75 6779 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
acf0174b 6780 }
50674e96 6781
629b3d75
MJ
6782 clauses = gimple_omp_for_clauses (fd->for_stmt);
6783 stmts = NULL;
6784 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6785 if (!gimple_seq_empty_p (stmts))
acf0174b 6786 {
629b3d75
MJ
6787 gimple_seq_add_seq (&stmts, *dlist);
6788 *dlist = stmts;
6093bc06 6789
629b3d75
MJ
6790 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6791 vinit = fd->loop.n1;
6792 if (cond_code == EQ_EXPR
6793 && tree_fits_shwi_p (fd->loop.n2)
6794 && ! integer_zerop (fd->loop.n2))
6795 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6796 else
6797 vinit = unshare_expr (vinit);
e67d7a1e 6798
629b3d75
MJ
6799 /* Initialize the iterator variable, so that threads that don't execute
6800 any iterations don't execute the lastprivate clauses by accident. */
6801 gimplify_assign (fd->loop.v, vinit, body_p);
acf0174b 6802 }
953ff289
DN
6803}
6804
1b96e9a4 6805
629b3d75 6806/* Lower code for an OMP loop directive. */
50674e96 6807
629b3d75
MJ
6808static void
6809lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6810{
6811 tree *rhs_p, block;
6812 struct omp_for_data fd, *fdp = NULL;
6813 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6814 gbind *new_stmt;
6815 gimple_seq omp_for_body, body, dlist;
6816 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6817 size_t i;
953ff289 6818
629b3d75 6819 push_gimplify_context ();
953ff289 6820
629b3d75 6821 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
953ff289 6822
629b3d75
MJ
6823 block = make_node (BLOCK);
6824 new_stmt = gimple_build_bind (NULL, NULL, block);
6825 /* Replace at gsi right away, so that 'stmt' is no member
6826 of a sequence anymore as we're going to add to a different
6827 one below. */
6828 gsi_replace (gsi_p, new_stmt, true);
953ff289 6829
629b3d75
MJ
6830 /* Move declaration of temporaries in the loop body before we make
6831 it go away. */
6832 omp_for_body = gimple_omp_body (stmt);
6833 if (!gimple_seq_empty_p (omp_for_body)
6834 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
acf0174b 6835 {
629b3d75
MJ
6836 gbind *inner_bind
6837 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6838 tree vars = gimple_bind_vars (inner_bind);
6839 gimple_bind_append_vars (new_stmt, vars);
6840 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6841 keep them on the inner_bind and it's block. */
6842 gimple_bind_set_vars (inner_bind, NULL_TREE);
6843 if (gimple_bind_block (inner_bind))
6844 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
acf0174b 6845 }
50674e96 6846
629b3d75 6847 if (gimple_omp_for_combined_into_p (stmt))
5a0f4dd3 6848 {
629b3d75
MJ
6849 omp_extract_for_data (stmt, &fd, NULL);
6850 fdp = &fd;
6851
6852 /* We need two temporaries with fd.loop.v type (istart/iend)
6853 and then (fd.collapse - 1) temporaries with the same
6854 type for count2 ... countN-1 vars if not constant. */
6855 size_t count = 2;
6856 tree type = fd.iter_type;
6857 if (fd.collapse > 1
6858 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6859 count += fd.collapse - 1;
6860 bool taskreg_for
6861 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6862 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6863 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6e6cf7b0 6864 tree simtc = NULL;
629b3d75
MJ
6865 tree clauses = *pc;
6866 if (taskreg_for)
6867 outerc
6868 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6869 OMP_CLAUSE__LOOPTEMP_);
6e6cf7b0
JJ
6870 if (ctx->simt_stmt)
6871 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6872 OMP_CLAUSE__LOOPTEMP_);
629b3d75 6873 for (i = 0; i < count; i++)
5a0f4dd3 6874 {
629b3d75
MJ
6875 tree temp;
6876 if (taskreg_for)
6877 {
6878 gcc_assert (outerc);
6879 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6880 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6881 OMP_CLAUSE__LOOPTEMP_);
6882 }
6883 else
5a0f4dd3 6884 {
6e6cf7b0
JJ
6885 /* If there are 2 adjacent SIMD stmts, one with _simt_
6886 clause, another without, make sure they have the same
6887 decls in _looptemp_ clauses, because the outer stmt
6888 they are combined into will look up just one inner_stmt. */
6889 if (ctx->simt_stmt)
6890 temp = OMP_CLAUSE_DECL (simtc);
6891 else
6892 temp = create_tmp_var (type);
629b3d75 6893 insert_decl_map (&ctx->outer->cb, temp, temp);
5a0f4dd3 6894 }
629b3d75
MJ
6895 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6896 OMP_CLAUSE_DECL (*pc) = temp;
6897 pc = &OMP_CLAUSE_CHAIN (*pc);
6e6cf7b0
JJ
6898 if (ctx->simt_stmt)
6899 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6900 OMP_CLAUSE__LOOPTEMP_);
5a0f4dd3 6901 }
629b3d75 6902 *pc = clauses;
5a0f4dd3
JJ
6903 }
6904
629b3d75
MJ
6905 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6906 dlist = NULL;
6907 body = NULL;
6908 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6909 fdp);
6910 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
917948d3 6911
629b3d75 6912 lower_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289 6913
629b3d75
MJ
6914 /* Lower the header expressions. At this point, we can assume that
6915 the header is of the form:
50674e96 6916
629b3d75 6917 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
917948d3 6918
629b3d75
MJ
6919 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6920 using the .omp_data_s mapping, if needed. */
6921 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6922 {
6923 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6924 if (!is_gimple_min_invariant (*rhs_p))
6925 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6926 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6927 recompute_tree_invariant_for_addr_expr (*rhs_p);
50674e96 6928
629b3d75
MJ
6929 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6930 if (!is_gimple_min_invariant (*rhs_p))
6931 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6932 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6933 recompute_tree_invariant_for_addr_expr (*rhs_p);
d9a6bd32 6934
629b3d75
MJ
6935 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6936 if (!is_gimple_min_invariant (*rhs_p))
6937 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6938 }
953ff289 6939
629b3d75
MJ
6940 /* Once lowered, extract the bounds and clauses. */
6941 omp_extract_for_data (stmt, &fd, NULL);
953ff289 6942
629b3d75
MJ
6943 if (is_gimple_omp_oacc (ctx->stmt)
6944 && !ctx_in_oacc_kernels_region (ctx))
6945 lower_oacc_head_tail (gimple_location (stmt),
6946 gimple_omp_for_clauses (stmt),
6947 &oacc_head, &oacc_tail, ctx);
953ff289 6948
01914336 6949 /* Add OpenACC partitioning and reduction markers just before the loop. */
629b3d75
MJ
6950 if (oacc_head)
6951 gimple_seq_add_seq (&body, oacc_head);
01914336 6952
629b3d75 6953 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
acf0174b 6954
629b3d75
MJ
6955 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6956 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
6957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6958 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6959 {
629b3d75
MJ
6960 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6961 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6962 OMP_CLAUSE_LINEAR_STEP (c)
6963 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6964 ctx);
d9a6bd32 6965 }
acf0174b 6966
629b3d75
MJ
6967 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6968 && gimple_omp_for_grid_phony (stmt));
6969 if (!phony_loop)
6970 gimple_seq_add_stmt (&body, stmt);
6971 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6972
6973 if (!phony_loop)
6974 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6975 fd.loop.v));
917948d3 6976
629b3d75
MJ
6977 /* After the loop, add exit clauses. */
6978 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
b8698a0f 6979
629b3d75
MJ
6980 if (ctx->cancellable)
6981 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
50674e96 6982
629b3d75 6983 gimple_seq_add_seq (&body, dlist);
953ff289 6984
629b3d75 6985 body = maybe_catch_exception (body);
953ff289 6986
629b3d75 6987 if (!phony_loop)
acf0174b 6988 {
629b3d75
MJ
6989 /* Region exit marker goes at the end of the loop body. */
6990 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6991 maybe_add_implicit_barrier_cancel (ctx, &body);
acf0174b 6992 }
953ff289 6993
629b3d75
MJ
6994 /* Add OpenACC joining and reduction markers just after the loop. */
6995 if (oacc_tail)
6996 gimple_seq_add_seq (&body, oacc_tail);
917948d3 6997
629b3d75 6998 pop_gimplify_context (new_stmt);
917948d3 6999
629b3d75 7000 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6724f8a6 7001 maybe_remove_omp_member_access_dummy_vars (new_stmt);
629b3d75
MJ
7002 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7003 if (BLOCK_VARS (block))
7004 TREE_USED (block) = 1;
917948d3 7005
629b3d75
MJ
7006 gimple_bind_set_body (new_stmt, body);
7007 gimple_omp_set_body (stmt, NULL);
7008 gimple_omp_for_set_pre_body (stmt, NULL);
7009}
17720e84 7010
629b3d75
MJ
7011/* Callback for walk_stmts. Check if the current statement only contains
7012 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
917948d3 7013
629b3d75
MJ
7014static tree
7015check_combined_parallel (gimple_stmt_iterator *gsi_p,
7016 bool *handled_ops_p,
7017 struct walk_stmt_info *wi)
7018{
7019 int *info = (int *) wi->info;
7020 gimple *stmt = gsi_stmt (*gsi_p);
917948d3 7021
629b3d75
MJ
7022 *handled_ops_p = true;
7023 switch (gimple_code (stmt))
acf0174b 7024 {
629b3d75 7025 WALK_SUBSTMTS;
8cba6b95 7026
65f4b875
AO
7027 case GIMPLE_DEBUG:
7028 break;
629b3d75
MJ
7029 case GIMPLE_OMP_FOR:
7030 case GIMPLE_OMP_SECTIONS:
7031 *info = *info == 0 ? 1 : -1;
7032 break;
7033 default:
7034 *info = -1;
7035 break;
acf0174b 7036 }
629b3d75 7037 return NULL;
953ff289
DN
7038}
7039
629b3d75
MJ
7040struct omp_taskcopy_context
7041{
7042 /* This field must be at the beginning, as we do "inheritance": Some
7043 callback functions for tree-inline.c (e.g., omp_copy_decl)
7044 receive a copy_body_data pointer that is up-casted to an
7045 omp_context pointer. */
7046 copy_body_data cb;
7047 omp_context *ctx;
7048};
9a771876 7049
629b3d75
MJ
7050static tree
7051task_copyfn_copy_decl (tree var, copy_body_data *cb)
7052{
7053 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9a771876 7054
629b3d75
MJ
7055 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7056 return create_tmp_var (TREE_TYPE (var));
9a771876 7057
629b3d75
MJ
7058 return var;
7059}
9a771876 7060
629b3d75
MJ
7061static tree
7062task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9a771876 7063{
629b3d75 7064 tree name, new_fields = NULL, type, f;
9a771876 7065
629b3d75
MJ
7066 type = lang_hooks.types.make_type (RECORD_TYPE);
7067 name = DECL_NAME (TYPE_NAME (orig_type));
7068 name = build_decl (gimple_location (tcctx->ctx->stmt),
7069 TYPE_DECL, name, type);
7070 TYPE_NAME (type) = name;
9a771876 7071
629b3d75 7072 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9a771876 7073 {
629b3d75
MJ
7074 tree new_f = copy_node (f);
7075 DECL_CONTEXT (new_f) = type;
7076 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7077 TREE_CHAIN (new_f) = new_fields;
7078 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7079 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7080 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7081 &tcctx->cb, NULL);
7082 new_fields = new_f;
7083 tcctx->cb.decl_map->put (f, new_f);
9a771876 7084 }
629b3d75
MJ
7085 TYPE_FIELDS (type) = nreverse (new_fields);
7086 layout_type (type);
7087 return type;
7088}
9a771876 7089
629b3d75 7090/* Create task copyfn. */
9a771876 7091
629b3d75
MJ
7092static void
7093create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7094{
7095 struct function *child_cfun;
7096 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7097 tree record_type, srecord_type, bind, list;
7098 bool record_needs_remap = false, srecord_needs_remap = false;
7099 splay_tree_node n;
7100 struct omp_taskcopy_context tcctx;
7101 location_t loc = gimple_location (task_stmt);
9a771876 7102
629b3d75
MJ
7103 child_fn = gimple_omp_task_copy_fn (task_stmt);
7104 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7105 gcc_assert (child_cfun->cfg == NULL);
7106 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9a771876 7107
629b3d75
MJ
7108 /* Reset DECL_CONTEXT on function arguments. */
7109 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7110 DECL_CONTEXT (t) = child_fn;
9a771876 7111
629b3d75
MJ
7112 /* Populate the function. */
7113 push_gimplify_context ();
7114 push_cfun (child_cfun);
9a771876 7115
629b3d75
MJ
7116 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7117 TREE_SIDE_EFFECTS (bind) = 1;
7118 list = NULL;
7119 DECL_SAVED_TREE (child_fn) = bind;
7120 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9a771876 7121
629b3d75
MJ
7122 /* Remap src and dst argument types if needed. */
7123 record_type = ctx->record_type;
7124 srecord_type = ctx->srecord_type;
7125 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7126 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7127 {
7128 record_needs_remap = true;
7129 break;
7130 }
7131 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7132 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7133 {
7134 srecord_needs_remap = true;
7135 break;
7136 }
9a771876 7137
629b3d75 7138 if (record_needs_remap || srecord_needs_remap)
9a771876 7139 {
629b3d75
MJ
7140 memset (&tcctx, '\0', sizeof (tcctx));
7141 tcctx.cb.src_fn = ctx->cb.src_fn;
7142 tcctx.cb.dst_fn = child_fn;
7143 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7144 gcc_checking_assert (tcctx.cb.src_node);
7145 tcctx.cb.dst_node = tcctx.cb.src_node;
7146 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7147 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7148 tcctx.cb.eh_lp_nr = 0;
7149 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7150 tcctx.cb.decl_map = new hash_map<tree, tree>;
7151 tcctx.ctx = ctx;
9a771876 7152
629b3d75
MJ
7153 if (record_needs_remap)
7154 record_type = task_copyfn_remap_type (&tcctx, record_type);
7155 if (srecord_needs_remap)
7156 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9a771876
JJ
7157 }
7158 else
629b3d75 7159 tcctx.cb.decl_map = NULL;
9a771876 7160
629b3d75
MJ
7161 arg = DECL_ARGUMENTS (child_fn);
7162 TREE_TYPE (arg) = build_pointer_type (record_type);
7163 sarg = DECL_CHAIN (arg);
7164 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9a771876 7165
629b3d75
MJ
7166 /* First pass: initialize temporaries used in record_type and srecord_type
7167 sizes and field offsets. */
7168 if (tcctx.cb.decl_map)
7169 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7170 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7171 {
7172 tree *p;
9a771876 7173
629b3d75
MJ
7174 decl = OMP_CLAUSE_DECL (c);
7175 p = tcctx.cb.decl_map->get (decl);
7176 if (p == NULL)
7177 continue;
7178 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7179 sf = (tree) n->value;
7180 sf = *tcctx.cb.decl_map->get (sf);
7181 src = build_simple_mem_ref_loc (loc, sarg);
7182 src = omp_build_component_ref (src, sf);
7183 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7184 append_to_statement_list (t, &list);
7185 }
9a771876 7186
629b3d75
MJ
7187 /* Second pass: copy shared var pointers and copy construct non-VLA
7188 firstprivate vars. */
7189 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7190 switch (OMP_CLAUSE_CODE (c))
7191 {
7192 splay_tree_key key;
7193 case OMP_CLAUSE_SHARED:
7194 decl = OMP_CLAUSE_DECL (c);
7195 key = (splay_tree_key) decl;
7196 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7197 key = (splay_tree_key) &DECL_UID (decl);
7198 n = splay_tree_lookup (ctx->field_map, key);
7199 if (n == NULL)
7200 break;
7201 f = (tree) n->value;
7202 if (tcctx.cb.decl_map)
7203 f = *tcctx.cb.decl_map->get (f);
7204 n = splay_tree_lookup (ctx->sfield_map, key);
7205 sf = (tree) n->value;
7206 if (tcctx.cb.decl_map)
7207 sf = *tcctx.cb.decl_map->get (sf);
7208 src = build_simple_mem_ref_loc (loc, sarg);
7209 src = omp_build_component_ref (src, sf);
7210 dst = build_simple_mem_ref_loc (loc, arg);
7211 dst = omp_build_component_ref (dst, f);
7212 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7213 append_to_statement_list (t, &list);
7214 break;
7215 case OMP_CLAUSE_FIRSTPRIVATE:
7216 decl = OMP_CLAUSE_DECL (c);
7217 if (is_variable_sized (decl))
7218 break;
7219 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7220 if (n == NULL)
7221 break;
7222 f = (tree) n->value;
7223 if (tcctx.cb.decl_map)
7224 f = *tcctx.cb.decl_map->get (f);
7225 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7226 if (n != NULL)
7227 {
7228 sf = (tree) n->value;
7229 if (tcctx.cb.decl_map)
7230 sf = *tcctx.cb.decl_map->get (sf);
7231 src = build_simple_mem_ref_loc (loc, sarg);
7232 src = omp_build_component_ref (src, sf);
7233 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7234 src = build_simple_mem_ref_loc (loc, src);
7235 }
7236 else
7237 src = decl;
7238 dst = build_simple_mem_ref_loc (loc, arg);
7239 dst = omp_build_component_ref (dst, f);
7240 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7241 append_to_statement_list (t, &list);
7242 break;
7243 case OMP_CLAUSE_PRIVATE:
7244 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7245 break;
7246 decl = OMP_CLAUSE_DECL (c);
7247 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7248 f = (tree) n->value;
7249 if (tcctx.cb.decl_map)
7250 f = *tcctx.cb.decl_map->get (f);
7251 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7252 if (n != NULL)
7253 {
7254 sf = (tree) n->value;
7255 if (tcctx.cb.decl_map)
7256 sf = *tcctx.cb.decl_map->get (sf);
7257 src = build_simple_mem_ref_loc (loc, sarg);
7258 src = omp_build_component_ref (src, sf);
7259 if (use_pointer_for_field (decl, NULL))
7260 src = build_simple_mem_ref_loc (loc, src);
7261 }
7262 else
7263 src = decl;
7264 dst = build_simple_mem_ref_loc (loc, arg);
7265 dst = omp_build_component_ref (dst, f);
7266 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7267 append_to_statement_list (t, &list);
7268 break;
7269 default:
7270 break;
7271 }
74bf76ed 7272
629b3d75
MJ
7273 /* Last pass: handle VLA firstprivates. */
7274 if (tcctx.cb.decl_map)
7275 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7277 {
7278 tree ind, ptr, df;
74bf76ed 7279
629b3d75
MJ
7280 decl = OMP_CLAUSE_DECL (c);
7281 if (!is_variable_sized (decl))
7282 continue;
7283 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7284 if (n == NULL)
7285 continue;
7286 f = (tree) n->value;
7287 f = *tcctx.cb.decl_map->get (f);
7288 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7289 ind = DECL_VALUE_EXPR (decl);
7290 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7291 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7292 n = splay_tree_lookup (ctx->sfield_map,
7293 (splay_tree_key) TREE_OPERAND (ind, 0));
7294 sf = (tree) n->value;
7295 sf = *tcctx.cb.decl_map->get (sf);
7296 src = build_simple_mem_ref_loc (loc, sarg);
7297 src = omp_build_component_ref (src, sf);
7298 src = build_simple_mem_ref_loc (loc, src);
7299 dst = build_simple_mem_ref_loc (loc, arg);
7300 dst = omp_build_component_ref (dst, f);
7301 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7302 append_to_statement_list (t, &list);
7303 n = splay_tree_lookup (ctx->field_map,
7304 (splay_tree_key) TREE_OPERAND (ind, 0));
7305 df = (tree) n->value;
7306 df = *tcctx.cb.decl_map->get (df);
7307 ptr = build_simple_mem_ref_loc (loc, arg);
7308 ptr = omp_build_component_ref (ptr, df);
7309 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7310 build_fold_addr_expr_loc (loc, dst));
7311 append_to_statement_list (t, &list);
7312 }
74bf76ed 7313
629b3d75
MJ
7314 t = build1 (RETURN_EXPR, void_type_node, NULL);
7315 append_to_statement_list (t, &list);
74bf76ed 7316
629b3d75
MJ
7317 if (tcctx.cb.decl_map)
7318 delete tcctx.cb.decl_map;
7319 pop_gimplify_context (NULL);
7320 BIND_EXPR_BODY (bind) = list;
7321 pop_cfun ();
7322}
74bf76ed
JJ
7323
7324static void
629b3d75 7325lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
74bf76ed 7326{
629b3d75
MJ
7327 tree c, clauses;
7328 gimple *g;
7329 size_t n_in = 0, n_out = 0, idx = 2, i;
7330
7331 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7332 gcc_assert (clauses);
7333 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7334 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7335 switch (OMP_CLAUSE_DEPEND_KIND (c))
7336 {
7337 case OMP_CLAUSE_DEPEND_IN:
7338 n_in++;
7339 break;
7340 case OMP_CLAUSE_DEPEND_OUT:
7341 case OMP_CLAUSE_DEPEND_INOUT:
7342 n_out++;
7343 break;
7344 case OMP_CLAUSE_DEPEND_SOURCE:
7345 case OMP_CLAUSE_DEPEND_SINK:
7346 /* FALLTHRU */
7347 default:
7348 gcc_unreachable ();
7349 }
7350 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7351 tree array = create_tmp_var (type);
7352 TREE_ADDRESSABLE (array) = 1;
7353 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7354 NULL_TREE);
7355 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7356 gimple_seq_add_stmt (iseq, g);
7357 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7358 NULL_TREE);
7359 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7360 gimple_seq_add_stmt (iseq, g);
7361 for (i = 0; i < 2; i++)
74bf76ed 7362 {
629b3d75
MJ
7363 if ((i ? n_in : n_out) == 0)
7364 continue;
7365 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7366 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7367 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7368 {
7369 tree t = OMP_CLAUSE_DECL (c);
7370 t = fold_convert (ptr_type_node, t);
7371 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7372 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7373 NULL_TREE, NULL_TREE);
7374 g = gimple_build_assign (r, t);
7375 gimple_seq_add_stmt (iseq, g);
7376 }
74bf76ed 7377 }
629b3d75
MJ
7378 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7379 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7380 OMP_CLAUSE_CHAIN (c) = *pclauses;
7381 *pclauses = c;
7382 tree clobber = build_constructor (type, NULL);
7383 TREE_THIS_VOLATILE (clobber) = 1;
7384 g = gimple_build_assign (array, clobber);
7385 gimple_seq_add_stmt (oseq, g);
7386}
7387
7388/* Lower the OpenMP parallel or task directive in the current statement
7389 in GSI_P. CTX holds context information for the directive. */
74bf76ed 7390
629b3d75
MJ
7391static void
7392lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7393{
7394 tree clauses;
7395 tree child_fn, t;
7396 gimple *stmt = gsi_stmt (*gsi_p);
7397 gbind *par_bind, *bind, *dep_bind = NULL;
7398 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7399 location_t loc = gimple_location (stmt);
74bf76ed 7400
629b3d75
MJ
7401 clauses = gimple_omp_taskreg_clauses (stmt);
7402 par_bind
7403 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7404 par_body = gimple_bind_body (par_bind);
7405 child_fn = ctx->cb.dst_fn;
7406 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7407 && !gimple_omp_parallel_combined_p (stmt))
74bf76ed 7408 {
629b3d75
MJ
7409 struct walk_stmt_info wi;
7410 int ws_num = 0;
74bf76ed 7411
629b3d75
MJ
7412 memset (&wi, 0, sizeof (wi));
7413 wi.info = &ws_num;
7414 wi.val_only = true;
7415 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7416 if (ws_num == 1)
7417 gimple_omp_parallel_set_combined_p (stmt, true);
74bf76ed 7418 }
629b3d75
MJ
7419 gimple_seq dep_ilist = NULL;
7420 gimple_seq dep_olist = NULL;
7421 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7422 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
acf0174b 7423 {
629b3d75
MJ
7424 push_gimplify_context ();
7425 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7426 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7427 &dep_ilist, &dep_olist);
9669b00b 7428 }
9669b00b 7429
629b3d75
MJ
7430 if (ctx->srecord_type)
7431 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9669b00b 7432
629b3d75 7433 push_gimplify_context ();
74bf76ed 7434
629b3d75
MJ
7435 par_olist = NULL;
7436 par_ilist = NULL;
7437 par_rlist = NULL;
7438 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7439 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7440 if (phony_construct && ctx->record_type)
9669b00b 7441 {
629b3d75
MJ
7442 gcc_checking_assert (!ctx->receiver_decl);
7443 ctx->receiver_decl = create_tmp_var
7444 (build_reference_type (ctx->record_type), ".omp_rec");
9669b00b 7445 }
629b3d75
MJ
7446 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7447 lower_omp (&par_body, ctx);
7448 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7449 lower_reduction_clauses (clauses, &par_rlist, ctx);
9669b00b 7450
629b3d75
MJ
7451 /* Declare all the variables created by mapping and the variables
7452 declared in the scope of the parallel body. */
7453 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7454 maybe_remove_omp_member_access_dummy_vars (par_bind);
629b3d75 7455 record_vars_into (gimple_bind_vars (par_bind), child_fn);
74bf76ed 7456
629b3d75 7457 if (ctx->record_type)
74bf76ed 7458 {
629b3d75
MJ
7459 ctx->sender_decl
7460 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7461 : ctx->record_type, ".omp_data_o");
7462 DECL_NAMELESS (ctx->sender_decl) = 1;
7463 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7464 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
74bf76ed 7465 }
74bf76ed 7466
629b3d75
MJ
7467 olist = NULL;
7468 ilist = NULL;
7469 lower_send_clauses (clauses, &ilist, &olist, ctx);
7470 lower_send_shared_vars (&ilist, &olist, ctx);
9669b00b 7471
629b3d75 7472 if (ctx->record_type)
74bf76ed 7473 {
629b3d75
MJ
7474 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7475 TREE_THIS_VOLATILE (clobber) = 1;
7476 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7477 clobber));
d9a6bd32 7478 }
d9a6bd32 7479
629b3d75
MJ
7480 /* Once all the expansions are done, sequence all the different
7481 fragments inside gimple_omp_body. */
d9a6bd32 7482
629b3d75 7483 new_body = NULL;
d9a6bd32 7484
629b3d75 7485 if (ctx->record_type)
d9a6bd32 7486 {
629b3d75
MJ
7487 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7488 /* fixup_child_record_type might have changed receiver_decl's type. */
7489 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7490 gimple_seq_add_stmt (&new_body,
7491 gimple_build_assign (ctx->receiver_decl, t));
d9a6bd32
JJ
7492 }
7493
629b3d75
MJ
7494 gimple_seq_add_seq (&new_body, par_ilist);
7495 gimple_seq_add_seq (&new_body, par_body);
7496 gimple_seq_add_seq (&new_body, par_rlist);
7497 if (ctx->cancellable)
7498 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7499 gimple_seq_add_seq (&new_body, par_olist);
7500 new_body = maybe_catch_exception (new_body);
7501 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7502 gimple_seq_add_stmt (&new_body,
7503 gimple_build_omp_continue (integer_zero_node,
7504 integer_zero_node));
7505 if (!phony_construct)
d9a6bd32 7506 {
629b3d75
MJ
7507 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7508 gimple_omp_set_body (stmt, new_body);
d9a6bd32
JJ
7509 }
7510
629b3d75
MJ
7511 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7512 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7513 gimple_bind_add_seq (bind, ilist);
7514 if (!phony_construct)
7515 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 7516 else
629b3d75
MJ
7517 gimple_bind_add_seq (bind, new_body);
7518 gimple_bind_add_seq (bind, olist);
d9a6bd32 7519
629b3d75
MJ
7520 pop_gimplify_context (NULL);
7521
7522 if (dep_bind)
d9a6bd32 7523 {
629b3d75
MJ
7524 gimple_bind_add_seq (dep_bind, dep_ilist);
7525 gimple_bind_add_stmt (dep_bind, bind);
7526 gimple_bind_add_seq (dep_bind, dep_olist);
7527 pop_gimplify_context (dep_bind);
d9a6bd32 7528 }
d9a6bd32
JJ
7529}
7530
629b3d75
MJ
7531/* Lower the GIMPLE_OMP_TARGET in the current statement
7532 in GSI_P. CTX holds context information for the directive. */
d9a6bd32
JJ
7533
7534static void
629b3d75 7535lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
d9a6bd32 7536{
629b3d75
MJ
7537 tree clauses;
7538 tree child_fn, t, c;
7539 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7540 gbind *tgt_bind, *bind, *dep_bind = NULL;
7541 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7542 location_t loc = gimple_location (stmt);
7543 bool offloaded, data_region;
7544 unsigned int map_cnt = 0;
d9a6bd32 7545
629b3d75
MJ
7546 offloaded = is_gimple_omp_offloaded (stmt);
7547 switch (gimple_omp_target_kind (stmt))
d9a6bd32 7548 {
629b3d75
MJ
7549 case GF_OMP_TARGET_KIND_REGION:
7550 case GF_OMP_TARGET_KIND_UPDATE:
7551 case GF_OMP_TARGET_KIND_ENTER_DATA:
7552 case GF_OMP_TARGET_KIND_EXIT_DATA:
7553 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7554 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7555 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7556 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7557 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7558 data_region = false;
7559 break;
7560 case GF_OMP_TARGET_KIND_DATA:
7561 case GF_OMP_TARGET_KIND_OACC_DATA:
7562 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7563 data_region = true;
7564 break;
7565 default:
7566 gcc_unreachable ();
74bf76ed 7567 }
74bf76ed 7568
629b3d75 7569 clauses = gimple_omp_target_clauses (stmt);
d9a6bd32 7570
629b3d75
MJ
7571 gimple_seq dep_ilist = NULL;
7572 gimple_seq dep_olist = NULL;
7573 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
d9a6bd32 7574 {
629b3d75
MJ
7575 push_gimplify_context ();
7576 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7577 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7578 &dep_ilist, &dep_olist);
d9a6bd32 7579 }
953ff289 7580
629b3d75
MJ
7581 tgt_bind = NULL;
7582 tgt_body = NULL;
7583 if (offloaded)
e4834818 7584 {
629b3d75
MJ
7585 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7586 tgt_body = gimple_bind_body (tgt_bind);
e4834818 7587 }
629b3d75
MJ
7588 else if (data_region)
7589 tgt_body = gimple_omp_body (stmt);
7590 child_fn = ctx->cb.dst_fn;
e4834818 7591
629b3d75
MJ
7592 push_gimplify_context ();
7593 fplist = NULL;
e4834818 7594
629b3d75
MJ
7595 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7596 switch (OMP_CLAUSE_CODE (c))
7597 {
7598 tree var, x;
e4834818 7599
629b3d75
MJ
7600 default:
7601 break;
7602 case OMP_CLAUSE_MAP:
7603#if CHECKING_P
7604 /* First check what we're prepared to handle in the following. */
7605 switch (OMP_CLAUSE_MAP_KIND (c))
7606 {
7607 case GOMP_MAP_ALLOC:
7608 case GOMP_MAP_TO:
7609 case GOMP_MAP_FROM:
7610 case GOMP_MAP_TOFROM:
7611 case GOMP_MAP_POINTER:
7612 case GOMP_MAP_TO_PSET:
7613 case GOMP_MAP_DELETE:
7614 case GOMP_MAP_RELEASE:
7615 case GOMP_MAP_ALWAYS_TO:
7616 case GOMP_MAP_ALWAYS_FROM:
7617 case GOMP_MAP_ALWAYS_TOFROM:
7618 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7619 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7620 case GOMP_MAP_STRUCT:
7621 case GOMP_MAP_ALWAYS_POINTER:
7622 break;
7623 case GOMP_MAP_FORCE_ALLOC:
7624 case GOMP_MAP_FORCE_TO:
7625 case GOMP_MAP_FORCE_FROM:
7626 case GOMP_MAP_FORCE_TOFROM:
7627 case GOMP_MAP_FORCE_PRESENT:
7628 case GOMP_MAP_FORCE_DEVICEPTR:
7629 case GOMP_MAP_DEVICE_RESIDENT:
7630 case GOMP_MAP_LINK:
7631 gcc_assert (is_gimple_omp_oacc (stmt));
7632 break;
7633 default:
7634 gcc_unreachable ();
7635 }
7636#endif
7637 /* FALLTHRU */
7638 case OMP_CLAUSE_TO:
7639 case OMP_CLAUSE_FROM:
7640 oacc_firstprivate:
7641 var = OMP_CLAUSE_DECL (c);
7642 if (!DECL_P (var))
7643 {
7644 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7645 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7646 && (OMP_CLAUSE_MAP_KIND (c)
7647 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7648 map_cnt++;
7649 continue;
7650 }
e4834818 7651
629b3d75
MJ
7652 if (DECL_SIZE (var)
7653 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7654 {
7655 tree var2 = DECL_VALUE_EXPR (var);
7656 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7657 var2 = TREE_OPERAND (var2, 0);
7658 gcc_assert (DECL_P (var2));
7659 var = var2;
7660 }
e4834818 7661
629b3d75
MJ
7662 if (offloaded
7663 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7664 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7665 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7666 {
7667 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7668 {
7669 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7670 && varpool_node::get_create (var)->offloadable)
7671 continue;
e4834818 7672
629b3d75
MJ
7673 tree type = build_pointer_type (TREE_TYPE (var));
7674 tree new_var = lookup_decl (var, ctx);
7675 x = create_tmp_var_raw (type, get_name (new_var));
7676 gimple_add_tmp_var (x);
7677 x = build_simple_mem_ref (x);
7678 SET_DECL_VALUE_EXPR (new_var, x);
7679 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7680 }
7681 continue;
7682 }
e4834818 7683
629b3d75
MJ
7684 if (!maybe_lookup_field (var, ctx))
7685 continue;
e4834818 7686
629b3d75
MJ
7687 /* Don't remap oacc parallel reduction variables, because the
7688 intermediate result must be local to each gang. */
7689 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7690 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7691 {
7692 x = build_receiver_ref (var, true, ctx);
7693 tree new_var = lookup_decl (var, ctx);
e4834818 7694
629b3d75
MJ
7695 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7696 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7697 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7698 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7699 x = build_simple_mem_ref (x);
7700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7701 {
7702 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
bd1cab35
CLT
7703 if (omp_is_reference (new_var)
7704 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
629b3d75
MJ
7705 {
7706 /* Create a local object to hold the instance
7707 value. */
7708 tree type = TREE_TYPE (TREE_TYPE (new_var));
7709 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7710 tree inst = create_tmp_var (type, id);
7711 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7712 x = build_fold_addr_expr (inst);
7713 }
7714 gimplify_assign (new_var, x, &fplist);
7715 }
7716 else if (DECL_P (new_var))
7717 {
7718 SET_DECL_VALUE_EXPR (new_var, x);
7719 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7720 }
7721 else
7722 gcc_unreachable ();
7723 }
7724 map_cnt++;
7725 break;
e4834818 7726
629b3d75
MJ
7727 case OMP_CLAUSE_FIRSTPRIVATE:
7728 if (is_oacc_parallel (ctx))
7729 goto oacc_firstprivate;
7730 map_cnt++;
7731 var = OMP_CLAUSE_DECL (c);
7732 if (!omp_is_reference (var)
7733 && !is_gimple_reg_type (TREE_TYPE (var)))
7734 {
7735 tree new_var = lookup_decl (var, ctx);
7736 if (is_variable_sized (var))
7737 {
7738 tree pvar = DECL_VALUE_EXPR (var);
7739 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7740 pvar = TREE_OPERAND (pvar, 0);
7741 gcc_assert (DECL_P (pvar));
7742 tree new_pvar = lookup_decl (pvar, ctx);
7743 x = build_fold_indirect_ref (new_pvar);
7744 TREE_THIS_NOTRAP (x) = 1;
7745 }
7746 else
7747 x = build_receiver_ref (var, true, ctx);
7748 SET_DECL_VALUE_EXPR (new_var, x);
7749 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7750 }
7751 break;
e4834818 7752
629b3d75
MJ
7753 case OMP_CLAUSE_PRIVATE:
7754 if (is_gimple_omp_oacc (ctx->stmt))
7755 break;
7756 var = OMP_CLAUSE_DECL (c);
7757 if (is_variable_sized (var))
7758 {
7759 tree new_var = lookup_decl (var, ctx);
7760 tree pvar = DECL_VALUE_EXPR (var);
7761 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7762 pvar = TREE_OPERAND (pvar, 0);
7763 gcc_assert (DECL_P (pvar));
7764 tree new_pvar = lookup_decl (pvar, ctx);
7765 x = build_fold_indirect_ref (new_pvar);
7766 TREE_THIS_NOTRAP (x) = 1;
7767 SET_DECL_VALUE_EXPR (new_var, x);
7768 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7769 }
7770 break;
e4834818 7771
629b3d75
MJ
7772 case OMP_CLAUSE_USE_DEVICE_PTR:
7773 case OMP_CLAUSE_IS_DEVICE_PTR:
7774 var = OMP_CLAUSE_DECL (c);
7775 map_cnt++;
7776 if (is_variable_sized (var))
7777 {
7778 tree new_var = lookup_decl (var, ctx);
7779 tree pvar = DECL_VALUE_EXPR (var);
7780 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7781 pvar = TREE_OPERAND (pvar, 0);
7782 gcc_assert (DECL_P (pvar));
7783 tree new_pvar = lookup_decl (pvar, ctx);
7784 x = build_fold_indirect_ref (new_pvar);
7785 TREE_THIS_NOTRAP (x) = 1;
7786 SET_DECL_VALUE_EXPR (new_var, x);
7787 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7788 }
7789 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7790 {
7791 tree new_var = lookup_decl (var, ctx);
7792 tree type = build_pointer_type (TREE_TYPE (var));
7793 x = create_tmp_var_raw (type, get_name (new_var));
7794 gimple_add_tmp_var (x);
7795 x = build_simple_mem_ref (x);
7796 SET_DECL_VALUE_EXPR (new_var, x);
7797 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7798 }
7799 else
7800 {
7801 tree new_var = lookup_decl (var, ctx);
7802 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7803 gimple_add_tmp_var (x);
7804 SET_DECL_VALUE_EXPR (new_var, x);
7805 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7806 }
7807 break;
7808 }
e4834818 7809
629b3d75 7810 if (offloaded)
e4834818 7811 {
629b3d75
MJ
7812 target_nesting_level++;
7813 lower_omp (&tgt_body, ctx);
7814 target_nesting_level--;
e4834818 7815 }
629b3d75
MJ
7816 else if (data_region)
7817 lower_omp (&tgt_body, ctx);
e4834818 7818
629b3d75 7819 if (offloaded)
e4834818 7820 {
629b3d75
MJ
7821 /* Declare all the variables created by mapping and the variables
7822 declared in the scope of the target body. */
7823 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7824 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
629b3d75 7825 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
e4834818
NS
7826 }
7827
629b3d75
MJ
7828 olist = NULL;
7829 ilist = NULL;
7830 if (ctx->record_type)
e4834818 7831 {
629b3d75
MJ
7832 ctx->sender_decl
7833 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7834 DECL_NAMELESS (ctx->sender_decl) = 1;
7835 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7836 t = make_tree_vec (3);
7837 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7838 TREE_VEC_ELT (t, 1)
7839 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7840 ".omp_data_sizes");
7841 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7842 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7843 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7844 tree tkind_type = short_unsigned_type_node;
7845 int talign_shift = 8;
7846 TREE_VEC_ELT (t, 2)
7847 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7848 ".omp_data_kinds");
7849 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7850 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7851 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7852 gimple_omp_target_set_data_arg (stmt, t);
953ff289 7853
629b3d75
MJ
7854 vec<constructor_elt, va_gc> *vsize;
7855 vec<constructor_elt, va_gc> *vkind;
7856 vec_alloc (vsize, map_cnt);
7857 vec_alloc (vkind, map_cnt);
7858 unsigned int map_idx = 0;
953ff289 7859
629b3d75
MJ
7860 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7861 switch (OMP_CLAUSE_CODE (c))
953ff289 7862 {
629b3d75
MJ
7863 tree ovar, nc, s, purpose, var, x, type;
7864 unsigned int talign;
953ff289 7865
629b3d75
MJ
7866 default:
7867 break;
953ff289 7868
629b3d75
MJ
7869 case OMP_CLAUSE_MAP:
7870 case OMP_CLAUSE_TO:
7871 case OMP_CLAUSE_FROM:
7872 oacc_firstprivate_map:
7873 nc = c;
7874 ovar = OMP_CLAUSE_DECL (c);
7875 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7876 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7877 || (OMP_CLAUSE_MAP_KIND (c)
7878 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7879 break;
7880 if (!DECL_P (ovar))
c34938a8 7881 {
629b3d75
MJ
7882 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7883 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7884 {
7885 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7886 == get_base_address (ovar));
7887 nc = OMP_CLAUSE_CHAIN (c);
7888 ovar = OMP_CLAUSE_DECL (nc);
7889 }
7890 else
7891 {
7892 tree x = build_sender_ref (ovar, ctx);
7893 tree v
7894 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7895 gimplify_assign (x, v, &ilist);
7896 nc = NULL_TREE;
7897 }
7898 }
7899 else
7900 {
7901 if (DECL_SIZE (ovar)
7902 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7903 {
7904 tree ovar2 = DECL_VALUE_EXPR (ovar);
7905 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7906 ovar2 = TREE_OPERAND (ovar2, 0);
7907 gcc_assert (DECL_P (ovar2));
7908 ovar = ovar2;
7909 }
7910 if (!maybe_lookup_field (ovar, ctx))
7911 continue;
c34938a8 7912 }
777f7f9a 7913
629b3d75
MJ
7914 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7915 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7916 talign = DECL_ALIGN_UNIT (ovar);
7917 if (nc)
7918 {
7919 var = lookup_decl_in_outer_ctx (ovar, ctx);
7920 x = build_sender_ref (ovar, ctx);
777f7f9a 7921
629b3d75
MJ
7922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7923 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7924 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7925 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7926 {
7927 gcc_assert (offloaded);
7928 tree avar
7929 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7930 mark_addressable (avar);
7931 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7932 talign = DECL_ALIGN_UNIT (avar);
7933 avar = build_fold_addr_expr (avar);
7934 gimplify_assign (x, avar, &ilist);
7935 }
7936 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7937 {
7938 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7939 if (!omp_is_reference (var))
7940 {
7941 if (is_gimple_reg (var)
7942 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7943 TREE_NO_WARNING (var) = 1;
7944 var = build_fold_addr_expr (var);
7945 }
7946 else
7947 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7948 gimplify_assign (x, var, &ilist);
7949 }
7950 else if (is_gimple_reg (var))
7951 {
7952 gcc_assert (offloaded);
7953 tree avar = create_tmp_var (TREE_TYPE (var));
7954 mark_addressable (avar);
7955 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7956 if (GOMP_MAP_COPY_TO_P (map_kind)
7957 || map_kind == GOMP_MAP_POINTER
7958 || map_kind == GOMP_MAP_TO_PSET
7959 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7960 {
7961 /* If we need to initialize a temporary
7962 with VAR because it is not addressable, and
7963 the variable hasn't been initialized yet, then
7964 we'll get a warning for the store to avar.
7965 Don't warn in that case, the mapping might
7966 be implicit. */
7967 TREE_NO_WARNING (var) = 1;
7968 gimplify_assign (avar, var, &ilist);
7969 }
7970 avar = build_fold_addr_expr (avar);
7971 gimplify_assign (x, avar, &ilist);
7972 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7973 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7974 && !TYPE_READONLY (TREE_TYPE (var)))
7975 {
7976 x = unshare_expr (x);
7977 x = build_simple_mem_ref (x);
7978 gimplify_assign (var, x, &olist);
7979 }
7980 }
7981 else
7982 {
7983 var = build_fold_addr_expr (var);
7984 gimplify_assign (x, var, &ilist);
7985 }
7986 }
7987 s = NULL_TREE;
7988 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7989 {
7990 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7991 s = TREE_TYPE (ovar);
7992 if (TREE_CODE (s) == REFERENCE_TYPE)
7993 s = TREE_TYPE (s);
7994 s = TYPE_SIZE_UNIT (s);
7995 }
7996 else
7997 s = OMP_CLAUSE_SIZE (c);
7998 if (s == NULL_TREE)
7999 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8000 s = fold_convert (size_type_node, s);
8001 purpose = size_int (map_idx++);
8002 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8003 if (TREE_CODE (s) != INTEGER_CST)
8004 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
777f7f9a 8005
629b3d75
MJ
8006 unsigned HOST_WIDE_INT tkind, tkind_zero;
8007 switch (OMP_CLAUSE_CODE (c))
8008 {
8009 case OMP_CLAUSE_MAP:
8010 tkind = OMP_CLAUSE_MAP_KIND (c);
8011 tkind_zero = tkind;
8012 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8013 switch (tkind)
8014 {
8015 case GOMP_MAP_ALLOC:
8016 case GOMP_MAP_TO:
8017 case GOMP_MAP_FROM:
8018 case GOMP_MAP_TOFROM:
8019 case GOMP_MAP_ALWAYS_TO:
8020 case GOMP_MAP_ALWAYS_FROM:
8021 case GOMP_MAP_ALWAYS_TOFROM:
8022 case GOMP_MAP_RELEASE:
8023 case GOMP_MAP_FORCE_TO:
8024 case GOMP_MAP_FORCE_FROM:
8025 case GOMP_MAP_FORCE_TOFROM:
8026 case GOMP_MAP_FORCE_PRESENT:
8027 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8028 break;
8029 case GOMP_MAP_DELETE:
8030 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8031 default:
8032 break;
8033 }
8034 if (tkind_zero != tkind)
8035 {
8036 if (integer_zerop (s))
8037 tkind = tkind_zero;
8038 else if (integer_nonzerop (s))
8039 tkind_zero = tkind;
8040 }
8041 break;
8042 case OMP_CLAUSE_FIRSTPRIVATE:
8043 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8044 tkind = GOMP_MAP_TO;
8045 tkind_zero = tkind;
8046 break;
8047 case OMP_CLAUSE_TO:
8048 tkind = GOMP_MAP_TO;
8049 tkind_zero = tkind;
8050 break;
8051 case OMP_CLAUSE_FROM:
8052 tkind = GOMP_MAP_FROM;
8053 tkind_zero = tkind;
8054 break;
8055 default:
8056 gcc_unreachable ();
8057 }
8058 gcc_checking_assert (tkind
8059 < (HOST_WIDE_INT_C (1U) << talign_shift));
8060 gcc_checking_assert (tkind_zero
8061 < (HOST_WIDE_INT_C (1U) << talign_shift));
8062 talign = ceil_log2 (talign);
8063 tkind |= talign << talign_shift;
8064 tkind_zero |= talign << talign_shift;
8065 gcc_checking_assert (tkind
8066 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8067 gcc_checking_assert (tkind_zero
8068 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8069 if (tkind == tkind_zero)
8070 x = build_int_cstu (tkind_type, tkind);
8071 else
8072 {
8073 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8074 x = build3 (COND_EXPR, tkind_type,
8075 fold_build2 (EQ_EXPR, boolean_type_node,
8076 unshare_expr (s), size_zero_node),
8077 build_int_cstu (tkind_type, tkind_zero),
8078 build_int_cstu (tkind_type, tkind));
8079 }
8080 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8081 if (nc && nc != c)
8082 c = nc;
8083 break;
05409788 8084
629b3d75
MJ
8085 case OMP_CLAUSE_FIRSTPRIVATE:
8086 if (is_oacc_parallel (ctx))
8087 goto oacc_firstprivate_map;
8088 ovar = OMP_CLAUSE_DECL (c);
8089 if (omp_is_reference (ovar))
8090 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8091 else
8092 talign = DECL_ALIGN_UNIT (ovar);
8093 var = lookup_decl_in_outer_ctx (ovar, ctx);
8094 x = build_sender_ref (ovar, ctx);
8095 tkind = GOMP_MAP_FIRSTPRIVATE;
8096 type = TREE_TYPE (ovar);
8097 if (omp_is_reference (ovar))
8098 type = TREE_TYPE (type);
8099 if ((INTEGRAL_TYPE_P (type)
8100 && TYPE_PRECISION (type) <= POINTER_SIZE)
8101 || TREE_CODE (type) == POINTER_TYPE)
8102 {
8103 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8104 tree t = var;
8105 if (omp_is_reference (var))
8106 t = build_simple_mem_ref (var);
8107 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8108 TREE_NO_WARNING (var) = 1;
8109 if (TREE_CODE (type) != POINTER_TYPE)
8110 t = fold_convert (pointer_sized_int_node, t);
8111 t = fold_convert (TREE_TYPE (x), t);
8112 gimplify_assign (x, t, &ilist);
8113 }
8114 else if (omp_is_reference (var))
8115 gimplify_assign (x, var, &ilist);
8116 else if (is_gimple_reg (var))
8117 {
8118 tree avar = create_tmp_var (TREE_TYPE (var));
8119 mark_addressable (avar);
8120 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8121 TREE_NO_WARNING (var) = 1;
8122 gimplify_assign (avar, var, &ilist);
8123 avar = build_fold_addr_expr (avar);
8124 gimplify_assign (x, avar, &ilist);
8125 }
8126 else
8127 {
8128 var = build_fold_addr_expr (var);
8129 gimplify_assign (x, var, &ilist);
8130 }
8131 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8132 s = size_int (0);
8133 else if (omp_is_reference (ovar))
8134 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8135 else
8136 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8137 s = fold_convert (size_type_node, s);
8138 purpose = size_int (map_idx++);
8139 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8140 if (TREE_CODE (s) != INTEGER_CST)
8141 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
05409788 8142
629b3d75
MJ
8143 gcc_checking_assert (tkind
8144 < (HOST_WIDE_INT_C (1U) << talign_shift));
8145 talign = ceil_log2 (talign);
8146 tkind |= talign << talign_shift;
8147 gcc_checking_assert (tkind
8148 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8149 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8150 build_int_cstu (tkind_type, tkind));
8151 break;
05409788 8152
629b3d75
MJ
8153 case OMP_CLAUSE_USE_DEVICE_PTR:
8154 case OMP_CLAUSE_IS_DEVICE_PTR:
8155 ovar = OMP_CLAUSE_DECL (c);
8156 var = lookup_decl_in_outer_ctx (ovar, ctx);
8157 x = build_sender_ref (ovar, ctx);
8158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8159 tkind = GOMP_MAP_USE_DEVICE_PTR;
8160 else
8161 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8162 type = TREE_TYPE (ovar);
8163 if (TREE_CODE (type) == ARRAY_TYPE)
8164 var = build_fold_addr_expr (var);
8165 else
8166 {
8167 if (omp_is_reference (ovar))
8168 {
8169 type = TREE_TYPE (type);
8170 if (TREE_CODE (type) != ARRAY_TYPE)
8171 var = build_simple_mem_ref (var);
8172 var = fold_convert (TREE_TYPE (x), var);
8173 }
8174 }
8175 gimplify_assign (x, var, &ilist);
8176 s = size_int (0);
8177 purpose = size_int (map_idx++);
8178 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8179 gcc_checking_assert (tkind
8180 < (HOST_WIDE_INT_C (1U) << talign_shift));
8181 gcc_checking_assert (tkind
8182 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8183 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8184 build_int_cstu (tkind_type, tkind));
8185 break;
8186 }
05409788 8187
629b3d75 8188 gcc_assert (map_idx == map_cnt);
20906c66 8189
629b3d75
MJ
8190 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8191 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8192 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8193 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8194 for (int i = 1; i <= 2; i++)
8195 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8196 {
8197 gimple_seq initlist = NULL;
8198 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8199 TREE_VEC_ELT (t, i)),
8200 &initlist, true, NULL_TREE);
8201 gimple_seq_add_seq (&ilist, initlist);
20906c66 8202
629b3d75
MJ
8203 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8204 NULL);
8205 TREE_THIS_VOLATILE (clobber) = 1;
8206 gimple_seq_add_stmt (&olist,
8207 gimple_build_assign (TREE_VEC_ELT (t, i),
8208 clobber));
8209 }
05409788 8210
629b3d75
MJ
8211 tree clobber = build_constructor (ctx->record_type, NULL);
8212 TREE_THIS_VOLATILE (clobber) = 1;
8213 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8214 clobber));
8215 }
05409788 8216
629b3d75
MJ
8217 /* Once all the expansions are done, sequence all the different
8218 fragments inside gimple_omp_body. */
05409788 8219
629b3d75 8220 new_body = NULL;
05409788 8221
629b3d75
MJ
8222 if (offloaded
8223 && ctx->record_type)
05409788 8224 {
629b3d75
MJ
8225 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8226 /* fixup_child_record_type might have changed receiver_decl's type. */
8227 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8228 gimple_seq_add_stmt (&new_body,
8229 gimple_build_assign (ctx->receiver_decl, t));
05409788 8230 }
629b3d75 8231 gimple_seq_add_seq (&new_body, fplist);
05409788 8232
629b3d75 8233 if (offloaded || data_region)
0645c1a2 8234 {
629b3d75
MJ
8235 tree prev = NULL_TREE;
8236 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8237 switch (OMP_CLAUSE_CODE (c))
0645c1a2 8238 {
629b3d75
MJ
8239 tree var, x;
8240 default:
8241 break;
8242 case OMP_CLAUSE_FIRSTPRIVATE:
8243 if (is_gimple_omp_oacc (ctx->stmt))
8244 break;
8245 var = OMP_CLAUSE_DECL (c);
8246 if (omp_is_reference (var)
8247 || is_gimple_reg_type (TREE_TYPE (var)))
0645c1a2 8248 {
629b3d75
MJ
8249 tree new_var = lookup_decl (var, ctx);
8250 tree type;
8251 type = TREE_TYPE (var);
8252 if (omp_is_reference (var))
8253 type = TREE_TYPE (type);
8254 if ((INTEGRAL_TYPE_P (type)
8255 && TYPE_PRECISION (type) <= POINTER_SIZE)
8256 || TREE_CODE (type) == POINTER_TYPE)
8257 {
8258 x = build_receiver_ref (var, false, ctx);
8259 if (TREE_CODE (type) != POINTER_TYPE)
8260 x = fold_convert (pointer_sized_int_node, x);
8261 x = fold_convert (type, x);
8262 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8263 fb_rvalue);
8264 if (omp_is_reference (var))
8265 {
8266 tree v = create_tmp_var_raw (type, get_name (var));
8267 gimple_add_tmp_var (v);
8268 TREE_ADDRESSABLE (v) = 1;
8269 gimple_seq_add_stmt (&new_body,
8270 gimple_build_assign (v, x));
8271 x = build_fold_addr_expr (v);
8272 }
8273 gimple_seq_add_stmt (&new_body,
8274 gimple_build_assign (new_var, x));
8275 }
8276 else
8277 {
8278 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8279 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8280 fb_rvalue);
8281 gimple_seq_add_stmt (&new_body,
8282 gimple_build_assign (new_var, x));
8283 }
8284 }
8285 else if (is_variable_sized (var))
8286 {
8287 tree pvar = DECL_VALUE_EXPR (var);
8288 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8289 pvar = TREE_OPERAND (pvar, 0);
8290 gcc_assert (DECL_P (pvar));
8291 tree new_var = lookup_decl (pvar, ctx);
8292 x = build_receiver_ref (var, false, ctx);
8293 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8294 gimple_seq_add_stmt (&new_body,
8295 gimple_build_assign (new_var, x));
8296 }
8297 break;
8298 case OMP_CLAUSE_PRIVATE:
8299 if (is_gimple_omp_oacc (ctx->stmt))
8300 break;
8301 var = OMP_CLAUSE_DECL (c);
8302 if (omp_is_reference (var))
8303 {
8304 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8305 tree new_var = lookup_decl (var, ctx);
8306 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8307 if (TREE_CONSTANT (x))
8308 {
8309 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8310 get_name (var));
8311 gimple_add_tmp_var (x);
8312 TREE_ADDRESSABLE (x) = 1;
8313 x = build_fold_addr_expr_loc (clause_loc, x);
8314 }
8315 else
8316 break;
9bd46bc9 8317
629b3d75
MJ
8318 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8319 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8320 gimple_seq_add_stmt (&new_body,
8321 gimple_build_assign (new_var, x));
8322 }
8323 break;
8324 case OMP_CLAUSE_USE_DEVICE_PTR:
8325 case OMP_CLAUSE_IS_DEVICE_PTR:
8326 var = OMP_CLAUSE_DECL (c);
8327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8328 x = build_sender_ref (var, ctx);
8329 else
8330 x = build_receiver_ref (var, false, ctx);
8331 if (is_variable_sized (var))
8332 {
8333 tree pvar = DECL_VALUE_EXPR (var);
8334 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8335 pvar = TREE_OPERAND (pvar, 0);
8336 gcc_assert (DECL_P (pvar));
8337 tree new_var = lookup_decl (pvar, ctx);
8338 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8339 gimple_seq_add_stmt (&new_body,
8340 gimple_build_assign (new_var, x));
8341 }
8342 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8343 {
8344 tree new_var = lookup_decl (var, ctx);
8345 new_var = DECL_VALUE_EXPR (new_var);
8346 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8347 new_var = TREE_OPERAND (new_var, 0);
8348 gcc_assert (DECL_P (new_var));
8349 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8350 gimple_seq_add_stmt (&new_body,
8351 gimple_build_assign (new_var, x));
8352 }
9bd46bc9 8353 else
629b3d75
MJ
8354 {
8355 tree type = TREE_TYPE (var);
8356 tree new_var = lookup_decl (var, ctx);
8357 if (omp_is_reference (var))
8358 {
8359 type = TREE_TYPE (type);
8360 if (TREE_CODE (type) != ARRAY_TYPE)
8361 {
8362 tree v = create_tmp_var_raw (type, get_name (var));
8363 gimple_add_tmp_var (v);
8364 TREE_ADDRESSABLE (v) = 1;
8365 x = fold_convert (type, x);
8366 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8367 fb_rvalue);
8368 gimple_seq_add_stmt (&new_body,
8369 gimple_build_assign (v, x));
8370 x = build_fold_addr_expr (v);
8371 }
8372 }
8373 new_var = DECL_VALUE_EXPR (new_var);
8374 x = fold_convert (TREE_TYPE (new_var), x);
8375 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8376 gimple_seq_add_stmt (&new_body,
8377 gimple_build_assign (new_var, x));
8378 }
8379 break;
9bd46bc9 8380 }
629b3d75
MJ
8381 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8382 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8383 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8384 or references to VLAs. */
8385 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8386 switch (OMP_CLAUSE_CODE (c))
8387 {
8388 tree var;
8389 default:
8390 break;
8391 case OMP_CLAUSE_MAP:
8392 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8393 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8394 {
8395 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
a90c8804 8396 poly_int64 offset = 0;
629b3d75
MJ
8397 gcc_assert (prev);
8398 var = OMP_CLAUSE_DECL (c);
8399 if (DECL_P (var)
8400 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8401 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8402 ctx))
8403 && varpool_node::get_create (var)->offloadable)
8404 break;
8405 if (TREE_CODE (var) == INDIRECT_REF
8406 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8407 var = TREE_OPERAND (var, 0);
8408 if (TREE_CODE (var) == COMPONENT_REF)
8409 {
8410 var = get_addr_base_and_unit_offset (var, &offset);
8411 gcc_assert (var != NULL_TREE && DECL_P (var));
8412 }
8413 else if (DECL_SIZE (var)
8414 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8415 {
8416 tree var2 = DECL_VALUE_EXPR (var);
8417 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8418 var2 = TREE_OPERAND (var2, 0);
8419 gcc_assert (DECL_P (var2));
8420 var = var2;
8421 }
8422 tree new_var = lookup_decl (var, ctx), x;
8423 tree type = TREE_TYPE (new_var);
8424 bool is_ref;
8425 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8426 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8427 == COMPONENT_REF))
8428 {
8429 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8430 is_ref = true;
8431 new_var = build2 (MEM_REF, type,
8432 build_fold_addr_expr (new_var),
8433 build_int_cst (build_pointer_type (type),
8434 offset));
8435 }
8436 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8437 {
8438 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8439 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8440 new_var = build2 (MEM_REF, type,
8441 build_fold_addr_expr (new_var),
8442 build_int_cst (build_pointer_type (type),
8443 offset));
8444 }
8445 else
8446 is_ref = omp_is_reference (var);
8447 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8448 is_ref = false;
8449 bool ref_to_array = false;
8450 if (is_ref)
8451 {
8452 type = TREE_TYPE (type);
8453 if (TREE_CODE (type) == ARRAY_TYPE)
8454 {
8455 type = build_pointer_type (type);
8456 ref_to_array = true;
8457 }
8458 }
8459 else if (TREE_CODE (type) == ARRAY_TYPE)
8460 {
8461 tree decl2 = DECL_VALUE_EXPR (new_var);
8462 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8463 decl2 = TREE_OPERAND (decl2, 0);
8464 gcc_assert (DECL_P (decl2));
8465 new_var = decl2;
8466 type = TREE_TYPE (new_var);
8467 }
8468 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8469 x = fold_convert_loc (clause_loc, type, x);
8470 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8471 {
8472 tree bias = OMP_CLAUSE_SIZE (c);
8473 if (DECL_P (bias))
8474 bias = lookup_decl (bias, ctx);
8475 bias = fold_convert_loc (clause_loc, sizetype, bias);
8476 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8477 bias);
8478 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8479 TREE_TYPE (x), x, bias);
8480 }
8481 if (ref_to_array)
8482 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8483 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8484 if (is_ref && !ref_to_array)
8485 {
8486 tree t = create_tmp_var_raw (type, get_name (var));
8487 gimple_add_tmp_var (t);
8488 TREE_ADDRESSABLE (t) = 1;
8489 gimple_seq_add_stmt (&new_body,
8490 gimple_build_assign (t, x));
8491 x = build_fold_addr_expr_loc (clause_loc, t);
8492 }
8493 gimple_seq_add_stmt (&new_body,
8494 gimple_build_assign (new_var, x));
8495 prev = NULL_TREE;
8496 }
8497 else if (OMP_CLAUSE_CHAIN (c)
8498 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8499 == OMP_CLAUSE_MAP
8500 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8501 == GOMP_MAP_FIRSTPRIVATE_POINTER
8502 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8503 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8504 prev = c;
8505 break;
8506 case OMP_CLAUSE_PRIVATE:
8507 var = OMP_CLAUSE_DECL (c);
8508 if (is_variable_sized (var))
8509 {
8510 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8511 tree new_var = lookup_decl (var, ctx);
8512 tree pvar = DECL_VALUE_EXPR (var);
8513 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8514 pvar = TREE_OPERAND (pvar, 0);
8515 gcc_assert (DECL_P (pvar));
8516 tree new_pvar = lookup_decl (pvar, ctx);
8517 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8518 tree al = size_int (DECL_ALIGN (var));
8519 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8520 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8521 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8522 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8523 gimple_seq_add_stmt (&new_body,
8524 gimple_build_assign (new_pvar, x));
8525 }
8526 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8527 {
8528 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8529 tree new_var = lookup_decl (var, ctx);
8530 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8531 if (TREE_CONSTANT (x))
8532 break;
8533 else
8534 {
8535 tree atmp
8536 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8537 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8538 tree al = size_int (TYPE_ALIGN (rtype));
8539 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8540 }
9bd46bc9 8541
629b3d75
MJ
8542 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8543 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8544 gimple_seq_add_stmt (&new_body,
8545 gimple_build_assign (new_var, x));
8546 }
8547 break;
8548 }
9bd46bc9 8549
629b3d75
MJ
8550 gimple_seq fork_seq = NULL;
8551 gimple_seq join_seq = NULL;
9bd46bc9 8552
629b3d75 8553 if (is_oacc_parallel (ctx))
9bd46bc9 8554 {
629b3d75
MJ
8555 /* If there are reductions on the offloaded region itself, treat
8556 them as a dummy GANG loop. */
8557 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
9bd46bc9 8558
629b3d75
MJ
8559 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8560 false, NULL, NULL, &fork_seq, &join_seq, ctx);
9bd46bc9 8561 }
9bd46bc9 8562
629b3d75
MJ
8563 gimple_seq_add_seq (&new_body, fork_seq);
8564 gimple_seq_add_seq (&new_body, tgt_body);
8565 gimple_seq_add_seq (&new_body, join_seq);
9bd46bc9 8566
629b3d75
MJ
8567 if (offloaded)
8568 new_body = maybe_catch_exception (new_body);
9bd46bc9 8569
629b3d75
MJ
8570 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8571 gimple_omp_set_body (stmt, new_body);
9bd46bc9
NS
8572 }
8573
629b3d75
MJ
8574 bind = gimple_build_bind (NULL, NULL,
8575 tgt_bind ? gimple_bind_block (tgt_bind)
8576 : NULL_TREE);
8577 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8578 gimple_bind_add_seq (bind, ilist);
8579 gimple_bind_add_stmt (bind, stmt);
8580 gimple_bind_add_seq (bind, olist);
9bd46bc9
NS
8581
8582 pop_gimplify_context (NULL);
8583
629b3d75 8584 if (dep_bind)
b6adbb9f 8585 {
629b3d75
MJ
8586 gimple_bind_add_seq (dep_bind, dep_ilist);
8587 gimple_bind_add_stmt (dep_bind, bind);
8588 gimple_bind_add_seq (dep_bind, dep_olist);
8589 pop_gimplify_context (dep_bind);
b6adbb9f 8590 }
b6adbb9f
NS
8591}
8592
629b3d75 8593/* Expand code for an OpenMP teams directive. */
94829f87 8594
f8393eb0 8595static void
629b3d75 8596lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
94829f87 8597{
629b3d75
MJ
8598 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8599 push_gimplify_context ();
94829f87 8600
629b3d75
MJ
8601 tree block = make_node (BLOCK);
8602 gbind *bind = gimple_build_bind (NULL, NULL, block);
8603 gsi_replace (gsi_p, bind, true);
8604 gimple_seq bind_body = NULL;
8605 gimple_seq dlist = NULL;
8606 gimple_seq olist = NULL;
94829f87 8607
629b3d75
MJ
8608 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8609 OMP_CLAUSE_NUM_TEAMS);
8610 if (num_teams == NULL_TREE)
8611 num_teams = build_int_cst (unsigned_type_node, 0);
8612 else
94829f87 8613 {
629b3d75
MJ
8614 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8615 num_teams = fold_convert (unsigned_type_node, num_teams);
8616 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
94829f87 8617 }
629b3d75
MJ
8618 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8619 OMP_CLAUSE_THREAD_LIMIT);
8620 if (thread_limit == NULL_TREE)
8621 thread_limit = build_int_cst (unsigned_type_node, 0);
8622 else
94829f87 8623 {
629b3d75
MJ
8624 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8625 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8626 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8627 fb_rvalue);
94829f87 8628 }
9bd46bc9 8629
629b3d75
MJ
8630 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8631 &bind_body, &dlist, ctx, NULL);
8632 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8633 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8634 if (!gimple_omp_teams_grid_phony (teams_stmt))
9bd46bc9 8635 {
629b3d75
MJ
8636 gimple_seq_add_stmt (&bind_body, teams_stmt);
8637 location_t loc = gimple_location (teams_stmt);
8638 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8639 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8640 gimple_set_location (call, loc);
8641 gimple_seq_add_stmt (&bind_body, call);
9bd46bc9
NS
8642 }
8643
629b3d75
MJ
8644 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8645 gimple_omp_set_body (teams_stmt, NULL);
8646 gimple_seq_add_seq (&bind_body, olist);
8647 gimple_seq_add_seq (&bind_body, dlist);
8648 if (!gimple_omp_teams_grid_phony (teams_stmt))
8649 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8650 gimple_bind_set_body (bind, bind_body);
9bd46bc9 8651
629b3d75 8652 pop_gimplify_context (bind);
9bd46bc9 8653
629b3d75
MJ
8654 gimple_bind_append_vars (bind, ctx->block_vars);
8655 BLOCK_VARS (block) = ctx->block_vars;
8656 if (BLOCK_VARS (block))
8657 TREE_USED (block) = 1;
9bd46bc9
NS
8658}
8659
629b3d75 8660/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
9bd46bc9 8661
629b3d75
MJ
8662static void
8663lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8664{
629b3d75
MJ
8665 gimple *stmt = gsi_stmt (*gsi_p);
8666 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8667 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8668 gimple_build_omp_return (false));
9bd46bc9
NS
8669}
8670
9bd46bc9 8671
629b3d75
MJ
8672/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8673 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8674 of OMP context, but with task_shared_vars set. */
9bd46bc9 8675
629b3d75
MJ
8676static tree
8677lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8678 void *data)
9bd46bc9 8679{
629b3d75 8680 tree t = *tp;
9bd46bc9 8681
629b3d75
MJ
8682 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8683 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8684 return t;
9bd46bc9 8685
629b3d75
MJ
8686 if (task_shared_vars
8687 && DECL_P (t)
8688 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8689 return t;
9bd46bc9 8690
629b3d75
MJ
8691 /* If a global variable has been privatized, TREE_CONSTANT on
8692 ADDR_EXPR might be wrong. */
8693 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8694 recompute_tree_invariant_for_addr_expr (t);
9bd46bc9 8695
629b3d75
MJ
8696 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8697 return NULL_TREE;
9bd46bc9
NS
8698}
8699
629b3d75
MJ
8700/* Data to be communicated between lower_omp_regimplify_operands and
8701 lower_omp_regimplify_operands_p. */
9bd46bc9 8702
629b3d75 8703struct lower_omp_regimplify_operands_data
9bd46bc9 8704{
629b3d75
MJ
8705 omp_context *ctx;
8706 vec<tree> *decls;
8707};
9bd46bc9 8708
629b3d75
MJ
8709/* Helper function for lower_omp_regimplify_operands. Find
8710 omp_member_access_dummy_var vars and adjust temporarily their
8711 DECL_VALUE_EXPRs if needed. */
9bd46bc9 8712
629b3d75
MJ
8713static tree
8714lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8715 void *data)
9bd46bc9 8716{
629b3d75
MJ
8717 tree t = omp_member_access_dummy_var (*tp);
8718 if (t)
9bd46bc9 8719 {
629b3d75
MJ
8720 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8721 lower_omp_regimplify_operands_data *ldata
8722 = (lower_omp_regimplify_operands_data *) wi->info;
8723 tree o = maybe_lookup_decl (t, ldata->ctx);
8724 if (o != t)
9bd46bc9 8725 {
629b3d75
MJ
8726 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8727 ldata->decls->safe_push (*tp);
8728 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8729 SET_DECL_VALUE_EXPR (*tp, v);
9bd46bc9 8730 }
9bd46bc9 8731 }
629b3d75
MJ
8732 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8733 return NULL_TREE;
9bd46bc9
NS
8734}
8735
629b3d75
MJ
8736/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8737 of omp_member_access_dummy_var vars during regimplification. */
9bd46bc9
NS
8738
8739static void
629b3d75
MJ
8740lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8741 gimple_stmt_iterator *gsi_p)
9bd46bc9 8742{
629b3d75
MJ
8743 auto_vec<tree, 10> decls;
8744 if (ctx)
8745 {
8746 struct walk_stmt_info wi;
8747 memset (&wi, '\0', sizeof (wi));
8748 struct lower_omp_regimplify_operands_data data;
8749 data.ctx = ctx;
8750 data.decls = &decls;
8751 wi.info = &data;
8752 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8753 }
8754 gimple_regimplify_operands (stmt, gsi_p);
8755 while (!decls.is_empty ())
8756 {
8757 tree t = decls.pop ();
8758 tree v = decls.pop ();
8759 SET_DECL_VALUE_EXPR (t, v);
8760 }
9bd46bc9
NS
8761}
8762
9bd46bc9 8763static void
629b3d75 8764lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8765{
629b3d75
MJ
8766 gimple *stmt = gsi_stmt (*gsi_p);
8767 struct walk_stmt_info wi;
8768 gcall *call_stmt;
9bd46bc9 8769
629b3d75
MJ
8770 if (gimple_has_location (stmt))
8771 input_location = gimple_location (stmt);
9bd46bc9 8772
629b3d75
MJ
8773 if (task_shared_vars)
8774 memset (&wi, '\0', sizeof (wi));
9bd46bc9 8775
629b3d75
MJ
8776 /* If we have issued syntax errors, avoid doing any heavy lifting.
8777 Just replace the OMP directives with a NOP to avoid
8778 confusing RTL expansion. */
8779 if (seen_error () && is_gimple_omp (stmt))
9bd46bc9 8780 {
629b3d75
MJ
8781 gsi_replace (gsi_p, gimple_build_nop (), true);
8782 return;
8783 }
9bd46bc9 8784
629b3d75
MJ
8785 switch (gimple_code (stmt))
8786 {
8787 case GIMPLE_COND:
8788 {
8789 gcond *cond_stmt = as_a <gcond *> (stmt);
8790 if ((ctx || task_shared_vars)
8791 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8792 lower_omp_regimplify_p,
8793 ctx ? NULL : &wi, NULL)
8794 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8795 lower_omp_regimplify_p,
8796 ctx ? NULL : &wi, NULL)))
8797 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8798 }
8799 break;
8800 case GIMPLE_CATCH:
8801 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8802 break;
8803 case GIMPLE_EH_FILTER:
8804 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8805 break;
8806 case GIMPLE_TRY:
8807 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8808 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8809 break;
8810 case GIMPLE_TRANSACTION:
01914336 8811 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
629b3d75
MJ
8812 ctx);
8813 break;
8814 case GIMPLE_BIND:
8815 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
6724f8a6 8816 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
629b3d75
MJ
8817 break;
8818 case GIMPLE_OMP_PARALLEL:
8819 case GIMPLE_OMP_TASK:
8820 ctx = maybe_lookup_ctx (stmt);
8821 gcc_assert (ctx);
8822 if (ctx->cancellable)
8823 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8824 lower_omp_taskreg (gsi_p, ctx);
8825 break;
8826 case GIMPLE_OMP_FOR:
8827 ctx = maybe_lookup_ctx (stmt);
8828 gcc_assert (ctx);
8829 if (ctx->cancellable)
8830 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8831 lower_omp_for (gsi_p, ctx);
8832 break;
8833 case GIMPLE_OMP_SECTIONS:
8834 ctx = maybe_lookup_ctx (stmt);
8835 gcc_assert (ctx);
8836 if (ctx->cancellable)
8837 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8838 lower_omp_sections (gsi_p, ctx);
8839 break;
8840 case GIMPLE_OMP_SINGLE:
8841 ctx = maybe_lookup_ctx (stmt);
8842 gcc_assert (ctx);
8843 lower_omp_single (gsi_p, ctx);
8844 break;
8845 case GIMPLE_OMP_MASTER:
8846 ctx = maybe_lookup_ctx (stmt);
8847 gcc_assert (ctx);
8848 lower_omp_master (gsi_p, ctx);
8849 break;
8850 case GIMPLE_OMP_TASKGROUP:
8851 ctx = maybe_lookup_ctx (stmt);
8852 gcc_assert (ctx);
8853 lower_omp_taskgroup (gsi_p, ctx);
8854 break;
8855 case GIMPLE_OMP_ORDERED:
8856 ctx = maybe_lookup_ctx (stmt);
8857 gcc_assert (ctx);
8858 lower_omp_ordered (gsi_p, ctx);
8859 break;
8860 case GIMPLE_OMP_CRITICAL:
8861 ctx = maybe_lookup_ctx (stmt);
8862 gcc_assert (ctx);
8863 lower_omp_critical (gsi_p, ctx);
8864 break;
8865 case GIMPLE_OMP_ATOMIC_LOAD:
8866 if ((ctx || task_shared_vars)
8867 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8868 as_a <gomp_atomic_load *> (stmt)),
8869 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8870 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8871 break;
8872 case GIMPLE_OMP_TARGET:
8873 ctx = maybe_lookup_ctx (stmt);
8874 gcc_assert (ctx);
8875 lower_omp_target (gsi_p, ctx);
8876 break;
8877 case GIMPLE_OMP_TEAMS:
8878 ctx = maybe_lookup_ctx (stmt);
8879 gcc_assert (ctx);
8880 lower_omp_teams (gsi_p, ctx);
8881 break;
8882 case GIMPLE_OMP_GRID_BODY:
8883 ctx = maybe_lookup_ctx (stmt);
8884 gcc_assert (ctx);
8885 lower_omp_grid_body (gsi_p, ctx);
8886 break;
8887 case GIMPLE_CALL:
8888 tree fndecl;
8889 call_stmt = as_a <gcall *> (stmt);
8890 fndecl = gimple_call_fndecl (call_stmt);
8891 if (fndecl
8892 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8893 switch (DECL_FUNCTION_CODE (fndecl))
9bd46bc9 8894 {
629b3d75
MJ
8895 case BUILT_IN_GOMP_BARRIER:
8896 if (ctx == NULL)
8897 break;
8898 /* FALLTHRU */
8899 case BUILT_IN_GOMP_CANCEL:
8900 case BUILT_IN_GOMP_CANCELLATION_POINT:
8901 omp_context *cctx;
8902 cctx = ctx;
8903 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8904 cctx = cctx->outer;
8905 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8906 if (!cctx->cancellable)
8907 {
8908 if (DECL_FUNCTION_CODE (fndecl)
8909 == BUILT_IN_GOMP_CANCELLATION_POINT)
8910 {
8911 stmt = gimple_build_nop ();
8912 gsi_replace (gsi_p, stmt, false);
8913 }
8914 break;
8915 }
8916 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8917 {
8918 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8919 gimple_call_set_fndecl (call_stmt, fndecl);
8920 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8921 }
8922 tree lhs;
8923 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8924 gimple_call_set_lhs (call_stmt, lhs);
8925 tree fallthru_label;
8926 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8927 gimple *g;
8928 g = gimple_build_label (fallthru_label);
8929 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8930 g = gimple_build_cond (NE_EXPR, lhs,
8931 fold_convert (TREE_TYPE (lhs),
8932 boolean_false_node),
8933 cctx->cancel_label, fallthru_label);
8934 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8935 break;
8936 default:
8937 break;
9bd46bc9 8938 }
629b3d75
MJ
8939 /* FALLTHRU */
8940 default:
8941 if ((ctx || task_shared_vars)
8942 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8943 ctx ? NULL : &wi))
9bd46bc9 8944 {
629b3d75
MJ
8945 /* Just remove clobbers, this should happen only if we have
8946 "privatized" local addressable variables in SIMD regions,
8947 the clobber isn't needed in that case and gimplifying address
8948 of the ARRAY_REF into a pointer and creating MEM_REF based
8949 clobber would create worse code than we get with the clobber
8950 dropped. */
8951 if (gimple_clobber_p (stmt))
4ae13300 8952 {
629b3d75
MJ
8953 gsi_replace (gsi_p, gimple_build_nop (), true);
8954 break;
9bd46bc9 8955 }
629b3d75 8956 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9bd46bc9 8957 }
629b3d75 8958 break;
9bd46bc9 8959 }
9bd46bc9
NS
8960}
8961
9bd46bc9 8962static void
629b3d75 8963lower_omp (gimple_seq *body, omp_context *ctx)
9bd46bc9 8964{
629b3d75
MJ
8965 location_t saved_location = input_location;
8966 gimple_stmt_iterator gsi;
8967 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8968 lower_omp_1 (&gsi, ctx);
8969 /* During gimplification, we haven't folded statments inside offloading
8970 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8971 if (target_nesting_level || taskreg_nesting_level)
8972 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8973 fold_stmt (&gsi);
8974 input_location = saved_location;
9bd46bc9
NS
8975}
8976
629b3d75 8977/* Main entry point. */
9bd46bc9 8978
629b3d75
MJ
8979static unsigned int
8980execute_lower_omp (void)
9bd46bc9 8981{
629b3d75
MJ
8982 gimple_seq body;
8983 int i;
8984 omp_context *ctx;
9bd46bc9 8985
629b3d75
MJ
8986 /* This pass always runs, to provide PROP_gimple_lomp.
8987 But often, there is nothing to do. */
5e9d6aa4 8988 if (flag_openacc == 0 && flag_openmp == 0
629b3d75
MJ
8989 && flag_openmp_simd == 0)
8990 return 0;
9bd46bc9 8991
629b3d75
MJ
8992 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8993 delete_omp_context);
9bd46bc9 8994
629b3d75 8995 body = gimple_body (current_function_decl);
9bd46bc9 8996
629b3d75
MJ
8997 if (hsa_gen_requested_p ())
8998 omp_grid_gridify_all_targets (&body);
8999
9000 scan_omp (&body, NULL);
9001 gcc_assert (taskreg_nesting_level == 0);
9002 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9003 finish_taskreg_scan (ctx);
9004 taskreg_contexts.release ();
9bd46bc9 9005
629b3d75
MJ
9006 if (all_contexts->root)
9007 {
9008 if (task_shared_vars)
9009 push_gimplify_context ();
9010 lower_omp (&body, NULL);
9011 if (task_shared_vars)
9012 pop_gimplify_context (NULL);
9013 }
9014
9015 if (all_contexts)
9016 {
9017 splay_tree_delete (all_contexts);
9018 all_contexts = NULL;
9bd46bc9 9019 }
629b3d75 9020 BITMAP_FREE (task_shared_vars);
6724f8a6
JJ
9021
9022 /* If current function is a method, remove artificial dummy VAR_DECL created
9023 for non-static data member privatization, they aren't needed for
9024 debuginfo nor anything else, have been already replaced everywhere in the
9025 IL and cause problems with LTO. */
9026 if (DECL_ARGUMENTS (current_function_decl)
9027 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9028 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9029 == POINTER_TYPE))
9030 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
629b3d75 9031 return 0;
9bd46bc9
NS
9032}
9033
629b3d75 9034namespace {
9bd46bc9 9035
629b3d75 9036const pass_data pass_data_lower_omp =
9bd46bc9 9037{
629b3d75
MJ
9038 GIMPLE_PASS, /* type */
9039 "omplower", /* name */
fd2b8c8b 9040 OPTGROUP_OMP, /* optinfo_flags */
629b3d75
MJ
9041 TV_NONE, /* tv_id */
9042 PROP_gimple_any, /* properties_required */
9043 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9044 0, /* properties_destroyed */
9045 0, /* todo_flags_start */
9046 0, /* todo_flags_finish */
9047};
9bd46bc9 9048
629b3d75
MJ
9049class pass_lower_omp : public gimple_opt_pass
9050{
9051public:
9052 pass_lower_omp (gcc::context *ctxt)
9053 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9054 {}
9bd46bc9 9055
629b3d75
MJ
9056 /* opt_pass methods: */
9057 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9bd46bc9 9058
629b3d75 9059}; // class pass_lower_omp
9bd46bc9 9060
629b3d75 9061} // anon namespace
9bd46bc9 9062
629b3d75
MJ
9063gimple_opt_pass *
9064make_pass_lower_omp (gcc::context *ctxt)
9065{
9066 return new pass_lower_omp (ctxt);
9bd46bc9 9067}
629b3d75
MJ
9068\f
9069/* The following is a utility to diagnose structured block violations.
9070 It is not part of the "omplower" pass, as that's invoked too late. It
9071 should be invoked by the respective front ends after gimplification. */
9bd46bc9 9072
629b3d75 9073static splay_tree all_labels;
9bd46bc9 9074
629b3d75
MJ
9075/* Check for mismatched contexts and generate an error if needed. Return
9076 true if an error is detected. */
9bd46bc9 9077
629b3d75
MJ
9078static bool
9079diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9080 gimple *branch_ctx, gimple *label_ctx)
9081{
9082 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9083 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9bd46bc9 9084
629b3d75
MJ
9085 if (label_ctx == branch_ctx)
9086 return false;
9bd46bc9 9087
629b3d75 9088 const char* kind = NULL;
9bd46bc9 9089
629b3d75 9090 if (flag_openacc)
9bd46bc9 9091 {
629b3d75
MJ
9092 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9093 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9bd46bc9 9094 {
629b3d75
MJ
9095 gcc_checking_assert (kind == NULL);
9096 kind = "OpenACC";
9bd46bc9
NS
9097 }
9098 }
629b3d75 9099 if (kind == NULL)
5b37e866 9100 {
0a734553 9101 gcc_checking_assert (flag_openmp || flag_openmp_simd);
629b3d75 9102 kind = "OpenMP";
5b37e866 9103 }
9bd46bc9 9104
01914336 9105 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
629b3d75
MJ
9106 so we could traverse it and issue a correct "exit" or "enter" error
9107 message upon a structured block violation.
c5a64cfe 9108
629b3d75
MJ
9109 We built the context by building a list with tree_cons'ing, but there is
9110 no easy counterpart in gimple tuples. It seems like far too much work
9111 for issuing exit/enter error messages. If someone really misses the
01914336 9112 distinct error message... patches welcome. */
c5a64cfe 9113
629b3d75
MJ
9114#if 0
9115 /* Try to avoid confusing the user by producing and error message
9116 with correct "exit" or "enter" verbiage. We prefer "exit"
9117 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9118 if (branch_ctx == NULL)
9119 exit_p = false;
9120 else
5b37e866 9121 {
629b3d75
MJ
9122 while (label_ctx)
9123 {
9124 if (TREE_VALUE (label_ctx) == branch_ctx)
9125 {
9126 exit_p = false;
9127 break;
9128 }
9129 label_ctx = TREE_CHAIN (label_ctx);
9130 }
5b37e866
NS
9131 }
9132
629b3d75
MJ
9133 if (exit_p)
9134 error ("invalid exit from %s structured block", kind);
9135 else
9136 error ("invalid entry to %s structured block", kind);
9137#endif
5b37e866 9138
629b3d75
MJ
9139 /* If it's obvious we have an invalid entry, be specific about the error. */
9140 if (branch_ctx == NULL)
9141 error ("invalid entry to %s structured block", kind);
9142 else
c5a64cfe 9143 {
629b3d75
MJ
9144 /* Otherwise, be vague and lazy, but efficient. */
9145 error ("invalid branch to/from %s structured block", kind);
c5a64cfe 9146 }
5b37e866 9147
629b3d75
MJ
9148 gsi_replace (gsi_p, gimple_build_nop (), false);
9149 return true;
c5a64cfe
NS
9150}
9151
629b3d75
MJ
9152/* Pass 1: Create a minimal tree of structured blocks, and record
9153 where each label is found. */
9bd46bc9 9154
629b3d75
MJ
9155static tree
9156diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9157 struct walk_stmt_info *wi)
9bd46bc9 9158{
629b3d75
MJ
9159 gimple *context = (gimple *) wi->info;
9160 gimple *inner_context;
9161 gimple *stmt = gsi_stmt (*gsi_p);
9bd46bc9 9162
629b3d75 9163 *handled_ops_p = true;
6e91acf8 9164
629b3d75
MJ
9165 switch (gimple_code (stmt))
9166 {
9167 WALK_SUBSTMTS;
6e91acf8 9168
629b3d75
MJ
9169 case GIMPLE_OMP_PARALLEL:
9170 case GIMPLE_OMP_TASK:
9171 case GIMPLE_OMP_SECTIONS:
9172 case GIMPLE_OMP_SINGLE:
9173 case GIMPLE_OMP_SECTION:
9174 case GIMPLE_OMP_MASTER:
9175 case GIMPLE_OMP_ORDERED:
9176 case GIMPLE_OMP_CRITICAL:
9177 case GIMPLE_OMP_TARGET:
9178 case GIMPLE_OMP_TEAMS:
9179 case GIMPLE_OMP_TASKGROUP:
9180 /* The minimal context here is just the current OMP construct. */
9181 inner_context = stmt;
9182 wi->info = inner_context;
9183 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9184 wi->info = context;
9185 break;
e5014671 9186
629b3d75
MJ
9187 case GIMPLE_OMP_FOR:
9188 inner_context = stmt;
9189 wi->info = inner_context;
9190 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9191 walk them. */
9192 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9193 diagnose_sb_1, NULL, wi);
9194 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9195 wi->info = context;
9196 break;
e5014671 9197
629b3d75
MJ
9198 case GIMPLE_LABEL:
9199 splay_tree_insert (all_labels,
9200 (splay_tree_key) gimple_label_label (
9201 as_a <glabel *> (stmt)),
9202 (splay_tree_value) context);
9203 break;
e5014671 9204
629b3d75
MJ
9205 default:
9206 break;
e5014671
NS
9207 }
9208
629b3d75 9209 return NULL_TREE;
e5014671
NS
9210}
9211
629b3d75
MJ
9212/* Pass 2: Check each branch and see if its context differs from that of
9213 the destination label's context. */
94829f87 9214
629b3d75
MJ
9215static tree
9216diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9217 struct walk_stmt_info *wi)
94829f87 9218{
629b3d75
MJ
9219 gimple *context = (gimple *) wi->info;
9220 splay_tree_node n;
9221 gimple *stmt = gsi_stmt (*gsi_p);
f8393eb0 9222
629b3d75 9223 *handled_ops_p = true;
f8393eb0 9224
629b3d75 9225 switch (gimple_code (stmt))
9bd46bc9 9226 {
629b3d75 9227 WALK_SUBSTMTS;
9bd46bc9 9228
629b3d75
MJ
9229 case GIMPLE_OMP_PARALLEL:
9230 case GIMPLE_OMP_TASK:
9231 case GIMPLE_OMP_SECTIONS:
9232 case GIMPLE_OMP_SINGLE:
9233 case GIMPLE_OMP_SECTION:
9234 case GIMPLE_OMP_MASTER:
9235 case GIMPLE_OMP_ORDERED:
9236 case GIMPLE_OMP_CRITICAL:
9237 case GIMPLE_OMP_TARGET:
9238 case GIMPLE_OMP_TEAMS:
9239 case GIMPLE_OMP_TASKGROUP:
9240 wi->info = stmt;
9241 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9242 wi->info = context;
9243 break;
e5014671 9244
629b3d75
MJ
9245 case GIMPLE_OMP_FOR:
9246 wi->info = stmt;
9247 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9248 walk them. */
9249 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9250 diagnose_sb_2, NULL, wi);
9251 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9252 wi->info = context;
9253 break;
e5014671 9254
629b3d75
MJ
9255 case GIMPLE_COND:
9256 {
9257 gcond *cond_stmt = as_a <gcond *> (stmt);
9258 tree lab = gimple_cond_true_label (cond_stmt);
9259 if (lab)
9bd46bc9 9260 {
629b3d75
MJ
9261 n = splay_tree_lookup (all_labels,
9262 (splay_tree_key) lab);
9263 diagnose_sb_0 (gsi_p, context,
9264 n ? (gimple *) n->value : NULL);
9bd46bc9 9265 }
629b3d75
MJ
9266 lab = gimple_cond_false_label (cond_stmt);
9267 if (lab)
9268 {
9269 n = splay_tree_lookup (all_labels,
9270 (splay_tree_key) lab);
9271 diagnose_sb_0 (gsi_p, context,
9272 n ? (gimple *) n->value : NULL);
9273 }
9274 }
9275 break;
9bd46bc9 9276
629b3d75
MJ
9277 case GIMPLE_GOTO:
9278 {
9279 tree lab = gimple_goto_dest (stmt);
9280 if (TREE_CODE (lab) != LABEL_DECL)
9281 break;
9282
9283 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9284 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9285 }
9286 break;
9bd46bc9 9287
629b3d75
MJ
9288 case GIMPLE_SWITCH:
9289 {
9290 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9291 unsigned int i;
9292 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9bd46bc9 9293 {
629b3d75
MJ
9294 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9295 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9296 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9297 break;
9bd46bc9 9298 }
9bd46bc9 9299 }
629b3d75 9300 break;
9bd46bc9 9301
629b3d75
MJ
9302 case GIMPLE_RETURN:
9303 diagnose_sb_0 (gsi_p, context, NULL);
9304 break;
94829f87 9305
629b3d75
MJ
9306 default:
9307 break;
94829f87
NS
9308 }
9309
629b3d75 9310 return NULL_TREE;
bd751975
NS
9311}
9312
629b3d75
MJ
9313static unsigned int
9314diagnose_omp_structured_block_errors (void)
94829f87 9315{
629b3d75
MJ
9316 struct walk_stmt_info wi;
9317 gimple_seq body = gimple_body (current_function_decl);
346a966e 9318
629b3d75 9319 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
94829f87 9320
629b3d75
MJ
9321 memset (&wi, 0, sizeof (wi));
9322 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
94829f87 9323
629b3d75
MJ
9324 memset (&wi, 0, sizeof (wi));
9325 wi.want_locations = true;
9326 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
94829f87 9327
629b3d75 9328 gimple_set_body (current_function_decl, body);
9669b00b 9329
629b3d75
MJ
9330 splay_tree_delete (all_labels);
9331 all_labels = NULL;
9669b00b 9332
9669b00b
AM
9333 return 0;
9334}
9335
9336namespace {
9337
629b3d75 9338const pass_data pass_data_diagnose_omp_blocks =
9669b00b
AM
9339{
9340 GIMPLE_PASS, /* type */
629b3d75 9341 "*diagnose_omp_blocks", /* name */
fd2b8c8b 9342 OPTGROUP_OMP, /* optinfo_flags */
9669b00b 9343 TV_NONE, /* tv_id */
629b3d75
MJ
9344 PROP_gimple_any, /* properties_required */
9345 0, /* properties_provided */
9669b00b
AM
9346 0, /* properties_destroyed */
9347 0, /* todo_flags_start */
629b3d75 9348 0, /* todo_flags_finish */
9669b00b
AM
9349};
9350
629b3d75 9351class pass_diagnose_omp_blocks : public gimple_opt_pass
9669b00b
AM
9352{
9353public:
629b3d75
MJ
9354 pass_diagnose_omp_blocks (gcc::context *ctxt)
9355 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9669b00b
AM
9356 {}
9357
9358 /* opt_pass methods: */
629b3d75
MJ
9359 virtual bool gate (function *)
9360 {
5e9d6aa4 9361 return flag_openacc || flag_openmp || flag_openmp_simd;
629b3d75 9362 }
9669b00b
AM
9363 virtual unsigned int execute (function *)
9364 {
629b3d75 9365 return diagnose_omp_structured_block_errors ();
4a38b02b
IV
9366 }
9367
629b3d75 9368}; // class pass_diagnose_omp_blocks
4a38b02b
IV
9369
9370} // anon namespace
9371
9372gimple_opt_pass *
629b3d75 9373make_pass_diagnose_omp_blocks (gcc::context *ctxt)
4a38b02b 9374{
629b3d75 9375 return new pass_diagnose_omp_blocks (ctxt);
4a38b02b 9376}
629b3d75 9377\f
4a38b02b 9378
953ff289 9379#include "gt-omp-low.h"