]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
pr87874.c (em): Declare uint64_max as const unsigned long long int.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
41dbbb37
TS
1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
953ff289
DN
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
85ec4feb 7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5 29#include "target.h"
953ff289 30#include "tree.h"
c7131fb2 31#include "gimple.h"
957060b5 32#include "tree-pass.h"
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
2fb9a547
AM
39#include "internal-fn.h"
40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
18f429e2 43#include "gimplify-me.h"
5be5c238 44#include "gimple-walk.h"
726a989a 45#include "tree-iterator.h"
953ff289
DN
46#include "tree-inline.h"
47#include "langhooks.h"
442b4905 48#include "tree-dfa.h"
7a300452 49#include "tree-ssa.h"
6be42dd4 50#include "splay-tree.h"
629b3d75 51#include "omp-general.h"
0645c1a2 52#include "omp-low.h"
629b3d75 53#include "omp-grid.h"
4484a35a 54#include "gimple-low.h"
dd912cb8 55#include "symbol-summary.h"
1fe37220 56#include "tree-nested.h"
1f6be682 57#include "context.h"
41dbbb37 58#include "gomp-constants.h"
9bd46bc9 59#include "gimple-pretty-print.h"
13293add 60#include "hsa-common.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
953ff289 63
41dbbb37 64/* Lowering of OMP parallel and workshare constructs proceeds in two
953ff289
DN
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
c0220ea4 68 re-gimplifying things when variables have been replaced with complex
953ff289
DN
69 expressions.
70
7ebaeab5 71 Final code generation is done by pass_expand_omp. The flowgraph is
41dbbb37
TS
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
953ff289
DN
74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
a79683d5 78struct omp_context
953ff289
DN
79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
355fe088 88 gimple *stmt;
953ff289 89
b8698a0f 90 /* Map variables to fields in a structure that allows communication
953ff289
DN
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
a68ab351
JJ
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
953ff289
DN
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
acf0174b
JJ
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
6e6cf7b0
JJ
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
953ff289
DN
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
121
953ff289
DN
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
acf0174b
JJ
124
125 /* True if this construct can be cancelled. */
126 bool cancellable;
a79683d5 127};
953ff289 128
953ff289 129static splay_tree all_contexts;
a68ab351 130static int taskreg_nesting_level;
acf0174b 131static int target_nesting_level;
a68ab351 132static bitmap task_shared_vars;
5771c391 133static vec<omp_context *> taskreg_contexts;
953ff289 134
26127932 135static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
136static tree scan_omp_1_op (tree *, int *, void *);
137
138#define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
0a35513e 143 case GIMPLE_TRANSACTION: \
726a989a
RB
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
147
e4834818
NS
148/* Return true if CTX corresponds to an oacc parallel region. */
149
150static bool
151is_oacc_parallel (omp_context *ctx)
152{
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157}
158
159/* Return true if CTX corresponds to an oacc kernels region. */
160
161static bool
162is_oacc_kernels (omp_context *ctx)
163{
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168}
169
d9a6bd32
JJ
170/* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
173
174tree
175omp_member_access_dummy_var (tree decl)
176{
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
183
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
187
188 while (1)
189 switch (TREE_CODE (v))
190 {
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
206 }
207}
208
209/* Helper for unshare_and_remap, called through walk_tree. */
210
211static tree
212unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213{
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
216 {
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
219 }
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
223}
224
225/* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
227
228static tree
229unshare_and_remap (tree x, tree from, tree to)
230{
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
235}
236
726a989a
RB
237/* Convenience function for calling scan_omp_1_op on tree operands. */
238
239static inline tree
240scan_omp_op (tree *tp, omp_context *ctx)
241{
242 struct walk_stmt_info wi;
243
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
247
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249}
250
355a7673 251static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
252static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289 254
953ff289
DN
255/* Return true if CTX is for an omp parallel. */
256
257static inline bool
258is_parallel_ctx (omp_context *ctx)
259{
726a989a 260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
261}
262
50674e96 263
a68ab351
JJ
264/* Return true if CTX is for an omp task. */
265
266static inline bool
267is_task_ctx (omp_context *ctx)
268{
726a989a 269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
270}
271
272
d9a6bd32
JJ
273/* Return true if CTX is for an omp taskloop. */
274
275static inline bool
276is_taskloop_ctx (omp_context *ctx)
277{
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280}
281
282
a68ab351
JJ
283/* Return true if CTX is for an omp parallel or omp task. */
284
285static inline bool
286is_taskreg_ctx (omp_context *ctx)
287{
d9a6bd32 288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
a68ab351
JJ
289}
290
953ff289
DN
291/* Return true if EXPR is variable sized. */
292
293static inline bool
22ea9ec0 294is_variable_sized (const_tree expr)
953ff289
DN
295{
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297}
298
41dbbb37 299/* Lookup variables. The "maybe" form
953ff289
DN
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
302
303static inline tree
304lookup_decl (tree var, omp_context *ctx)
305{
b787e7a2 306 tree *n = ctx->cb.decl_map->get (var);
6be42dd4 307 return *n;
953ff289
DN
308}
309
310static inline tree
7c8f7639 311maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 312{
b787e7a2 313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
6be42dd4 314 return n ? *n : NULL_TREE;
953ff289
DN
315}
316
317static inline tree
318lookup_field (tree var, omp_context *ctx)
319{
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
323}
324
a68ab351 325static inline tree
d9a6bd32 326lookup_sfield (splay_tree_key key, omp_context *ctx)
a68ab351
JJ
327{
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
d9a6bd32 330 ? ctx->sfield_map : ctx->field_map, key);
a68ab351
JJ
331 return (tree) n->value;
332}
333
953ff289 334static inline tree
d9a6bd32
JJ
335lookup_sfield (tree var, omp_context *ctx)
336{
337 return lookup_sfield ((splay_tree_key) var, ctx);
338}
339
340static inline tree
341maybe_lookup_field (splay_tree_key key, omp_context *ctx)
953ff289
DN
342{
343 splay_tree_node n;
d9a6bd32 344 n = splay_tree_lookup (ctx->field_map, key);
953ff289
DN
345 return n ? (tree) n->value : NULL_TREE;
346}
347
d9a6bd32
JJ
348static inline tree
349maybe_lookup_field (tree var, omp_context *ctx)
350{
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
352}
353
7c8f7639
JJ
354/* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
953ff289
DN
356
357static bool
a68ab351 358use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289 359{
9dc5773f
JJ
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
953ff289
DN
362 return true;
363
6fc0bb99 364 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 365 when we know the value is not accessible from an outer scope. */
7c8f7639 366 if (shared_ctx)
953ff289 367 {
41dbbb37
TS
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369
953ff289
DN
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
376
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
077b0dfb 381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
382 return true;
383
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
7c8f7639 388
6d840d99
JJ
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
396
7c8f7639
JJ
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
6d840d99 402 if (shared_ctx->is_nested)
7c8f7639
JJ
403 {
404 omp_context *up;
405
406 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
408 break;
409
d9c194cb 410 if (up)
7c8f7639
JJ
411 {
412 tree c;
413
726a989a 414 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
419
420 if (c)
25142650 421 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
422 }
423 }
a68ab351 424
6d840d99 425 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
6d840d99 428 if (is_task_ctx (shared_ctx))
a68ab351 429 {
25142650
JJ
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
d9a6bd32 433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
a68ab351
JJ
434 {
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
442 }
443 return true;
444 }
953ff289
DN
445 }
446
447 return false;
448}
449
917948d3
ZD
450/* Construct a new automatic decl similar to VAR. */
451
452static tree
453omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
454{
455 tree copy = copy_var_decl (var, name, type);
456
457 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 458 DECL_CHAIN (copy) = ctx->block_vars;
d9a6bd32
JJ
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
953ff289
DN
467 ctx->block_vars = copy;
468
469 return copy;
470}
471
472static tree
473omp_copy_decl_1 (tree var, omp_context *ctx)
474{
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476}
477
a9a58711
JJ
478/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480static tree
481omp_build_component_ref (tree obj, tree field)
482{
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
489}
490
953ff289
DN
491/* Build tree nodes to access the field for VAR on the receiver side. */
492
493static tree
494build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
495{
496 tree x, field = lookup_field (var, ctx);
497
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
503
70f34814 504 x = build_simple_mem_ref (ctx->receiver_decl);
f1b9b669 505 TREE_THIS_NOTRAP (x) = 1;
a9a58711 506 x = omp_build_component_ref (x, field);
953ff289 507 if (by_ref)
096b85f4
TV
508 {
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
511 }
953ff289
DN
512
513 return x;
514}
515
516/* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
519
520static tree
c39dad64
JJ
521build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
953ff289
DN
523{
524 tree x;
525
8ca5b2a2 526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
527 x = var;
528 else if (is_variable_sized (var))
529 {
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
c39dad64 531 x = build_outer_var_ref (x, ctx, code);
70f34814 532 x = build_simple_mem_ref (x);
953ff289 533 }
a68ab351 534 else if (is_taskreg_ctx (ctx))
953ff289 535 {
7c8f7639 536 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
537 x = build_receiver_ref (var, by_ref, ctx);
538 }
c39dad64
JJ
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
74bf76ed 545 {
c39dad64
JJ
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
74bf76ed
JJ
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
f3b331d1 554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
555 if (x == NULL_TREE)
556 x = var;
557 }
c39dad64 558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
d9a6bd32
JJ
559 {
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
565 {
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
570 }
571 else
572 {
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
579
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
584 }
585 }
953ff289 586 else if (ctx->outer)
b2b40051
MJ
587 {
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
590 {
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
594 }
c39dad64 595 x = lookup_decl (var, outer);
b2b40051 596 }
629b3d75 597 else if (omp_is_reference (var))
eeb1d9e0
JJ
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
d9a6bd32
JJ
601 else if (omp_member_access_dummy_var (var))
602 x = var;
953ff289
DN
603 else
604 gcc_unreachable ();
605
d9a6bd32
JJ
606 if (x == var)
607 {
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
610 {
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
617 }
618 }
619
629b3d75 620 if (omp_is_reference (var))
70f34814 621 x = build_simple_mem_ref (x);
953ff289
DN
622
623 return x;
624}
625
626/* Build tree nodes to access the field for VAR on the sender side. */
627
628static tree
d9a6bd32 629build_sender_ref (splay_tree_key key, omp_context *ctx)
953ff289 630{
d9a6bd32 631 tree field = lookup_sfield (key, ctx);
a9a58711 632 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
633}
634
d9a6bd32
JJ
635static tree
636build_sender_ref (tree var, omp_context *ctx)
637{
638 return build_sender_ref ((splay_tree_key) var, ctx);
639}
640
86938de6
TV
641/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
953ff289
DN
643
644static void
829c6349 645install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 646{
a68ab351 647 tree field, type, sfield = NULL_TREE;
d9a6bd32 648 splay_tree_key key = (splay_tree_key) var;
953ff289 649
d9a6bd32
JJ
650 if ((mask & 8) != 0)
651 {
652 key = (splay_tree_key) &DECL_UID (var);
653 gcc_checking_assert (key != (splay_tree_key) var);
654 }
a68ab351 655 gcc_assert ((mask & 1) == 0
d9a6bd32 656 || !splay_tree_lookup (ctx->field_map, key));
a68ab351 657 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
d9a6bd32 658 || !splay_tree_lookup (ctx->sfield_map, key));
41dbbb37
TS
659 gcc_assert ((mask & 3) == 3
660 || !is_gimple_omp_oacc (ctx->stmt));
953ff289
DN
661
662 type = TREE_TYPE (var);
8498c16b
TV
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type)
667 && TYPE_RESTRICT (type))
668 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669
acf0174b
JJ
670 if (mask & 4)
671 {
672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
673 type = build_pointer_type (build_pointer_type (type));
674 }
675 else if (by_ref)
829c6349 676 type = build_pointer_type (type);
629b3d75 677 else if ((mask & 3) == 1 && omp_is_reference (var))
a68ab351 678 type = TREE_TYPE (type);
953ff289 679
c2255bc4
AH
680 field = build_decl (DECL_SOURCE_LOCATION (var),
681 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
682
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
687 if (type == TREE_TYPE (var))
688 {
fe37c7af 689 SET_DECL_ALIGN (field, DECL_ALIGN (var));
a68ab351
JJ
690 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
691 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
692 }
693 else
fe37c7af 694 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
953ff289 695
a68ab351
JJ
696 if ((mask & 3) == 3)
697 {
698 insert_field_into_struct (ctx->record_type, field);
699 if (ctx->srecord_type)
700 {
c2255bc4
AH
701 sfield = build_decl (DECL_SOURCE_LOCATION (var),
702 FIELD_DECL, DECL_NAME (var), type);
a68ab351 703 DECL_ABSTRACT_ORIGIN (sfield) = var;
fe37c7af 704 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
a68ab351
JJ
705 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
706 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
707 insert_field_into_struct (ctx->srecord_type, sfield);
708 }
709 }
710 else
711 {
712 if (ctx->srecord_type == NULL_TREE)
713 {
714 tree t;
715
716 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
717 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
718 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
719 {
d9a6bd32 720 sfield = build_decl (DECL_SOURCE_LOCATION (t),
c2255bc4 721 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
722 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
723 insert_field_into_struct (ctx->srecord_type, sfield);
724 splay_tree_insert (ctx->sfield_map,
725 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
726 (splay_tree_value) sfield);
727 }
728 }
729 sfield = field;
730 insert_field_into_struct ((mask & 1) ? ctx->record_type
731 : ctx->srecord_type, field);
732 }
953ff289 733
a68ab351 734 if (mask & 1)
d9a6bd32 735 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
a68ab351 736 if ((mask & 2) && ctx->sfield_map)
d9a6bd32 737 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
953ff289
DN
738}
739
740static tree
741install_var_local (tree var, omp_context *ctx)
742{
743 tree new_var = omp_copy_decl_1 (var, ctx);
744 insert_decl_map (&ctx->cb, var, new_var);
745 return new_var;
746}
747
748/* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
750
751static void
752fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
753{
754 tree new_decl, size;
755
756 new_decl = lookup_decl (decl, ctx);
757
758 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
759
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
761 && DECL_HAS_VALUE_EXPR_P (decl))
762 {
763 tree ve = DECL_VALUE_EXPR (decl);
726a989a 764 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
765 SET_DECL_VALUE_EXPR (new_decl, ve);
766 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
767 }
768
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
770 {
771 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
772 if (size == error_mark_node)
773 size = TYPE_SIZE (TREE_TYPE (new_decl));
774 DECL_SIZE (new_decl) = size;
775
776 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
779 DECL_SIZE_UNIT (new_decl) = size;
780 }
781}
782
783/* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
787
788static tree
789omp_copy_decl (tree var, copy_body_data *cb)
790{
791 omp_context *ctx = (omp_context *) cb;
792 tree new_var;
793
953ff289
DN
794 if (TREE_CODE (var) == LABEL_DECL)
795 {
50aa16c3
JJ
796 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
797 return var;
c2255bc4 798 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 799 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
800 insert_decl_map (&ctx->cb, var, new_var);
801 return new_var;
802 }
803
a68ab351 804 while (!is_taskreg_ctx (ctx))
953ff289
DN
805 {
806 ctx = ctx->outer;
807 if (ctx == NULL)
808 return var;
809 new_var = maybe_lookup_decl (var, ctx);
810 if (new_var)
811 return new_var;
812 }
813
8ca5b2a2
JJ
814 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
815 return var;
816
953ff289
DN
817 return error_mark_node;
818}
819
629b3d75 820/* Create a new context, with OUTER_CTX being the surrounding context. */
50674e96 821
629b3d75
MJ
822static omp_context *
823new_omp_context (gimple *stmt, omp_context *outer_ctx)
50674e96 824{
629b3d75 825 omp_context *ctx = XCNEW (omp_context);
50674e96 826
629b3d75
MJ
827 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
828 (splay_tree_value) ctx);
829 ctx->stmt = stmt;
50674e96 830
629b3d75 831 if (outer_ctx)
777f7f9a 832 {
629b3d75
MJ
833 ctx->outer = outer_ctx;
834 ctx->cb = outer_ctx->cb;
835 ctx->cb.block = NULL;
836 ctx->depth = outer_ctx->depth + 1;
953ff289
DN
837 }
838 else
839 {
840 ctx->cb.src_fn = current_function_decl;
841 ctx->cb.dst_fn = current_function_decl;
d52f5295 842 ctx->cb.src_node = cgraph_node::get (current_function_decl);
fe660d7b 843 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
844 ctx->cb.dst_node = ctx->cb.src_node;
845 ctx->cb.src_cfun = cfun;
846 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 847 ctx->cb.eh_lp_nr = 0;
953ff289
DN
848 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
849 ctx->depth = 1;
850 }
851
b787e7a2 852 ctx->cb.decl_map = new hash_map<tree, tree>;
953ff289
DN
853
854 return ctx;
855}
856
726a989a 857static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
858
859/* Finalize task copyfn. */
860
861static void
538dd0b7 862finalize_task_copyfn (gomp_task *task_stmt)
2368a460
JJ
863{
864 struct function *child_cfun;
af16bc76 865 tree child_fn;
355a7673 866 gimple_seq seq = NULL, new_seq;
538dd0b7 867 gbind *bind;
2368a460 868
726a989a 869 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
870 if (child_fn == NULL_TREE)
871 return;
872
873 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 874 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 875
2368a460 876 push_cfun (child_cfun);
3ad065ef 877 bind = gimplify_body (child_fn, false);
726a989a
RB
878 gimple_seq_add_stmt (&seq, bind);
879 new_seq = maybe_catch_exception (seq);
880 if (new_seq != seq)
881 {
882 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 883 seq = NULL;
726a989a
RB
884 gimple_seq_add_stmt (&seq, bind);
885 }
886 gimple_set_body (child_fn, seq);
2368a460 887 pop_cfun ();
2368a460 888
d7ed20db 889 /* Inform the callgraph about the new function. */
edafad14
TV
890 cgraph_node *node = cgraph_node::get_create (child_fn);
891 node->parallelized_function = 1;
d52f5295 892 cgraph_node::add_new_function (child_fn, false);
2368a460
JJ
893}
894
953ff289
DN
895/* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
897
898static void
899delete_omp_context (splay_tree_value value)
900{
901 omp_context *ctx = (omp_context *) value;
902
b787e7a2 903 delete ctx->cb.decl_map;
953ff289
DN
904
905 if (ctx->field_map)
906 splay_tree_delete (ctx->field_map);
a68ab351
JJ
907 if (ctx->sfield_map)
908 splay_tree_delete (ctx->sfield_map);
953ff289
DN
909
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx->record_type)
913 {
914 tree t;
910ad8de 915 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
916 DECL_ABSTRACT_ORIGIN (t) = NULL;
917 }
a68ab351
JJ
918 if (ctx->srecord_type)
919 {
920 tree t;
910ad8de 921 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
922 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 }
953ff289 924
2368a460 925 if (is_task_ctx (ctx))
538dd0b7 926 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
2368a460 927
953ff289
DN
928 XDELETE (ctx);
929}
930
931/* Fix up RECEIVER_DECL with a type that has been remapped to the child
932 context. */
933
934static void
935fixup_child_record_type (omp_context *ctx)
936{
937 tree f, type = ctx->record_type;
938
b2b40051
MJ
939 if (!ctx->receiver_decl)
940 return;
953ff289
DN
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
910ad8de 945 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
946 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
947 break;
948 if (f)
949 {
950 tree name, new_fields = NULL;
951
952 type = lang_hooks.types.make_type (RECORD_TYPE);
953 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
954 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
955 TYPE_DECL, name, type);
953ff289
DN
956 TYPE_NAME (type) = name;
957
910ad8de 958 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
959 {
960 tree new_f = copy_node (f);
961 DECL_CONTEXT (new_f) = type;
962 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 963 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
964 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
965 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
966 &ctx->cb, NULL);
967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
968 &ctx->cb, NULL);
953ff289
DN
969 new_fields = new_f;
970
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
974 (splay_tree_value) new_f);
975 }
976 TYPE_FIELDS (type) = nreverse (new_fields);
977 layout_type (type);
978 }
979
d9a6bd32
JJ
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx->stmt))
983 type = build_qualified_type (type, TYPE_QUAL_CONST);
984
a2a2fe4b
RB
985 TREE_TYPE (ctx->receiver_decl)
986 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
953ff289
DN
987}
988
989/* Instantiate decls as necessary in CTX to satisfy the data sharing
829c6349 990 specified by CLAUSES. */
953ff289
DN
991
992static void
829c6349 993scan_sharing_clauses (tree clauses, omp_context *ctx)
953ff289
DN
994{
995 tree c, decl;
996 bool scan_array_reductions = false;
997
998 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
999 {
1000 bool by_ref;
1001
aaf46ef9 1002 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1003 {
1004 case OMP_CLAUSE_PRIVATE:
1005 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1007 goto do_private;
1008 else if (!is_variable_sized (decl))
953ff289
DN
1009 install_var_local (decl, ctx);
1010 break;
1011
1012 case OMP_CLAUSE_SHARED:
9cf32741 1013 decl = OMP_CLAUSE_DECL (c);
acf0174b
JJ
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
9cf32741
JJ
1016 {
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1020 if (is_global_var (odecl))
1021 break;
1022 insert_decl_map (&ctx->cb, decl, odecl);
1023 break;
1024 }
a68ab351 1025 gcc_assert (is_taskreg_ctx (ctx));
5da250fc
JJ
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1027 || !is_variable_sized (decl));
8ca5b2a2
JJ
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1031 break;
d9a6bd32 1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1a80d6b8
JJ
1033 {
1034 use_pointer_for_field (decl, ctx);
1035 break;
1036 }
1037 by_ref = use_pointer_for_field (decl, NULL);
1038 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
953ff289
DN
1039 || TREE_ADDRESSABLE (decl)
1040 || by_ref
629b3d75 1041 || omp_is_reference (decl))
953ff289 1042 {
1a80d6b8 1043 by_ref = use_pointer_for_field (decl, ctx);
a68ab351 1044 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1045 install_var_local (decl, ctx);
1046 break;
1047 }
1048 /* We don't need to copy const scalar vars back. */
aaf46ef9 1049 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1050 goto do_private;
1051
d9a6bd32
JJ
1052 case OMP_CLAUSE_REDUCTION:
1053 decl = OMP_CLAUSE_DECL (c);
1054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl) == MEM_REF)
1056 {
1057 tree t = TREE_OPERAND (decl, 0);
e01d41e5
JJ
1058 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1059 t = TREE_OPERAND (t, 0);
d9a6bd32
JJ
1060 if (TREE_CODE (t) == INDIRECT_REF
1061 || TREE_CODE (t) == ADDR_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 install_var_local (t, ctx);
1064 if (is_taskreg_ctx (ctx)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1066 && !is_variable_sized (t))
1067 {
1068 by_ref = use_pointer_for_field (t, ctx);
1069 install_var_field (t, by_ref, 3, ctx);
1070 }
1071 break;
1072 }
1073 goto do_private;
1074
953ff289
DN
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 /* Let the corresponding firstprivate clause create
1077 the variable. */
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1079 break;
1080 /* FALLTHRU */
1081
1082 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 1083 case OMP_CLAUSE_LINEAR:
953ff289
DN
1084 decl = OMP_CLAUSE_DECL (c);
1085 do_private:
d9a6bd32
JJ
1086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1088 && is_gimple_omp_offloaded (ctx->stmt))
1089 {
1090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
629b3d75 1091 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
d9a6bd32
JJ
1092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1093 install_var_field (decl, true, 3, ctx);
1094 else
1095 install_var_field (decl, false, 3, ctx);
1096 }
953ff289 1097 if (is_variable_sized (decl))
953ff289 1098 {
a68ab351
JJ
1099 if (is_task_ctx (ctx))
1100 install_var_field (decl, false, 1, ctx);
1101 break;
1102 }
1103 else if (is_taskreg_ctx (ctx))
1104 {
1105 bool global
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1107 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1108
1109 if (is_task_ctx (ctx)
629b3d75 1110 && (global || by_ref || omp_is_reference (decl)))
a68ab351
JJ
1111 {
1112 install_var_field (decl, false, 1, ctx);
1113 if (!global)
1114 install_var_field (decl, by_ref, 2, ctx);
1115 }
1116 else if (!global)
1117 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1118 }
1119 install_var_local (decl, ctx);
1120 break;
1121
d9a6bd32
JJ
1122 case OMP_CLAUSE_USE_DEVICE_PTR:
1123 decl = OMP_CLAUSE_DECL (c);
1124 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1125 install_var_field (decl, true, 3, ctx);
1126 else
1127 install_var_field (decl, false, 3, ctx);
1128 if (DECL_SIZE (decl)
1129 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1130 {
1131 tree decl2 = DECL_VALUE_EXPR (decl);
1132 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1133 decl2 = TREE_OPERAND (decl2, 0);
1134 gcc_assert (DECL_P (decl2));
1135 install_var_local (decl2, ctx);
1136 }
1137 install_var_local (decl, ctx);
1138 break;
1139
1140 case OMP_CLAUSE_IS_DEVICE_PTR:
1141 decl = OMP_CLAUSE_DECL (c);
1142 goto do_private;
1143
acf0174b 1144 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 1145 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
1146 decl = OMP_CLAUSE_DECL (c);
1147 install_var_field (decl, false, 3, ctx);
1148 install_var_local (decl, ctx);
1149 break;
1150
953ff289 1151 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1152 case OMP_CLAUSE_COPYIN:
1153 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1154 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1155 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1156 break;
1157
20906c66 1158 case OMP_CLAUSE_FINAL:
953ff289
DN
1159 case OMP_CLAUSE_IF:
1160 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1161 case OMP_CLAUSE_NUM_TEAMS:
1162 case OMP_CLAUSE_THREAD_LIMIT:
1163 case OMP_CLAUSE_DEVICE:
953ff289 1164 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1165 case OMP_CLAUSE_DIST_SCHEDULE:
1166 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
1167 case OMP_CLAUSE_PRIORITY:
1168 case OMP_CLAUSE_GRAINSIZE:
1169 case OMP_CLAUSE_NUM_TASKS:
41dbbb37
TS
1170 case OMP_CLAUSE_NUM_GANGS:
1171 case OMP_CLAUSE_NUM_WORKERS:
1172 case OMP_CLAUSE_VECTOR_LENGTH:
953ff289 1173 if (ctx->outer)
726a989a 1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1175 break;
1176
acf0174b
JJ
1177 case OMP_CLAUSE_TO:
1178 case OMP_CLAUSE_FROM:
1179 case OMP_CLAUSE_MAP:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1182 decl = OMP_CLAUSE_DECL (c);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
4a38b02b 1185 directly. However, global variables with "omp declare target link"
5883c5cc 1186 attribute need to be copied. Or when ALWAYS modifier is used. */
acf0174b
JJ
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1188 && DECL_P (decl)
e01d41e5
JJ
1189 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1192 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
5883c5cc
JJ
1193 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1194 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1195 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
acf0174b 1196 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
4a38b02b
IV
1197 && varpool_node::get_create (decl)->offloadable
1198 && !lookup_attribute ("omp declare target link",
1199 DECL_ATTRIBUTES (decl)))
acf0174b
JJ
1200 break;
1201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1202 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
acf0174b 1203 {
41dbbb37
TS
1204 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1205 not offloaded; there is nothing to map for those. */
1206 if (!is_gimple_omp_offloaded (ctx->stmt)
b8910447
JJ
1207 && !POINTER_TYPE_P (TREE_TYPE (decl))
1208 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
acf0174b
JJ
1209 break;
1210 }
d9a6bd32 1211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
e01d41e5
JJ
1212 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1213 || (OMP_CLAUSE_MAP_KIND (c)
1214 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
d9a6bd32
JJ
1215 {
1216 if (TREE_CODE (decl) == COMPONENT_REF
1217 || (TREE_CODE (decl) == INDIRECT_REF
1218 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1219 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1220 == REFERENCE_TYPE)))
1221 break;
1222 if (DECL_SIZE (decl)
1223 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1224 {
1225 tree decl2 = DECL_VALUE_EXPR (decl);
1226 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1227 decl2 = TREE_OPERAND (decl2, 0);
1228 gcc_assert (DECL_P (decl2));
1229 install_var_local (decl2, ctx);
1230 }
1231 install_var_local (decl, ctx);
1232 break;
1233 }
acf0174b
JJ
1234 if (DECL_P (decl))
1235 {
1236 if (DECL_SIZE (decl)
1237 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1238 {
1239 tree decl2 = DECL_VALUE_EXPR (decl);
1240 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1241 decl2 = TREE_OPERAND (decl2, 0);
1242 gcc_assert (DECL_P (decl2));
e01d41e5 1243 install_var_field (decl2, true, 3, ctx);
acf0174b
JJ
1244 install_var_local (decl2, ctx);
1245 install_var_local (decl, ctx);
1246 }
1247 else
1248 {
1249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1250 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
acf0174b
JJ
1251 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1252 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1253 install_var_field (decl, true, 7, ctx);
1254 else
829c6349 1255 install_var_field (decl, true, 3, ctx);
c42cfb5c
CP
1256 if (is_gimple_omp_offloaded (ctx->stmt)
1257 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
acf0174b
JJ
1258 install_var_local (decl, ctx);
1259 }
1260 }
1261 else
1262 {
1263 tree base = get_base_address (decl);
1264 tree nc = OMP_CLAUSE_CHAIN (c);
1265 if (DECL_P (base)
1266 && nc != NULL_TREE
1267 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1268 && OMP_CLAUSE_DECL (nc) == base
41dbbb37 1269 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
acf0174b
JJ
1270 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1271 {
1272 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1273 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1274 }
1275 else
1276 {
f014c653
JJ
1277 if (ctx->outer)
1278 {
1279 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1280 decl = OMP_CLAUSE_DECL (c);
1281 }
acf0174b
JJ
1282 gcc_assert (!splay_tree_lookup (ctx->field_map,
1283 (splay_tree_key) decl));
1284 tree field
1285 = build_decl (OMP_CLAUSE_LOCATION (c),
1286 FIELD_DECL, NULL_TREE, ptr_type_node);
fe37c7af 1287 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
acf0174b
JJ
1288 insert_field_into_struct (ctx->record_type, field);
1289 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1290 (splay_tree_value) field);
1291 }
1292 }
1293 break;
1294
b2b40051
MJ
1295 case OMP_CLAUSE__GRIDDIM_:
1296 if (ctx->outer)
1297 {
1298 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1299 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1300 }
1301 break;
1302
953ff289
DN
1303 case OMP_CLAUSE_NOWAIT:
1304 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1305 case OMP_CLAUSE_COLLAPSE:
1306 case OMP_CLAUSE_UNTIED:
20906c66 1307 case OMP_CLAUSE_MERGEABLE:
acf0174b 1308 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1309 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
1310 case OMP_CLAUSE_SIMDLEN:
1311 case OMP_CLAUSE_THREADS:
1312 case OMP_CLAUSE_SIMD:
1313 case OMP_CLAUSE_NOGROUP:
1314 case OMP_CLAUSE_DEFAULTMAP:
41dbbb37
TS
1315 case OMP_CLAUSE_ASYNC:
1316 case OMP_CLAUSE_WAIT:
1317 case OMP_CLAUSE_GANG:
1318 case OMP_CLAUSE_WORKER:
1319 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1320 case OMP_CLAUSE_INDEPENDENT:
1321 case OMP_CLAUSE_AUTO:
1322 case OMP_CLAUSE_SEQ:
02889d23 1323 case OMP_CLAUSE_TILE:
6c7509bc 1324 case OMP_CLAUSE__SIMT_:
8a4674bb 1325 case OMP_CLAUSE_DEFAULT:
829c6349
CLT
1326 case OMP_CLAUSE_IF_PRESENT:
1327 case OMP_CLAUSE_FINALIZE:
953ff289
DN
1328 break;
1329
acf0174b
JJ
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1336
41dbbb37 1337 case OMP_CLAUSE__CACHE_:
953ff289
DN
1338 default:
1339 gcc_unreachable ();
1340 }
1341 }
1342
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1344 {
aaf46ef9 1345 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1346 {
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
726a989a 1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1351 scan_array_reductions = true;
953ff289
DN
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1355
953ff289 1356 case OMP_CLAUSE_FIRSTPRIVATE:
41dbbb37 1357 case OMP_CLAUSE_PRIVATE:
74bf76ed 1358 case OMP_CLAUSE_LINEAR:
d9a6bd32 1359 case OMP_CLAUSE_IS_DEVICE_PTR:
953ff289
DN
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
d9a6bd32
JJ
1362 {
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1366 {
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1373 }
1374 install_var_local (decl, ctx);
1375 }
953ff289 1376 fixup_remapped_decl (decl, ctx,
aaf46ef9 1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
d9a6bd32
JJ
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
953ff289 1381 scan_array_reductions = true;
d9a6bd32
JJ
1382 break;
1383
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1387 {
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1391 }
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
f7468577 1393 scan_array_reductions = true;
953ff289
DN
1394 break;
1395
1396 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
953ff289 1400 decl = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1404 {
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1411 }
1412 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1413 break;
1414
acf0174b 1415 case OMP_CLAUSE_MAP:
41dbbb37 1416 if (!is_gimple_omp_offloaded (ctx->stmt))
acf0174b
JJ
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
e01d41e5
JJ
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1f6be682 1425 && varpool_node::get_create (decl)->offloadable)
acf0174b
JJ
1426 break;
1427 if (DECL_P (decl))
1428 {
d9a6bd32
JJ
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
acf0174b
JJ
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1433 {
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1437 }
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1440 {
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1447 }
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1450 }
1451 break;
1452
953ff289
DN
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
953ff289 1461 case OMP_CLAUSE_SCHEDULE:
acf0174b 1462 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
acf0174b 1469 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1470 case OMP_CLAUSE_SAFELEN:
d9a6bd32 1471 case OMP_CLAUSE_SIMDLEN:
acf0174b
JJ
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
d9a6bd32
JJ
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
41dbbb37
TS
1485 case OMP_CLAUSE_ASYNC:
1486 case OMP_CLAUSE_WAIT:
1487 case OMP_CLAUSE_NUM_GANGS:
1488 case OMP_CLAUSE_NUM_WORKERS:
1489 case OMP_CLAUSE_VECTOR_LENGTH:
1490 case OMP_CLAUSE_GANG:
1491 case OMP_CLAUSE_WORKER:
1492 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1493 case OMP_CLAUSE_INDEPENDENT:
1494 case OMP_CLAUSE_AUTO:
1495 case OMP_CLAUSE_SEQ:
02889d23 1496 case OMP_CLAUSE_TILE:
b2b40051 1497 case OMP_CLAUSE__GRIDDIM_:
6c7509bc 1498 case OMP_CLAUSE__SIMT_:
829c6349
CLT
1499 case OMP_CLAUSE_IF_PRESENT:
1500 case OMP_CLAUSE_FINALIZE:
41dbbb37
TS
1501 break;
1502
41dbbb37 1503 case OMP_CLAUSE__CACHE_:
953ff289
DN
1504 default:
1505 gcc_unreachable ();
1506 }
1507 }
1508
41dbbb37
TS
1509 gcc_checking_assert (!scan_array_reductions
1510 || !is_gimple_omp_oacc (ctx->stmt));
953ff289 1511 if (scan_array_reductions)
6b37bdaf
PP
1512 {
1513 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1515 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1516 {
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1519 }
1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1521 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1522 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1524 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1526 }
953ff289
DN
1527}
1528
5e9d6aa4 1529/* Create a new name for omp child function. Returns an identifier. */
953ff289 1530
953ff289 1531static tree
5e9d6aa4 1532create_omp_child_function_name (bool task_copy)
953ff289 1533{
7958186b
MP
1534 return clone_function_name_numbered (current_function_decl,
1535 task_copy ? "_omp_cpyfn" : "_omp_fn");
9a771876
JJ
1536}
1537
9669b00b
AM
1538/* Return true if CTX may belong to offloaded code: either if current function
1539 is offloaded, or any enclosing context corresponds to a target region. */
1540
1541static bool
1542omp_maybe_offloaded_ctx (omp_context *ctx)
1543{
1544 if (cgraph_node::get (current_function_decl)->offloadable)
1545 return true;
1546 for (; ctx; ctx = ctx->outer)
1547 if (is_gimple_omp_offloaded (ctx->stmt))
1548 return true;
1549 return false;
1550}
1551
953ff289
DN
1552/* Build a decl for the omp child function. It'll not contain a body
1553 yet, just the bare decl. */
1554
1555static void
a68ab351 1556create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1557{
1558 tree decl, type, name, t;
1559
5e9d6aa4 1560 name = create_omp_child_function_name (task_copy);
a68ab351
JJ
1561 if (task_copy)
1562 type = build_function_type_list (void_type_node, ptr_type_node,
1563 ptr_type_node, NULL_TREE);
1564 else
1565 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1566
9a771876 1567 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
953ff289 1568
41dbbb37
TS
1569 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1570 || !task_copy);
a68ab351
JJ
1571 if (!task_copy)
1572 ctx->cb.dst_fn = decl;
1573 else
726a989a 1574 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1575
1576 TREE_STATIC (decl) = 1;
1577 TREE_USED (decl) = 1;
1578 DECL_ARTIFICIAL (decl) = 1;
1579 DECL_IGNORED_P (decl) = 0;
1580 TREE_PUBLIC (decl) = 0;
1581 DECL_UNINLINABLE (decl) = 1;
1582 DECL_EXTERNAL (decl) = 0;
1583 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1584 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 1585 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
5c38262d 1586 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
f1542d9a
JJ
1587 /* Remove omp declare simd attribute from the new attributes. */
1588 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1589 {
1590 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1591 a = a2;
1592 a = TREE_CHAIN (a);
1593 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1594 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1595 *p = TREE_CHAIN (*p);
1596 else
1597 {
1598 tree chain = TREE_CHAIN (*p);
1599 *p = copy_node (*p);
1600 p = &TREE_CHAIN (*p);
1601 *p = chain;
1602 }
1603 }
5c38262d
JJ
1604 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1605 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1606 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1607 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1608 DECL_FUNCTION_VERSIONED (decl)
1609 = DECL_FUNCTION_VERSIONED (current_function_decl);
1610
9669b00b 1611 if (omp_maybe_offloaded_ctx (ctx))
acf0174b 1612 {
9669b00b
AM
1613 cgraph_node::get_create (decl)->offloadable = 1;
1614 if (ENABLE_OFFLOADING)
1615 g->have_offload = true;
acf0174b 1616 }
953ff289 1617
d7823208
BS
1618 if (cgraph_node::get_create (decl)->offloadable
1619 && !lookup_attribute ("omp declare target",
1620 DECL_ATTRIBUTES (current_function_decl)))
9669b00b
AM
1621 {
1622 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1623 ? "omp target entrypoint"
1624 : "omp declare target");
1625 DECL_ATTRIBUTES (decl)
1626 = tree_cons (get_identifier (target_attr),
1627 NULL_TREE, DECL_ATTRIBUTES (decl));
1628 }
d7823208 1629
c2255bc4
AH
1630 t = build_decl (DECL_SOURCE_LOCATION (decl),
1631 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1632 DECL_ARTIFICIAL (t) = 1;
1633 DECL_IGNORED_P (t) = 1;
07485407 1634 DECL_CONTEXT (t) = decl;
953ff289
DN
1635 DECL_RESULT (decl) = t;
1636
9a771876
JJ
1637 tree data_name = get_identifier (".omp_data_i");
1638 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1639 ptr_type_node);
953ff289 1640 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1641 DECL_NAMELESS (t) = 1;
953ff289 1642 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1643 DECL_CONTEXT (t) = current_function_decl;
953ff289 1644 TREE_USED (t) = 1;
d9a6bd32 1645 TREE_READONLY (t) = 1;
953ff289 1646 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1647 if (!task_copy)
1648 ctx->receiver_decl = t;
1649 else
1650 {
c2255bc4
AH
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1653 ptr_type_node);
1654 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1655 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1656 DECL_ARG_TYPE (t) = ptr_type_node;
1657 DECL_CONTEXT (t) = current_function_decl;
1658 TREE_USED (t) = 1;
628c189e 1659 TREE_ADDRESSABLE (t) = 1;
910ad8de 1660 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1661 DECL_ARGUMENTS (decl) = t;
1662 }
953ff289 1663
b8698a0f 1664 /* Allocate memory for the function structure. The call to
50674e96 1665 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1666 it afterward. */
db2960f4 1667 push_struct_function (decl);
726a989a 1668 cfun->function_end_locus = gimple_location (ctx->stmt);
381cdae4 1669 init_tree_ssa (cfun);
db2960f4 1670 pop_cfun ();
953ff289
DN
1671}
1672
acf0174b
JJ
1673/* Callback for walk_gimple_seq. Check if combined parallel
1674 contains gimple_omp_for_combined_into_p OMP_FOR. */
1675
629b3d75
MJ
1676tree
1677omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1678 bool *handled_ops_p,
1679 struct walk_stmt_info *wi)
acf0174b 1680{
355fe088 1681 gimple *stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
1682
1683 *handled_ops_p = true;
1684 switch (gimple_code (stmt))
1685 {
1686 WALK_SUBSTMTS;
1687
1688 case GIMPLE_OMP_FOR:
1689 if (gimple_omp_for_combined_into_p (stmt)
d9a6bd32
JJ
1690 && gimple_omp_for_kind (stmt)
1691 == *(const enum gf_mask *) (wi->info))
acf0174b
JJ
1692 {
1693 wi->info = stmt;
1694 return integer_zero_node;
1695 }
1696 break;
1697 default:
1698 break;
1699 }
1700 return NULL;
1701}
1702
d9a6bd32
JJ
1703/* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1704
1705static void
1706add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1707 omp_context *outer_ctx)
1708{
1709 struct walk_stmt_info wi;
1710
1711 memset (&wi, 0, sizeof (wi));
1712 wi.val_only = true;
1713 wi.info = (void *) &msk;
629b3d75 1714 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
d9a6bd32
JJ
1715 if (wi.info != (void *) &msk)
1716 {
1717 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1718 struct omp_for_data fd;
629b3d75 1719 omp_extract_for_data (for_stmt, &fd, NULL);
d9a6bd32
JJ
1720 /* We need two temporaries with fd.loop.v type (istart/iend)
1721 and then (fd.collapse - 1) temporaries with the same
1722 type for count2 ... countN-1 vars if not constant. */
1723 size_t count = 2, i;
1724 tree type = fd.iter_type;
1725 if (fd.collapse > 1
1726 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1727 {
1728 count += fd.collapse - 1;
e01d41e5 1729 /* If there are lastprivate clauses on the inner
d9a6bd32
JJ
1730 GIMPLE_OMP_FOR, add one more temporaries for the total number
1731 of iterations (product of count1 ... countN-1). */
629b3d75 1732 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
e01d41e5
JJ
1733 OMP_CLAUSE_LASTPRIVATE))
1734 count++;
1735 else if (msk == GF_OMP_FOR_KIND_FOR
629b3d75 1736 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
e01d41e5 1737 OMP_CLAUSE_LASTPRIVATE))
d9a6bd32
JJ
1738 count++;
1739 }
1740 for (i = 0; i < count; i++)
1741 {
1742 tree temp = create_tmp_var (type);
1743 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1744 insert_decl_map (&outer_ctx->cb, temp, temp);
1745 OMP_CLAUSE_DECL (c) = temp;
1746 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1747 gimple_omp_taskreg_set_clauses (stmt, c);
1748 }
1749 }
1750}
1751
953ff289
DN
1752/* Scan an OpenMP parallel directive. */
1753
1754static void
726a989a 1755scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1756{
1757 omp_context *ctx;
1758 tree name;
538dd0b7 1759 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
953ff289
DN
1760
1761 /* Ignore parallel directives with empty bodies, unless there
1762 are copyin clauses. */
1763 if (optimize > 0
726a989a 1764 && empty_body_p (gimple_omp_body (stmt))
629b3d75 1765 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
726a989a 1766 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1767 {
726a989a 1768 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1769 return;
1770 }
1771
acf0174b 1772 if (gimple_omp_parallel_combined_p (stmt))
d9a6bd32 1773 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
acf0174b 1774
726a989a 1775 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1776 taskreg_contexts.safe_push (ctx);
a68ab351 1777 if (taskreg_nesting_level > 1)
50674e96 1778 ctx->is_nested = true;
953ff289 1779 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289 1780 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1781 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1782 name = build_decl (gimple_location (stmt),
1783 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1784 DECL_ARTIFICIAL (name) = 1;
1785 DECL_NAMELESS (name) = 1;
953ff289 1786 TYPE_NAME (ctx->record_type) = name;
f7484978 1787 TYPE_ARTIFICIAL (ctx->record_type) = 1;
b2b40051
MJ
1788 if (!gimple_omp_parallel_grid_phony (stmt))
1789 {
1790 create_omp_child_function (ctx, false);
1791 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1792 }
953ff289 1793
726a989a 1794 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 1795 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
1796
1797 if (TYPE_FIELDS (ctx->record_type) == NULL)
1798 ctx->record_type = ctx->receiver_decl = NULL;
953ff289
DN
1799}
1800
a68ab351
JJ
1801/* Scan an OpenMP task directive. */
1802
1803static void
726a989a 1804scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1805{
1806 omp_context *ctx;
726a989a 1807 tree name, t;
538dd0b7 1808 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
a68ab351 1809
fbc698e0
JJ
1810 /* Ignore task directives with empty bodies, unless they have depend
1811 clause. */
a68ab351 1812 if (optimize > 0
fbc698e0
JJ
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
a68ab351 1815 {
726a989a 1816 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1817 return;
1818 }
1819
d9a6bd32
JJ
1820 if (gimple_omp_task_taskloop_p (stmt))
1821 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1822
726a989a 1823 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1824 taskreg_contexts.safe_push (ctx);
a68ab351
JJ
1825 if (taskreg_nesting_level > 1)
1826 ctx->is_nested = true;
1827 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
a68ab351
JJ
1828 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1829 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1830 name = build_decl (gimple_location (stmt),
1831 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1832 DECL_ARTIFICIAL (name) = 1;
1833 DECL_NAMELESS (name) = 1;
a68ab351 1834 TYPE_NAME (ctx->record_type) = name;
f7484978 1835 TYPE_ARTIFICIAL (ctx->record_type) = 1;
a68ab351 1836 create_omp_child_function (ctx, false);
726a989a 1837 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1838
726a989a 1839 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1840
1841 if (ctx->srecord_type)
1842 {
1843 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1844 name = build_decl (gimple_location (stmt),
1845 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1846 DECL_ARTIFICIAL (name) = 1;
1847 DECL_NAMELESS (name) = 1;
a68ab351 1848 TYPE_NAME (ctx->srecord_type) = name;
f7484978 1849 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
a68ab351
JJ
1850 create_omp_child_function (ctx, true);
1851 }
1852
26127932 1853 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
1854
1855 if (TYPE_FIELDS (ctx->record_type) == NULL)
1856 {
1857 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1858 t = build_int_cst (long_integer_type_node, 0);
1859 gimple_omp_task_set_arg_size (stmt, t);
1860 t = build_int_cst (long_integer_type_node, 1);
1861 gimple_omp_task_set_arg_align (stmt, t);
a68ab351 1862 }
5771c391
JJ
1863}
1864
655e5265
JJ
1865/* Helper function for finish_taskreg_scan, called through walk_tree.
1866 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1867 tree, replace it in the expression. */
1868
1869static tree
1870finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1871{
1872 if (VAR_P (*tp))
1873 {
1874 omp_context *ctx = (omp_context *) data;
1875 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1876 if (t != *tp)
1877 {
1878 if (DECL_HAS_VALUE_EXPR_P (t))
1879 t = unshare_expr (DECL_VALUE_EXPR (t));
1880 *tp = t;
1881 }
1882 *walk_subtrees = 0;
1883 }
1884 else if (IS_TYPE_OR_DECL_P (*tp))
1885 *walk_subtrees = 0;
1886 return NULL_TREE;
1887}
5771c391
JJ
1888
1889/* If any decls have been made addressable during scan_omp,
1890 adjust their fields if needed, and layout record types
1891 of parallel/task constructs. */
1892
1893static void
1894finish_taskreg_scan (omp_context *ctx)
1895{
1896 if (ctx->record_type == NULL_TREE)
1897 return;
1898
1899 /* If any task_shared_vars were needed, verify all
1900 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1901 statements if use_pointer_for_field hasn't changed
1902 because of that. If it did, update field types now. */
1903 if (task_shared_vars)
1904 {
1905 tree c;
1906
1907 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1908 c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
1909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1910 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5771c391
JJ
1911 {
1912 tree decl = OMP_CLAUSE_DECL (c);
1913
1914 /* Global variables don't need to be copied,
1915 the receiver side will use them directly. */
1916 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1917 continue;
1918 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1919 || !use_pointer_for_field (decl, ctx))
1920 continue;
1921 tree field = lookup_field (decl, ctx);
1922 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1923 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1924 continue;
1925 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1926 TREE_THIS_VOLATILE (field) = 0;
1927 DECL_USER_ALIGN (field) = 0;
fe37c7af 1928 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
5771c391 1929 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
fe37c7af 1930 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
5771c391
JJ
1931 if (ctx->srecord_type)
1932 {
1933 tree sfield = lookup_sfield (decl, ctx);
1934 TREE_TYPE (sfield) = TREE_TYPE (field);
1935 TREE_THIS_VOLATILE (sfield) = 0;
1936 DECL_USER_ALIGN (sfield) = 0;
fe37c7af 1937 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
5771c391 1938 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
fe37c7af 1939 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
5771c391
JJ
1940 }
1941 }
1942 }
1943
1944 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1945 {
1946 layout_type (ctx->record_type);
1947 fixup_child_record_type (ctx);
1948 }
a68ab351
JJ
1949 else
1950 {
5771c391 1951 location_t loc = gimple_location (ctx->stmt);
a68ab351
JJ
1952 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1953 /* Move VLA fields to the end. */
1954 p = &TYPE_FIELDS (ctx->record_type);
1955 while (*p)
1956 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1957 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1958 {
1959 *q = *p;
1960 *p = TREE_CHAIN (*p);
1961 TREE_CHAIN (*q) = NULL_TREE;
1962 q = &TREE_CHAIN (*q);
1963 }
1964 else
910ad8de 1965 p = &DECL_CHAIN (*p);
a68ab351 1966 *p = vla_fields;
d9a6bd32
JJ
1967 if (gimple_omp_task_taskloop_p (ctx->stmt))
1968 {
1969 /* Move fields corresponding to first and second _looptemp_
1970 clause first. There are filled by GOMP_taskloop
1971 and thus need to be in specific positions. */
1972 tree c1 = gimple_omp_task_clauses (ctx->stmt);
629b3d75
MJ
1973 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1974 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
d9a6bd32
JJ
1975 OMP_CLAUSE__LOOPTEMP_);
1976 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1977 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1978 p = &TYPE_FIELDS (ctx->record_type);
1979 while (*p)
1980 if (*p == f1 || *p == f2)
1981 *p = DECL_CHAIN (*p);
1982 else
1983 p = &DECL_CHAIN (*p);
1984 DECL_CHAIN (f1) = f2;
1985 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1986 TYPE_FIELDS (ctx->record_type) = f1;
1987 if (ctx->srecord_type)
1988 {
1989 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1990 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1991 p = &TYPE_FIELDS (ctx->srecord_type);
1992 while (*p)
1993 if (*p == f1 || *p == f2)
1994 *p = DECL_CHAIN (*p);
1995 else
1996 p = &DECL_CHAIN (*p);
1997 DECL_CHAIN (f1) = f2;
1998 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1999 TYPE_FIELDS (ctx->srecord_type) = f1;
2000 }
2001 }
a68ab351
JJ
2002 layout_type (ctx->record_type);
2003 fixup_child_record_type (ctx);
2004 if (ctx->srecord_type)
2005 layout_type (ctx->srecord_type);
5771c391
JJ
2006 tree t = fold_convert_loc (loc, long_integer_type_node,
2007 TYPE_SIZE_UNIT (ctx->record_type));
655e5265
JJ
2008 if (TREE_CODE (t) != INTEGER_CST)
2009 {
2010 t = unshare_expr (t);
2011 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2012 }
5771c391 2013 gimple_omp_task_set_arg_size (ctx->stmt, t);
726a989a 2014 t = build_int_cst (long_integer_type_node,
a68ab351 2015 TYPE_ALIGN_UNIT (ctx->record_type));
5771c391 2016 gimple_omp_task_set_arg_align (ctx->stmt, t);
a68ab351
JJ
2017 }
2018}
2019
e4834818 2020/* Find the enclosing offload context. */
953ff289 2021
41dbbb37
TS
2022static omp_context *
2023enclosing_target_ctx (omp_context *ctx)
2024{
e4834818
NS
2025 for (; ctx; ctx = ctx->outer)
2026 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2027 break;
2028
41dbbb37
TS
2029 return ctx;
2030}
2031
e4834818
NS
2032/* Return true if ctx is part of an oacc kernels region. */
2033
41dbbb37 2034static bool
e4834818 2035ctx_in_oacc_kernels_region (omp_context *ctx)
41dbbb37 2036{
e4834818
NS
2037 for (;ctx != NULL; ctx = ctx->outer)
2038 {
2039 gimple *stmt = ctx->stmt;
2040 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2041 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2042 return true;
2043 }
2044
2045 return false;
2046}
2047
2048/* Check the parallelism clauses inside a kernels regions.
2049 Until kernels handling moves to use the same loop indirection
2050 scheme as parallel, we need to do this checking early. */
2051
2052static unsigned
2053check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2054{
2055 bool checking = true;
2056 unsigned outer_mask = 0;
2057 unsigned this_mask = 0;
2058 bool has_seq = false, has_auto = false;
2059
2060 if (ctx->outer)
2061 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2062 if (!stmt)
2063 {
2064 checking = false;
2065 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2066 return outer_mask;
2067 stmt = as_a <gomp_for *> (ctx->stmt);
2068 }
2069
2070 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2071 {
2072 switch (OMP_CLAUSE_CODE (c))
2073 {
2074 case OMP_CLAUSE_GANG:
2075 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2076 break;
2077 case OMP_CLAUSE_WORKER:
2078 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2079 break;
2080 case OMP_CLAUSE_VECTOR:
2081 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2082 break;
2083 case OMP_CLAUSE_SEQ:
2084 has_seq = true;
2085 break;
2086 case OMP_CLAUSE_AUTO:
2087 has_auto = true;
2088 break;
2089 default:
2090 break;
2091 }
2092 }
2093
2094 if (checking)
2095 {
2096 if (has_seq && (this_mask || has_auto))
2097 error_at (gimple_location (stmt), "%<seq%> overrides other"
2098 " OpenACC loop specifiers");
2099 else if (has_auto && this_mask)
2100 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2101 " OpenACC loop specifiers");
2102
2103 if (this_mask & outer_mask)
2104 error_at (gimple_location (stmt), "inner loop uses same"
2105 " OpenACC parallelism as containing loop");
2106 }
2107
2108 return outer_mask | this_mask;
41dbbb37
TS
2109}
2110
2111/* Scan a GIMPLE_OMP_FOR. */
953ff289 2112
6e6cf7b0 2113static omp_context *
538dd0b7 2114scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
953ff289 2115{
50674e96 2116 omp_context *ctx;
726a989a 2117 size_t i;
41dbbb37
TS
2118 tree clauses = gimple_omp_for_clauses (stmt);
2119
50674e96 2120 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2121
41dbbb37
TS
2122 if (is_gimple_omp_oacc (stmt))
2123 {
e4834818
NS
2124 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2125
2126 if (!tgt || is_oacc_parallel (tgt))
2127 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2128 {
2129 char const *check = NULL;
2130
2131 switch (OMP_CLAUSE_CODE (c))
2132 {
2133 case OMP_CLAUSE_GANG:
2134 check = "gang";
2135 break;
2136
2137 case OMP_CLAUSE_WORKER:
2138 check = "worker";
2139 break;
2140
2141 case OMP_CLAUSE_VECTOR:
2142 check = "vector";
2143 break;
2144
2145 default:
2146 break;
2147 }
2148
2149 if (check && OMP_CLAUSE_OPERAND (c, 0))
2150 error_at (gimple_location (stmt),
2151 "argument not permitted on %qs clause in"
2152 " OpenACC %<parallel%>", check);
2153 }
2154
2155 if (tgt && is_oacc_kernels (tgt))
2156 {
2157 /* Strip out reductions, as they are not handled yet. */
2158 tree *prev_ptr = &clauses;
2159
2160 while (tree probe = *prev_ptr)
41dbbb37 2161 {
e4834818
NS
2162 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2163
2164 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2165 *prev_ptr = *next_ptr;
2166 else
2167 prev_ptr = next_ptr;
41dbbb37 2168 }
e4834818
NS
2169
2170 gimple_omp_for_set_clauses (stmt, clauses);
2171 check_oacc_kernel_gwv (stmt, ctx);
41dbbb37
TS
2172 }
2173 }
2174
2175 scan_sharing_clauses (clauses, ctx);
953ff289 2176
26127932 2177 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2178 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2179 {
726a989a
RB
2180 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2181 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2184 }
26127932 2185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
6e6cf7b0 2186 return ctx;
953ff289
DN
2187}
2188
6c7509bc
JJ
2189/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2190
2191static void
2192scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2193 omp_context *outer_ctx)
2194{
2195 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2196 gsi_replace (gsi, bind, false);
2197 gimple_seq seq = NULL;
2198 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2199 tree cond = create_tmp_var_raw (integer_type_node);
2200 DECL_CONTEXT (cond) = current_function_decl;
2201 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2202 gimple_bind_set_vars (bind, cond);
2203 gimple_call_set_lhs (g, cond);
2204 gimple_seq_add_stmt (&seq, g);
2205 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2206 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2208 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2209 gimple_seq_add_stmt (&seq, g);
2210 g = gimple_build_label (lab1);
2211 gimple_seq_add_stmt (&seq, g);
2212 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2213 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2214 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2215 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2216 gimple_omp_for_set_clauses (new_stmt, clause);
2217 gimple_seq_add_stmt (&seq, new_stmt);
2218 g = gimple_build_goto (lab3);
2219 gimple_seq_add_stmt (&seq, g);
2220 g = gimple_build_label (lab2);
2221 gimple_seq_add_stmt (&seq, g);
2222 gimple_seq_add_stmt (&seq, stmt);
2223 g = gimple_build_label (lab3);
2224 gimple_seq_add_stmt (&seq, g);
2225 gimple_bind_set_body (bind, seq);
2226 update_stmt (bind);
2227 scan_omp_for (new_stmt, outer_ctx);
6e6cf7b0 2228 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
6c7509bc
JJ
2229}
2230
953ff289
DN
2231/* Scan an OpenMP sections directive. */
2232
2233static void
538dd0b7 2234scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
953ff289 2235{
953ff289
DN
2236 omp_context *ctx;
2237
2238 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2239 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2240 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2241}
2242
2243/* Scan an OpenMP single directive. */
2244
2245static void
538dd0b7 2246scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
953ff289 2247{
953ff289
DN
2248 omp_context *ctx;
2249 tree name;
2250
2251 ctx = new_omp_context (stmt, outer_ctx);
2252 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2253 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2254 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2255 name = build_decl (gimple_location (stmt),
2256 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2257 TYPE_NAME (ctx->record_type) = name;
2258
726a989a 2259 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2260 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2261
2262 if (TYPE_FIELDS (ctx->record_type) == NULL)
2263 ctx->record_type = NULL;
2264 else
2265 layout_type (ctx->record_type);
2266}
2267
41dbbb37 2268/* Scan a GIMPLE_OMP_TARGET. */
acf0174b
JJ
2269
2270static void
538dd0b7 2271scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
acf0174b
JJ
2272{
2273 omp_context *ctx;
2274 tree name;
41dbbb37
TS
2275 bool offloaded = is_gimple_omp_offloaded (stmt);
2276 tree clauses = gimple_omp_target_clauses (stmt);
acf0174b
JJ
2277
2278 ctx = new_omp_context (stmt, outer_ctx);
2279 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
acf0174b
JJ
2280 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2281 name = create_tmp_var_name (".omp_data_t");
2282 name = build_decl (gimple_location (stmt),
2283 TYPE_DECL, name, ctx->record_type);
2284 DECL_ARTIFICIAL (name) = 1;
2285 DECL_NAMELESS (name) = 1;
2286 TYPE_NAME (ctx->record_type) = name;
f7484978 2287 TYPE_ARTIFICIAL (ctx->record_type) = 1;
86938de6 2288
41dbbb37 2289 if (offloaded)
acf0174b
JJ
2290 {
2291 create_omp_child_function (ctx, false);
2292 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2293 }
2294
829c6349 2295 scan_sharing_clauses (clauses, ctx);
acf0174b
JJ
2296 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2297
2298 if (TYPE_FIELDS (ctx->record_type) == NULL)
2299 ctx->record_type = ctx->receiver_decl = NULL;
2300 else
2301 {
2302 TYPE_FIELDS (ctx->record_type)
2303 = nreverse (TYPE_FIELDS (ctx->record_type));
b2b29377
MM
2304 if (flag_checking)
2305 {
2306 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2307 for (tree field = TYPE_FIELDS (ctx->record_type);
2308 field;
2309 field = DECL_CHAIN (field))
2310 gcc_assert (DECL_ALIGN (field) == align);
2311 }
acf0174b 2312 layout_type (ctx->record_type);
41dbbb37 2313 if (offloaded)
acf0174b
JJ
2314 fixup_child_record_type (ctx);
2315 }
2316}
2317
2318/* Scan an OpenMP teams directive. */
2319
2320static void
538dd0b7 2321scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
acf0174b
JJ
2322{
2323 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2324 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2325 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2326}
953ff289 2327
41dbbb37 2328/* Check nesting restrictions. */
26127932 2329static bool
355fe088 2330check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
a6fc8e21 2331{
d9a6bd32
JJ
2332 tree c;
2333
b2b40051
MJ
2334 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2335 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2336 the original copy of its contents. */
2337 return true;
2338
41dbbb37
TS
2339 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2340 inside an OpenACC CTX. */
2341 if (!(is_gimple_omp (stmt)
640b7e74
TV
2342 && is_gimple_omp_oacc (stmt))
2343 /* Except for atomic codes that we share with OpenMP. */
2344 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2345 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2346 {
629b3d75 2347 if (oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2348 {
2349 error_at (gimple_location (stmt),
2350 "non-OpenACC construct inside of OpenACC routine");
2351 return false;
2352 }
2353 else
2354 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2355 if (is_gimple_omp (octx->stmt)
2356 && is_gimple_omp_oacc (octx->stmt))
2357 {
2358 error_at (gimple_location (stmt),
2359 "non-OpenACC construct inside of OpenACC region");
2360 return false;
2361 }
41dbbb37
TS
2362 }
2363
74bf76ed
JJ
2364 if (ctx != NULL)
2365 {
2366 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 2367 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2368 {
d9a6bd32
JJ
2369 c = NULL_TREE;
2370 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2371 {
2372 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2373 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18 2374 {
629b3d75 2375 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
d9f4ea18
JJ
2376 && (ctx->outer == NULL
2377 || !gimple_omp_for_combined_into_p (ctx->stmt)
2378 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2379 || (gimple_omp_for_kind (ctx->outer->stmt)
2380 != GF_OMP_FOR_KIND_FOR)
2381 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2382 {
2383 error_at (gimple_location (stmt),
2384 "%<ordered simd threads%> must be closely "
2385 "nested inside of %<for simd%> region");
2386 return false;
2387 }
2388 return true;
2389 }
d9a6bd32 2390 }
74bf76ed 2391 error_at (gimple_location (stmt),
d9a6bd32 2392 "OpenMP constructs other than %<#pragma omp ordered simd%>"
d9f4ea18 2393 " may not be nested inside %<simd%> region");
74bf76ed
JJ
2394 return false;
2395 }
acf0174b
JJ
2396 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2397 {
2398 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
56b1c60e
MJ
2399 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2400 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
acf0174b
JJ
2401 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2402 {
2403 error_at (gimple_location (stmt),
d9f4ea18
JJ
2404 "only %<distribute%> or %<parallel%> regions are "
2405 "allowed to be strictly nested inside %<teams%> "
2406 "region");
acf0174b
JJ
2407 return false;
2408 }
2409 }
74bf76ed 2410 }
726a989a 2411 switch (gimple_code (stmt))
a6fc8e21 2412 {
726a989a 2413 case GIMPLE_OMP_FOR:
0aadce73 2414 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2415 return true;
acf0174b
JJ
2416 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2417 {
2418 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2419 {
2420 error_at (gimple_location (stmt),
d9f4ea18
JJ
2421 "%<distribute%> region must be strictly nested "
2422 "inside %<teams%> construct");
acf0174b
JJ
2423 return false;
2424 }
2425 return true;
2426 }
d9a6bd32
JJ
2427 /* We split taskloop into task and nested taskloop in it. */
2428 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2429 return true;
68d58afb
NS
2430 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2431 {
2432 bool ok = false;
01914336 2433
68d58afb
NS
2434 if (ctx)
2435 switch (gimple_code (ctx->stmt))
2436 {
2437 case GIMPLE_OMP_FOR:
2438 ok = (gimple_omp_for_kind (ctx->stmt)
2439 == GF_OMP_FOR_KIND_OACC_LOOP);
2440 break;
2441
2442 case GIMPLE_OMP_TARGET:
2443 switch (gimple_omp_target_kind (ctx->stmt))
2444 {
2445 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2446 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2447 ok = true;
2448 break;
2449
2450 default:
2451 break;
2452 }
2453
2454 default:
2455 break;
2456 }
629b3d75 2457 else if (oacc_get_fn_attrib (current_function_decl))
68d58afb
NS
2458 ok = true;
2459 if (!ok)
2460 {
2461 error_at (gimple_location (stmt),
2462 "OpenACC loop directive must be associated with"
2463 " an OpenACC compute region");
2464 return false;
2465 }
2466 }
acf0174b
JJ
2467 /* FALLTHRU */
2468 case GIMPLE_CALL:
2469 if (is_gimple_call (stmt)
2470 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2471 == BUILT_IN_GOMP_CANCEL
2472 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2473 == BUILT_IN_GOMP_CANCELLATION_POINT))
2474 {
2475 const char *bad = NULL;
2476 const char *kind = NULL;
d9f4ea18
JJ
2477 const char *construct
2478 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2479 == BUILT_IN_GOMP_CANCEL)
2480 ? "#pragma omp cancel"
2481 : "#pragma omp cancellation point";
acf0174b
JJ
2482 if (ctx == NULL)
2483 {
2484 error_at (gimple_location (stmt), "orphaned %qs construct",
d9f4ea18 2485 construct);
acf0174b
JJ
2486 return false;
2487 }
9541ffee 2488 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2489 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2490 : 0)
2491 {
2492 case 1:
2493 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2494 bad = "#pragma omp parallel";
2495 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2496 == BUILT_IN_GOMP_CANCEL
2497 && !integer_zerop (gimple_call_arg (stmt, 1)))
2498 ctx->cancellable = true;
2499 kind = "parallel";
2500 break;
2501 case 2:
2502 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2503 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2504 bad = "#pragma omp for";
2505 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2506 == BUILT_IN_GOMP_CANCEL
2507 && !integer_zerop (gimple_call_arg (stmt, 1)))
2508 {
2509 ctx->cancellable = true;
629b3d75 2510 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2511 OMP_CLAUSE_NOWAIT))
2512 warning_at (gimple_location (stmt), 0,
2513 "%<#pragma omp cancel for%> inside "
2514 "%<nowait%> for construct");
629b3d75 2515 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2516 OMP_CLAUSE_ORDERED))
2517 warning_at (gimple_location (stmt), 0,
2518 "%<#pragma omp cancel for%> inside "
2519 "%<ordered%> for construct");
2520 }
2521 kind = "for";
2522 break;
2523 case 4:
2524 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2525 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2526 bad = "#pragma omp sections";
2527 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2528 == BUILT_IN_GOMP_CANCEL
2529 && !integer_zerop (gimple_call_arg (stmt, 1)))
2530 {
2531 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2532 {
2533 ctx->cancellable = true;
629b3d75 2534 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2535 (ctx->stmt),
2536 OMP_CLAUSE_NOWAIT))
2537 warning_at (gimple_location (stmt), 0,
2538 "%<#pragma omp cancel sections%> inside "
2539 "%<nowait%> sections construct");
2540 }
2541 else
2542 {
2543 gcc_assert (ctx->outer
2544 && gimple_code (ctx->outer->stmt)
2545 == GIMPLE_OMP_SECTIONS);
2546 ctx->outer->cancellable = true;
629b3d75 2547 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2548 (ctx->outer->stmt),
2549 OMP_CLAUSE_NOWAIT))
2550 warning_at (gimple_location (stmt), 0,
2551 "%<#pragma omp cancel sections%> inside "
2552 "%<nowait%> sections construct");
2553 }
2554 }
2555 kind = "sections";
2556 break;
2557 case 8:
2558 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2559 bad = "#pragma omp task";
2560 else
d9f4ea18
JJ
2561 {
2562 for (omp_context *octx = ctx->outer;
2563 octx; octx = octx->outer)
2564 {
2565 switch (gimple_code (octx->stmt))
2566 {
2567 case GIMPLE_OMP_TASKGROUP:
2568 break;
2569 case GIMPLE_OMP_TARGET:
2570 if (gimple_omp_target_kind (octx->stmt)
2571 != GF_OMP_TARGET_KIND_REGION)
2572 continue;
2573 /* FALLTHRU */
2574 case GIMPLE_OMP_PARALLEL:
2575 case GIMPLE_OMP_TEAMS:
2576 error_at (gimple_location (stmt),
2577 "%<%s taskgroup%> construct not closely "
2578 "nested inside of %<taskgroup%> region",
2579 construct);
2580 return false;
2581 default:
2582 continue;
2583 }
2584 break;
2585 }
2586 ctx->cancellable = true;
2587 }
acf0174b
JJ
2588 kind = "taskgroup";
2589 break;
2590 default:
2591 error_at (gimple_location (stmt), "invalid arguments");
2592 return false;
2593 }
2594 if (bad)
2595 {
2596 error_at (gimple_location (stmt),
2597 "%<%s %s%> construct not closely nested inside of %qs",
d9f4ea18 2598 construct, kind, bad);
acf0174b
JJ
2599 return false;
2600 }
2601 }
74bf76ed 2602 /* FALLTHRU */
726a989a
RB
2603 case GIMPLE_OMP_SECTIONS:
2604 case GIMPLE_OMP_SINGLE:
a6fc8e21 2605 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2606 switch (gimple_code (ctx->stmt))
a6fc8e21 2607 {
726a989a 2608 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2609 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2610 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2611 break;
2612 /* FALLTHRU */
726a989a
RB
2613 case GIMPLE_OMP_SECTIONS:
2614 case GIMPLE_OMP_SINGLE:
2615 case GIMPLE_OMP_ORDERED:
2616 case GIMPLE_OMP_MASTER:
2617 case GIMPLE_OMP_TASK:
acf0174b 2618 case GIMPLE_OMP_CRITICAL:
726a989a 2619 if (is_gimple_call (stmt))
a68ab351 2620 {
acf0174b
JJ
2621 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2622 != BUILT_IN_GOMP_BARRIER)
2623 return true;
26127932
JJ
2624 error_at (gimple_location (stmt),
2625 "barrier region may not be closely nested inside "
d9f4ea18
JJ
2626 "of work-sharing, %<critical%>, %<ordered%>, "
2627 "%<master%>, explicit %<task%> or %<taskloop%> "
2628 "region");
26127932 2629 return false;
a68ab351 2630 }
26127932
JJ
2631 error_at (gimple_location (stmt),
2632 "work-sharing region may not be closely nested inside "
d9f4ea18
JJ
2633 "of work-sharing, %<critical%>, %<ordered%>, "
2634 "%<master%>, explicit %<task%> or %<taskloop%> region");
26127932 2635 return false;
726a989a 2636 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2637 case GIMPLE_OMP_TEAMS:
26127932 2638 return true;
d9f4ea18
JJ
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (ctx->stmt)
2641 == GF_OMP_TARGET_KIND_REGION)
2642 return true;
2643 break;
a6fc8e21
JJ
2644 default:
2645 break;
2646 }
2647 break;
726a989a 2648 case GIMPLE_OMP_MASTER:
a6fc8e21 2649 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2650 switch (gimple_code (ctx->stmt))
a6fc8e21 2651 {
726a989a 2652 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2653 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2654 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2655 break;
2656 /* FALLTHRU */
726a989a
RB
2657 case GIMPLE_OMP_SECTIONS:
2658 case GIMPLE_OMP_SINGLE:
2659 case GIMPLE_OMP_TASK:
26127932 2660 error_at (gimple_location (stmt),
d9f4ea18
JJ
2661 "%<master%> region may not be closely nested inside "
2662 "of work-sharing, explicit %<task%> or %<taskloop%> "
2663 "region");
26127932 2664 return false;
726a989a 2665 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2666 case GIMPLE_OMP_TEAMS:
26127932 2667 return true;
d9f4ea18
JJ
2668 case GIMPLE_OMP_TARGET:
2669 if (gimple_omp_target_kind (ctx->stmt)
2670 == GF_OMP_TARGET_KIND_REGION)
2671 return true;
2672 break;
a6fc8e21
JJ
2673 default:
2674 break;
2675 }
2676 break;
d9a6bd32
JJ
2677 case GIMPLE_OMP_TASK:
2678 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2680 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2681 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2682 {
2683 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2684 error_at (OMP_CLAUSE_LOCATION (c),
2685 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2686 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2687 return false;
2688 }
2689 break;
726a989a 2690 case GIMPLE_OMP_ORDERED:
d9a6bd32
JJ
2691 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2692 c; c = OMP_CLAUSE_CHAIN (c))
2693 {
2694 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2695 {
2696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
d9f4ea18 2697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
d9a6bd32
JJ
2698 continue;
2699 }
2700 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2701 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2702 || kind == OMP_CLAUSE_DEPEND_SINK)
2703 {
2704 tree oclause;
2705 /* Look for containing ordered(N) loop. */
2706 if (ctx == NULL
2707 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2708 || (oclause
629b3d75 2709 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
d9a6bd32
JJ
2710 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2711 {
2712 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2713 "%<ordered%> construct with %<depend%> clause "
2714 "must be closely nested inside an %<ordered%> "
2715 "loop");
d9a6bd32
JJ
2716 return false;
2717 }
2718 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2719 {
2720 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2721 "%<ordered%> construct with %<depend%> clause "
2722 "must be closely nested inside a loop with "
2723 "%<ordered%> clause with a parameter");
d9a6bd32
JJ
2724 return false;
2725 }
2726 }
2727 else
2728 {
2729 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2730 "invalid depend kind in omp %<ordered%> %<depend%>");
2731 return false;
2732 }
2733 }
2734 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2735 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18
JJ
2736 {
2737 /* ordered simd must be closely nested inside of simd region,
2738 and simd region must not encounter constructs other than
2739 ordered simd, therefore ordered simd may be either orphaned,
2740 or ctx->stmt must be simd. The latter case is handled already
2741 earlier. */
2742 if (ctx != NULL)
2743 {
2744 error_at (gimple_location (stmt),
2745 "%<ordered%> %<simd%> must be closely nested inside "
2746 "%<simd%> region");
d9a6bd32
JJ
2747 return false;
2748 }
2749 }
a6fc8e21 2750 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2751 switch (gimple_code (ctx->stmt))
a6fc8e21 2752 {
726a989a
RB
2753 case GIMPLE_OMP_CRITICAL:
2754 case GIMPLE_OMP_TASK:
d9f4ea18
JJ
2755 case GIMPLE_OMP_ORDERED:
2756 ordered_in_taskloop:
26127932 2757 error_at (gimple_location (stmt),
d9f4ea18
JJ
2758 "%<ordered%> region may not be closely nested inside "
2759 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2760 "%<taskloop%> region");
26127932 2761 return false;
726a989a 2762 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2763 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2764 goto ordered_in_taskloop;
173670e2
JJ
2765 tree o;
2766 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2767 OMP_CLAUSE_ORDERED);
2768 if (o == NULL)
26127932
JJ
2769 {
2770 error_at (gimple_location (stmt),
d9f4ea18
JJ
2771 "%<ordered%> region must be closely nested inside "
2772 "a loop region with an %<ordered%> clause");
26127932
JJ
2773 return false;
2774 }
173670e2
JJ
2775 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2776 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2777 {
2778 error_at (gimple_location (stmt),
2779 "%<ordered%> region without %<depend%> clause may "
2780 "not be closely nested inside a loop region with "
2781 "an %<ordered%> clause with a parameter");
2782 return false;
2783 }
26127932 2784 return true;
d9f4ea18
JJ
2785 case GIMPLE_OMP_TARGET:
2786 if (gimple_omp_target_kind (ctx->stmt)
2787 != GF_OMP_TARGET_KIND_REGION)
2788 break;
2789 /* FALLTHRU */
726a989a 2790 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2791 case GIMPLE_OMP_TEAMS:
acf0174b 2792 error_at (gimple_location (stmt),
d9f4ea18
JJ
2793 "%<ordered%> region must be closely nested inside "
2794 "a loop region with an %<ordered%> clause");
acf0174b 2795 return false;
a6fc8e21
JJ
2796 default:
2797 break;
2798 }
2799 break;
726a989a 2800 case GIMPLE_OMP_CRITICAL:
538dd0b7
DM
2801 {
2802 tree this_stmt_name
2803 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2804 for (; ctx != NULL; ctx = ctx->outer)
2805 if (gomp_critical *other_crit
2806 = dyn_cast <gomp_critical *> (ctx->stmt))
2807 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2808 {
2809 error_at (gimple_location (stmt),
d9f4ea18
JJ
2810 "%<critical%> region may not be nested inside "
2811 "a %<critical%> region with the same name");
538dd0b7
DM
2812 return false;
2813 }
2814 }
a6fc8e21 2815 break;
acf0174b
JJ
2816 case GIMPLE_OMP_TEAMS:
2817 if (ctx == NULL
2818 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2819 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2820 {
2821 error_at (gimple_location (stmt),
d9f4ea18
JJ
2822 "%<teams%> construct not closely nested inside of "
2823 "%<target%> construct");
acf0174b
JJ
2824 return false;
2825 }
2826 break;
f014c653 2827 case GIMPLE_OMP_TARGET:
d9a6bd32
JJ
2828 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2830 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2831 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2832 {
2833 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2834 error_at (OMP_CLAUSE_LOCATION (c),
2835 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2836 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2837 return false;
2838 }
640b7e74 2839 if (is_gimple_omp_offloaded (stmt)
629b3d75 2840 && oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2841 {
2842 error_at (gimple_location (stmt),
2843 "OpenACC region inside of OpenACC routine, nested "
2844 "parallelism not supported yet");
2845 return false;
2846 }
f014c653 2847 for (; ctx != NULL; ctx = ctx->outer)
41dbbb37
TS
2848 {
2849 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2850 {
2851 if (is_gimple_omp (stmt)
2852 && is_gimple_omp_oacc (stmt)
2853 && is_gimple_omp (ctx->stmt))
2854 {
2855 error_at (gimple_location (stmt),
2856 "OpenACC construct inside of non-OpenACC region");
2857 return false;
2858 }
2859 continue;
2860 }
2861
2862 const char *stmt_name, *ctx_stmt_name;
2863 switch (gimple_omp_target_kind (stmt))
2864 {
2865 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2866 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2867 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
d9a6bd32
JJ
2868 case GF_OMP_TARGET_KIND_ENTER_DATA:
2869 stmt_name = "target enter data"; break;
2870 case GF_OMP_TARGET_KIND_EXIT_DATA:
2871 stmt_name = "target exit data"; break;
41dbbb37
TS
2872 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2873 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2874 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2875 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
d9a6bd32
JJ
2876 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2877 stmt_name = "enter/exit data"; break;
37d5ad46
JB
2878 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2879 break;
41dbbb37
TS
2880 default: gcc_unreachable ();
2881 }
2882 switch (gimple_omp_target_kind (ctx->stmt))
2883 {
2884 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2885 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
d9a6bd32
JJ
2886 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2887 ctx_stmt_name = "parallel"; break;
2888 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2889 ctx_stmt_name = "kernels"; break;
41dbbb37 2890 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
37d5ad46
JB
2891 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2892 ctx_stmt_name = "host_data"; break;
41dbbb37
TS
2893 default: gcc_unreachable ();
2894 }
2895
2896 /* OpenACC/OpenMP mismatch? */
2897 if (is_gimple_omp_oacc (stmt)
2898 != is_gimple_omp_oacc (ctx->stmt))
2899 {
2900 error_at (gimple_location (stmt),
d9f4ea18 2901 "%s %qs construct inside of %s %qs region",
41dbbb37
TS
2902 (is_gimple_omp_oacc (stmt)
2903 ? "OpenACC" : "OpenMP"), stmt_name,
2904 (is_gimple_omp_oacc (ctx->stmt)
2905 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2906 return false;
2907 }
2908 if (is_gimple_omp_offloaded (ctx->stmt))
2909 {
2910 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2911 if (is_gimple_omp_oacc (ctx->stmt))
2912 {
2913 error_at (gimple_location (stmt),
d9f4ea18 2914 "%qs construct inside of %qs region",
41dbbb37
TS
2915 stmt_name, ctx_stmt_name);
2916 return false;
2917 }
2918 else
2919 {
41dbbb37 2920 warning_at (gimple_location (stmt), 0,
d9f4ea18 2921 "%qs construct inside of %qs region",
41dbbb37
TS
2922 stmt_name, ctx_stmt_name);
2923 }
2924 }
2925 }
f014c653 2926 break;
a6fc8e21
JJ
2927 default:
2928 break;
2929 }
26127932 2930 return true;
a6fc8e21
JJ
2931}
2932
2933
726a989a
RB
2934/* Helper function scan_omp.
2935
2936 Callback for walk_tree or operators in walk_gimple_stmt used to
41dbbb37 2937 scan for OMP directives in TP. */
953ff289
DN
2938
2939static tree
726a989a 2940scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 2941{
d3bfe4de
KG
2942 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2943 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
2944 tree t = *tp;
2945
726a989a
RB
2946 switch (TREE_CODE (t))
2947 {
2948 case VAR_DECL:
2949 case PARM_DECL:
2950 case LABEL_DECL:
2951 case RESULT_DECL:
2952 if (ctx)
b2b40051
MJ
2953 {
2954 tree repl = remap_decl (t, &ctx->cb);
2955 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2956 *tp = repl;
2957 }
726a989a
RB
2958 break;
2959
2960 default:
2961 if (ctx && TYPE_P (t))
2962 *tp = remap_type (t, &ctx->cb);
2963 else if (!DECL_P (t))
a900ae6b
JJ
2964 {
2965 *walk_subtrees = 1;
2966 if (ctx)
70f34814
RG
2967 {
2968 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2969 if (tem != TREE_TYPE (t))
2970 {
2971 if (TREE_CODE (t) == INTEGER_CST)
8e6cdc90 2972 *tp = wide_int_to_tree (tem, wi::to_wide (t));
70f34814
RG
2973 else
2974 TREE_TYPE (t) = tem;
2975 }
2976 }
a900ae6b 2977 }
726a989a
RB
2978 break;
2979 }
2980
2981 return NULL_TREE;
2982}
2983
c02065fc
AH
2984/* Return true if FNDECL is a setjmp or a longjmp. */
2985
2986static bool
2987setjmp_or_longjmp_p (const_tree fndecl)
2988{
3d78e008
ML
2989 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
2990 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
c02065fc
AH
2991 return true;
2992
2993 tree declname = DECL_NAME (fndecl);
2994 if (!declname)
2995 return false;
2996 const char *name = IDENTIFIER_POINTER (declname);
2997 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2998}
2999
726a989a
RB
3000
3001/* Helper function for scan_omp.
3002
41dbbb37 3003 Callback for walk_gimple_stmt used to scan for OMP directives in
726a989a
RB
3004 the current statement in GSI. */
3005
3006static tree
3007scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3008 struct walk_stmt_info *wi)
3009{
355fe088 3010 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
3011 omp_context *ctx = (omp_context *) wi->info;
3012
3013 if (gimple_has_location (stmt))
3014 input_location = gimple_location (stmt);
953ff289 3015
41dbbb37 3016 /* Check the nesting restrictions. */
acf0174b
JJ
3017 bool remove = false;
3018 if (is_gimple_omp (stmt))
3019 remove = !check_omp_nesting_restrictions (stmt, ctx);
3020 else if (is_gimple_call (stmt))
3021 {
3022 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
3023 if (fndecl)
3024 {
3025 if (setjmp_or_longjmp_p (fndecl)
3026 && ctx
3027 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3028 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
c02065fc
AH
3029 {
3030 remove = true;
3031 error_at (gimple_location (stmt),
3032 "setjmp/longjmp inside simd construct");
3033 }
3034 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3035 switch (DECL_FUNCTION_CODE (fndecl))
3036 {
3037 case BUILT_IN_GOMP_BARRIER:
3038 case BUILT_IN_GOMP_CANCEL:
3039 case BUILT_IN_GOMP_CANCELLATION_POINT:
3040 case BUILT_IN_GOMP_TASKYIELD:
3041 case BUILT_IN_GOMP_TASKWAIT:
3042 case BUILT_IN_GOMP_TASKGROUP_START:
3043 case BUILT_IN_GOMP_TASKGROUP_END:
3044 remove = !check_omp_nesting_restrictions (stmt, ctx);
3045 break;
3046 default:
3047 break;
3048 }
3049 }
acf0174b
JJ
3050 }
3051 if (remove)
3052 {
3053 stmt = gimple_build_nop ();
3054 gsi_replace (gsi, stmt, false);
a68ab351 3055 }
a6fc8e21 3056
726a989a
RB
3057 *handled_ops_p = true;
3058
3059 switch (gimple_code (stmt))
953ff289 3060 {
726a989a 3061 case GIMPLE_OMP_PARALLEL:
a68ab351 3062 taskreg_nesting_level++;
726a989a 3063 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
3064 taskreg_nesting_level--;
3065 break;
3066
726a989a 3067 case GIMPLE_OMP_TASK:
a68ab351 3068 taskreg_nesting_level++;
726a989a 3069 scan_omp_task (gsi, ctx);
a68ab351 3070 taskreg_nesting_level--;
953ff289
DN
3071 break;
3072
726a989a 3073 case GIMPLE_OMP_FOR:
6c7509bc
JJ
3074 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3075 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3076 && omp_maybe_offloaded_ctx (ctx)
3077 && omp_max_simt_vf ())
3078 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3079 else
3080 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
953ff289
DN
3081 break;
3082
726a989a 3083 case GIMPLE_OMP_SECTIONS:
538dd0b7 3084 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
953ff289
DN
3085 break;
3086
726a989a 3087 case GIMPLE_OMP_SINGLE:
538dd0b7 3088 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
953ff289
DN
3089 break;
3090
726a989a
RB
3091 case GIMPLE_OMP_SECTION:
3092 case GIMPLE_OMP_MASTER:
acf0174b 3093 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
3094 case GIMPLE_OMP_ORDERED:
3095 case GIMPLE_OMP_CRITICAL:
b2b40051 3096 case GIMPLE_OMP_GRID_BODY:
726a989a 3097 ctx = new_omp_context (stmt, ctx);
26127932 3098 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
3099 break;
3100
acf0174b 3101 case GIMPLE_OMP_TARGET:
538dd0b7 3102 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
acf0174b
JJ
3103 break;
3104
3105 case GIMPLE_OMP_TEAMS:
538dd0b7 3106 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
acf0174b
JJ
3107 break;
3108
726a989a 3109 case GIMPLE_BIND:
953ff289
DN
3110 {
3111 tree var;
953ff289 3112
726a989a
RB
3113 *handled_ops_p = false;
3114 if (ctx)
538dd0b7
DM
3115 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3116 var ;
3117 var = DECL_CHAIN (var))
726a989a 3118 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
3119 }
3120 break;
953ff289 3121 default:
726a989a 3122 *handled_ops_p = false;
953ff289
DN
3123 break;
3124 }
3125
3126 return NULL_TREE;
3127}
3128
3129
726a989a 3130/* Scan all the statements starting at the current statement. CTX
41dbbb37 3131 contains context information about the OMP directives and
726a989a 3132 clauses found during the scan. */
953ff289
DN
3133
3134static void
26127932 3135scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
3136{
3137 location_t saved_location;
3138 struct walk_stmt_info wi;
3139
3140 memset (&wi, 0, sizeof (wi));
953ff289 3141 wi.info = ctx;
953ff289
DN
3142 wi.want_locations = true;
3143
3144 saved_location = input_location;
26127932 3145 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
3146 input_location = saved_location;
3147}
3148\f
3149/* Re-gimplification and code generation routines. */
3150
6724f8a6
JJ
3151/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3152 of BIND if in a method. */
3153
3154static void
3155maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3156{
3157 if (DECL_ARGUMENTS (current_function_decl)
3158 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3159 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3160 == POINTER_TYPE))
3161 {
3162 tree vars = gimple_bind_vars (bind);
3163 for (tree *pvar = &vars; *pvar; )
3164 if (omp_member_access_dummy_var (*pvar))
3165 *pvar = DECL_CHAIN (*pvar);
3166 else
3167 pvar = &DECL_CHAIN (*pvar);
3168 gimple_bind_set_vars (bind, vars);
3169 }
3170}
3171
3172/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3173 block and its subblocks. */
3174
3175static void
3176remove_member_access_dummy_vars (tree block)
3177{
3178 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3179 if (omp_member_access_dummy_var (*pvar))
3180 *pvar = DECL_CHAIN (*pvar);
3181 else
3182 pvar = &DECL_CHAIN (*pvar);
3183
3184 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3185 remove_member_access_dummy_vars (block);
3186}
3187
953ff289
DN
3188/* If a context was created for STMT when it was scanned, return it. */
3189
3190static omp_context *
355fe088 3191maybe_lookup_ctx (gimple *stmt)
953ff289
DN
3192{
3193 splay_tree_node n;
3194 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3195 return n ? (omp_context *) n->value : NULL;
3196}
3197
50674e96
DN
3198
3199/* Find the mapping for DECL in CTX or the immediately enclosing
3200 context that has a mapping for DECL.
3201
3202 If CTX is a nested parallel directive, we may have to use the decl
3203 mappings created in CTX's parent context. Suppose that we have the
3204 following parallel nesting (variable UIDs showed for clarity):
3205
3206 iD.1562 = 0;
3207 #omp parallel shared(iD.1562) -> outer parallel
3208 iD.1562 = iD.1562 + 1;
3209
3210 #omp parallel shared (iD.1562) -> inner parallel
3211 iD.1562 = iD.1562 - 1;
3212
3213 Each parallel structure will create a distinct .omp_data_s structure
3214 for copying iD.1562 in/out of the directive:
3215
3216 outer parallel .omp_data_s.1.i -> iD.1562
3217 inner parallel .omp_data_s.2.i -> iD.1562
3218
3219 A shared variable mapping will produce a copy-out operation before
3220 the parallel directive and a copy-in operation after it. So, in
3221 this case we would have:
3222
3223 iD.1562 = 0;
3224 .omp_data_o.1.i = iD.1562;
3225 #omp parallel shared(iD.1562) -> outer parallel
3226 .omp_data_i.1 = &.omp_data_o.1
3227 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3228
3229 .omp_data_o.2.i = iD.1562; -> **
3230 #omp parallel shared(iD.1562) -> inner parallel
3231 .omp_data_i.2 = &.omp_data_o.2
3232 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3233
3234
3235 ** This is a problem. The symbol iD.1562 cannot be referenced
3236 inside the body of the outer parallel region. But since we are
3237 emitting this copy operation while expanding the inner parallel
3238 directive, we need to access the CTX structure of the outer
3239 parallel directive to get the correct mapping:
3240
3241 .omp_data_o.2.i = .omp_data_i.1->i
3242
3243 Since there may be other workshare or parallel directives enclosing
3244 the parallel directive, it may be necessary to walk up the context
3245 parent chain. This is not a problem in general because nested
3246 parallelism happens only rarely. */
3247
3248static tree
3249lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3250{
3251 tree t;
3252 omp_context *up;
3253
50674e96
DN
3254 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3255 t = maybe_lookup_decl (decl, up);
3256
d2dda7fe 3257 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 3258
64964499 3259 return t ? t : decl;
50674e96
DN
3260}
3261
3262
8ca5b2a2
JJ
3263/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3264 in outer contexts. */
3265
3266static tree
3267maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3268{
3269 tree t = NULL;
3270 omp_context *up;
3271
d2dda7fe
JJ
3272 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3273 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
3274
3275 return t ? t : decl;
3276}
3277
3278
f2c9f71d 3279/* Construct the initialization value for reduction operation OP. */
953ff289
DN
3280
3281tree
f2c9f71d 3282omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
953ff289 3283{
f2c9f71d 3284 switch (op)
953ff289
DN
3285 {
3286 case PLUS_EXPR:
3287 case MINUS_EXPR:
3288 case BIT_IOR_EXPR:
3289 case BIT_XOR_EXPR:
3290 case TRUTH_OR_EXPR:
3291 case TRUTH_ORIF_EXPR:
3292 case TRUTH_XOR_EXPR:
3293 case NE_EXPR:
e8160c9a 3294 return build_zero_cst (type);
953ff289
DN
3295
3296 case MULT_EXPR:
3297 case TRUTH_AND_EXPR:
3298 case TRUTH_ANDIF_EXPR:
3299 case EQ_EXPR:
db3927fb 3300 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
3301
3302 case BIT_AND_EXPR:
db3927fb 3303 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
3304
3305 case MAX_EXPR:
3306 if (SCALAR_FLOAT_TYPE_P (type))
3307 {
3308 REAL_VALUE_TYPE max, min;
3d3dbadd 3309 if (HONOR_INFINITIES (type))
953ff289
DN
3310 {
3311 real_inf (&max);
3312 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3313 }
3314 else
3315 real_maxval (&min, 1, TYPE_MODE (type));
3316 return build_real (type, min);
3317 }
3ff2d74e
TV
3318 else if (POINTER_TYPE_P (type))
3319 {
3320 wide_int min
3321 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3322 return wide_int_to_tree (type, min);
3323 }
953ff289
DN
3324 else
3325 {
3326 gcc_assert (INTEGRAL_TYPE_P (type));
3327 return TYPE_MIN_VALUE (type);
3328 }
3329
3330 case MIN_EXPR:
3331 if (SCALAR_FLOAT_TYPE_P (type))
3332 {
3333 REAL_VALUE_TYPE max;
3d3dbadd 3334 if (HONOR_INFINITIES (type))
953ff289
DN
3335 real_inf (&max);
3336 else
3337 real_maxval (&max, 0, TYPE_MODE (type));
3338 return build_real (type, max);
3339 }
3ff2d74e
TV
3340 else if (POINTER_TYPE_P (type))
3341 {
3342 wide_int max
3343 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3344 return wide_int_to_tree (type, max);
3345 }
953ff289
DN
3346 else
3347 {
3348 gcc_assert (INTEGRAL_TYPE_P (type));
3349 return TYPE_MAX_VALUE (type);
3350 }
3351
3352 default:
3353 gcc_unreachable ();
3354 }
3355}
3356
f2c9f71d
TS
3357/* Construct the initialization value for reduction CLAUSE. */
3358
3359tree
3360omp_reduction_init (tree clause, tree type)
3361{
3362 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3363 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3364}
3365
acf0174b
JJ
3366/* Return alignment to be assumed for var in CLAUSE, which should be
3367 OMP_CLAUSE_ALIGNED. */
3368
3369static tree
3370omp_clause_aligned_alignment (tree clause)
3371{
3372 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3373 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3374
3375 /* Otherwise return implementation defined alignment. */
3376 unsigned int al = 1;
16d22000 3377 opt_scalar_mode mode_iter;
86e36728
RS
3378 auto_vector_sizes sizes;
3379 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3380 poly_uint64 vs = 0;
3381 for (unsigned int i = 0; i < sizes.length (); ++i)
3382 vs = ordered_max (vs, sizes[i]);
acf0174b
JJ
3383 static enum mode_class classes[]
3384 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3385 for (int i = 0; i < 4; i += 2)
16d22000
RS
3386 /* The for loop above dictates that we only walk through scalar classes. */
3387 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
acf0174b 3388 {
16d22000
RS
3389 scalar_mode mode = mode_iter.require ();
3390 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
acf0174b
JJ
3391 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3392 continue;
86e36728
RS
3393 while (maybe_ne (vs, 0U)
3394 && known_lt (GET_MODE_SIZE (vmode), vs)
490d0f6c
RS
3395 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3396 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
01914336 3397
acf0174b
JJ
3398 tree type = lang_hooks.types.type_for_mode (mode, 1);
3399 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3400 continue;
cf098191
RS
3401 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3402 GET_MODE_SIZE (mode));
86e36728 3403 type = build_vector_type (type, nelts);
acf0174b
JJ
3404 if (TYPE_MODE (type) != vmode)
3405 continue;
3406 if (TYPE_ALIGN_UNIT (type) > al)
3407 al = TYPE_ALIGN_UNIT (type);
3408 }
3409 return build_int_cst (integer_type_node, al);
3410}
3411
6943af07
AM
3412
3413/* This structure is part of the interface between lower_rec_simd_input_clauses
3414 and lower_rec_input_clauses. */
3415
3416struct omplow_simd_context {
9d2f08ab 3417 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
6943af07
AM
3418 tree idx;
3419 tree lane;
0c6b03b5
AM
3420 vec<tree, va_heap> simt_eargs;
3421 gimple_seq simt_dlist;
9d2f08ab 3422 poly_uint64_pod max_vf;
6943af07
AM
3423 bool is_simt;
3424};
3425
74bf76ed
JJ
3426/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3427 privatization. */
3428
3429static bool
6943af07
AM
3430lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3431 omplow_simd_context *sctx, tree &ivar, tree &lvar)
74bf76ed 3432{
9d2f08ab 3433 if (known_eq (sctx->max_vf, 0U))
74bf76ed 3434 {
6943af07 3435 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d2f08ab 3436 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3437 {
629b3d75 3438 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 3439 OMP_CLAUSE_SAFELEN);
9d2f08ab
RS
3440 if (c)
3441 {
3442 poly_uint64 safe_len;
3443 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3444 || maybe_lt (safe_len, 1U))
3445 sctx->max_vf = 1;
3446 else
3447 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3448 }
74bf76ed 3449 }
9d2f08ab 3450 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3451 {
6943af07
AM
3452 sctx->idx = create_tmp_var (unsigned_type_node);
3453 sctx->lane = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
3454 }
3455 }
9d2f08ab 3456 if (known_eq (sctx->max_vf, 1U))
74bf76ed
JJ
3457 return false;
3458
0c6b03b5
AM
3459 if (sctx->is_simt)
3460 {
3461 if (is_gimple_reg (new_var))
3462 {
3463 ivar = lvar = new_var;
3464 return true;
3465 }
3466 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3467 ivar = lvar = create_tmp_var (type);
3468 TREE_ADDRESSABLE (ivar) = 1;
3469 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3470 NULL, DECL_ATTRIBUTES (ivar));
3471 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3472 tree clobber = build_constructor (type, NULL);
3473 TREE_THIS_VOLATILE (clobber) = 1;
3474 gimple *g = gimple_build_assign (ivar, clobber);
3475 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3476 }
3477 else
3478 {
3479 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3480 tree avar = create_tmp_var_raw (atype);
3481 if (TREE_ADDRESSABLE (new_var))
3482 TREE_ADDRESSABLE (avar) = 1;
3483 DECL_ATTRIBUTES (avar)
3484 = tree_cons (get_identifier ("omp simd array"), NULL,
3485 DECL_ATTRIBUTES (avar));
3486 gimple_add_tmp_var (avar);
3487 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3488 NULL_TREE, NULL_TREE);
3489 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3490 NULL_TREE, NULL_TREE);
3491 }
acf0174b
JJ
3492 if (DECL_P (new_var))
3493 {
3494 SET_DECL_VALUE_EXPR (new_var, lvar);
3495 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3496 }
74bf76ed
JJ
3497 return true;
3498}
3499
decaaec8
JJ
3500/* Helper function of lower_rec_input_clauses. For a reference
3501 in simd reduction, add an underlying variable it will reference. */
3502
3503static void
3504handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3505{
3506 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3507 if (TREE_CONSTANT (z))
3508 {
d9a6bd32
JJ
3509 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3510 get_name (new_vard));
decaaec8
JJ
3511 gimple_add_tmp_var (z);
3512 TREE_ADDRESSABLE (z) = 1;
3513 z = build_fold_addr_expr_loc (loc, z);
3514 gimplify_assign (new_vard, z, ilist);
3515 }
3516}
3517
953ff289
DN
3518/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3519 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3520 private variables. Initialization statements go in ILIST, while calls
3521 to destructors go in DLIST. */
3522
3523static void
726a989a 3524lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 3525 omp_context *ctx, struct omp_for_data *fd)
953ff289 3526{
5039610b 3527 tree c, dtor, copyin_seq, x, ptr;
953ff289 3528 bool copyin_by_ref = false;
8ca5b2a2 3529 bool lastprivate_firstprivate = false;
acf0174b 3530 bool reduction_omp_orig_ref = false;
953ff289 3531 int pass;
74bf76ed 3532 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3533 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
6943af07 3534 omplow_simd_context sctx = omplow_simd_context ();
0c6b03b5
AM
3535 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3536 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
9669b00b 3537 gimple_seq llist[3] = { };
953ff289 3538
953ff289 3539 copyin_seq = NULL;
6943af07 3540 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
953ff289 3541
74bf76ed
JJ
3542 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3543 with data sharing clauses referencing variable sized vars. That
3544 is unnecessarily hard to support and very unlikely to result in
3545 vectorized code anyway. */
3546 if (is_simd)
3547 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3548 switch (OMP_CLAUSE_CODE (c))
3549 {
da6f124d
JJ
3550 case OMP_CLAUSE_LINEAR:
3551 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6943af07 3552 sctx.max_vf = 1;
da6f124d 3553 /* FALLTHRU */
74bf76ed
JJ
3554 case OMP_CLAUSE_PRIVATE:
3555 case OMP_CLAUSE_FIRSTPRIVATE:
3556 case OMP_CLAUSE_LASTPRIVATE:
74bf76ed 3557 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3558 sctx.max_vf = 1;
74bf76ed 3559 break;
d9a6bd32
JJ
3560 case OMP_CLAUSE_REDUCTION:
3561 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3562 || is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3563 sctx.max_vf = 1;
d9a6bd32 3564 break;
74bf76ed
JJ
3565 default:
3566 continue;
3567 }
3568
0c6b03b5 3569 /* Add a placeholder for simduid. */
9d2f08ab 3570 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
0c6b03b5
AM
3571 sctx.simt_eargs.safe_push (NULL_TREE);
3572
953ff289
DN
3573 /* Do all the fixed sized types in the first pass, and the variable sized
3574 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3575 the variable sized types are processed before we use them in the
953ff289
DN
3576 variable sized operations. */
3577 for (pass = 0; pass < 2; ++pass)
3578 {
3579 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3580 {
aaf46ef9 3581 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3582 tree var, new_var;
3583 bool by_ref;
db3927fb 3584 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3585
3586 switch (c_kind)
3587 {
3588 case OMP_CLAUSE_PRIVATE:
3589 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3590 continue;
3591 break;
3592 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3593 /* Ignore shared directives in teams construct. */
3594 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3595 continue;
8ca5b2a2
JJ
3596 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3597 {
d9a6bd32
JJ
3598 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3599 || is_global_var (OMP_CLAUSE_DECL (c)));
8ca5b2a2
JJ
3600 continue;
3601 }
953ff289 3602 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3603 case OMP_CLAUSE_COPYIN:
d9a6bd32 3604 break;
acf0174b 3605 case OMP_CLAUSE_LINEAR:
d9a6bd32
JJ
3606 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3607 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3608 lastprivate_firstprivate = true;
acf0174b 3609 break;
953ff289 3610 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3611 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3612 reduction_omp_orig_ref = true;
953ff289 3613 break;
acf0174b 3614 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 3615 /* Handle _looptemp_ clauses only on parallel/task. */
acf0174b
JJ
3616 if (fd)
3617 continue;
74bf76ed 3618 break;
077b0dfb 3619 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3620 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3621 {
3622 lastprivate_firstprivate = true;
d9a6bd32 3623 if (pass != 0 || is_taskloop_ctx (ctx))
8ca5b2a2
JJ
3624 continue;
3625 }
92d28cbb
JJ
3626 /* Even without corresponding firstprivate, if
3627 decl is Fortran allocatable, it needs outer var
3628 reference. */
3629 else if (pass == 0
3630 && lang_hooks.decls.omp_private_outer_ref
3631 (OMP_CLAUSE_DECL (c)))
3632 lastprivate_firstprivate = true;
077b0dfb 3633 break;
acf0174b
JJ
3634 case OMP_CLAUSE_ALIGNED:
3635 if (pass == 0)
3636 continue;
3637 var = OMP_CLAUSE_DECL (c);
3638 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3639 && !is_global_var (var))
3640 {
3641 new_var = maybe_lookup_decl (var, ctx);
3642 if (new_var == NULL_TREE)
3643 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3644 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3645 tree alarg = omp_clause_aligned_alignment (c);
3646 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3647 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
acf0174b
JJ
3648 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3649 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3650 gimplify_and_add (x, ilist);
3651 }
3652 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3653 && is_global_var (var))
3654 {
3655 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3656 new_var = lookup_decl (var, ctx);
3657 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3658 t = build_fold_addr_expr_loc (clause_loc, t);
3659 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3660 tree alarg = omp_clause_aligned_alignment (c);
3661 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3662 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
acf0174b 3663 t = fold_convert_loc (clause_loc, ptype, t);
b731b390 3664 x = create_tmp_var (ptype);
acf0174b
JJ
3665 t = build2 (MODIFY_EXPR, ptype, x, t);
3666 gimplify_and_add (t, ilist);
3667 t = build_simple_mem_ref_loc (clause_loc, x);
3668 SET_DECL_VALUE_EXPR (new_var, t);
3669 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3670 }
3671 continue;
953ff289
DN
3672 default:
3673 continue;
3674 }
3675
3676 new_var = var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
3677 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3678 {
3679 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
3680 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3681 var = TREE_OPERAND (var, 0);
d9a6bd32
JJ
3682 if (TREE_CODE (var) == INDIRECT_REF
3683 || TREE_CODE (var) == ADDR_EXPR)
3684 var = TREE_OPERAND (var, 0);
3685 if (is_variable_sized (var))
3686 {
3687 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3688 var = DECL_VALUE_EXPR (var);
3689 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3690 var = TREE_OPERAND (var, 0);
3691 gcc_assert (DECL_P (var));
3692 }
3693 new_var = var;
3694 }
953ff289
DN
3695 if (c_kind != OMP_CLAUSE_COPYIN)
3696 new_var = lookup_decl (var, ctx);
3697
3698 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3699 {
3700 if (pass != 0)
3701 continue;
3702 }
d9a6bd32
JJ
3703 /* C/C++ array section reductions. */
3704 else if (c_kind == OMP_CLAUSE_REDUCTION
3705 && var != OMP_CLAUSE_DECL (c))
953ff289
DN
3706 {
3707 if (pass == 0)
3708 continue;
3709
e01d41e5 3710 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
d9a6bd32 3711 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
e01d41e5
JJ
3712 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3713 {
3714 tree b = TREE_OPERAND (orig_var, 1);
3715 b = maybe_lookup_decl (b, ctx);
3716 if (b == NULL)
3717 {
3718 b = TREE_OPERAND (orig_var, 1);
3719 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3720 }
3721 if (integer_zerop (bias))
3722 bias = b;
3723 else
3724 {
3725 bias = fold_convert_loc (clause_loc,
3726 TREE_TYPE (b), bias);
3727 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3728 TREE_TYPE (b), b, bias);
3729 }
3730 orig_var = TREE_OPERAND (orig_var, 0);
3731 }
d9a6bd32
JJ
3732 if (TREE_CODE (orig_var) == INDIRECT_REF
3733 || TREE_CODE (orig_var) == ADDR_EXPR)
3734 orig_var = TREE_OPERAND (orig_var, 0);
3735 tree d = OMP_CLAUSE_DECL (c);
3736 tree type = TREE_TYPE (d);
3737 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3738 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3739 const char *name = get_name (orig_var);
3740 if (TREE_CONSTANT (v))
a68ab351 3741 {
d9a6bd32
JJ
3742 x = create_tmp_var_raw (type, name);
3743 gimple_add_tmp_var (x);
3744 TREE_ADDRESSABLE (x) = 1;
3745 x = build_fold_addr_expr_loc (clause_loc, x);
3746 }
3747 else
3748 {
3749 tree atmp
3750 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3751 tree t = maybe_lookup_decl (v, ctx);
3752 if (t)
3753 v = t;
3754 else
3755 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3756 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3757 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3758 TREE_TYPE (v), v,
3759 build_int_cst (TREE_TYPE (v), 1));
3760 t = fold_build2_loc (clause_loc, MULT_EXPR,
3761 TREE_TYPE (v), t,
3762 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3763 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3764 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3765 }
3766
3767 tree ptype = build_pointer_type (TREE_TYPE (type));
3768 x = fold_convert_loc (clause_loc, ptype, x);
3769 tree y = create_tmp_var (ptype, name);
3770 gimplify_assign (y, x, ilist);
3771 x = y;
e01d41e5
JJ
3772 tree yb = y;
3773
3774 if (!integer_zerop (bias))
3775 {
48a78aee
JJ
3776 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3777 bias);
3778 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3779 x);
3780 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3781 pointer_sized_int_node, yb, bias);
3782 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
e01d41e5
JJ
3783 yb = create_tmp_var (ptype, name);
3784 gimplify_assign (yb, x, ilist);
3785 x = yb;
3786 }
3787
3788 d = TREE_OPERAND (d, 0);
3789 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3790 d = TREE_OPERAND (d, 0);
3791 if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
3792 {
3793 if (orig_var != var)
3794 {
3795 gcc_assert (is_variable_sized (orig_var));
3796 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3797 x);
3798 gimplify_assign (new_var, x, ilist);
3799 tree new_orig_var = lookup_decl (orig_var, ctx);
3800 tree t = build_fold_indirect_ref (new_var);
3801 DECL_IGNORED_P (new_var) = 0;
3802 TREE_THIS_NOTRAP (t);
3803 SET_DECL_VALUE_EXPR (new_orig_var, t);
3804 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3805 }
3806 else
3807 {
3808 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3809 build_int_cst (ptype, 0));
3810 SET_DECL_VALUE_EXPR (new_var, x);
3811 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3812 }
3813 }
3814 else
3815 {
3816 gcc_assert (orig_var == var);
e01d41e5 3817 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
3818 {
3819 x = create_tmp_var (ptype, name);
3820 TREE_ADDRESSABLE (x) = 1;
e01d41e5 3821 gimplify_assign (x, yb, ilist);
d9a6bd32
JJ
3822 x = build_fold_addr_expr_loc (clause_loc, x);
3823 }
3824 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3825 gimplify_assign (new_var, x, ilist);
3826 }
3827 tree y1 = create_tmp_var (ptype, NULL);
3828 gimplify_assign (y1, y, ilist);
3829 tree i2 = NULL_TREE, y2 = NULL_TREE;
3830 tree body2 = NULL_TREE, end2 = NULL_TREE;
3831 tree y3 = NULL_TREE, y4 = NULL_TREE;
3832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3833 {
3834 y2 = create_tmp_var (ptype, NULL);
3835 gimplify_assign (y2, y, ilist);
3836 tree ref = build_outer_var_ref (var, ctx);
3837 /* For ref build_outer_var_ref already performs this. */
e01d41e5 3838 if (TREE_CODE (d) == INDIRECT_REF)
629b3d75 3839 gcc_assert (omp_is_reference (var));
e01d41e5 3840 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32 3841 ref = build_fold_addr_expr (ref);
629b3d75 3842 else if (omp_is_reference (var))
d9a6bd32
JJ
3843 ref = build_fold_addr_expr (ref);
3844 ref = fold_convert_loc (clause_loc, ptype, ref);
3845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3846 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3847 {
3848 y3 = create_tmp_var (ptype, NULL);
3849 gimplify_assign (y3, unshare_expr (ref), ilist);
3850 }
3851 if (is_simd)
3852 {
3853 y4 = create_tmp_var (ptype, NULL);
3854 gimplify_assign (y4, ref, dlist);
3855 }
3856 }
3857 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3858 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3859 tree body = create_artificial_label (UNKNOWN_LOCATION);
3860 tree end = create_artificial_label (UNKNOWN_LOCATION);
3861 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3862 if (y2)
3863 {
3864 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3865 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3866 body2 = create_artificial_label (UNKNOWN_LOCATION);
3867 end2 = create_artificial_label (UNKNOWN_LOCATION);
3868 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3869 }
3870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3871 {
3872 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3873 tree decl_placeholder
3874 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3875 SET_DECL_VALUE_EXPR (decl_placeholder,
3876 build_simple_mem_ref (y1));
3877 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3878 SET_DECL_VALUE_EXPR (placeholder,
3879 y3 ? build_simple_mem_ref (y3)
3880 : error_mark_node);
3881 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3882 x = lang_hooks.decls.omp_clause_default_ctor
3883 (c, build_simple_mem_ref (y1),
3884 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3885 if (x)
3886 gimplify_and_add (x, ilist);
3887 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3888 {
3889 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3890 lower_omp (&tseq, ctx);
3891 gimple_seq_add_seq (ilist, tseq);
3892 }
3893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3894 if (is_simd)
3895 {
3896 SET_DECL_VALUE_EXPR (decl_placeholder,
3897 build_simple_mem_ref (y2));
3898 SET_DECL_VALUE_EXPR (placeholder,
3899 build_simple_mem_ref (y4));
3900 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3901 lower_omp (&tseq, ctx);
3902 gimple_seq_add_seq (dlist, tseq);
3903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3904 }
3905 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3906 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3907 x = lang_hooks.decls.omp_clause_dtor
3908 (c, build_simple_mem_ref (y2));
3909 if (x)
3910 {
3911 gimple_seq tseq = NULL;
3912 dtor = x;
3913 gimplify_stmt (&dtor, &tseq);
3914 gimple_seq_add_seq (dlist, tseq);
3915 }
3916 }
3917 else
3918 {
3919 x = omp_reduction_init (c, TREE_TYPE (type));
3920 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3921
3922 /* reduction(-:var) sums up the partial results, so it
3923 acts identically to reduction(+:var). */
3924 if (code == MINUS_EXPR)
3925 code = PLUS_EXPR;
3926
3927 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3928 if (is_simd)
3929 {
3930 x = build2 (code, TREE_TYPE (type),
3931 build_simple_mem_ref (y4),
3932 build_simple_mem_ref (y2));
3933 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3934 }
3935 }
3936 gimple *g
3937 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3938 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3939 gimple_seq_add_stmt (ilist, g);
3940 if (y3)
3941 {
3942 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3943 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3944 gimple_seq_add_stmt (ilist, g);
3945 }
3946 g = gimple_build_assign (i, PLUS_EXPR, i,
3947 build_int_cst (TREE_TYPE (i), 1));
3948 gimple_seq_add_stmt (ilist, g);
3949 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3950 gimple_seq_add_stmt (ilist, g);
3951 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3952 if (y2)
3953 {
3954 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3955 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3956 gimple_seq_add_stmt (dlist, g);
3957 if (y4)
3958 {
3959 g = gimple_build_assign
3960 (y4, POINTER_PLUS_EXPR, y4,
3961 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3962 gimple_seq_add_stmt (dlist, g);
3963 }
3964 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3965 build_int_cst (TREE_TYPE (i2), 1));
3966 gimple_seq_add_stmt (dlist, g);
3967 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3968 gimple_seq_add_stmt (dlist, g);
3969 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3970 }
3971 continue;
3972 }
3973 else if (is_variable_sized (var))
3974 {
3975 /* For variable sized types, we need to allocate the
3976 actual storage here. Call alloca and store the
3977 result in the pointer decl that we created elsewhere. */
3978 if (pass == 0)
3979 continue;
3980
3981 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3982 {
3983 gcall *stmt;
3984 tree tmp, atmp;
3985
3986 ptr = DECL_VALUE_EXPR (new_var);
3987 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3988 ptr = TREE_OPERAND (ptr, 0);
a68ab351
JJ
3989 gcc_assert (DECL_P (ptr));
3990 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
3991
3992 /* void *tmp = __builtin_alloca */
d9a6bd32
JJ
3993 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3994 stmt = gimple_build_call (atmp, 2, x,
3995 size_int (DECL_ALIGN (var)));
b731b390 3996 tmp = create_tmp_var_raw (ptr_type_node);
726a989a
RB
3997 gimple_add_tmp_var (tmp);
3998 gimple_call_set_lhs (stmt, tmp);
3999
4000 gimple_seq_add_stmt (ilist, stmt);
4001
db3927fb 4002 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 4003 gimplify_assign (ptr, x, ilist);
a68ab351 4004 }
953ff289 4005 }
629b3d75 4006 else if (omp_is_reference (var))
953ff289 4007 {
50674e96
DN
4008 /* For references that are being privatized for Fortran,
4009 allocate new backing storage for the new pointer
4010 variable. This allows us to avoid changing all the
4011 code that expects a pointer to something that expects
acf0174b 4012 a direct variable. */
953ff289
DN
4013 if (pass == 0)
4014 continue;
4015
4016 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
4017 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4018 {
4019 x = build_receiver_ref (var, false, ctx);
db3927fb 4020 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4021 }
4022 else if (TREE_CONSTANT (x))
953ff289 4023 {
decaaec8
JJ
4024 /* For reduction in SIMD loop, defer adding the
4025 initialization of the reference, because if we decide
4026 to use SIMD array for it, the initilization could cause
4027 expansion ICE. */
4028 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4ceffa27
JJ
4029 x = NULL_TREE;
4030 else
4031 {
4ceffa27 4032 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
d9a6bd32 4033 get_name (var));
4ceffa27
JJ
4034 gimple_add_tmp_var (x);
4035 TREE_ADDRESSABLE (x) = 1;
4036 x = build_fold_addr_expr_loc (clause_loc, x);
4037 }
953ff289
DN
4038 }
4039 else
4040 {
d9a6bd32
JJ
4041 tree atmp
4042 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4043 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4044 tree al = size_int (TYPE_ALIGN (rtype));
4045 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
953ff289
DN
4046 }
4047
4ceffa27
JJ
4048 if (x)
4049 {
4050 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4051 gimplify_assign (new_var, x, ilist);
4052 }
953ff289 4053
70f34814 4054 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
4055 }
4056 else if (c_kind == OMP_CLAUSE_REDUCTION
4057 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4058 {
4059 if (pass == 0)
4060 continue;
4061 }
4062 else if (pass != 0)
4063 continue;
4064
aaf46ef9 4065 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
4066 {
4067 case OMP_CLAUSE_SHARED:
acf0174b
JJ
4068 /* Ignore shared directives in teams construct. */
4069 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4070 continue;
8ca5b2a2
JJ
4071 /* Shared global vars are just accessed directly. */
4072 if (is_global_var (new_var))
4073 break;
d9a6bd32
JJ
4074 /* For taskloop firstprivate/lastprivate, represented
4075 as firstprivate and shared clause on the task, new_var
4076 is the firstprivate var. */
4077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4078 break;
953ff289
DN
4079 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4080 needs to be delayed until after fixup_child_record_type so
4081 that we get the correct type during the dereference. */
7c8f7639 4082 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
4083 x = build_receiver_ref (var, by_ref, ctx);
4084 SET_DECL_VALUE_EXPR (new_var, x);
4085 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4086
4087 /* ??? If VAR is not passed by reference, and the variable
4088 hasn't been initialized yet, then we'll get a warning for
4089 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 4090 able to notice this and not store anything at all, but
953ff289
DN
4091 we're generating code too early. Suppress the warning. */
4092 if (!by_ref)
4093 TREE_NO_WARNING (var) = 1;
4094 break;
4095
4096 case OMP_CLAUSE_LASTPRIVATE:
4097 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4098 break;
4099 /* FALLTHRU */
4100
4101 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
4102 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4103 x = build_outer_var_ref (var, ctx);
4104 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4105 {
4106 if (is_task_ctx (ctx))
4107 x = build_receiver_ref (var, false, ctx);
4108 else
c39dad64 4109 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
a68ab351
JJ
4110 }
4111 else
4112 x = NULL;
74bf76ed 4113 do_private:
acf0174b 4114 tree nx;
d9a6bd32
JJ
4115 nx = lang_hooks.decls.omp_clause_default_ctor
4116 (c, unshare_expr (new_var), x);
74bf76ed
JJ
4117 if (is_simd)
4118 {
4119 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 4120 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed 4121 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6943af07
AM
4122 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4123 ivar, lvar))
74bf76ed 4124 {
acf0174b 4125 if (nx)
74bf76ed
JJ
4126 x = lang_hooks.decls.omp_clause_default_ctor
4127 (c, unshare_expr (ivar), x);
acf0174b 4128 if (nx && x)
74bf76ed
JJ
4129 gimplify_and_add (x, &llist[0]);
4130 if (y)
4131 {
4132 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4133 if (y)
4134 {
4135 gimple_seq tseq = NULL;
4136
4137 dtor = y;
4138 gimplify_stmt (&dtor, &tseq);
4139 gimple_seq_add_seq (&llist[1], tseq);
4140 }
4141 }
4142 break;
4143 }
4144 }
acf0174b
JJ
4145 if (nx)
4146 gimplify_and_add (nx, ilist);
953ff289
DN
4147 /* FALLTHRU */
4148
4149 do_dtor:
4150 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4151 if (x)
4152 {
726a989a
RB
4153 gimple_seq tseq = NULL;
4154
953ff289 4155 dtor = x;
726a989a 4156 gimplify_stmt (&dtor, &tseq);
355a7673 4157 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
4158 }
4159 break;
4160
74bf76ed
JJ
4161 case OMP_CLAUSE_LINEAR:
4162 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4163 goto do_firstprivate;
4164 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4165 x = NULL;
4166 else
4167 x = build_outer_var_ref (var, ctx);
4168 goto do_private;
4169
953ff289 4170 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
4171 if (is_task_ctx (ctx))
4172 {
629b3d75 4173 if (omp_is_reference (var) || is_variable_sized (var))
a68ab351
JJ
4174 goto do_dtor;
4175 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4176 ctx))
4177 || use_pointer_for_field (var, NULL))
4178 {
4179 x = build_receiver_ref (var, false, ctx);
4180 SET_DECL_VALUE_EXPR (new_var, x);
4181 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4182 goto do_dtor;
4183 }
4184 }
74bf76ed 4185 do_firstprivate:
953ff289 4186 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
4187 if (is_simd)
4188 {
acf0174b
JJ
4189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4190 && gimple_omp_for_combined_into_p (ctx->stmt))
4191 {
da6f124d
JJ
4192 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4193 tree stept = TREE_TYPE (t);
629b3d75 4194 tree ct = omp_find_clause (clauses,
da6f124d
JJ
4195 OMP_CLAUSE__LOOPTEMP_);
4196 gcc_assert (ct);
4197 tree l = OMP_CLAUSE_DECL (ct);
56ad0e38
JJ
4198 tree n1 = fd->loop.n1;
4199 tree step = fd->loop.step;
4200 tree itype = TREE_TYPE (l);
4201 if (POINTER_TYPE_P (itype))
4202 itype = signed_type_for (itype);
4203 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4204 if (TYPE_UNSIGNED (itype)
4205 && fd->loop.cond_code == GT_EXPR)
4206 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4207 fold_build1 (NEGATE_EXPR, itype, l),
4208 fold_build1 (NEGATE_EXPR,
4209 itype, step));
4210 else
4211 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
acf0174b
JJ
4212 t = fold_build2 (MULT_EXPR, stept,
4213 fold_convert (stept, l), t);
da6f124d
JJ
4214
4215 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4216 {
4217 x = lang_hooks.decls.omp_clause_linear_ctor
4218 (c, new_var, x, t);
4219 gimplify_and_add (x, ilist);
4220 goto do_dtor;
4221 }
4222
acf0174b
JJ
4223 if (POINTER_TYPE_P (TREE_TYPE (x)))
4224 x = fold_build2 (POINTER_PLUS_EXPR,
4225 TREE_TYPE (x), x, t);
4226 else
4227 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4228 }
4229
74bf76ed
JJ
4230 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4231 || TREE_ADDRESSABLE (new_var))
6943af07
AM
4232 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4233 ivar, lvar))
74bf76ed
JJ
4234 {
4235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4236 {
b731b390 4237 tree iv = create_tmp_var (TREE_TYPE (new_var));
74bf76ed
JJ
4238 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4239 gimplify_and_add (x, ilist);
4240 gimple_stmt_iterator gsi
4241 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
538dd0b7 4242 gassign *g
74bf76ed
JJ
4243 = gimple_build_assign (unshare_expr (lvar), iv);
4244 gsi_insert_before_without_update (&gsi, g,
4245 GSI_SAME_STMT);
da6f124d 4246 tree t = OMP_CLAUSE_LINEAR_STEP (c);
74bf76ed
JJ
4247 enum tree_code code = PLUS_EXPR;
4248 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4249 code = POINTER_PLUS_EXPR;
0d0e4a03 4250 g = gimple_build_assign (iv, code, iv, t);
74bf76ed
JJ
4251 gsi_insert_before_without_update (&gsi, g,
4252 GSI_SAME_STMT);
4253 break;
4254 }
4255 x = lang_hooks.decls.omp_clause_copy_ctor
4256 (c, unshare_expr (ivar), x);
4257 gimplify_and_add (x, &llist[0]);
4258 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4259 if (x)
4260 {
4261 gimple_seq tseq = NULL;
4262
4263 dtor = x;
4264 gimplify_stmt (&dtor, &tseq);
4265 gimple_seq_add_seq (&llist[1], tseq);
4266 }
4267 break;
4268 }
4269 }
d9a6bd32
JJ
4270 x = lang_hooks.decls.omp_clause_copy_ctor
4271 (c, unshare_expr (new_var), x);
953ff289
DN
4272 gimplify_and_add (x, ilist);
4273 goto do_dtor;
953ff289 4274
acf0174b 4275 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 4276 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
4277 x = build_outer_var_ref (var, ctx);
4278 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4279 gimplify_and_add (x, ilist);
4280 break;
4281
953ff289 4282 case OMP_CLAUSE_COPYIN:
7c8f7639 4283 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
4284 x = build_receiver_ref (var, by_ref, ctx);
4285 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4286 append_to_statement_list (x, &copyin_seq);
4287 copyin_by_ref |= by_ref;
4288 break;
4289
4290 case OMP_CLAUSE_REDUCTION:
e5014671
NS
4291 /* OpenACC reductions are initialized using the
4292 GOACC_REDUCTION internal function. */
4293 if (is_gimple_omp_oacc (ctx->stmt))
4294 break;
953ff289
DN
4295 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4296 {
a68ab351 4297 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
355fe088 4298 gimple *tseq;
a68ab351
JJ
4299 x = build_outer_var_ref (var, ctx);
4300
629b3d75 4301 if (omp_is_reference (var)
acf0174b
JJ
4302 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4303 TREE_TYPE (x)))
db3927fb 4304 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4305 SET_DECL_VALUE_EXPR (placeholder, x);
4306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b 4307 tree new_vard = new_var;
629b3d75 4308 if (omp_is_reference (var))
acf0174b
JJ
4309 {
4310 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4311 new_vard = TREE_OPERAND (new_var, 0);
4312 gcc_assert (DECL_P (new_vard));
4313 }
74bf76ed 4314 if (is_simd
6943af07
AM
4315 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4316 ivar, lvar))
74bf76ed 4317 {
acf0174b
JJ
4318 if (new_vard == new_var)
4319 {
4320 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4321 SET_DECL_VALUE_EXPR (new_var, ivar);
4322 }
4323 else
4324 {
4325 SET_DECL_VALUE_EXPR (new_vard,
4326 build_fold_addr_expr (ivar));
4327 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4328 }
4329 x = lang_hooks.decls.omp_clause_default_ctor
4330 (c, unshare_expr (ivar),
4331 build_outer_var_ref (var, ctx));
4332 if (x)
4333 gimplify_and_add (x, &llist[0]);
4334 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4335 {
4336 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4337 lower_omp (&tseq, ctx);
4338 gimple_seq_add_seq (&llist[0], tseq);
4339 }
4340 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4341 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4342 lower_omp (&tseq, ctx);
4343 gimple_seq_add_seq (&llist[1], tseq);
4344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4345 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4346 if (new_vard == new_var)
4347 SET_DECL_VALUE_EXPR (new_var, lvar);
4348 else
4349 SET_DECL_VALUE_EXPR (new_vard,
4350 build_fold_addr_expr (lvar));
4351 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4352 if (x)
4353 {
4354 tseq = NULL;
4355 dtor = x;
4356 gimplify_stmt (&dtor, &tseq);
4357 gimple_seq_add_seq (&llist[1], tseq);
4358 }
4359 break;
4360 }
4ceffa27
JJ
4361 /* If this is a reference to constant size reduction var
4362 with placeholder, we haven't emitted the initializer
4363 for it because it is undesirable if SIMD arrays are used.
4364 But if they aren't used, we need to emit the deferred
4365 initialization now. */
629b3d75 4366 else if (omp_is_reference (var) && is_simd)
decaaec8 4367 handle_simd_reference (clause_loc, new_vard, ilist);
acf0174b 4368 x = lang_hooks.decls.omp_clause_default_ctor
92d28cbb
JJ
4369 (c, unshare_expr (new_var),
4370 build_outer_var_ref (var, ctx));
acf0174b
JJ
4371 if (x)
4372 gimplify_and_add (x, ilist);
4373 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4374 {
4375 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4376 lower_omp (&tseq, ctx);
4377 gimple_seq_add_seq (ilist, tseq);
4378 }
4379 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4380 if (is_simd)
4381 {
4382 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4383 lower_omp (&tseq, ctx);
4384 gimple_seq_add_seq (dlist, tseq);
4385 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4386 }
4387 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4388 goto do_dtor;
4389 }
4390 else
4391 {
4392 x = omp_reduction_init (c, TREE_TYPE (new_var));
4393 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
e9792e1d
JJ
4394 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4395
4396 /* reduction(-:var) sums up the partial results, so it
4397 acts identically to reduction(+:var). */
4398 if (code == MINUS_EXPR)
4399 code = PLUS_EXPR;
4400
decaaec8 4401 tree new_vard = new_var;
629b3d75 4402 if (is_simd && omp_is_reference (var))
decaaec8
JJ
4403 {
4404 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4405 new_vard = TREE_OPERAND (new_var, 0);
4406 gcc_assert (DECL_P (new_vard));
4407 }
acf0174b 4408 if (is_simd
6943af07
AM
4409 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4410 ivar, lvar))
acf0174b 4411 {
acf0174b
JJ
4412 tree ref = build_outer_var_ref (var, ctx);
4413
4414 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4415
6943af07 4416 if (sctx.is_simt)
9669b00b
AM
4417 {
4418 if (!simt_lane)
4419 simt_lane = create_tmp_var (unsigned_type_node);
4420 x = build_call_expr_internal_loc
4421 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4422 TREE_TYPE (ivar), 2, ivar, simt_lane);
4423 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4424 gimplify_assign (ivar, x, &llist[2]);
4425 }
acf0174b 4426 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
4427 ref = build_outer_var_ref (var, ctx);
4428 gimplify_assign (ref, x, &llist[1]);
decaaec8
JJ
4429
4430 if (new_vard != new_var)
4431 {
4432 SET_DECL_VALUE_EXPR (new_vard,
4433 build_fold_addr_expr (lvar));
4434 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4435 }
74bf76ed
JJ
4436 }
4437 else
4438 {
629b3d75 4439 if (omp_is_reference (var) && is_simd)
decaaec8 4440 handle_simd_reference (clause_loc, new_vard, ilist);
74bf76ed
JJ
4441 gimplify_assign (new_var, x, ilist);
4442 if (is_simd)
e9792e1d
JJ
4443 {
4444 tree ref = build_outer_var_ref (var, ctx);
4445
4446 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4447 ref = build_outer_var_ref (var, ctx);
4448 gimplify_assign (ref, x, dlist);
4449 }
74bf76ed 4450 }
953ff289
DN
4451 }
4452 break;
4453
4454 default:
4455 gcc_unreachable ();
4456 }
4457 }
4458 }
4459
9d2f08ab 4460 if (known_eq (sctx.max_vf, 1U))
0c6b03b5
AM
4461 sctx.is_simt = false;
4462
4463 if (sctx.lane || sctx.is_simt)
74bf76ed 4464 {
0c6b03b5 4465 uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
4466 /* Don't want uninit warnings on simduid, it is always uninitialized,
4467 but we use it not for the value, but for the DECL_UID only. */
4468 TREE_NO_WARNING (uid) = 1;
0c6b03b5
AM
4469 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4470 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4471 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4472 gimple_omp_for_set_clauses (ctx->stmt, c);
4473 }
4474 /* Emit calls denoting privatized variables and initializing a pointer to
4475 structure that holds private variables as fields after ompdevlow pass. */
4476 if (sctx.is_simt)
4477 {
4478 sctx.simt_eargs[0] = uid;
4479 gimple *g
4480 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4481 gimple_call_set_lhs (g, uid);
4482 gimple_seq_add_stmt (ilist, g);
4483 sctx.simt_eargs.release ();
4484
4485 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4486 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4487 gimple_call_set_lhs (g, simtrec);
4488 gimple_seq_add_stmt (ilist, g);
4489 }
4490 if (sctx.lane)
4491 {
355fe088 4492 gimple *g
74bf76ed 4493 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
6943af07 4494 gimple_call_set_lhs (g, sctx.lane);
74bf76ed
JJ
4495 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4496 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6943af07 4497 g = gimple_build_assign (sctx.lane, INTEGER_CST,
0d0e4a03 4498 build_int_cst (unsigned_type_node, 0));
74bf76ed 4499 gimple_seq_add_stmt (ilist, g);
9669b00b
AM
4500 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4501 if (llist[2])
4502 {
4503 tree simt_vf = create_tmp_var (unsigned_type_node);
4504 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4505 gimple_call_set_lhs (g, simt_vf);
4506 gimple_seq_add_stmt (dlist, g);
4507
4508 tree t = build_int_cst (unsigned_type_node, 1);
4509 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4510 gimple_seq_add_stmt (dlist, g);
4511
4512 t = build_int_cst (unsigned_type_node, 0);
6943af07 4513 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
9669b00b
AM
4514 gimple_seq_add_stmt (dlist, g);
4515
4516 tree body = create_artificial_label (UNKNOWN_LOCATION);
4517 tree header = create_artificial_label (UNKNOWN_LOCATION);
4518 tree end = create_artificial_label (UNKNOWN_LOCATION);
4519 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4520 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4521
4522 gimple_seq_add_seq (dlist, llist[2]);
4523
4524 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4525 gimple_seq_add_stmt (dlist, g);
4526
4527 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4528 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4529 gimple_seq_add_stmt (dlist, g);
4530
4531 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4532 }
74bf76ed
JJ
4533 for (int i = 0; i < 2; i++)
4534 if (llist[i])
4535 {
b731b390 4536 tree vf = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
4537 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4538 gimple_call_set_lhs (g, vf);
4539 gimple_seq *seq = i == 0 ? ilist : dlist;
4540 gimple_seq_add_stmt (seq, g);
4541 tree t = build_int_cst (unsigned_type_node, 0);
6943af07 4542 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
74bf76ed
JJ
4543 gimple_seq_add_stmt (seq, g);
4544 tree body = create_artificial_label (UNKNOWN_LOCATION);
4545 tree header = create_artificial_label (UNKNOWN_LOCATION);
4546 tree end = create_artificial_label (UNKNOWN_LOCATION);
4547 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4548 gimple_seq_add_stmt (seq, gimple_build_label (body));
4549 gimple_seq_add_seq (seq, llist[i]);
4550 t = build_int_cst (unsigned_type_node, 1);
6943af07 4551 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
74bf76ed
JJ
4552 gimple_seq_add_stmt (seq, g);
4553 gimple_seq_add_stmt (seq, gimple_build_label (header));
6943af07 4554 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
74bf76ed
JJ
4555 gimple_seq_add_stmt (seq, g);
4556 gimple_seq_add_stmt (seq, gimple_build_label (end));
4557 }
4558 }
0c6b03b5
AM
4559 if (sctx.is_simt)
4560 {
4561 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4562 gimple *g
4563 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4564 gimple_seq_add_stmt (dlist, g);
4565 }
74bf76ed 4566
953ff289
DN
4567 /* The copyin sequence is not to be executed by the main thread, since
4568 that would result in self-copies. Perhaps not visible to scalars,
4569 but it certainly is to C++ operator=. */
4570 if (copyin_seq)
4571 {
e79983f4
MM
4572 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4573 0);
953ff289
DN
4574 x = build2 (NE_EXPR, boolean_type_node, x,
4575 build_int_cst (TREE_TYPE (x), 0));
4576 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4577 gimplify_and_add (x, ilist);
4578 }
4579
4580 /* If any copyin variable is passed by reference, we must ensure the
4581 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
4582 threads. Similarly for variables in both firstprivate and
4583 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
4584 happens after firstprivate copying in all threads. And similarly
4585 for UDRs if initializer expression refers to omp_orig. */
4586 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
4587 {
4588 /* Don't add any barrier for #pragma omp simd or
4589 #pragma omp distribute. */
4590 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
e2110f8f 4591 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
629b3d75 4592 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
74bf76ed
JJ
4593 }
4594
4595 /* If max_vf is non-zero, then we can use only a vectorization factor
4596 up to the max_vf we chose. So stick it into the safelen clause. */
9d2f08ab 4597 if (maybe_ne (sctx.max_vf, 0U))
74bf76ed 4598 {
629b3d75 4599 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 4600 OMP_CLAUSE_SAFELEN);
9d2f08ab 4601 poly_uint64 safe_len;
74bf76ed 4602 if (c == NULL_TREE
9d2f08ab
RS
4603 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4604 && maybe_gt (safe_len, sctx.max_vf)))
74bf76ed
JJ
4605 {
4606 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4607 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943af07 4608 sctx.max_vf);
74bf76ed
JJ
4609 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4610 gimple_omp_for_set_clauses (ctx->stmt, c);
4611 }
4612 }
953ff289
DN
4613}
4614
50674e96 4615
953ff289
DN
4616/* Generate code to implement the LASTPRIVATE clauses. This is used for
4617 both parallel and workshare constructs. PREDICATE may be NULL if it's
4618 always true. */
4619
4620static void
726a989a 4621lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 4622 omp_context *ctx)
953ff289 4623{
74bf76ed 4624 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 4625 bool par_clauses = false;
9669b00b 4626 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
953ff289 4627
74bf76ed
JJ
4628 /* Early exit if there are no lastprivate or linear clauses. */
4629 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4630 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4631 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4632 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4633 break;
953ff289
DN
4634 if (clauses == NULL)
4635 {
4636 /* If this was a workshare clause, see if it had been combined
4637 with its parallel. In that case, look for the clauses on the
4638 parallel statement itself. */
4639 if (is_parallel_ctx (ctx))
4640 return;
4641
4642 ctx = ctx->outer;
4643 if (ctx == NULL || !is_parallel_ctx (ctx))
4644 return;
4645
629b3d75 4646 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
4647 OMP_CLAUSE_LASTPRIVATE);
4648 if (clauses == NULL)
4649 return;
a68ab351 4650 par_clauses = true;
953ff289
DN
4651 }
4652
9669b00b
AM
4653 bool maybe_simt = false;
4654 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4655 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4656 {
629b3d75
MJ
4657 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4658 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
9669b00b
AM
4659 if (simduid)
4660 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4661 }
4662
726a989a
RB
4663 if (predicate)
4664 {
538dd0b7 4665 gcond *stmt;
726a989a 4666 tree label_true, arm1, arm2;
56b1c60e 4667 enum tree_code pred_code = TREE_CODE (predicate);
726a989a 4668
c2255bc4
AH
4669 label = create_artificial_label (UNKNOWN_LOCATION);
4670 label_true = create_artificial_label (UNKNOWN_LOCATION);
56b1c60e
MJ
4671 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4672 {
4673 arm1 = TREE_OPERAND (predicate, 0);
4674 arm2 = TREE_OPERAND (predicate, 1);
4675 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4676 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4677 }
4678 else
4679 {
4680 arm1 = predicate;
4681 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4682 arm2 = boolean_false_node;
4683 pred_code = NE_EXPR;
4684 }
9669b00b
AM
4685 if (maybe_simt)
4686 {
56b1c60e 4687 c = build2 (pred_code, boolean_type_node, arm1, arm2);
9669b00b
AM
4688 c = fold_convert (integer_type_node, c);
4689 simtcond = create_tmp_var (integer_type_node);
4690 gimplify_assign (simtcond, c, stmt_list);
4691 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4692 1, simtcond);
4693 c = create_tmp_var (integer_type_node);
4694 gimple_call_set_lhs (g, c);
4695 gimple_seq_add_stmt (stmt_list, g);
4696 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4697 label_true, label);
4698 }
4699 else
56b1c60e 4700 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
726a989a
RB
4701 gimple_seq_add_stmt (stmt_list, stmt);
4702 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4703 }
953ff289 4704
a68ab351 4705 for (c = clauses; c ;)
953ff289
DN
4706 {
4707 tree var, new_var;
db3927fb 4708 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 4709
74bf76ed
JJ
4710 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4711 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4712 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
4713 {
4714 var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
4715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4716 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4717 && is_taskloop_ctx (ctx))
4718 {
4719 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4720 new_var = lookup_decl (var, ctx->outer);
4721 }
4722 else
2187f2a2
JJ
4723 {
4724 new_var = lookup_decl (var, ctx);
4725 /* Avoid uninitialized warnings for lastprivate and
4726 for linear iterators. */
4727 if (predicate
4728 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4729 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4730 TREE_NO_WARNING (new_var) = 1;
4731 }
953ff289 4732
2260d19d 4733 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
74bf76ed
JJ
4734 {
4735 tree val = DECL_VALUE_EXPR (new_var);
2260d19d 4736 if (TREE_CODE (val) == ARRAY_REF
74bf76ed
JJ
4737 && VAR_P (TREE_OPERAND (val, 0))
4738 && lookup_attribute ("omp simd array",
4739 DECL_ATTRIBUTES (TREE_OPERAND (val,
4740 0))))
4741 {
4742 if (lastlane == NULL)
4743 {
b731b390 4744 lastlane = create_tmp_var (unsigned_type_node);
538dd0b7 4745 gcall *g
74bf76ed
JJ
4746 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4747 2, simduid,
4748 TREE_OPERAND (val, 1));
4749 gimple_call_set_lhs (g, lastlane);
4750 gimple_seq_add_stmt (stmt_list, g);
4751 }
4752 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4753 TREE_OPERAND (val, 0), lastlane,
4754 NULL_TREE, NULL_TREE);
0c6b03b5 4755 }
2260d19d
AM
4756 }
4757 else if (maybe_simt)
4758 {
4759 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4760 ? DECL_VALUE_EXPR (new_var)
4761 : new_var);
4762 if (simtlast == NULL)
0c6b03b5 4763 {
2260d19d
AM
4764 simtlast = create_tmp_var (unsigned_type_node);
4765 gcall *g = gimple_build_call_internal
4766 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4767 gimple_call_set_lhs (g, simtlast);
4768 gimple_seq_add_stmt (stmt_list, g);
74bf76ed 4769 }
2260d19d
AM
4770 x = build_call_expr_internal_loc
4771 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4772 TREE_TYPE (val), 2, val, simtlast);
4773 new_var = unshare_expr (new_var);
4774 gimplify_assign (new_var, x, stmt_list);
4775 new_var = unshare_expr (new_var);
74bf76ed
JJ
4776 }
4777
4778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4779 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 4780 {
355a7673 4781 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
4782 gimple_seq_add_seq (stmt_list,
4783 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 4784 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 4785 }
f7468577
JJ
4786 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4787 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4788 {
4789 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4790 gimple_seq_add_seq (stmt_list,
4791 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4792 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4793 }
953ff289 4794
d9a6bd32
JJ
4795 x = NULL_TREE;
4796 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4797 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4798 {
4799 gcc_checking_assert (is_taskloop_ctx (ctx));
4800 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4801 ctx->outer->outer);
4802 if (is_global_var (ovar))
4803 x = ovar;
4804 }
4805 if (!x)
c39dad64 4806 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
629b3d75 4807 if (omp_is_reference (var))
70f34814 4808 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 4809 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 4810 gimplify_and_add (x, stmt_list);
a68ab351
JJ
4811 }
4812 c = OMP_CLAUSE_CHAIN (c);
4813 if (c == NULL && !par_clauses)
4814 {
4815 /* If this was a workshare clause, see if it had been combined
4816 with its parallel. In that case, continue looking for the
4817 clauses also on the parallel statement itself. */
4818 if (is_parallel_ctx (ctx))
4819 break;
4820
4821 ctx = ctx->outer;
4822 if (ctx == NULL || !is_parallel_ctx (ctx))
4823 break;
4824
629b3d75 4825 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
4826 OMP_CLAUSE_LASTPRIVATE);
4827 par_clauses = true;
4828 }
953ff289
DN
4829 }
4830
726a989a
RB
4831 if (label)
4832 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
4833}
4834
e5014671
NS
4835/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4836 (which might be a placeholder). INNER is true if this is an inner
4837 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4838 join markers. Generate the before-loop forking sequence in
4839 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4840 general form of these sequences is
4841
4842 GOACC_REDUCTION_SETUP
4843 GOACC_FORK
4844 GOACC_REDUCTION_INIT
4845 ...
4846 GOACC_REDUCTION_FINI
4847 GOACC_JOIN
4848 GOACC_REDUCTION_TEARDOWN. */
4849
41dbbb37 4850static void
e5014671
NS
4851lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4852 gcall *fork, gcall *join, gimple_seq *fork_seq,
4853 gimple_seq *join_seq, omp_context *ctx)
41dbbb37 4854{
e5014671
NS
4855 gimple_seq before_fork = NULL;
4856 gimple_seq after_fork = NULL;
4857 gimple_seq before_join = NULL;
4858 gimple_seq after_join = NULL;
4859 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4860 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4861 unsigned offset = 0;
4862
4863 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4865 {
4866 tree orig = OMP_CLAUSE_DECL (c);
4867 tree var = maybe_lookup_decl (orig, ctx);
4868 tree ref_to_res = NULL_TREE;
c42cfb5c
CP
4869 tree incoming, outgoing, v1, v2, v3;
4870 bool is_private = false;
e5014671
NS
4871
4872 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4873 if (rcode == MINUS_EXPR)
4874 rcode = PLUS_EXPR;
4875 else if (rcode == TRUTH_ANDIF_EXPR)
4876 rcode = BIT_AND_EXPR;
4877 else if (rcode == TRUTH_ORIF_EXPR)
4878 rcode = BIT_IOR_EXPR;
4879 tree op = build_int_cst (unsigned_type_node, rcode);
4880
4881 if (!var)
4882 var = orig;
e5014671
NS
4883
4884 incoming = outgoing = var;
01914336 4885
e5014671
NS
4886 if (!inner)
4887 {
4888 /* See if an outer construct also reduces this variable. */
4889 omp_context *outer = ctx;
41dbbb37 4890
e5014671
NS
4891 while (omp_context *probe = outer->outer)
4892 {
4893 enum gimple_code type = gimple_code (probe->stmt);
4894 tree cls;
41dbbb37 4895
e5014671
NS
4896 switch (type)
4897 {
4898 case GIMPLE_OMP_FOR:
4899 cls = gimple_omp_for_clauses (probe->stmt);
4900 break;
41dbbb37 4901
e5014671
NS
4902 case GIMPLE_OMP_TARGET:
4903 if (gimple_omp_target_kind (probe->stmt)
4904 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4905 goto do_lookup;
41dbbb37 4906
e5014671
NS
4907 cls = gimple_omp_target_clauses (probe->stmt);
4908 break;
41dbbb37 4909
e5014671
NS
4910 default:
4911 goto do_lookup;
4912 }
01914336 4913
e5014671
NS
4914 outer = probe;
4915 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4916 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4917 && orig == OMP_CLAUSE_DECL (cls))
c42cfb5c
CP
4918 {
4919 incoming = outgoing = lookup_decl (orig, probe);
4920 goto has_outer_reduction;
4921 }
4922 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4923 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4924 && orig == OMP_CLAUSE_DECL (cls))
4925 {
4926 is_private = true;
4927 goto do_lookup;
4928 }
e5014671 4929 }
41dbbb37 4930
e5014671
NS
4931 do_lookup:
4932 /* This is the outermost construct with this reduction,
4933 see if there's a mapping for it. */
4934 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
c42cfb5c 4935 && maybe_lookup_field (orig, outer) && !is_private)
e5014671
NS
4936 {
4937 ref_to_res = build_receiver_ref (orig, false, outer);
629b3d75 4938 if (omp_is_reference (orig))
e5014671 4939 ref_to_res = build_simple_mem_ref (ref_to_res);
41dbbb37 4940
c42cfb5c
CP
4941 tree type = TREE_TYPE (var);
4942 if (POINTER_TYPE_P (type))
4943 type = TREE_TYPE (type);
4944
e5014671 4945 outgoing = var;
c42cfb5c 4946 incoming = omp_reduction_init_op (loc, rcode, type);
e5014671
NS
4947 }
4948 else
11c4c4ba
CLT
4949 {
4950 /* Try to look at enclosing contexts for reduction var,
4951 use original if no mapping found. */
4952 tree t = NULL_TREE;
4953 omp_context *c = ctx->outer;
4954 while (c && !t)
4955 {
4956 t = maybe_lookup_decl (orig, c);
4957 c = c->outer;
4958 }
4959 incoming = outgoing = (t ? t : orig);
4960 }
01914336 4961
e5014671
NS
4962 has_outer_reduction:;
4963 }
41dbbb37 4964
e5014671
NS
4965 if (!ref_to_res)
4966 ref_to_res = integer_zero_node;
41dbbb37 4967
01914336 4968 if (omp_is_reference (orig))
c42cfb5c
CP
4969 {
4970 tree type = TREE_TYPE (var);
4971 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4972
4973 if (!inner)
4974 {
4975 tree x = create_tmp_var (TREE_TYPE (type), id);
4976 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4977 }
4978
4979 v1 = create_tmp_var (type, id);
4980 v2 = create_tmp_var (type, id);
4981 v3 = create_tmp_var (type, id);
4982
4983 gimplify_assign (v1, var, fork_seq);
4984 gimplify_assign (v2, var, fork_seq);
4985 gimplify_assign (v3, var, fork_seq);
4986
4987 var = build_simple_mem_ref (var);
4988 v1 = build_simple_mem_ref (v1);
4989 v2 = build_simple_mem_ref (v2);
4990 v3 = build_simple_mem_ref (v3);
4991 outgoing = build_simple_mem_ref (outgoing);
4992
e387fc64 4993 if (!TREE_CONSTANT (incoming))
c42cfb5c
CP
4994 incoming = build_simple_mem_ref (incoming);
4995 }
4996 else
4997 v1 = v2 = v3 = var;
4998
e5014671 4999 /* Determine position in reduction buffer, which may be used
ef1d3b57
RS
5000 by target. The parser has ensured that this is not a
5001 variable-sized type. */
5002 fixed_size_mode mode
5003 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
e5014671
NS
5004 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5005 offset = (offset + align - 1) & ~(align - 1);
5006 tree off = build_int_cst (sizetype, offset);
5007 offset += GET_MODE_SIZE (mode);
41dbbb37 5008
e5014671
NS
5009 if (!init_code)
5010 {
5011 init_code = build_int_cst (integer_type_node,
5012 IFN_GOACC_REDUCTION_INIT);
5013 fini_code = build_int_cst (integer_type_node,
5014 IFN_GOACC_REDUCTION_FINI);
5015 setup_code = build_int_cst (integer_type_node,
5016 IFN_GOACC_REDUCTION_SETUP);
5017 teardown_code = build_int_cst (integer_type_node,
5018 IFN_GOACC_REDUCTION_TEARDOWN);
5019 }
5020
5021 tree setup_call
5022 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5023 TREE_TYPE (var), 6, setup_code,
5024 unshare_expr (ref_to_res),
5025 incoming, level, op, off);
5026 tree init_call
5027 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5028 TREE_TYPE (var), 6, init_code,
5029 unshare_expr (ref_to_res),
c42cfb5c 5030 v1, level, op, off);
e5014671
NS
5031 tree fini_call
5032 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5033 TREE_TYPE (var), 6, fini_code,
5034 unshare_expr (ref_to_res),
c42cfb5c 5035 v2, level, op, off);
e5014671
NS
5036 tree teardown_call
5037 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5038 TREE_TYPE (var), 6, teardown_code,
c42cfb5c 5039 ref_to_res, v3, level, op, off);
e5014671 5040
c42cfb5c
CP
5041 gimplify_assign (v1, setup_call, &before_fork);
5042 gimplify_assign (v2, init_call, &after_fork);
5043 gimplify_assign (v3, fini_call, &before_join);
e5014671
NS
5044 gimplify_assign (outgoing, teardown_call, &after_join);
5045 }
5046
5047 /* Now stitch things together. */
5048 gimple_seq_add_seq (fork_seq, before_fork);
5049 if (fork)
5050 gimple_seq_add_stmt (fork_seq, fork);
5051 gimple_seq_add_seq (fork_seq, after_fork);
5052
5053 gimple_seq_add_seq (join_seq, before_join);
5054 if (join)
5055 gimple_seq_add_stmt (join_seq, join);
5056 gimple_seq_add_seq (join_seq, after_join);
41dbbb37 5057}
50674e96 5058
953ff289
DN
5059/* Generate code to implement the REDUCTION clauses. */
5060
5061static void
726a989a 5062lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 5063{
726a989a 5064 gimple_seq sub_seq = NULL;
355fe088 5065 gimple *stmt;
374d0225 5066 tree x, c;
953ff289
DN
5067 int count = 0;
5068
e5014671
NS
5069 /* OpenACC loop reductions are handled elsewhere. */
5070 if (is_gimple_omp_oacc (ctx->stmt))
5071 return;
5072
74bf76ed
JJ
5073 /* SIMD reductions are handled in lower_rec_input_clauses. */
5074 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 5075 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed
JJ
5076 return;
5077
953ff289
DN
5078 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5079 update in that case, otherwise use a lock. */
5080 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 5081 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289 5082 {
d9a6bd32
JJ
5083 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5084 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
953ff289 5085 {
acf0174b 5086 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
5087 count = -1;
5088 break;
5089 }
5090 count++;
5091 }
5092
5093 if (count == 0)
5094 return;
5095
5096 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5097 {
d9a6bd32 5098 tree var, ref, new_var, orig_var;
953ff289 5099 enum tree_code code;
db3927fb 5100 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5101
aaf46ef9 5102 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
5103 continue;
5104
c24783c4 5105 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
d9a6bd32
JJ
5106 orig_var = var = OMP_CLAUSE_DECL (c);
5107 if (TREE_CODE (var) == MEM_REF)
5108 {
5109 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
5110 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5111 var = TREE_OPERAND (var, 0);
c24783c4 5112 if (TREE_CODE (var) == ADDR_EXPR)
d9a6bd32 5113 var = TREE_OPERAND (var, 0);
c24783c4
JJ
5114 else
5115 {
5116 /* If this is a pointer or referenced based array
5117 section, the var could be private in the outer
5118 context e.g. on orphaned loop construct. Pretend this
5119 is private variable's outer reference. */
5120 ccode = OMP_CLAUSE_PRIVATE;
5121 if (TREE_CODE (var) == INDIRECT_REF)
5122 var = TREE_OPERAND (var, 0);
5123 }
d9a6bd32
JJ
5124 orig_var = var;
5125 if (is_variable_sized (var))
5126 {
5127 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5128 var = DECL_VALUE_EXPR (var);
5129 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5130 var = TREE_OPERAND (var, 0);
5131 gcc_assert (DECL_P (var));
5132 }
5133 }
953ff289 5134 new_var = lookup_decl (var, ctx);
629b3d75 5135 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
70f34814 5136 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
c24783c4 5137 ref = build_outer_var_ref (var, ctx, ccode);
953ff289 5138 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
5139
5140 /* reduction(-:var) sums up the partial results, so it acts
5141 identically to reduction(+:var). */
953ff289
DN
5142 if (code == MINUS_EXPR)
5143 code = PLUS_EXPR;
5144
e5014671 5145 if (count == 1)
953ff289 5146 {
db3927fb 5147 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5148
5149 addr = save_expr (addr);
5150 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 5151 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 5152 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 5153 gimplify_and_add (x, stmt_seqp);
953ff289
DN
5154 return;
5155 }
d9a6bd32
JJ
5156 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5157 {
5158 tree d = OMP_CLAUSE_DECL (c);
5159 tree type = TREE_TYPE (d);
5160 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5161 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5162 tree ptype = build_pointer_type (TREE_TYPE (type));
e01d41e5
JJ
5163 tree bias = TREE_OPERAND (d, 1);
5164 d = TREE_OPERAND (d, 0);
5165 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5166 {
5167 tree b = TREE_OPERAND (d, 1);
5168 b = maybe_lookup_decl (b, ctx);
5169 if (b == NULL)
5170 {
5171 b = TREE_OPERAND (d, 1);
5172 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5173 }
5174 if (integer_zerop (bias))
5175 bias = b;
5176 else
5177 {
5178 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5179 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5180 TREE_TYPE (b), b, bias);
5181 }
5182 d = TREE_OPERAND (d, 0);
5183 }
d9a6bd32
JJ
5184 /* For ref build_outer_var_ref already performs this, so
5185 only new_var needs a dereference. */
e01d41e5 5186 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
5187 {
5188 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
629b3d75 5189 gcc_assert (omp_is_reference (var) && var == orig_var);
d9a6bd32 5190 }
e01d41e5 5191 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
5192 {
5193 if (orig_var == var)
5194 {
5195 new_var = build_fold_addr_expr (new_var);
5196 ref = build_fold_addr_expr (ref);
5197 }
5198 }
5199 else
5200 {
5201 gcc_assert (orig_var == var);
629b3d75 5202 if (omp_is_reference (var))
d9a6bd32
JJ
5203 ref = build_fold_addr_expr (ref);
5204 }
5205 if (DECL_P (v))
5206 {
5207 tree t = maybe_lookup_decl (v, ctx);
5208 if (t)
5209 v = t;
5210 else
5211 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5212 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5213 }
e01d41e5
JJ
5214 if (!integer_zerop (bias))
5215 {
5216 bias = fold_convert_loc (clause_loc, sizetype, bias);
5217 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5218 TREE_TYPE (new_var), new_var,
5219 unshare_expr (bias));
5220 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5221 TREE_TYPE (ref), ref, bias);
5222 }
d9a6bd32
JJ
5223 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5224 ref = fold_convert_loc (clause_loc, ptype, ref);
5225 tree m = create_tmp_var (ptype, NULL);
5226 gimplify_assign (m, new_var, stmt_seqp);
5227 new_var = m;
5228 m = create_tmp_var (ptype, NULL);
5229 gimplify_assign (m, ref, stmt_seqp);
5230 ref = m;
5231 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5232 tree body = create_artificial_label (UNKNOWN_LOCATION);
5233 tree end = create_artificial_label (UNKNOWN_LOCATION);
5234 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5235 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5236 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5237 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5238 {
5239 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5240 tree decl_placeholder
5241 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5242 SET_DECL_VALUE_EXPR (placeholder, out);
5243 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5244 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5245 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5246 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5247 gimple_seq_add_seq (&sub_seq,
5248 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5250 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5251 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5252 }
5253 else
5254 {
5255 x = build2 (code, TREE_TYPE (out), out, priv);
5256 out = unshare_expr (out);
5257 gimplify_assign (out, x, &sub_seq);
5258 }
5259 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5260 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5261 gimple_seq_add_stmt (&sub_seq, g);
5262 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5263 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5264 gimple_seq_add_stmt (&sub_seq, g);
5265 g = gimple_build_assign (i, PLUS_EXPR, i,
5266 build_int_cst (TREE_TYPE (i), 1));
5267 gimple_seq_add_stmt (&sub_seq, g);
5268 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5269 gimple_seq_add_stmt (&sub_seq, g);
5270 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5271 }
41dbbb37 5272 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
953ff289
DN
5273 {
5274 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5275
629b3d75 5276 if (omp_is_reference (var)
acf0174b
JJ
5277 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5278 TREE_TYPE (ref)))
db3927fb 5279 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5280 SET_DECL_VALUE_EXPR (placeholder, ref);
5281 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 5282 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
5283 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5284 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
5285 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5286 }
5287 else
5288 {
5289 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5290 ref = build_outer_var_ref (var, ctx);
726a989a 5291 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
5292 }
5293 }
5294
e79983f4
MM
5295 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5296 0);
726a989a 5297 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 5298
726a989a 5299 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 5300
e79983f4
MM
5301 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5302 0);
726a989a 5303 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
5304}
5305
50674e96 5306
953ff289
DN
5307/* Generate code to implement the COPYPRIVATE clauses. */
5308
5309static void
726a989a 5310lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
5311 omp_context *ctx)
5312{
5313 tree c;
5314
5315 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5316 {
78db7d92 5317 tree var, new_var, ref, x;
953ff289 5318 bool by_ref;
db3927fb 5319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5320
aaf46ef9 5321 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
5322 continue;
5323
5324 var = OMP_CLAUSE_DECL (c);
7c8f7639 5325 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
5326
5327 ref = build_sender_ref (var, ctx);
78db7d92
JJ
5328 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5329 if (by_ref)
5330 {
5331 x = build_fold_addr_expr_loc (clause_loc, new_var);
5332 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5333 }
726a989a 5334 gimplify_assign (ref, x, slist);
953ff289 5335
78db7d92
JJ
5336 ref = build_receiver_ref (var, false, ctx);
5337 if (by_ref)
5338 {
5339 ref = fold_convert_loc (clause_loc,
5340 build_pointer_type (TREE_TYPE (new_var)),
5341 ref);
5342 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5343 }
629b3d75 5344 if (omp_is_reference (var))
953ff289 5345 {
78db7d92 5346 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
5347 ref = build_simple_mem_ref_loc (clause_loc, ref);
5348 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 5349 }
78db7d92 5350 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
5351 gimplify_and_add (x, rlist);
5352 }
5353}
5354
50674e96 5355
953ff289
DN
5356/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5357 and REDUCTION from the sender (aka parent) side. */
5358
5359static void
726a989a
RB
5360lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5361 omp_context *ctx)
953ff289 5362{
d9a6bd32
JJ
5363 tree c, t;
5364 int ignored_looptemp = 0;
5365 bool is_taskloop = false;
5366
5367 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5368 by GOMP_taskloop. */
5369 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5370 {
5371 ignored_looptemp = 2;
5372 is_taskloop = true;
5373 }
953ff289
DN
5374
5375 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5376 {
50674e96 5377 tree val, ref, x, var;
953ff289 5378 bool by_ref, do_in = false, do_out = false;
db3927fb 5379 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5380
aaf46ef9 5381 switch (OMP_CLAUSE_CODE (c))
953ff289 5382 {
a68ab351
JJ
5383 case OMP_CLAUSE_PRIVATE:
5384 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5385 break;
5386 continue;
953ff289
DN
5387 case OMP_CLAUSE_FIRSTPRIVATE:
5388 case OMP_CLAUSE_COPYIN:
5389 case OMP_CLAUSE_LASTPRIVATE:
5390 case OMP_CLAUSE_REDUCTION:
d9a6bd32
JJ
5391 break;
5392 case OMP_CLAUSE_SHARED:
5393 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5394 break;
5395 continue;
acf0174b 5396 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32
JJ
5397 if (ignored_looptemp)
5398 {
5399 ignored_looptemp--;
5400 continue;
5401 }
953ff289
DN
5402 break;
5403 default:
5404 continue;
5405 }
5406
d2dda7fe 5407 val = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
5408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5409 && TREE_CODE (val) == MEM_REF)
5410 {
5411 val = TREE_OPERAND (val, 0);
e01d41e5
JJ
5412 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5413 val = TREE_OPERAND (val, 0);
d9a6bd32
JJ
5414 if (TREE_CODE (val) == INDIRECT_REF
5415 || TREE_CODE (val) == ADDR_EXPR)
5416 val = TREE_OPERAND (val, 0);
5417 if (is_variable_sized (val))
5418 continue;
5419 }
5420
5421 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5422 outer taskloop region. */
5423 omp_context *ctx_for_o = ctx;
5424 if (is_taskloop
5425 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5426 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5427 ctx_for_o = ctx->outer;
5428
5429 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
50674e96 5430
8ca5b2a2
JJ
5431 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5432 && is_global_var (var))
5433 continue;
d9a6bd32
JJ
5434
5435 t = omp_member_access_dummy_var (var);
5436 if (t)
5437 {
5438 var = DECL_VALUE_EXPR (var);
5439 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5440 if (o != t)
5441 var = unshare_and_remap (var, t, o);
5442 else
5443 var = unshare_expr (var);
5444 }
5445
5446 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5447 {
5448 /* Handle taskloop firstprivate/lastprivate, where the
5449 lastprivate on GIMPLE_OMP_TASK is represented as
5450 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5451 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5452 x = omp_build_component_ref (ctx->sender_decl, f);
5453 if (use_pointer_for_field (val, ctx))
5454 var = build_fold_addr_expr (var);
5455 gimplify_assign (x, var, ilist);
5456 DECL_ABSTRACT_ORIGIN (f) = NULL;
5457 continue;
5458 }
5459
5460 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5461 || val == OMP_CLAUSE_DECL (c))
5462 && is_variable_sized (val))
953ff289 5463 continue;
7c8f7639 5464 by_ref = use_pointer_for_field (val, NULL);
953ff289 5465
aaf46ef9 5466 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5467 {
5468 case OMP_CLAUSE_FIRSTPRIVATE:
ec35ea45
JJ
5469 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5470 && !by_ref
5471 && is_task_ctx (ctx))
5472 TREE_NO_WARNING (var) = 1;
5473 do_in = true;
5474 break;
5475
5476 case OMP_CLAUSE_PRIVATE:
953ff289 5477 case OMP_CLAUSE_COPYIN:
acf0174b 5478 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
5479 do_in = true;
5480 break;
5481
5482 case OMP_CLAUSE_LASTPRIVATE:
629b3d75 5483 if (by_ref || omp_is_reference (val))
953ff289
DN
5484 {
5485 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5486 continue;
5487 do_in = true;
5488 }
5489 else
a68ab351
JJ
5490 {
5491 do_out = true;
5492 if (lang_hooks.decls.omp_private_outer_ref (val))
5493 do_in = true;
5494 }
953ff289
DN
5495 break;
5496
5497 case OMP_CLAUSE_REDUCTION:
5498 do_in = true;
d9a6bd32 5499 if (val == OMP_CLAUSE_DECL (c))
629b3d75 5500 do_out = !(by_ref || omp_is_reference (val));
d9a6bd32
JJ
5501 else
5502 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
953ff289
DN
5503 break;
5504
5505 default:
5506 gcc_unreachable ();
5507 }
5508
5509 if (do_in)
5510 {
5511 ref = build_sender_ref (val, ctx);
db3927fb 5512 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 5513 gimplify_assign (ref, x, ilist);
a68ab351
JJ
5514 if (is_task_ctx (ctx))
5515 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 5516 }
50674e96 5517
953ff289
DN
5518 if (do_out)
5519 {
5520 ref = build_sender_ref (val, ctx);
726a989a 5521 gimplify_assign (var, ref, olist);
953ff289
DN
5522 }
5523 }
5524}
5525
726a989a
RB
5526/* Generate code to implement SHARED from the sender (aka parent)
5527 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5528 list things that got automatically shared. */
953ff289
DN
5529
5530static void
726a989a 5531lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 5532{
d9a6bd32 5533 tree var, ovar, nvar, t, f, x, record_type;
953ff289
DN
5534
5535 if (ctx->record_type == NULL)
5536 return;
50674e96 5537
a68ab351 5538 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 5539 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
5540 {
5541 ovar = DECL_ABSTRACT_ORIGIN (f);
d9a6bd32
JJ
5542 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5543 continue;
5544
953ff289
DN
5545 nvar = maybe_lookup_decl (ovar, ctx);
5546 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5547 continue;
5548
50674e96
DN
5549 /* If CTX is a nested parallel directive. Find the immediately
5550 enclosing parallel or workshare construct that contains a
5551 mapping for OVAR. */
d2dda7fe 5552 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 5553
d9a6bd32
JJ
5554 t = omp_member_access_dummy_var (var);
5555 if (t)
5556 {
5557 var = DECL_VALUE_EXPR (var);
5558 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5559 if (o != t)
5560 var = unshare_and_remap (var, t, o);
5561 else
5562 var = unshare_expr (var);
5563 }
5564
7c8f7639 5565 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
5566 {
5567 x = build_sender_ref (ovar, ctx);
50674e96 5568 var = build_fold_addr_expr (var);
726a989a 5569 gimplify_assign (x, var, ilist);
953ff289
DN
5570 }
5571 else
5572 {
5573 x = build_sender_ref (ovar, ctx);
726a989a 5574 gimplify_assign (x, var, ilist);
953ff289 5575
14e5b285
RG
5576 if (!TREE_READONLY (var)
5577 /* We don't need to receive a new reference to a result
5578 or parm decl. In fact we may not store to it as we will
5579 invalidate any pending RSO and generate wrong gimple
5580 during inlining. */
5581 && !((TREE_CODE (var) == RESULT_DECL
5582 || TREE_CODE (var) == PARM_DECL)
5583 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
5584 {
5585 x = build_sender_ref (ovar, ctx);
726a989a 5586 gimplify_assign (var, x, olist);
a68ab351 5587 }
953ff289
DN
5588 }
5589 }
5590}
5591
e4834818
NS
5592/* Emit an OpenACC head marker call, encapulating the partitioning and
5593 other information that must be processed by the target compiler.
5594 Return the maximum number of dimensions the associated loop might
5595 be partitioned over. */
5596
5597static unsigned
5598lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5599 gimple_seq *seq, omp_context *ctx)
5600{
5601 unsigned levels = 0;
5602 unsigned tag = 0;
5603 tree gang_static = NULL_TREE;
5604 auto_vec<tree, 5> args;
5605
5606 args.quick_push (build_int_cst
5607 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5608 args.quick_push (ddvar);
5609 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5610 {
5611 switch (OMP_CLAUSE_CODE (c))
5612 {
5613 case OMP_CLAUSE_GANG:
5614 tag |= OLF_DIM_GANG;
5615 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5616 /* static:* is represented by -1, and we can ignore it, as
5617 scheduling is always static. */
5618 if (gang_static && integer_minus_onep (gang_static))
5619 gang_static = NULL_TREE;
5620 levels++;
5621 break;
5622
5623 case OMP_CLAUSE_WORKER:
5624 tag |= OLF_DIM_WORKER;
5625 levels++;
5626 break;
5627
5628 case OMP_CLAUSE_VECTOR:
5629 tag |= OLF_DIM_VECTOR;
5630 levels++;
5631 break;
5632
5633 case OMP_CLAUSE_SEQ:
5634 tag |= OLF_SEQ;
5635 break;
5636
5637 case OMP_CLAUSE_AUTO:
5638 tag |= OLF_AUTO;
5639 break;
5640
5641 case OMP_CLAUSE_INDEPENDENT:
5642 tag |= OLF_INDEPENDENT;
5643 break;
5644
02889d23
CLT
5645 case OMP_CLAUSE_TILE:
5646 tag |= OLF_TILE;
5647 break;
5648
e4834818
NS
5649 default:
5650 continue;
5651 }
5652 }
5653
5654 if (gang_static)
5655 {
5656 if (DECL_P (gang_static))
5657 gang_static = build_outer_var_ref (gang_static, ctx);
5658 tag |= OLF_GANG_STATIC;
5659 }
5660
5661 /* In a parallel region, loops are implicitly INDEPENDENT. */
5662 omp_context *tgt = enclosing_target_ctx (ctx);
5663 if (!tgt || is_oacc_parallel (tgt))
5664 tag |= OLF_INDEPENDENT;
5665
02889d23
CLT
5666 if (tag & OLF_TILE)
5667 /* Tiling could use all 3 levels. */
5668 levels = 3;
5669 else
5670 {
5671 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5672 Ensure at least one level, or 2 for possible auto
5673 partitioning */
5674 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5675 << OLF_DIM_BASE) | OLF_SEQ));
5676
5677 if (levels < 1u + maybe_auto)
5678 levels = 1u + maybe_auto;
5679 }
e4834818
NS
5680
5681 args.quick_push (build_int_cst (integer_type_node, levels));
5682 args.quick_push (build_int_cst (integer_type_node, tag));
5683 if (gang_static)
5684 args.quick_push (gang_static);
5685
5686 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5687 gimple_set_location (call, loc);
5688 gimple_set_lhs (call, ddvar);
5689 gimple_seq_add_stmt (seq, call);
5690
5691 return levels;
5692}
5693
5694/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5695 partitioning level of the enclosed region. */
5696
5697static void
5698lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5699 tree tofollow, gimple_seq *seq)
5700{
5701 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5702 : IFN_UNIQUE_OACC_TAIL_MARK);
5703 tree marker = build_int_cst (integer_type_node, marker_kind);
5704 int nargs = 2 + (tofollow != NULL_TREE);
5705 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5706 marker, ddvar, tofollow);
5707 gimple_set_location (call, loc);
5708 gimple_set_lhs (call, ddvar);
5709 gimple_seq_add_stmt (seq, call);
5710}
5711
5712/* Generate the before and after OpenACC loop sequences. CLAUSES are
5713 the loop clauses, from which we extract reductions. Initialize
5714 HEAD and TAIL. */
5715
5716static void
5717lower_oacc_head_tail (location_t loc, tree clauses,
5718 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5719{
5720 bool inner = false;
5721 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5722 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5723
5724 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
e4834818
NS
5725 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5726 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5727
4877b5a4 5728 gcc_assert (count);
e4834818
NS
5729 for (unsigned done = 1; count; count--, done++)
5730 {
5731 gimple_seq fork_seq = NULL;
5732 gimple_seq join_seq = NULL;
5733
5734 tree place = build_int_cst (integer_type_node, -1);
5735 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5736 fork_kind, ddvar, place);
5737 gimple_set_location (fork, loc);
5738 gimple_set_lhs (fork, ddvar);
5739
5740 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5741 join_kind, ddvar, place);
5742 gimple_set_location (join, loc);
5743 gimple_set_lhs (join, ddvar);
5744
5745 /* Mark the beginning of this level sequence. */
5746 if (inner)
5747 lower_oacc_loop_marker (loc, ddvar, true,
5748 build_int_cst (integer_type_node, count),
5749 &fork_seq);
5750 lower_oacc_loop_marker (loc, ddvar, false,
5751 build_int_cst (integer_type_node, done),
5752 &join_seq);
5753
e5014671
NS
5754 lower_oacc_reductions (loc, clauses, place, inner,
5755 fork, join, &fork_seq, &join_seq, ctx);
e4834818
NS
5756
5757 /* Append this level to head. */
5758 gimple_seq_add_seq (head, fork_seq);
5759 /* Prepend it to tail. */
5760 gimple_seq_add_seq (&join_seq, *tail);
5761 *tail = join_seq;
5762
5763 inner = true;
5764 }
5765
5766 /* Mark the end of the sequence. */
5767 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5768 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5769}
726a989a 5770
629b3d75
MJ
5771/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5772 catch handler and return it. This prevents programs from violating the
5773 structured block semantics with throws. */
726a989a 5774
629b3d75
MJ
5775static gimple_seq
5776maybe_catch_exception (gimple_seq body)
726a989a 5777{
629b3d75
MJ
5778 gimple *g;
5779 tree decl;
b2b40051 5780
629b3d75
MJ
5781 if (!flag_exceptions)
5782 return body;
b2b40051 5783
629b3d75
MJ
5784 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5785 decl = lang_hooks.eh_protect_cleanup_actions ();
5786 else
5787 decl = builtin_decl_explicit (BUILT_IN_TRAP);
b2b40051 5788
629b3d75
MJ
5789 g = gimple_build_eh_must_not_throw (decl);
5790 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5791 GIMPLE_TRY_CATCH);
b2b40051 5792
629b3d75 5793 return gimple_seq_alloc_with_stmt (g);
b2b40051
MJ
5794}
5795
629b3d75
MJ
5796\f
5797/* Routines to lower OMP directives into OMP-GIMPLE. */
726a989a 5798
629b3d75
MJ
5799/* If ctx is a worksharing context inside of a cancellable parallel
5800 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5801 and conditional branch to parallel's cancel_label to handle
5802 cancellation in the implicit barrier. */
953ff289
DN
5803
5804static void
629b3d75 5805maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
953ff289 5806{
629b3d75
MJ
5807 gimple *omp_return = gimple_seq_last_stmt (*body);
5808 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5809 if (gimple_omp_return_nowait_p (omp_return))
5810 return;
5811 if (ctx->outer
5812 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5813 && ctx->outer->cancellable)
50674e96 5814 {
629b3d75
MJ
5815 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5816 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5817 tree lhs = create_tmp_var (c_bool_type);
5818 gimple_omp_return_set_lhs (omp_return, lhs);
5819 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5820 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5821 fold_convert (c_bool_type,
5822 boolean_false_node),
5823 ctx->outer->cancel_label, fallthru_label);
5824 gimple_seq_add_stmt (body, g);
5825 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
50674e96 5826 }
629b3d75 5827}
953ff289 5828
629b3d75
MJ
5829/* Lower the OpenMP sections directive in the current statement in GSI_P.
5830 CTX is the enclosing OMP context for the current statement. */
953ff289 5831
629b3d75
MJ
5832static void
5833lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5834{
5835 tree block, control;
5836 gimple_stmt_iterator tgsi;
5837 gomp_sections *stmt;
5838 gimple *t;
5839 gbind *new_stmt, *bind;
5840 gimple_seq ilist, dlist, olist, new_body;
953ff289 5841
629b3d75 5842 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
953ff289 5843
629b3d75 5844 push_gimplify_context ();
acf0174b 5845
629b3d75
MJ
5846 dlist = NULL;
5847 ilist = NULL;
5848 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5849 &ilist, &dlist, ctx, NULL);
953ff289 5850
629b3d75
MJ
5851 new_body = gimple_omp_body (stmt);
5852 gimple_omp_set_body (stmt, NULL);
5853 tgsi = gsi_start (new_body);
5854 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
953ff289 5855 {
629b3d75
MJ
5856 omp_context *sctx;
5857 gimple *sec_start;
50674e96 5858
629b3d75
MJ
5859 sec_start = gsi_stmt (tgsi);
5860 sctx = maybe_lookup_ctx (sec_start);
5861 gcc_assert (sctx);
5862
5863 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5864 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5865 GSI_CONTINUE_LINKING);
5866 gimple_omp_set_body (sec_start, NULL);
5867
5868 if (gsi_one_before_end_p (tgsi))
50674e96 5869 {
629b3d75
MJ
5870 gimple_seq l = NULL;
5871 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5872 &l, ctx);
5873 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5874 gimple_omp_section_set_last (sec_start);
5875 }
917948d3 5876
629b3d75
MJ
5877 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5878 GSI_CONTINUE_LINKING);
5879 }
50674e96 5880
629b3d75
MJ
5881 block = make_node (BLOCK);
5882 bind = gimple_build_bind (NULL, new_body, block);
50674e96 5883
629b3d75
MJ
5884 olist = NULL;
5885 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5886
629b3d75
MJ
5887 block = make_node (BLOCK);
5888 new_stmt = gimple_build_bind (NULL, NULL, block);
5889 gsi_replace (gsi_p, new_stmt, true);
50674e96 5890
629b3d75
MJ
5891 pop_gimplify_context (new_stmt);
5892 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5893 BLOCK_VARS (block) = gimple_bind_vars (bind);
5894 if (BLOCK_VARS (block))
5895 TREE_USED (block) = 1;
50674e96 5896
629b3d75
MJ
5897 new_body = NULL;
5898 gimple_seq_add_seq (&new_body, ilist);
5899 gimple_seq_add_stmt (&new_body, stmt);
5900 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5901 gimple_seq_add_stmt (&new_body, bind);
50674e96 5902
629b3d75
MJ
5903 control = create_tmp_var (unsigned_type_node, ".section");
5904 t = gimple_build_omp_continue (control, control);
5905 gimple_omp_sections_set_control (stmt, control);
5906 gimple_seq_add_stmt (&new_body, t);
50674e96 5907
629b3d75
MJ
5908 gimple_seq_add_seq (&new_body, olist);
5909 if (ctx->cancellable)
5910 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5911 gimple_seq_add_seq (&new_body, dlist);
917948d3 5912
629b3d75 5913 new_body = maybe_catch_exception (new_body);
50674e96 5914
01914336
MJ
5915 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5916 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5917 t = gimple_build_omp_return (nowait);
629b3d75
MJ
5918 gimple_seq_add_stmt (&new_body, t);
5919 maybe_add_implicit_barrier_cancel (ctx, &new_body);
953ff289 5920
629b3d75 5921 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
5922}
5923
9a771876 5924
629b3d75
MJ
5925/* A subroutine of lower_omp_single. Expand the simple form of
5926 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
9a771876 5927
629b3d75
MJ
5928 if (GOMP_single_start ())
5929 BODY;
5930 [ GOMP_barrier (); ] -> unless 'nowait' is present.
9a771876 5931
629b3d75
MJ
5932 FIXME. It may be better to delay expanding the logic of this until
5933 pass_expand_omp. The expanded logic may make the job more difficult
5934 to a synchronization analysis pass. */
a68ab351
JJ
5935
5936static void
629b3d75 5937lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
a68ab351 5938{
629b3d75
MJ
5939 location_t loc = gimple_location (single_stmt);
5940 tree tlabel = create_artificial_label (loc);
5941 tree flabel = create_artificial_label (loc);
5942 gimple *call, *cond;
5943 tree lhs, decl;
20906c66 5944
629b3d75
MJ
5945 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5946 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5947 call = gimple_build_call (decl, 0);
5948 gimple_call_set_lhs (call, lhs);
5949 gimple_seq_add_stmt (pre_p, call);
a68ab351 5950
629b3d75
MJ
5951 cond = gimple_build_cond (EQ_EXPR, lhs,
5952 fold_convert_loc (loc, TREE_TYPE (lhs),
5953 boolean_true_node),
5954 tlabel, flabel);
5955 gimple_seq_add_stmt (pre_p, cond);
5956 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5957 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5958 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
a68ab351
JJ
5959}
5960
5961
629b3d75
MJ
5962/* A subroutine of lower_omp_single. Expand the simple form of
5963 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289 5964
629b3d75 5965 #pragma omp single copyprivate (a, b, c)
953ff289 5966
629b3d75 5967 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
953ff289 5968
629b3d75
MJ
5969 {
5970 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5971 {
5972 BODY;
5973 copyout.a = a;
5974 copyout.b = b;
5975 copyout.c = c;
5976 GOMP_single_copy_end (&copyout);
5977 }
5978 else
5979 {
5980 a = copyout_p->a;
5981 b = copyout_p->b;
5982 c = copyout_p->c;
5983 }
5984 GOMP_barrier ();
5985 }
726a989a 5986
629b3d75
MJ
5987 FIXME. It may be better to delay expanding the logic of this until
5988 pass_expand_omp. The expanded logic may make the job more difficult
5989 to a synchronization analysis pass. */
953ff289 5990
629b3d75
MJ
5991static void
5992lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5993 omp_context *ctx)
5994{
5995 tree ptr_type, t, l0, l1, l2, bfn_decl;
5996 gimple_seq copyin_seq;
5997 location_t loc = gimple_location (single_stmt);
953ff289 5998
629b3d75 5999 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
953ff289 6000
629b3d75
MJ
6001 ptr_type = build_pointer_type (ctx->record_type);
6002 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
953ff289 6003
629b3d75
MJ
6004 l0 = create_artificial_label (loc);
6005 l1 = create_artificial_label (loc);
6006 l2 = create_artificial_label (loc);
953ff289 6007
629b3d75
MJ
6008 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6009 t = build_call_expr_loc (loc, bfn_decl, 0);
6010 t = fold_convert_loc (loc, ptr_type, t);
6011 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289 6012
629b3d75
MJ
6013 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6014 build_int_cst (ptr_type, 0));
6015 t = build3 (COND_EXPR, void_type_node, t,
6016 build_and_jump (&l0), build_and_jump (&l1));
6017 gimplify_and_add (t, pre_p);
953ff289 6018
629b3d75 6019 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 6020
629b3d75 6021 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289 6022
629b3d75
MJ
6023 copyin_seq = NULL;
6024 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6025 &copyin_seq, ctx);
953ff289 6026
629b3d75
MJ
6027 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6028 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6029 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6030 gimplify_and_add (t, pre_p);
2aee3e57 6031
629b3d75
MJ
6032 t = build_and_jump (&l2);
6033 gimplify_and_add (t, pre_p);
953ff289 6034
629b3d75 6035 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 6036
629b3d75 6037 gimple_seq_add_seq (pre_p, copyin_seq);
777f7f9a 6038
629b3d75 6039 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
777f7f9a 6040}
50674e96 6041
629b3d75
MJ
6042
6043/* Expand code for an OpenMP single directive. */
2b4cf991
JJ
6044
6045static void
629b3d75 6046lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
2b4cf991 6047{
629b3d75 6048 tree block;
629b3d75
MJ
6049 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6050 gbind *bind;
6051 gimple_seq bind_body, bind_body_tail = NULL, dlist;
2b4cf991 6052
629b3d75 6053 push_gimplify_context ();
2b4cf991 6054
629b3d75
MJ
6055 block = make_node (BLOCK);
6056 bind = gimple_build_bind (NULL, NULL, block);
6057 gsi_replace (gsi_p, bind, true);
6058 bind_body = NULL;
6059 dlist = NULL;
6060 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6061 &bind_body, &dlist, ctx, NULL);
6062 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
2b4cf991 6063
629b3d75 6064 gimple_seq_add_stmt (&bind_body, single_stmt);
2b4cf991 6065
629b3d75
MJ
6066 if (ctx->record_type)
6067 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6068 else
6069 lower_omp_single_simple (single_stmt, &bind_body);
2b4cf991 6070
629b3d75 6071 gimple_omp_set_body (single_stmt, NULL);
2b4cf991 6072
629b3d75 6073 gimple_seq_add_seq (&bind_body, dlist);
5a0f4dd3 6074
629b3d75 6075 bind_body = maybe_catch_exception (bind_body);
5a0f4dd3 6076
01914336
MJ
6077 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6078 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6079 gimple *g = gimple_build_omp_return (nowait);
6080 gimple_seq_add_stmt (&bind_body_tail, g);
629b3d75
MJ
6081 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6082 if (ctx->record_type)
6083 {
6084 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6085 tree clobber = build_constructor (ctx->record_type, NULL);
6086 TREE_THIS_VOLATILE (clobber) = 1;
6087 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6088 clobber), GSI_SAME_STMT);
6089 }
6090 gimple_seq_add_seq (&bind_body, bind_body_tail);
6091 gimple_bind_set_body (bind, bind_body);
5a0f4dd3 6092
629b3d75 6093 pop_gimplify_context (bind);
5a0f4dd3 6094
629b3d75
MJ
6095 gimple_bind_append_vars (bind, ctx->block_vars);
6096 BLOCK_VARS (block) = ctx->block_vars;
6097 if (BLOCK_VARS (block))
6098 TREE_USED (block) = 1;
5a0f4dd3
JJ
6099}
6100
74bf76ed 6101
629b3d75 6102/* Expand code for an OpenMP master directive. */
953ff289
DN
6103
6104static void
629b3d75 6105lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6106{
629b3d75
MJ
6107 tree block, lab = NULL, x, bfn_decl;
6108 gimple *stmt = gsi_stmt (*gsi_p);
6109 gbind *bind;
6110 location_t loc = gimple_location (stmt);
6111 gimple_seq tseq;
50674e96 6112
629b3d75 6113 push_gimplify_context ();
50674e96 6114
629b3d75
MJ
6115 block = make_node (BLOCK);
6116 bind = gimple_build_bind (NULL, NULL, block);
6117 gsi_replace (gsi_p, bind, true);
6118 gimple_bind_add_stmt (bind, stmt);
50674e96 6119
629b3d75
MJ
6120 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6121 x = build_call_expr_loc (loc, bfn_decl, 0);
6122 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6123 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6124 tseq = NULL;
6125 gimplify_and_add (x, &tseq);
6126 gimple_bind_add_seq (bind, tseq);
9a771876 6127
629b3d75
MJ
6128 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6129 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6130 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6131 gimple_omp_set_body (stmt, NULL);
b357f682 6132
629b3d75 6133 gimple_bind_add_stmt (bind, gimple_build_label (lab));
99819c63 6134
629b3d75 6135 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e01d41e5 6136
629b3d75 6137 pop_gimplify_context (bind);
b8698a0f 6138
629b3d75
MJ
6139 gimple_bind_append_vars (bind, ctx->block_vars);
6140 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
6141}
6142
e4834818 6143
629b3d75 6144/* Expand code for an OpenMP taskgroup directive. */
e4834818 6145
629b3d75
MJ
6146static void
6147lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
e4834818 6148{
629b3d75
MJ
6149 gimple *stmt = gsi_stmt (*gsi_p);
6150 gcall *x;
6151 gbind *bind;
6152 tree block = make_node (BLOCK);
e4834818 6153
629b3d75
MJ
6154 bind = gimple_build_bind (NULL, NULL, block);
6155 gsi_replace (gsi_p, bind, true);
6156 gimple_bind_add_stmt (bind, stmt);
e4834818 6157
629b3d75
MJ
6158 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6159 0);
6160 gimple_bind_add_stmt (bind, x);
e4834818 6161
629b3d75
MJ
6162 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6163 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6164 gimple_omp_set_body (stmt, NULL);
e4834818 6165
629b3d75 6166 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e4834818 6167
629b3d75
MJ
6168 gimple_bind_append_vars (bind, ctx->block_vars);
6169 BLOCK_VARS (block) = ctx->block_vars;
e4834818
NS
6170}
6171
50674e96 6172
629b3d75 6173/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
74bf76ed
JJ
6174
6175static void
629b3d75
MJ
6176lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6177 omp_context *ctx)
74bf76ed 6178{
629b3d75
MJ
6179 struct omp_for_data fd;
6180 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6181 return;
74bf76ed 6182
629b3d75
MJ
6183 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6184 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6185 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6186 if (!fd.ordered)
6187 return;
acf0174b 6188
629b3d75
MJ
6189 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6190 tree c = gimple_omp_ordered_clauses (ord_stmt);
6191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6192 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
74bf76ed 6193 {
629b3d75
MJ
6194 /* Merge depend clauses from multiple adjacent
6195 #pragma omp ordered depend(sink:...) constructs
6196 into one #pragma omp ordered depend(sink:...), so that
6197 we can optimize them together. */
6198 gimple_stmt_iterator gsi = *gsi_p;
6199 gsi_next (&gsi);
6200 while (!gsi_end_p (gsi))
74bf76ed 6201 {
629b3d75
MJ
6202 gimple *stmt = gsi_stmt (gsi);
6203 if (is_gimple_debug (stmt)
6204 || gimple_code (stmt) == GIMPLE_NOP)
74bf76ed 6205 {
629b3d75
MJ
6206 gsi_next (&gsi);
6207 continue;
74bf76ed 6208 }
629b3d75
MJ
6209 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6210 break;
6211 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6212 c = gimple_omp_ordered_clauses (ord_stmt2);
6213 if (c == NULL_TREE
6214 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6215 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6216 break;
6217 while (*list_p)
6218 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6219 *list_p = c;
6220 gsi_remove (&gsi, true);
74bf76ed
JJ
6221 }
6222 }
74bf76ed 6223
629b3d75
MJ
6224 /* Canonicalize sink dependence clauses into one folded clause if
6225 possible.
74bf76ed 6226
629b3d75
MJ
6227 The basic algorithm is to create a sink vector whose first
6228 element is the GCD of all the first elements, and whose remaining
6229 elements are the minimum of the subsequent columns.
74bf76ed 6230
629b3d75
MJ
6231 We ignore dependence vectors whose first element is zero because
6232 such dependencies are known to be executed by the same thread.
acf0174b 6233
629b3d75
MJ
6234 We take into account the direction of the loop, so a minimum
6235 becomes a maximum if the loop is iterating forwards. We also
6236 ignore sink clauses where the loop direction is unknown, or where
6237 the offsets are clearly invalid because they are not a multiple
6238 of the loop increment.
6239
6240 For example:
6241
6242 #pragma omp for ordered(2)
6243 for (i=0; i < N; ++i)
6244 for (j=0; j < M; ++j)
acf0174b 6245 {
629b3d75
MJ
6246 #pragma omp ordered \
6247 depend(sink:i-8,j-2) \
6248 depend(sink:i,j-1) \ // Completely ignored because i+0.
6249 depend(sink:i-4,j-3) \
6250 depend(sink:i-6,j-4)
6251 #pragma omp ordered depend(source)
acf0174b 6252 }
acf0174b 6253
629b3d75 6254 Folded clause is:
74bf76ed 6255
629b3d75
MJ
6256 depend(sink:-gcd(8,4,6),-min(2,3,4))
6257 -or-
6258 depend(sink:-2,-2)
6259 */
74bf76ed 6260
629b3d75
MJ
6261 /* FIXME: Computing GCD's where the first element is zero is
6262 non-trivial in the presence of collapsed loops. Do this later. */
6263 if (fd.collapse > 1)
6264 return;
74bf76ed 6265
629b3d75 6266 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
c3684b7b
MS
6267
6268 /* wide_int is not a POD so it must be default-constructed. */
6269 for (unsigned i = 0; i != 2 * len - 1; ++i)
6270 new (static_cast<void*>(folded_deps + i)) wide_int ();
6271
629b3d75
MJ
6272 tree folded_dep = NULL_TREE;
6273 /* TRUE if the first dimension's offset is negative. */
6274 bool neg_offset_p = false;
74bf76ed 6275
629b3d75
MJ
6276 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6277 unsigned int i;
6278 while ((c = *list_p) != NULL)
74bf76ed 6279 {
629b3d75 6280 bool remove = false;
74bf76ed 6281
629b3d75
MJ
6282 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6283 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6284 goto next_ordered_clause;
74bf76ed 6285
629b3d75
MJ
6286 tree vec;
6287 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6288 vec && TREE_CODE (vec) == TREE_LIST;
6289 vec = TREE_CHAIN (vec), ++i)
74bf76ed 6290 {
629b3d75 6291 gcc_assert (i < len);
74bf76ed 6292
629b3d75
MJ
6293 /* omp_extract_for_data has canonicalized the condition. */
6294 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6295 || fd.loops[i].cond_code == GT_EXPR);
6296 bool forward = fd.loops[i].cond_code == LT_EXPR;
6297 bool maybe_lexically_later = true;
953ff289 6298
629b3d75
MJ
6299 /* While the committee makes up its mind, bail if we have any
6300 non-constant steps. */
6301 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6302 goto lower_omp_ordered_ret;
953ff289 6303
629b3d75
MJ
6304 tree itype = TREE_TYPE (TREE_VALUE (vec));
6305 if (POINTER_TYPE_P (itype))
6306 itype = sizetype;
8e6cdc90 6307 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
629b3d75
MJ
6308 TYPE_PRECISION (itype),
6309 TYPE_SIGN (itype));
a68ab351 6310
629b3d75 6311 /* Ignore invalid offsets that are not multiples of the step. */
8e6cdc90
RS
6312 if (!wi::multiple_of_p (wi::abs (offset),
6313 wi::abs (wi::to_wide (fd.loops[i].step)),
6314 UNSIGNED))
b4c3a85b 6315 {
629b3d75
MJ
6316 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6317 "ignoring sink clause with offset that is not "
6318 "a multiple of the loop step");
6319 remove = true;
6320 goto next_ordered_clause;
b4c3a85b 6321 }
d9a6bd32 6322
629b3d75
MJ
6323 /* Calculate the first dimension. The first dimension of
6324 the folded dependency vector is the GCD of the first
6325 elements, while ignoring any first elements whose offset
6326 is 0. */
6327 if (i == 0)
b4c3a85b 6328 {
629b3d75
MJ
6329 /* Ignore dependence vectors whose first dimension is 0. */
6330 if (offset == 0)
b4c3a85b 6331 {
629b3d75
MJ
6332 remove = true;
6333 goto next_ordered_clause;
b4c3a85b 6334 }
d9a6bd32 6335 else
629b3d75
MJ
6336 {
6337 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6338 {
6339 error_at (OMP_CLAUSE_LOCATION (c),
6340 "first offset must be in opposite direction "
6341 "of loop iterations");
6342 goto lower_omp_ordered_ret;
6343 }
6344 if (forward)
6345 offset = -offset;
6346 neg_offset_p = forward;
6347 /* Initialize the first time around. */
6348 if (folded_dep == NULL_TREE)
6349 {
6350 folded_dep = c;
6351 folded_deps[0] = offset;
6352 }
6353 else
6354 folded_deps[0] = wi::gcd (folded_deps[0],
6355 offset, UNSIGNED);
6356 }
d9a6bd32 6357 }
629b3d75 6358 /* Calculate minimum for the remaining dimensions. */
d9a6bd32 6359 else
d9a6bd32 6360 {
629b3d75
MJ
6361 folded_deps[len + i - 1] = offset;
6362 if (folded_dep == c)
6363 folded_deps[i] = offset;
6364 else if (maybe_lexically_later
6365 && !wi::eq_p (folded_deps[i], offset))
6366 {
6367 if (forward ^ wi::gts_p (folded_deps[i], offset))
6368 {
6369 unsigned int j;
6370 folded_dep = c;
6371 for (j = 1; j <= i; j++)
6372 folded_deps[j] = folded_deps[len + j - 1];
6373 }
6374 else
6375 maybe_lexically_later = false;
6376 }
d9a6bd32 6377 }
d9a6bd32 6378 }
629b3d75 6379 gcc_assert (i == len);
d9a6bd32 6380
629b3d75
MJ
6381 remove = true;
6382
6383 next_ordered_clause:
6384 if (remove)
6385 *list_p = OMP_CLAUSE_CHAIN (c);
d9a6bd32 6386 else
629b3d75 6387 list_p = &OMP_CLAUSE_CHAIN (c);
d9a6bd32 6388 }
d9a6bd32 6389
629b3d75 6390 if (folded_dep)
d9a6bd32 6391 {
629b3d75
MJ
6392 if (neg_offset_p)
6393 folded_deps[0] = -folded_deps[0];
d9a6bd32 6394
629b3d75
MJ
6395 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6396 if (POINTER_TYPE_P (itype))
6397 itype = sizetype;
6398
6399 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6400 = wide_int_to_tree (itype, folded_deps[0]);
6401 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6402 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
d9a6bd32
JJ
6403 }
6404
629b3d75 6405 lower_omp_ordered_ret:
d9a6bd32 6406
629b3d75
MJ
6407 /* Ordered without clauses is #pragma omp threads, while we want
6408 a nop instead if we remove all clauses. */
6409 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6410 gsi_replace (gsi_p, gimple_build_nop (), true);
d9a6bd32
JJ
6411}
6412
6413
629b3d75 6414/* Expand code for an OpenMP ordered directive. */
953ff289 6415
777f7f9a 6416static void
629b3d75 6417lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6418{
629b3d75
MJ
6419 tree block;
6420 gimple *stmt = gsi_stmt (*gsi_p), *g;
6421 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6422 gcall *x;
6423 gbind *bind;
6424 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6425 OMP_CLAUSE_SIMD);
6426 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6427 loop. */
6428 bool maybe_simt
6429 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6430 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6431 OMP_CLAUSE_THREADS);
d9a6bd32 6432
629b3d75
MJ
6433 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6434 OMP_CLAUSE_DEPEND))
d9a6bd32 6435 {
629b3d75
MJ
6436 /* FIXME: This is needs to be moved to the expansion to verify various
6437 conditions only testable on cfg with dominators computed, and also
6438 all the depend clauses to be merged still might need to be available
6439 for the runtime checks. */
6440 if (0)
6441 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6442 return;
a68ab351 6443 }
d9a6bd32 6444
629b3d75
MJ
6445 push_gimplify_context ();
6446
6447 block = make_node (BLOCK);
6448 bind = gimple_build_bind (NULL, NULL, block);
6449 gsi_replace (gsi_p, bind, true);
6450 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 6451
629b3d75 6452 if (simd)
917948d3 6453 {
629b3d75
MJ
6454 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6455 build_int_cst (NULL_TREE, threads));
6456 cfun->has_simduid_loops = true;
917948d3
ZD
6457 }
6458 else
629b3d75
MJ
6459 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6460 0);
6461 gimple_bind_add_stmt (bind, x);
6462
6463 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6464 if (maybe_simt)
953ff289 6465 {
629b3d75
MJ
6466 counter = create_tmp_var (integer_type_node);
6467 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6468 gimple_call_set_lhs (g, counter);
6469 gimple_bind_add_stmt (bind, g);
d9a6bd32 6470
629b3d75
MJ
6471 body = create_artificial_label (UNKNOWN_LOCATION);
6472 test = create_artificial_label (UNKNOWN_LOCATION);
6473 gimple_bind_add_stmt (bind, gimple_build_label (body));
953ff289 6474
629b3d75
MJ
6475 tree simt_pred = create_tmp_var (integer_type_node);
6476 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6477 gimple_call_set_lhs (g, simt_pred);
6478 gimple_bind_add_stmt (bind, g);
d9a6bd32 6479
629b3d75
MJ
6480 tree t = create_artificial_label (UNKNOWN_LOCATION);
6481 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6482 gimple_bind_add_stmt (bind, g);
74bf76ed 6483
629b3d75 6484 gimple_bind_add_stmt (bind, gimple_build_label (t));
acf0174b 6485 }
629b3d75
MJ
6486 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6487 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6488 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6489 gimple_omp_set_body (stmt, NULL);
acf0174b 6490
629b3d75 6491 if (maybe_simt)
d9a6bd32 6492 {
629b3d75
MJ
6493 gimple_bind_add_stmt (bind, gimple_build_label (test));
6494 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6495 gimple_bind_add_stmt (bind, g);
50674e96 6496
629b3d75
MJ
6497 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6498 tree nonneg = create_tmp_var (integer_type_node);
6499 gimple_seq tseq = NULL;
6500 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6501 gimple_bind_add_seq (bind, tseq);
d9a6bd32 6502
629b3d75
MJ
6503 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6504 gimple_call_set_lhs (g, nonneg);
6505 gimple_bind_add_stmt (bind, g);
d9a6bd32 6506
629b3d75
MJ
6507 tree end = create_artificial_label (UNKNOWN_LOCATION);
6508 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6509 gimple_bind_add_stmt (bind, g);
50674e96 6510
629b3d75 6511 gimple_bind_add_stmt (bind, gimple_build_label (end));
e5c95afe 6512 }
629b3d75
MJ
6513 if (simd)
6514 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6515 build_int_cst (NULL_TREE, threads));
777f7f9a 6516 else
629b3d75
MJ
6517 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6518 0);
6519 gimple_bind_add_stmt (bind, x);
917948d3 6520
629b3d75 6521 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6522
629b3d75 6523 pop_gimplify_context (bind);
917948d3 6524
629b3d75
MJ
6525 gimple_bind_append_vars (bind, ctx->block_vars);
6526 BLOCK_VARS (block) = gimple_bind_vars (bind);
6527}
56102c7f 6528
56102c7f 6529
629b3d75
MJ
6530/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6531 substitution of a couple of function calls. But in the NAMED case,
6532 requires that languages coordinate a symbol name. It is therefore
6533 best put here in common code. */
56102c7f 6534
629b3d75 6535static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
56102c7f 6536
629b3d75
MJ
6537static void
6538lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6539{
6540 tree block;
6541 tree name, lock, unlock;
6542 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6543 gbind *bind;
6544 location_t loc = gimple_location (stmt);
6545 gimple_seq tbody;
56102c7f 6546
629b3d75
MJ
6547 name = gimple_omp_critical_name (stmt);
6548 if (name)
6549 {
6550 tree decl;
56102c7f 6551
629b3d75
MJ
6552 if (!critical_name_mutexes)
6553 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
56102c7f 6554
629b3d75
MJ
6555 tree *n = critical_name_mutexes->get (name);
6556 if (n == NULL)
74bf76ed 6557 {
629b3d75 6558 char *new_str;
953ff289 6559
629b3d75 6560 decl = create_tmp_var_raw (ptr_type_node);
953ff289 6561
629b3d75
MJ
6562 new_str = ACONCAT ((".gomp_critical_user_",
6563 IDENTIFIER_POINTER (name), NULL));
6564 DECL_NAME (decl) = get_identifier (new_str);
6565 TREE_PUBLIC (decl) = 1;
6566 TREE_STATIC (decl) = 1;
6567 DECL_COMMON (decl) = 1;
6568 DECL_ARTIFICIAL (decl) = 1;
6569 DECL_IGNORED_P (decl) = 1;
953ff289 6570
629b3d75 6571 varpool_node::finalize_decl (decl);
953ff289 6572
629b3d75
MJ
6573 critical_name_mutexes->put (name, decl);
6574 }
6575 else
6576 decl = *n;
953ff289 6577
629b3d75
MJ
6578 /* If '#pragma omp critical' is inside offloaded region or
6579 inside function marked as offloadable, the symbol must be
6580 marked as offloadable too. */
6581 omp_context *octx;
6582 if (cgraph_node::get (current_function_decl)->offloadable)
6583 varpool_node::get_create (decl)->offloadable = 1;
6584 else
6585 for (octx = ctx->outer; octx; octx = octx->outer)
6586 if (is_gimple_omp_offloaded (octx->stmt))
6587 {
6588 varpool_node::get_create (decl)->offloadable = 1;
6589 break;
6590 }
777f7f9a 6591
629b3d75 6592 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
01914336
MJ
6593 lock = build_call_expr_loc (loc, lock, 1,
6594 build_fold_addr_expr_loc (loc, decl));
777f7f9a 6595
629b3d75
MJ
6596 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6597 unlock = build_call_expr_loc (loc, unlock, 1,
6598 build_fold_addr_expr_loc (loc, decl));
acf0174b 6599 }
acf0174b 6600 else
5a0f4dd3 6601 {
629b3d75
MJ
6602 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6603 lock = build_call_expr_loc (loc, lock, 0);
5a0f4dd3 6604
629b3d75
MJ
6605 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6606 unlock = build_call_expr_loc (loc, unlock, 0);
acf0174b 6607 }
953ff289 6608
629b3d75 6609 push_gimplify_context ();
fb79f500 6610
629b3d75
MJ
6611 block = make_node (BLOCK);
6612 bind = gimple_build_bind (NULL, NULL, block);
6613 gsi_replace (gsi_p, bind, true);
6614 gimple_bind_add_stmt (bind, stmt);
fb79f500 6615
629b3d75
MJ
6616 tbody = gimple_bind_body (bind);
6617 gimplify_and_add (lock, &tbody);
6618 gimple_bind_set_body (bind, tbody);
fb79f500 6619
629b3d75
MJ
6620 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6621 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6622 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6623 gimple_omp_set_body (stmt, NULL);
953ff289 6624
629b3d75
MJ
6625 tbody = gimple_bind_body (bind);
6626 gimplify_and_add (unlock, &tbody);
6627 gimple_bind_set_body (bind, tbody);
953ff289 6628
629b3d75 6629 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6630
629b3d75
MJ
6631 pop_gimplify_context (bind);
6632 gimple_bind_append_vars (bind, ctx->block_vars);
6633 BLOCK_VARS (block) = gimple_bind_vars (bind);
6634}
50674e96 6635
629b3d75
MJ
6636/* A subroutine of lower_omp_for. Generate code to emit the predicate
6637 for a lastprivate clause. Given a loop control predicate of (V
6638 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6639 is appended to *DLIST, iterator initialization is appended to
6640 *BODY_P. */
50674e96 6641
629b3d75
MJ
6642static void
6643lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6644 gimple_seq *dlist, struct omp_context *ctx)
6645{
6646 tree clauses, cond, vinit;
6647 enum tree_code cond_code;
6648 gimple_seq stmts;
953ff289 6649
629b3d75
MJ
6650 cond_code = fd->loop.cond_code;
6651 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
acf0174b 6652
629b3d75
MJ
6653 /* When possible, use a strict equality expression. This can let VRP
6654 type optimizations deduce the value and remove a copy. */
6655 if (tree_fits_shwi_p (fd->loop.step))
acf0174b 6656 {
629b3d75
MJ
6657 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6658 if (step == 1 || step == -1)
6659 cond_code = EQ_EXPR;
acf0174b 6660 }
629b3d75
MJ
6661
6662 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6663 || gimple_omp_for_grid_phony (fd->for_stmt))
6664 cond = omp_grid_lastprivate_predicate (fd);
a68ab351 6665 else
acf0174b 6666 {
629b3d75
MJ
6667 tree n2 = fd->loop.n2;
6668 if (fd->collapse > 1
6669 && TREE_CODE (n2) != INTEGER_CST
6670 && gimple_omp_for_combined_into_p (fd->for_stmt))
d9a6bd32 6671 {
629b3d75
MJ
6672 struct omp_context *taskreg_ctx = NULL;
6673 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
d9a6bd32 6674 {
629b3d75
MJ
6675 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6676 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6677 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
d9a6bd32 6678 {
629b3d75
MJ
6679 if (gimple_omp_for_combined_into_p (gfor))
6680 {
6681 gcc_assert (ctx->outer->outer
6682 && is_parallel_ctx (ctx->outer->outer));
6683 taskreg_ctx = ctx->outer->outer;
6684 }
6685 else
6686 {
6687 struct omp_for_data outer_fd;
6688 omp_extract_for_data (gfor, &outer_fd, NULL);
6689 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6690 }
d9a6bd32 6691 }
629b3d75
MJ
6692 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6693 taskreg_ctx = ctx->outer->outer;
6694 }
6695 else if (is_taskreg_ctx (ctx->outer))
6696 taskreg_ctx = ctx->outer;
6697 if (taskreg_ctx)
6698 {
6699 int i;
6700 tree taskreg_clauses
6701 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6702 tree innerc = omp_find_clause (taskreg_clauses,
6703 OMP_CLAUSE__LOOPTEMP_);
6704 gcc_assert (innerc);
6705 for (i = 0; i < fd->collapse; i++)
6706 {
6707 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6708 OMP_CLAUSE__LOOPTEMP_);
6709 gcc_assert (innerc);
6710 }
6711 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6712 OMP_CLAUSE__LOOPTEMP_);
6713 if (innerc)
6714 n2 = fold_convert (TREE_TYPE (n2),
6715 lookup_decl (OMP_CLAUSE_DECL (innerc),
6716 taskreg_ctx));
d9a6bd32 6717 }
acf0174b 6718 }
629b3d75 6719 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
acf0174b 6720 }
50674e96 6721
629b3d75
MJ
6722 clauses = gimple_omp_for_clauses (fd->for_stmt);
6723 stmts = NULL;
6724 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6725 if (!gimple_seq_empty_p (stmts))
acf0174b 6726 {
629b3d75
MJ
6727 gimple_seq_add_seq (&stmts, *dlist);
6728 *dlist = stmts;
6093bc06 6729
629b3d75
MJ
6730 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6731 vinit = fd->loop.n1;
6732 if (cond_code == EQ_EXPR
6733 && tree_fits_shwi_p (fd->loop.n2)
6734 && ! integer_zerop (fd->loop.n2))
6735 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6736 else
6737 vinit = unshare_expr (vinit);
e67d7a1e 6738
629b3d75
MJ
6739 /* Initialize the iterator variable, so that threads that don't execute
6740 any iterations don't execute the lastprivate clauses by accident. */
6741 gimplify_assign (fd->loop.v, vinit, body_p);
acf0174b 6742 }
953ff289
DN
6743}
6744
1b96e9a4 6745
629b3d75 6746/* Lower code for an OMP loop directive. */
50674e96 6747
629b3d75
MJ
6748static void
6749lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6750{
6751 tree *rhs_p, block;
6752 struct omp_for_data fd, *fdp = NULL;
6753 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6754 gbind *new_stmt;
6755 gimple_seq omp_for_body, body, dlist;
6756 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6757 size_t i;
953ff289 6758
629b3d75 6759 push_gimplify_context ();
953ff289 6760
629b3d75 6761 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
953ff289 6762
629b3d75
MJ
6763 block = make_node (BLOCK);
6764 new_stmt = gimple_build_bind (NULL, NULL, block);
6765 /* Replace at gsi right away, so that 'stmt' is no member
6766 of a sequence anymore as we're going to add to a different
6767 one below. */
6768 gsi_replace (gsi_p, new_stmt, true);
953ff289 6769
629b3d75
MJ
6770 /* Move declaration of temporaries in the loop body before we make
6771 it go away. */
6772 omp_for_body = gimple_omp_body (stmt);
6773 if (!gimple_seq_empty_p (omp_for_body)
6774 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
acf0174b 6775 {
629b3d75
MJ
6776 gbind *inner_bind
6777 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6778 tree vars = gimple_bind_vars (inner_bind);
6779 gimple_bind_append_vars (new_stmt, vars);
6780 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6781 keep them on the inner_bind and it's block. */
6782 gimple_bind_set_vars (inner_bind, NULL_TREE);
6783 if (gimple_bind_block (inner_bind))
6784 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
acf0174b 6785 }
50674e96 6786
629b3d75 6787 if (gimple_omp_for_combined_into_p (stmt))
5a0f4dd3 6788 {
629b3d75
MJ
6789 omp_extract_for_data (stmt, &fd, NULL);
6790 fdp = &fd;
6791
6792 /* We need two temporaries with fd.loop.v type (istart/iend)
6793 and then (fd.collapse - 1) temporaries with the same
6794 type for count2 ... countN-1 vars if not constant. */
6795 size_t count = 2;
6796 tree type = fd.iter_type;
6797 if (fd.collapse > 1
6798 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6799 count += fd.collapse - 1;
6800 bool taskreg_for
6801 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6802 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6803 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6e6cf7b0 6804 tree simtc = NULL;
629b3d75
MJ
6805 tree clauses = *pc;
6806 if (taskreg_for)
6807 outerc
6808 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6809 OMP_CLAUSE__LOOPTEMP_);
6e6cf7b0
JJ
6810 if (ctx->simt_stmt)
6811 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6812 OMP_CLAUSE__LOOPTEMP_);
629b3d75 6813 for (i = 0; i < count; i++)
5a0f4dd3 6814 {
629b3d75
MJ
6815 tree temp;
6816 if (taskreg_for)
6817 {
6818 gcc_assert (outerc);
6819 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6820 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6821 OMP_CLAUSE__LOOPTEMP_);
6822 }
6823 else
5a0f4dd3 6824 {
6e6cf7b0
JJ
6825 /* If there are 2 adjacent SIMD stmts, one with _simt_
6826 clause, another without, make sure they have the same
6827 decls in _looptemp_ clauses, because the outer stmt
6828 they are combined into will look up just one inner_stmt. */
6829 if (ctx->simt_stmt)
6830 temp = OMP_CLAUSE_DECL (simtc);
6831 else
6832 temp = create_tmp_var (type);
629b3d75 6833 insert_decl_map (&ctx->outer->cb, temp, temp);
5a0f4dd3 6834 }
629b3d75
MJ
6835 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6836 OMP_CLAUSE_DECL (*pc) = temp;
6837 pc = &OMP_CLAUSE_CHAIN (*pc);
6e6cf7b0
JJ
6838 if (ctx->simt_stmt)
6839 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6840 OMP_CLAUSE__LOOPTEMP_);
5a0f4dd3 6841 }
629b3d75 6842 *pc = clauses;
5a0f4dd3
JJ
6843 }
6844
629b3d75
MJ
6845 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6846 dlist = NULL;
6847 body = NULL;
6848 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6849 fdp);
6850 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
917948d3 6851
629b3d75 6852 lower_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289 6853
629b3d75
MJ
6854 /* Lower the header expressions. At this point, we can assume that
6855 the header is of the form:
50674e96 6856
629b3d75 6857 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
917948d3 6858
629b3d75
MJ
6859 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6860 using the .omp_data_s mapping, if needed. */
6861 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6862 {
6863 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6864 if (!is_gimple_min_invariant (*rhs_p))
6865 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6866 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6867 recompute_tree_invariant_for_addr_expr (*rhs_p);
50674e96 6868
629b3d75
MJ
6869 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6870 if (!is_gimple_min_invariant (*rhs_p))
6871 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6872 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6873 recompute_tree_invariant_for_addr_expr (*rhs_p);
d9a6bd32 6874
629b3d75
MJ
6875 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6876 if (!is_gimple_min_invariant (*rhs_p))
6877 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6878 }
953ff289 6879
629b3d75
MJ
6880 /* Once lowered, extract the bounds and clauses. */
6881 omp_extract_for_data (stmt, &fd, NULL);
953ff289 6882
629b3d75
MJ
6883 if (is_gimple_omp_oacc (ctx->stmt)
6884 && !ctx_in_oacc_kernels_region (ctx))
6885 lower_oacc_head_tail (gimple_location (stmt),
6886 gimple_omp_for_clauses (stmt),
6887 &oacc_head, &oacc_tail, ctx);
953ff289 6888
01914336 6889 /* Add OpenACC partitioning and reduction markers just before the loop. */
629b3d75
MJ
6890 if (oacc_head)
6891 gimple_seq_add_seq (&body, oacc_head);
01914336 6892
629b3d75 6893 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
acf0174b 6894
629b3d75
MJ
6895 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6896 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
6897 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6898 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6899 {
629b3d75
MJ
6900 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6901 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6902 OMP_CLAUSE_LINEAR_STEP (c)
6903 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6904 ctx);
d9a6bd32 6905 }
acf0174b 6906
629b3d75
MJ
6907 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6908 && gimple_omp_for_grid_phony (stmt));
6909 if (!phony_loop)
6910 gimple_seq_add_stmt (&body, stmt);
6911 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6912
6913 if (!phony_loop)
6914 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6915 fd.loop.v));
917948d3 6916
629b3d75
MJ
6917 /* After the loop, add exit clauses. */
6918 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
b8698a0f 6919
629b3d75
MJ
6920 if (ctx->cancellable)
6921 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
50674e96 6922
629b3d75 6923 gimple_seq_add_seq (&body, dlist);
953ff289 6924
629b3d75 6925 body = maybe_catch_exception (body);
953ff289 6926
629b3d75 6927 if (!phony_loop)
acf0174b 6928 {
629b3d75
MJ
6929 /* Region exit marker goes at the end of the loop body. */
6930 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6931 maybe_add_implicit_barrier_cancel (ctx, &body);
acf0174b 6932 }
953ff289 6933
629b3d75
MJ
6934 /* Add OpenACC joining and reduction markers just after the loop. */
6935 if (oacc_tail)
6936 gimple_seq_add_seq (&body, oacc_tail);
917948d3 6937
629b3d75 6938 pop_gimplify_context (new_stmt);
917948d3 6939
629b3d75 6940 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6724f8a6 6941 maybe_remove_omp_member_access_dummy_vars (new_stmt);
629b3d75
MJ
6942 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6943 if (BLOCK_VARS (block))
6944 TREE_USED (block) = 1;
917948d3 6945
629b3d75
MJ
6946 gimple_bind_set_body (new_stmt, body);
6947 gimple_omp_set_body (stmt, NULL);
6948 gimple_omp_for_set_pre_body (stmt, NULL);
6949}
17720e84 6950
629b3d75
MJ
6951/* Callback for walk_stmts. Check if the current statement only contains
6952 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
917948d3 6953
629b3d75
MJ
6954static tree
6955check_combined_parallel (gimple_stmt_iterator *gsi_p,
6956 bool *handled_ops_p,
6957 struct walk_stmt_info *wi)
6958{
6959 int *info = (int *) wi->info;
6960 gimple *stmt = gsi_stmt (*gsi_p);
917948d3 6961
629b3d75
MJ
6962 *handled_ops_p = true;
6963 switch (gimple_code (stmt))
acf0174b 6964 {
629b3d75 6965 WALK_SUBSTMTS;
8cba6b95 6966
65f4b875
AO
6967 case GIMPLE_DEBUG:
6968 break;
629b3d75
MJ
6969 case GIMPLE_OMP_FOR:
6970 case GIMPLE_OMP_SECTIONS:
6971 *info = *info == 0 ? 1 : -1;
6972 break;
6973 default:
6974 *info = -1;
6975 break;
acf0174b 6976 }
629b3d75 6977 return NULL;
953ff289
DN
6978}
6979
629b3d75
MJ
6980struct omp_taskcopy_context
6981{
6982 /* This field must be at the beginning, as we do "inheritance": Some
6983 callback functions for tree-inline.c (e.g., omp_copy_decl)
6984 receive a copy_body_data pointer that is up-casted to an
6985 omp_context pointer. */
6986 copy_body_data cb;
6987 omp_context *ctx;
6988};
9a771876 6989
629b3d75
MJ
6990static tree
6991task_copyfn_copy_decl (tree var, copy_body_data *cb)
6992{
6993 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9a771876 6994
629b3d75
MJ
6995 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6996 return create_tmp_var (TREE_TYPE (var));
9a771876 6997
629b3d75
MJ
6998 return var;
6999}
9a771876 7000
629b3d75
MJ
7001static tree
7002task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9a771876 7003{
629b3d75 7004 tree name, new_fields = NULL, type, f;
9a771876 7005
629b3d75
MJ
7006 type = lang_hooks.types.make_type (RECORD_TYPE);
7007 name = DECL_NAME (TYPE_NAME (orig_type));
7008 name = build_decl (gimple_location (tcctx->ctx->stmt),
7009 TYPE_DECL, name, type);
7010 TYPE_NAME (type) = name;
9a771876 7011
629b3d75 7012 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9a771876 7013 {
629b3d75
MJ
7014 tree new_f = copy_node (f);
7015 DECL_CONTEXT (new_f) = type;
7016 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7017 TREE_CHAIN (new_f) = new_fields;
7018 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7019 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7020 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7021 &tcctx->cb, NULL);
7022 new_fields = new_f;
7023 tcctx->cb.decl_map->put (f, new_f);
9a771876 7024 }
629b3d75
MJ
7025 TYPE_FIELDS (type) = nreverse (new_fields);
7026 layout_type (type);
7027 return type;
7028}
9a771876 7029
629b3d75 7030/* Create task copyfn. */
9a771876 7031
629b3d75
MJ
7032static void
7033create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7034{
7035 struct function *child_cfun;
7036 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7037 tree record_type, srecord_type, bind, list;
7038 bool record_needs_remap = false, srecord_needs_remap = false;
7039 splay_tree_node n;
7040 struct omp_taskcopy_context tcctx;
7041 location_t loc = gimple_location (task_stmt);
a3bccfa1 7042 size_t looptempno = 0;
9a771876 7043
629b3d75
MJ
7044 child_fn = gimple_omp_task_copy_fn (task_stmt);
7045 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7046 gcc_assert (child_cfun->cfg == NULL);
7047 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9a771876 7048
629b3d75
MJ
7049 /* Reset DECL_CONTEXT on function arguments. */
7050 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7051 DECL_CONTEXT (t) = child_fn;
9a771876 7052
629b3d75
MJ
7053 /* Populate the function. */
7054 push_gimplify_context ();
7055 push_cfun (child_cfun);
9a771876 7056
629b3d75
MJ
7057 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7058 TREE_SIDE_EFFECTS (bind) = 1;
7059 list = NULL;
7060 DECL_SAVED_TREE (child_fn) = bind;
7061 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9a771876 7062
629b3d75
MJ
7063 /* Remap src and dst argument types if needed. */
7064 record_type = ctx->record_type;
7065 srecord_type = ctx->srecord_type;
7066 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7067 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7068 {
7069 record_needs_remap = true;
7070 break;
7071 }
7072 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7073 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7074 {
7075 srecord_needs_remap = true;
7076 break;
7077 }
9a771876 7078
629b3d75 7079 if (record_needs_remap || srecord_needs_remap)
9a771876 7080 {
629b3d75
MJ
7081 memset (&tcctx, '\0', sizeof (tcctx));
7082 tcctx.cb.src_fn = ctx->cb.src_fn;
7083 tcctx.cb.dst_fn = child_fn;
7084 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7085 gcc_checking_assert (tcctx.cb.src_node);
7086 tcctx.cb.dst_node = tcctx.cb.src_node;
7087 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7088 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7089 tcctx.cb.eh_lp_nr = 0;
7090 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7091 tcctx.cb.decl_map = new hash_map<tree, tree>;
7092 tcctx.ctx = ctx;
9a771876 7093
629b3d75
MJ
7094 if (record_needs_remap)
7095 record_type = task_copyfn_remap_type (&tcctx, record_type);
7096 if (srecord_needs_remap)
7097 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9a771876
JJ
7098 }
7099 else
629b3d75 7100 tcctx.cb.decl_map = NULL;
9a771876 7101
629b3d75
MJ
7102 arg = DECL_ARGUMENTS (child_fn);
7103 TREE_TYPE (arg) = build_pointer_type (record_type);
7104 sarg = DECL_CHAIN (arg);
7105 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9a771876 7106
629b3d75
MJ
7107 /* First pass: initialize temporaries used in record_type and srecord_type
7108 sizes and field offsets. */
7109 if (tcctx.cb.decl_map)
7110 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7112 {
7113 tree *p;
9a771876 7114
629b3d75
MJ
7115 decl = OMP_CLAUSE_DECL (c);
7116 p = tcctx.cb.decl_map->get (decl);
7117 if (p == NULL)
7118 continue;
7119 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7120 sf = (tree) n->value;
7121 sf = *tcctx.cb.decl_map->get (sf);
7122 src = build_simple_mem_ref_loc (loc, sarg);
7123 src = omp_build_component_ref (src, sf);
7124 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7125 append_to_statement_list (t, &list);
7126 }
9a771876 7127
629b3d75
MJ
7128 /* Second pass: copy shared var pointers and copy construct non-VLA
7129 firstprivate vars. */
7130 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7131 switch (OMP_CLAUSE_CODE (c))
7132 {
7133 splay_tree_key key;
7134 case OMP_CLAUSE_SHARED:
7135 decl = OMP_CLAUSE_DECL (c);
7136 key = (splay_tree_key) decl;
7137 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7138 key = (splay_tree_key) &DECL_UID (decl);
7139 n = splay_tree_lookup (ctx->field_map, key);
7140 if (n == NULL)
7141 break;
7142 f = (tree) n->value;
7143 if (tcctx.cb.decl_map)
7144 f = *tcctx.cb.decl_map->get (f);
7145 n = splay_tree_lookup (ctx->sfield_map, key);
7146 sf = (tree) n->value;
7147 if (tcctx.cb.decl_map)
7148 sf = *tcctx.cb.decl_map->get (sf);
7149 src = build_simple_mem_ref_loc (loc, sarg);
7150 src = omp_build_component_ref (src, sf);
7151 dst = build_simple_mem_ref_loc (loc, arg);
7152 dst = omp_build_component_ref (dst, f);
7153 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7154 append_to_statement_list (t, &list);
7155 break;
a3bccfa1
JJ
7156 case OMP_CLAUSE__LOOPTEMP_:
7157 /* Fields for first two _looptemp_ clauses are initialized by
7158 GOMP_taskloop*, the rest are handled like firstprivate. */
7159 if (looptempno < 2)
7160 {
7161 looptempno++;
7162 break;
7163 }
7164 /* FALLTHRU */
629b3d75
MJ
7165 case OMP_CLAUSE_FIRSTPRIVATE:
7166 decl = OMP_CLAUSE_DECL (c);
7167 if (is_variable_sized (decl))
7168 break;
7169 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7170 if (n == NULL)
7171 break;
7172 f = (tree) n->value;
7173 if (tcctx.cb.decl_map)
7174 f = *tcctx.cb.decl_map->get (f);
7175 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7176 if (n != NULL)
7177 {
7178 sf = (tree) n->value;
7179 if (tcctx.cb.decl_map)
7180 sf = *tcctx.cb.decl_map->get (sf);
7181 src = build_simple_mem_ref_loc (loc, sarg);
7182 src = omp_build_component_ref (src, sf);
7183 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7184 src = build_simple_mem_ref_loc (loc, src);
7185 }
7186 else
7187 src = decl;
7188 dst = build_simple_mem_ref_loc (loc, arg);
7189 dst = omp_build_component_ref (dst, f);
a3bccfa1
JJ
7190 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7191 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7192 else
7193 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
629b3d75
MJ
7194 append_to_statement_list (t, &list);
7195 break;
7196 case OMP_CLAUSE_PRIVATE:
7197 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7198 break;
7199 decl = OMP_CLAUSE_DECL (c);
7200 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7201 f = (tree) n->value;
7202 if (tcctx.cb.decl_map)
7203 f = *tcctx.cb.decl_map->get (f);
7204 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7205 if (n != NULL)
7206 {
7207 sf = (tree) n->value;
7208 if (tcctx.cb.decl_map)
7209 sf = *tcctx.cb.decl_map->get (sf);
7210 src = build_simple_mem_ref_loc (loc, sarg);
7211 src = omp_build_component_ref (src, sf);
7212 if (use_pointer_for_field (decl, NULL))
7213 src = build_simple_mem_ref_loc (loc, src);
7214 }
7215 else
7216 src = decl;
7217 dst = build_simple_mem_ref_loc (loc, arg);
7218 dst = omp_build_component_ref (dst, f);
7219 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7220 append_to_statement_list (t, &list);
7221 break;
7222 default:
7223 break;
7224 }
74bf76ed 7225
629b3d75
MJ
7226 /* Last pass: handle VLA firstprivates. */
7227 if (tcctx.cb.decl_map)
7228 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7230 {
7231 tree ind, ptr, df;
74bf76ed 7232
629b3d75
MJ
7233 decl = OMP_CLAUSE_DECL (c);
7234 if (!is_variable_sized (decl))
7235 continue;
7236 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7237 if (n == NULL)
7238 continue;
7239 f = (tree) n->value;
7240 f = *tcctx.cb.decl_map->get (f);
7241 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7242 ind = DECL_VALUE_EXPR (decl);
7243 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7244 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7245 n = splay_tree_lookup (ctx->sfield_map,
7246 (splay_tree_key) TREE_OPERAND (ind, 0));
7247 sf = (tree) n->value;
7248 sf = *tcctx.cb.decl_map->get (sf);
7249 src = build_simple_mem_ref_loc (loc, sarg);
7250 src = omp_build_component_ref (src, sf);
7251 src = build_simple_mem_ref_loc (loc, src);
7252 dst = build_simple_mem_ref_loc (loc, arg);
7253 dst = omp_build_component_ref (dst, f);
7254 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7255 append_to_statement_list (t, &list);
7256 n = splay_tree_lookup (ctx->field_map,
7257 (splay_tree_key) TREE_OPERAND (ind, 0));
7258 df = (tree) n->value;
7259 df = *tcctx.cb.decl_map->get (df);
7260 ptr = build_simple_mem_ref_loc (loc, arg);
7261 ptr = omp_build_component_ref (ptr, df);
7262 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7263 build_fold_addr_expr_loc (loc, dst));
7264 append_to_statement_list (t, &list);
7265 }
74bf76ed 7266
629b3d75
MJ
7267 t = build1 (RETURN_EXPR, void_type_node, NULL);
7268 append_to_statement_list (t, &list);
74bf76ed 7269
629b3d75
MJ
7270 if (tcctx.cb.decl_map)
7271 delete tcctx.cb.decl_map;
7272 pop_gimplify_context (NULL);
7273 BIND_EXPR_BODY (bind) = list;
7274 pop_cfun ();
7275}
74bf76ed
JJ
7276
7277static void
629b3d75 7278lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
74bf76ed 7279{
629b3d75
MJ
7280 tree c, clauses;
7281 gimple *g;
7282 size_t n_in = 0, n_out = 0, idx = 2, i;
7283
7284 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7285 gcc_assert (clauses);
7286 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7287 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7288 switch (OMP_CLAUSE_DEPEND_KIND (c))
7289 {
7290 case OMP_CLAUSE_DEPEND_IN:
7291 n_in++;
7292 break;
7293 case OMP_CLAUSE_DEPEND_OUT:
7294 case OMP_CLAUSE_DEPEND_INOUT:
7295 n_out++;
7296 break;
7297 case OMP_CLAUSE_DEPEND_SOURCE:
7298 case OMP_CLAUSE_DEPEND_SINK:
7299 /* FALLTHRU */
7300 default:
7301 gcc_unreachable ();
7302 }
7303 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7304 tree array = create_tmp_var (type);
7305 TREE_ADDRESSABLE (array) = 1;
7306 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7307 NULL_TREE);
7308 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7309 gimple_seq_add_stmt (iseq, g);
7310 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7311 NULL_TREE);
7312 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7313 gimple_seq_add_stmt (iseq, g);
7314 for (i = 0; i < 2; i++)
74bf76ed 7315 {
629b3d75
MJ
7316 if ((i ? n_in : n_out) == 0)
7317 continue;
7318 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7319 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7320 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7321 {
7322 tree t = OMP_CLAUSE_DECL (c);
7323 t = fold_convert (ptr_type_node, t);
7324 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7325 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7326 NULL_TREE, NULL_TREE);
7327 g = gimple_build_assign (r, t);
7328 gimple_seq_add_stmt (iseq, g);
7329 }
74bf76ed 7330 }
629b3d75
MJ
7331 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7332 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7333 OMP_CLAUSE_CHAIN (c) = *pclauses;
7334 *pclauses = c;
7335 tree clobber = build_constructor (type, NULL);
7336 TREE_THIS_VOLATILE (clobber) = 1;
7337 g = gimple_build_assign (array, clobber);
7338 gimple_seq_add_stmt (oseq, g);
7339}
7340
7341/* Lower the OpenMP parallel or task directive in the current statement
7342 in GSI_P. CTX holds context information for the directive. */
74bf76ed 7343
629b3d75
MJ
7344static void
7345lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7346{
7347 tree clauses;
7348 tree child_fn, t;
7349 gimple *stmt = gsi_stmt (*gsi_p);
7350 gbind *par_bind, *bind, *dep_bind = NULL;
7351 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7352 location_t loc = gimple_location (stmt);
74bf76ed 7353
629b3d75
MJ
7354 clauses = gimple_omp_taskreg_clauses (stmt);
7355 par_bind
7356 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7357 par_body = gimple_bind_body (par_bind);
7358 child_fn = ctx->cb.dst_fn;
7359 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7360 && !gimple_omp_parallel_combined_p (stmt))
74bf76ed 7361 {
629b3d75
MJ
7362 struct walk_stmt_info wi;
7363 int ws_num = 0;
74bf76ed 7364
629b3d75
MJ
7365 memset (&wi, 0, sizeof (wi));
7366 wi.info = &ws_num;
7367 wi.val_only = true;
7368 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7369 if (ws_num == 1)
7370 gimple_omp_parallel_set_combined_p (stmt, true);
74bf76ed 7371 }
629b3d75
MJ
7372 gimple_seq dep_ilist = NULL;
7373 gimple_seq dep_olist = NULL;
7374 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7375 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
acf0174b 7376 {
629b3d75
MJ
7377 push_gimplify_context ();
7378 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7379 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7380 &dep_ilist, &dep_olist);
9669b00b 7381 }
9669b00b 7382
629b3d75
MJ
7383 if (ctx->srecord_type)
7384 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9669b00b 7385
629b3d75 7386 push_gimplify_context ();
74bf76ed 7387
629b3d75
MJ
7388 par_olist = NULL;
7389 par_ilist = NULL;
7390 par_rlist = NULL;
7391 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7392 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7393 if (phony_construct && ctx->record_type)
9669b00b 7394 {
629b3d75
MJ
7395 gcc_checking_assert (!ctx->receiver_decl);
7396 ctx->receiver_decl = create_tmp_var
7397 (build_reference_type (ctx->record_type), ".omp_rec");
9669b00b 7398 }
629b3d75
MJ
7399 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7400 lower_omp (&par_body, ctx);
7401 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7402 lower_reduction_clauses (clauses, &par_rlist, ctx);
9669b00b 7403
629b3d75
MJ
7404 /* Declare all the variables created by mapping and the variables
7405 declared in the scope of the parallel body. */
7406 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7407 maybe_remove_omp_member_access_dummy_vars (par_bind);
629b3d75 7408 record_vars_into (gimple_bind_vars (par_bind), child_fn);
74bf76ed 7409
629b3d75 7410 if (ctx->record_type)
74bf76ed 7411 {
629b3d75
MJ
7412 ctx->sender_decl
7413 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7414 : ctx->record_type, ".omp_data_o");
7415 DECL_NAMELESS (ctx->sender_decl) = 1;
7416 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7417 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
74bf76ed 7418 }
74bf76ed 7419
629b3d75
MJ
7420 olist = NULL;
7421 ilist = NULL;
7422 lower_send_clauses (clauses, &ilist, &olist, ctx);
7423 lower_send_shared_vars (&ilist, &olist, ctx);
9669b00b 7424
629b3d75 7425 if (ctx->record_type)
74bf76ed 7426 {
629b3d75
MJ
7427 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7428 TREE_THIS_VOLATILE (clobber) = 1;
7429 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7430 clobber));
d9a6bd32 7431 }
d9a6bd32 7432
629b3d75
MJ
7433 /* Once all the expansions are done, sequence all the different
7434 fragments inside gimple_omp_body. */
d9a6bd32 7435
629b3d75 7436 new_body = NULL;
d9a6bd32 7437
629b3d75 7438 if (ctx->record_type)
d9a6bd32 7439 {
629b3d75
MJ
7440 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7441 /* fixup_child_record_type might have changed receiver_decl's type. */
7442 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7443 gimple_seq_add_stmt (&new_body,
7444 gimple_build_assign (ctx->receiver_decl, t));
d9a6bd32
JJ
7445 }
7446
629b3d75
MJ
7447 gimple_seq_add_seq (&new_body, par_ilist);
7448 gimple_seq_add_seq (&new_body, par_body);
7449 gimple_seq_add_seq (&new_body, par_rlist);
7450 if (ctx->cancellable)
7451 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7452 gimple_seq_add_seq (&new_body, par_olist);
7453 new_body = maybe_catch_exception (new_body);
7454 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7455 gimple_seq_add_stmt (&new_body,
7456 gimple_build_omp_continue (integer_zero_node,
7457 integer_zero_node));
7458 if (!phony_construct)
d9a6bd32 7459 {
629b3d75
MJ
7460 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7461 gimple_omp_set_body (stmt, new_body);
d9a6bd32
JJ
7462 }
7463
629b3d75
MJ
7464 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7465 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7466 gimple_bind_add_seq (bind, ilist);
7467 if (!phony_construct)
7468 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 7469 else
629b3d75
MJ
7470 gimple_bind_add_seq (bind, new_body);
7471 gimple_bind_add_seq (bind, olist);
d9a6bd32 7472
629b3d75
MJ
7473 pop_gimplify_context (NULL);
7474
7475 if (dep_bind)
d9a6bd32 7476 {
629b3d75
MJ
7477 gimple_bind_add_seq (dep_bind, dep_ilist);
7478 gimple_bind_add_stmt (dep_bind, bind);
7479 gimple_bind_add_seq (dep_bind, dep_olist);
7480 pop_gimplify_context (dep_bind);
d9a6bd32 7481 }
d9a6bd32
JJ
7482}
7483
629b3d75
MJ
7484/* Lower the GIMPLE_OMP_TARGET in the current statement
7485 in GSI_P. CTX holds context information for the directive. */
d9a6bd32
JJ
7486
7487static void
629b3d75 7488lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
d9a6bd32 7489{
629b3d75
MJ
7490 tree clauses;
7491 tree child_fn, t, c;
7492 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7493 gbind *tgt_bind, *bind, *dep_bind = NULL;
7494 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7495 location_t loc = gimple_location (stmt);
7496 bool offloaded, data_region;
7497 unsigned int map_cnt = 0;
d9a6bd32 7498
629b3d75
MJ
7499 offloaded = is_gimple_omp_offloaded (stmt);
7500 switch (gimple_omp_target_kind (stmt))
d9a6bd32 7501 {
629b3d75
MJ
7502 case GF_OMP_TARGET_KIND_REGION:
7503 case GF_OMP_TARGET_KIND_UPDATE:
7504 case GF_OMP_TARGET_KIND_ENTER_DATA:
7505 case GF_OMP_TARGET_KIND_EXIT_DATA:
7506 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7507 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7508 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7509 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7510 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7511 data_region = false;
7512 break;
7513 case GF_OMP_TARGET_KIND_DATA:
7514 case GF_OMP_TARGET_KIND_OACC_DATA:
7515 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7516 data_region = true;
7517 break;
7518 default:
7519 gcc_unreachable ();
74bf76ed 7520 }
74bf76ed 7521
629b3d75 7522 clauses = gimple_omp_target_clauses (stmt);
d9a6bd32 7523
629b3d75
MJ
7524 gimple_seq dep_ilist = NULL;
7525 gimple_seq dep_olist = NULL;
7526 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
d9a6bd32 7527 {
629b3d75
MJ
7528 push_gimplify_context ();
7529 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7530 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7531 &dep_ilist, &dep_olist);
d9a6bd32 7532 }
953ff289 7533
629b3d75
MJ
7534 tgt_bind = NULL;
7535 tgt_body = NULL;
7536 if (offloaded)
e4834818 7537 {
629b3d75
MJ
7538 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7539 tgt_body = gimple_bind_body (tgt_bind);
e4834818 7540 }
629b3d75
MJ
7541 else if (data_region)
7542 tgt_body = gimple_omp_body (stmt);
7543 child_fn = ctx->cb.dst_fn;
e4834818 7544
629b3d75
MJ
7545 push_gimplify_context ();
7546 fplist = NULL;
e4834818 7547
629b3d75
MJ
7548 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7549 switch (OMP_CLAUSE_CODE (c))
7550 {
7551 tree var, x;
e4834818 7552
629b3d75
MJ
7553 default:
7554 break;
7555 case OMP_CLAUSE_MAP:
7556#if CHECKING_P
7557 /* First check what we're prepared to handle in the following. */
7558 switch (OMP_CLAUSE_MAP_KIND (c))
7559 {
7560 case GOMP_MAP_ALLOC:
7561 case GOMP_MAP_TO:
7562 case GOMP_MAP_FROM:
7563 case GOMP_MAP_TOFROM:
7564 case GOMP_MAP_POINTER:
7565 case GOMP_MAP_TO_PSET:
7566 case GOMP_MAP_DELETE:
7567 case GOMP_MAP_RELEASE:
7568 case GOMP_MAP_ALWAYS_TO:
7569 case GOMP_MAP_ALWAYS_FROM:
7570 case GOMP_MAP_ALWAYS_TOFROM:
7571 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7572 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7573 case GOMP_MAP_STRUCT:
7574 case GOMP_MAP_ALWAYS_POINTER:
7575 break;
7576 case GOMP_MAP_FORCE_ALLOC:
7577 case GOMP_MAP_FORCE_TO:
7578 case GOMP_MAP_FORCE_FROM:
7579 case GOMP_MAP_FORCE_TOFROM:
7580 case GOMP_MAP_FORCE_PRESENT:
7581 case GOMP_MAP_FORCE_DEVICEPTR:
7582 case GOMP_MAP_DEVICE_RESIDENT:
7583 case GOMP_MAP_LINK:
7584 gcc_assert (is_gimple_omp_oacc (stmt));
7585 break;
7586 default:
7587 gcc_unreachable ();
7588 }
7589#endif
7590 /* FALLTHRU */
7591 case OMP_CLAUSE_TO:
7592 case OMP_CLAUSE_FROM:
7593 oacc_firstprivate:
7594 var = OMP_CLAUSE_DECL (c);
7595 if (!DECL_P (var))
7596 {
7597 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7598 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7599 && (OMP_CLAUSE_MAP_KIND (c)
7600 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7601 map_cnt++;
7602 continue;
7603 }
e4834818 7604
629b3d75
MJ
7605 if (DECL_SIZE (var)
7606 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7607 {
7608 tree var2 = DECL_VALUE_EXPR (var);
7609 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7610 var2 = TREE_OPERAND (var2, 0);
7611 gcc_assert (DECL_P (var2));
7612 var = var2;
7613 }
e4834818 7614
629b3d75
MJ
7615 if (offloaded
7616 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7617 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7618 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7619 {
7620 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7621 {
7622 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7623 && varpool_node::get_create (var)->offloadable)
7624 continue;
e4834818 7625
629b3d75
MJ
7626 tree type = build_pointer_type (TREE_TYPE (var));
7627 tree new_var = lookup_decl (var, ctx);
7628 x = create_tmp_var_raw (type, get_name (new_var));
7629 gimple_add_tmp_var (x);
7630 x = build_simple_mem_ref (x);
7631 SET_DECL_VALUE_EXPR (new_var, x);
7632 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7633 }
7634 continue;
7635 }
e4834818 7636
629b3d75
MJ
7637 if (!maybe_lookup_field (var, ctx))
7638 continue;
e4834818 7639
629b3d75
MJ
7640 /* Don't remap oacc parallel reduction variables, because the
7641 intermediate result must be local to each gang. */
7642 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7643 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7644 {
7645 x = build_receiver_ref (var, true, ctx);
7646 tree new_var = lookup_decl (var, ctx);
e4834818 7647
629b3d75
MJ
7648 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7649 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7650 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7651 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7652 x = build_simple_mem_ref (x);
7653 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7654 {
7655 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
bd1cab35
CLT
7656 if (omp_is_reference (new_var)
7657 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
629b3d75
MJ
7658 {
7659 /* Create a local object to hold the instance
7660 value. */
7661 tree type = TREE_TYPE (TREE_TYPE (new_var));
7662 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7663 tree inst = create_tmp_var (type, id);
7664 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7665 x = build_fold_addr_expr (inst);
7666 }
7667 gimplify_assign (new_var, x, &fplist);
7668 }
7669 else if (DECL_P (new_var))
7670 {
7671 SET_DECL_VALUE_EXPR (new_var, x);
7672 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7673 }
7674 else
7675 gcc_unreachable ();
7676 }
7677 map_cnt++;
7678 break;
e4834818 7679
629b3d75
MJ
7680 case OMP_CLAUSE_FIRSTPRIVATE:
7681 if (is_oacc_parallel (ctx))
7682 goto oacc_firstprivate;
7683 map_cnt++;
7684 var = OMP_CLAUSE_DECL (c);
7685 if (!omp_is_reference (var)
7686 && !is_gimple_reg_type (TREE_TYPE (var)))
7687 {
7688 tree new_var = lookup_decl (var, ctx);
7689 if (is_variable_sized (var))
7690 {
7691 tree pvar = DECL_VALUE_EXPR (var);
7692 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7693 pvar = TREE_OPERAND (pvar, 0);
7694 gcc_assert (DECL_P (pvar));
7695 tree new_pvar = lookup_decl (pvar, ctx);
7696 x = build_fold_indirect_ref (new_pvar);
7697 TREE_THIS_NOTRAP (x) = 1;
7698 }
7699 else
7700 x = build_receiver_ref (var, true, ctx);
7701 SET_DECL_VALUE_EXPR (new_var, x);
7702 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7703 }
7704 break;
e4834818 7705
629b3d75
MJ
7706 case OMP_CLAUSE_PRIVATE:
7707 if (is_gimple_omp_oacc (ctx->stmt))
7708 break;
7709 var = OMP_CLAUSE_DECL (c);
7710 if (is_variable_sized (var))
7711 {
7712 tree new_var = lookup_decl (var, ctx);
7713 tree pvar = DECL_VALUE_EXPR (var);
7714 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7715 pvar = TREE_OPERAND (pvar, 0);
7716 gcc_assert (DECL_P (pvar));
7717 tree new_pvar = lookup_decl (pvar, ctx);
7718 x = build_fold_indirect_ref (new_pvar);
7719 TREE_THIS_NOTRAP (x) = 1;
7720 SET_DECL_VALUE_EXPR (new_var, x);
7721 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7722 }
7723 break;
e4834818 7724
629b3d75
MJ
7725 case OMP_CLAUSE_USE_DEVICE_PTR:
7726 case OMP_CLAUSE_IS_DEVICE_PTR:
7727 var = OMP_CLAUSE_DECL (c);
7728 map_cnt++;
7729 if (is_variable_sized (var))
7730 {
7731 tree new_var = lookup_decl (var, ctx);
7732 tree pvar = DECL_VALUE_EXPR (var);
7733 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7734 pvar = TREE_OPERAND (pvar, 0);
7735 gcc_assert (DECL_P (pvar));
7736 tree new_pvar = lookup_decl (pvar, ctx);
7737 x = build_fold_indirect_ref (new_pvar);
7738 TREE_THIS_NOTRAP (x) = 1;
7739 SET_DECL_VALUE_EXPR (new_var, x);
7740 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7741 }
7742 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7743 {
7744 tree new_var = lookup_decl (var, ctx);
7745 tree type = build_pointer_type (TREE_TYPE (var));
7746 x = create_tmp_var_raw (type, get_name (new_var));
7747 gimple_add_tmp_var (x);
7748 x = build_simple_mem_ref (x);
7749 SET_DECL_VALUE_EXPR (new_var, x);
7750 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7751 }
7752 else
7753 {
7754 tree new_var = lookup_decl (var, ctx);
7755 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7756 gimple_add_tmp_var (x);
7757 SET_DECL_VALUE_EXPR (new_var, x);
7758 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7759 }
7760 break;
7761 }
e4834818 7762
629b3d75 7763 if (offloaded)
e4834818 7764 {
629b3d75
MJ
7765 target_nesting_level++;
7766 lower_omp (&tgt_body, ctx);
7767 target_nesting_level--;
e4834818 7768 }
629b3d75
MJ
7769 else if (data_region)
7770 lower_omp (&tgt_body, ctx);
e4834818 7771
629b3d75 7772 if (offloaded)
e4834818 7773 {
629b3d75
MJ
7774 /* Declare all the variables created by mapping and the variables
7775 declared in the scope of the target body. */
7776 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7777 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
629b3d75 7778 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
e4834818
NS
7779 }
7780
629b3d75
MJ
7781 olist = NULL;
7782 ilist = NULL;
7783 if (ctx->record_type)
e4834818 7784 {
629b3d75
MJ
7785 ctx->sender_decl
7786 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7787 DECL_NAMELESS (ctx->sender_decl) = 1;
7788 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7789 t = make_tree_vec (3);
7790 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7791 TREE_VEC_ELT (t, 1)
7792 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7793 ".omp_data_sizes");
7794 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7795 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7796 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7797 tree tkind_type = short_unsigned_type_node;
7798 int talign_shift = 8;
7799 TREE_VEC_ELT (t, 2)
7800 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7801 ".omp_data_kinds");
7802 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7803 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7804 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7805 gimple_omp_target_set_data_arg (stmt, t);
953ff289 7806
629b3d75
MJ
7807 vec<constructor_elt, va_gc> *vsize;
7808 vec<constructor_elt, va_gc> *vkind;
7809 vec_alloc (vsize, map_cnt);
7810 vec_alloc (vkind, map_cnt);
7811 unsigned int map_idx = 0;
953ff289 7812
629b3d75
MJ
7813 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7814 switch (OMP_CLAUSE_CODE (c))
953ff289 7815 {
629b3d75
MJ
7816 tree ovar, nc, s, purpose, var, x, type;
7817 unsigned int talign;
953ff289 7818
629b3d75
MJ
7819 default:
7820 break;
953ff289 7821
629b3d75
MJ
7822 case OMP_CLAUSE_MAP:
7823 case OMP_CLAUSE_TO:
7824 case OMP_CLAUSE_FROM:
7825 oacc_firstprivate_map:
7826 nc = c;
7827 ovar = OMP_CLAUSE_DECL (c);
7828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7829 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7830 || (OMP_CLAUSE_MAP_KIND (c)
7831 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7832 break;
7833 if (!DECL_P (ovar))
c34938a8 7834 {
629b3d75
MJ
7835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7836 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7837 {
7838 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7839 == get_base_address (ovar));
7840 nc = OMP_CLAUSE_CHAIN (c);
7841 ovar = OMP_CLAUSE_DECL (nc);
7842 }
7843 else
7844 {
7845 tree x = build_sender_ref (ovar, ctx);
7846 tree v
7847 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7848 gimplify_assign (x, v, &ilist);
7849 nc = NULL_TREE;
7850 }
7851 }
7852 else
7853 {
7854 if (DECL_SIZE (ovar)
7855 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7856 {
7857 tree ovar2 = DECL_VALUE_EXPR (ovar);
7858 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7859 ovar2 = TREE_OPERAND (ovar2, 0);
7860 gcc_assert (DECL_P (ovar2));
7861 ovar = ovar2;
7862 }
7863 if (!maybe_lookup_field (ovar, ctx))
7864 continue;
c34938a8 7865 }
777f7f9a 7866
629b3d75
MJ
7867 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7868 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7869 talign = DECL_ALIGN_UNIT (ovar);
7870 if (nc)
7871 {
7872 var = lookup_decl_in_outer_ctx (ovar, ctx);
7873 x = build_sender_ref (ovar, ctx);
777f7f9a 7874
629b3d75
MJ
7875 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7876 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7877 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7878 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7879 {
7880 gcc_assert (offloaded);
7881 tree avar
7882 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7883 mark_addressable (avar);
7884 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7885 talign = DECL_ALIGN_UNIT (avar);
7886 avar = build_fold_addr_expr (avar);
7887 gimplify_assign (x, avar, &ilist);
7888 }
7889 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7890 {
7891 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7892 if (!omp_is_reference (var))
7893 {
7894 if (is_gimple_reg (var)
7895 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7896 TREE_NO_WARNING (var) = 1;
7897 var = build_fold_addr_expr (var);
7898 }
7899 else
7900 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7901 gimplify_assign (x, var, &ilist);
7902 }
7903 else if (is_gimple_reg (var))
7904 {
7905 gcc_assert (offloaded);
7906 tree avar = create_tmp_var (TREE_TYPE (var));
7907 mark_addressable (avar);
7908 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7909 if (GOMP_MAP_COPY_TO_P (map_kind)
7910 || map_kind == GOMP_MAP_POINTER
7911 || map_kind == GOMP_MAP_TO_PSET
7912 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7913 {
7914 /* If we need to initialize a temporary
7915 with VAR because it is not addressable, and
7916 the variable hasn't been initialized yet, then
7917 we'll get a warning for the store to avar.
7918 Don't warn in that case, the mapping might
7919 be implicit. */
7920 TREE_NO_WARNING (var) = 1;
7921 gimplify_assign (avar, var, &ilist);
7922 }
7923 avar = build_fold_addr_expr (avar);
7924 gimplify_assign (x, avar, &ilist);
7925 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7926 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7927 && !TYPE_READONLY (TREE_TYPE (var)))
7928 {
7929 x = unshare_expr (x);
7930 x = build_simple_mem_ref (x);
7931 gimplify_assign (var, x, &olist);
7932 }
7933 }
7934 else
7935 {
7936 var = build_fold_addr_expr (var);
7937 gimplify_assign (x, var, &ilist);
7938 }
7939 }
7940 s = NULL_TREE;
7941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7942 {
7943 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7944 s = TREE_TYPE (ovar);
7945 if (TREE_CODE (s) == REFERENCE_TYPE)
7946 s = TREE_TYPE (s);
7947 s = TYPE_SIZE_UNIT (s);
7948 }
7949 else
7950 s = OMP_CLAUSE_SIZE (c);
7951 if (s == NULL_TREE)
7952 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7953 s = fold_convert (size_type_node, s);
7954 purpose = size_int (map_idx++);
7955 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7956 if (TREE_CODE (s) != INTEGER_CST)
7957 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
777f7f9a 7958
629b3d75
MJ
7959 unsigned HOST_WIDE_INT tkind, tkind_zero;
7960 switch (OMP_CLAUSE_CODE (c))
7961 {
7962 case OMP_CLAUSE_MAP:
7963 tkind = OMP_CLAUSE_MAP_KIND (c);
7964 tkind_zero = tkind;
7965 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7966 switch (tkind)
7967 {
7968 case GOMP_MAP_ALLOC:
7969 case GOMP_MAP_TO:
7970 case GOMP_MAP_FROM:
7971 case GOMP_MAP_TOFROM:
7972 case GOMP_MAP_ALWAYS_TO:
7973 case GOMP_MAP_ALWAYS_FROM:
7974 case GOMP_MAP_ALWAYS_TOFROM:
7975 case GOMP_MAP_RELEASE:
7976 case GOMP_MAP_FORCE_TO:
7977 case GOMP_MAP_FORCE_FROM:
7978 case GOMP_MAP_FORCE_TOFROM:
7979 case GOMP_MAP_FORCE_PRESENT:
7980 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7981 break;
7982 case GOMP_MAP_DELETE:
7983 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7984 default:
7985 break;
7986 }
7987 if (tkind_zero != tkind)
7988 {
7989 if (integer_zerop (s))
7990 tkind = tkind_zero;
7991 else if (integer_nonzerop (s))
7992 tkind_zero = tkind;
7993 }
7994 break;
7995 case OMP_CLAUSE_FIRSTPRIVATE:
7996 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7997 tkind = GOMP_MAP_TO;
7998 tkind_zero = tkind;
7999 break;
8000 case OMP_CLAUSE_TO:
8001 tkind = GOMP_MAP_TO;
8002 tkind_zero = tkind;
8003 break;
8004 case OMP_CLAUSE_FROM:
8005 tkind = GOMP_MAP_FROM;
8006 tkind_zero = tkind;
8007 break;
8008 default:
8009 gcc_unreachable ();
8010 }
8011 gcc_checking_assert (tkind
8012 < (HOST_WIDE_INT_C (1U) << talign_shift));
8013 gcc_checking_assert (tkind_zero
8014 < (HOST_WIDE_INT_C (1U) << talign_shift));
8015 talign = ceil_log2 (talign);
8016 tkind |= talign << talign_shift;
8017 tkind_zero |= talign << talign_shift;
8018 gcc_checking_assert (tkind
8019 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8020 gcc_checking_assert (tkind_zero
8021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8022 if (tkind == tkind_zero)
8023 x = build_int_cstu (tkind_type, tkind);
8024 else
8025 {
8026 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8027 x = build3 (COND_EXPR, tkind_type,
8028 fold_build2 (EQ_EXPR, boolean_type_node,
8029 unshare_expr (s), size_zero_node),
8030 build_int_cstu (tkind_type, tkind_zero),
8031 build_int_cstu (tkind_type, tkind));
8032 }
8033 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8034 if (nc && nc != c)
8035 c = nc;
8036 break;
05409788 8037
629b3d75
MJ
8038 case OMP_CLAUSE_FIRSTPRIVATE:
8039 if (is_oacc_parallel (ctx))
8040 goto oacc_firstprivate_map;
8041 ovar = OMP_CLAUSE_DECL (c);
8042 if (omp_is_reference (ovar))
8043 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8044 else
8045 talign = DECL_ALIGN_UNIT (ovar);
8046 var = lookup_decl_in_outer_ctx (ovar, ctx);
8047 x = build_sender_ref (ovar, ctx);
8048 tkind = GOMP_MAP_FIRSTPRIVATE;
8049 type = TREE_TYPE (ovar);
8050 if (omp_is_reference (ovar))
8051 type = TREE_TYPE (type);
8052 if ((INTEGRAL_TYPE_P (type)
8053 && TYPE_PRECISION (type) <= POINTER_SIZE)
8054 || TREE_CODE (type) == POINTER_TYPE)
8055 {
8056 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8057 tree t = var;
8058 if (omp_is_reference (var))
8059 t = build_simple_mem_ref (var);
8060 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8061 TREE_NO_WARNING (var) = 1;
8062 if (TREE_CODE (type) != POINTER_TYPE)
8063 t = fold_convert (pointer_sized_int_node, t);
8064 t = fold_convert (TREE_TYPE (x), t);
8065 gimplify_assign (x, t, &ilist);
8066 }
8067 else if (omp_is_reference (var))
8068 gimplify_assign (x, var, &ilist);
8069 else if (is_gimple_reg (var))
8070 {
8071 tree avar = create_tmp_var (TREE_TYPE (var));
8072 mark_addressable (avar);
8073 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8074 TREE_NO_WARNING (var) = 1;
8075 gimplify_assign (avar, var, &ilist);
8076 avar = build_fold_addr_expr (avar);
8077 gimplify_assign (x, avar, &ilist);
8078 }
8079 else
8080 {
8081 var = build_fold_addr_expr (var);
8082 gimplify_assign (x, var, &ilist);
8083 }
8084 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8085 s = size_int (0);
8086 else if (omp_is_reference (ovar))
8087 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8088 else
8089 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8090 s = fold_convert (size_type_node, s);
8091 purpose = size_int (map_idx++);
8092 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8093 if (TREE_CODE (s) != INTEGER_CST)
8094 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
05409788 8095
629b3d75
MJ
8096 gcc_checking_assert (tkind
8097 < (HOST_WIDE_INT_C (1U) << talign_shift));
8098 talign = ceil_log2 (talign);
8099 tkind |= talign << talign_shift;
8100 gcc_checking_assert (tkind
8101 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8102 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8103 build_int_cstu (tkind_type, tkind));
8104 break;
05409788 8105
629b3d75
MJ
8106 case OMP_CLAUSE_USE_DEVICE_PTR:
8107 case OMP_CLAUSE_IS_DEVICE_PTR:
8108 ovar = OMP_CLAUSE_DECL (c);
8109 var = lookup_decl_in_outer_ctx (ovar, ctx);
8110 x = build_sender_ref (ovar, ctx);
8111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8112 tkind = GOMP_MAP_USE_DEVICE_PTR;
8113 else
8114 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8115 type = TREE_TYPE (ovar);
8116 if (TREE_CODE (type) == ARRAY_TYPE)
8117 var = build_fold_addr_expr (var);
8118 else
8119 {
8120 if (omp_is_reference (ovar))
8121 {
8122 type = TREE_TYPE (type);
8123 if (TREE_CODE (type) != ARRAY_TYPE)
8124 var = build_simple_mem_ref (var);
8125 var = fold_convert (TREE_TYPE (x), var);
8126 }
8127 }
8128 gimplify_assign (x, var, &ilist);
8129 s = size_int (0);
8130 purpose = size_int (map_idx++);
8131 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8132 gcc_checking_assert (tkind
8133 < (HOST_WIDE_INT_C (1U) << talign_shift));
8134 gcc_checking_assert (tkind
8135 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8136 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8137 build_int_cstu (tkind_type, tkind));
8138 break;
8139 }
05409788 8140
629b3d75 8141 gcc_assert (map_idx == map_cnt);
20906c66 8142
629b3d75
MJ
8143 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8144 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8145 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8146 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8147 for (int i = 1; i <= 2; i++)
8148 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8149 {
8150 gimple_seq initlist = NULL;
8151 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8152 TREE_VEC_ELT (t, i)),
8153 &initlist, true, NULL_TREE);
8154 gimple_seq_add_seq (&ilist, initlist);
20906c66 8155
629b3d75
MJ
8156 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8157 NULL);
8158 TREE_THIS_VOLATILE (clobber) = 1;
8159 gimple_seq_add_stmt (&olist,
8160 gimple_build_assign (TREE_VEC_ELT (t, i),
8161 clobber));
8162 }
05409788 8163
629b3d75
MJ
8164 tree clobber = build_constructor (ctx->record_type, NULL);
8165 TREE_THIS_VOLATILE (clobber) = 1;
8166 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8167 clobber));
8168 }
05409788 8169
629b3d75
MJ
8170 /* Once all the expansions are done, sequence all the different
8171 fragments inside gimple_omp_body. */
05409788 8172
629b3d75 8173 new_body = NULL;
05409788 8174
629b3d75
MJ
8175 if (offloaded
8176 && ctx->record_type)
05409788 8177 {
629b3d75
MJ
8178 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8179 /* fixup_child_record_type might have changed receiver_decl's type. */
8180 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8181 gimple_seq_add_stmt (&new_body,
8182 gimple_build_assign (ctx->receiver_decl, t));
05409788 8183 }
629b3d75 8184 gimple_seq_add_seq (&new_body, fplist);
05409788 8185
629b3d75 8186 if (offloaded || data_region)
0645c1a2 8187 {
629b3d75
MJ
8188 tree prev = NULL_TREE;
8189 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8190 switch (OMP_CLAUSE_CODE (c))
0645c1a2 8191 {
629b3d75
MJ
8192 tree var, x;
8193 default:
8194 break;
8195 case OMP_CLAUSE_FIRSTPRIVATE:
8196 if (is_gimple_omp_oacc (ctx->stmt))
8197 break;
8198 var = OMP_CLAUSE_DECL (c);
8199 if (omp_is_reference (var)
8200 || is_gimple_reg_type (TREE_TYPE (var)))
0645c1a2 8201 {
629b3d75
MJ
8202 tree new_var = lookup_decl (var, ctx);
8203 tree type;
8204 type = TREE_TYPE (var);
8205 if (omp_is_reference (var))
8206 type = TREE_TYPE (type);
8207 if ((INTEGRAL_TYPE_P (type)
8208 && TYPE_PRECISION (type) <= POINTER_SIZE)
8209 || TREE_CODE (type) == POINTER_TYPE)
8210 {
8211 x = build_receiver_ref (var, false, ctx);
8212 if (TREE_CODE (type) != POINTER_TYPE)
8213 x = fold_convert (pointer_sized_int_node, x);
8214 x = fold_convert (type, x);
8215 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8216 fb_rvalue);
8217 if (omp_is_reference (var))
8218 {
8219 tree v = create_tmp_var_raw (type, get_name (var));
8220 gimple_add_tmp_var (v);
8221 TREE_ADDRESSABLE (v) = 1;
8222 gimple_seq_add_stmt (&new_body,
8223 gimple_build_assign (v, x));
8224 x = build_fold_addr_expr (v);
8225 }
8226 gimple_seq_add_stmt (&new_body,
8227 gimple_build_assign (new_var, x));
8228 }
8229 else
8230 {
8231 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8232 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8233 fb_rvalue);
8234 gimple_seq_add_stmt (&new_body,
8235 gimple_build_assign (new_var, x));
8236 }
8237 }
8238 else if (is_variable_sized (var))
8239 {
8240 tree pvar = DECL_VALUE_EXPR (var);
8241 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8242 pvar = TREE_OPERAND (pvar, 0);
8243 gcc_assert (DECL_P (pvar));
8244 tree new_var = lookup_decl (pvar, ctx);
8245 x = build_receiver_ref (var, false, ctx);
8246 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8247 gimple_seq_add_stmt (&new_body,
8248 gimple_build_assign (new_var, x));
8249 }
8250 break;
8251 case OMP_CLAUSE_PRIVATE:
8252 if (is_gimple_omp_oacc (ctx->stmt))
8253 break;
8254 var = OMP_CLAUSE_DECL (c);
8255 if (omp_is_reference (var))
8256 {
8257 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8258 tree new_var = lookup_decl (var, ctx);
8259 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8260 if (TREE_CONSTANT (x))
8261 {
8262 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8263 get_name (var));
8264 gimple_add_tmp_var (x);
8265 TREE_ADDRESSABLE (x) = 1;
8266 x = build_fold_addr_expr_loc (clause_loc, x);
8267 }
8268 else
8269 break;
9bd46bc9 8270
629b3d75
MJ
8271 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8272 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8273 gimple_seq_add_stmt (&new_body,
8274 gimple_build_assign (new_var, x));
8275 }
8276 break;
8277 case OMP_CLAUSE_USE_DEVICE_PTR:
8278 case OMP_CLAUSE_IS_DEVICE_PTR:
8279 var = OMP_CLAUSE_DECL (c);
8280 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8281 x = build_sender_ref (var, ctx);
8282 else
8283 x = build_receiver_ref (var, false, ctx);
8284 if (is_variable_sized (var))
8285 {
8286 tree pvar = DECL_VALUE_EXPR (var);
8287 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8288 pvar = TREE_OPERAND (pvar, 0);
8289 gcc_assert (DECL_P (pvar));
8290 tree new_var = lookup_decl (pvar, ctx);
8291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8292 gimple_seq_add_stmt (&new_body,
8293 gimple_build_assign (new_var, x));
8294 }
8295 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8296 {
8297 tree new_var = lookup_decl (var, ctx);
8298 new_var = DECL_VALUE_EXPR (new_var);
8299 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8300 new_var = TREE_OPERAND (new_var, 0);
8301 gcc_assert (DECL_P (new_var));
8302 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8303 gimple_seq_add_stmt (&new_body,
8304 gimple_build_assign (new_var, x));
8305 }
9bd46bc9 8306 else
629b3d75
MJ
8307 {
8308 tree type = TREE_TYPE (var);
8309 tree new_var = lookup_decl (var, ctx);
8310 if (omp_is_reference (var))
8311 {
8312 type = TREE_TYPE (type);
8313 if (TREE_CODE (type) != ARRAY_TYPE)
8314 {
8315 tree v = create_tmp_var_raw (type, get_name (var));
8316 gimple_add_tmp_var (v);
8317 TREE_ADDRESSABLE (v) = 1;
8318 x = fold_convert (type, x);
8319 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8320 fb_rvalue);
8321 gimple_seq_add_stmt (&new_body,
8322 gimple_build_assign (v, x));
8323 x = build_fold_addr_expr (v);
8324 }
8325 }
8326 new_var = DECL_VALUE_EXPR (new_var);
8327 x = fold_convert (TREE_TYPE (new_var), x);
8328 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8329 gimple_seq_add_stmt (&new_body,
8330 gimple_build_assign (new_var, x));
8331 }
8332 break;
9bd46bc9 8333 }
629b3d75
MJ
8334 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8335 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8336 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8337 or references to VLAs. */
8338 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8339 switch (OMP_CLAUSE_CODE (c))
8340 {
8341 tree var;
8342 default:
8343 break;
8344 case OMP_CLAUSE_MAP:
8345 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8346 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8347 {
8348 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
a90c8804 8349 poly_int64 offset = 0;
629b3d75
MJ
8350 gcc_assert (prev);
8351 var = OMP_CLAUSE_DECL (c);
8352 if (DECL_P (var)
8353 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8354 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8355 ctx))
8356 && varpool_node::get_create (var)->offloadable)
8357 break;
8358 if (TREE_CODE (var) == INDIRECT_REF
8359 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8360 var = TREE_OPERAND (var, 0);
8361 if (TREE_CODE (var) == COMPONENT_REF)
8362 {
8363 var = get_addr_base_and_unit_offset (var, &offset);
8364 gcc_assert (var != NULL_TREE && DECL_P (var));
8365 }
8366 else if (DECL_SIZE (var)
8367 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8368 {
8369 tree var2 = DECL_VALUE_EXPR (var);
8370 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8371 var2 = TREE_OPERAND (var2, 0);
8372 gcc_assert (DECL_P (var2));
8373 var = var2;
8374 }
8375 tree new_var = lookup_decl (var, ctx), x;
8376 tree type = TREE_TYPE (new_var);
8377 bool is_ref;
8378 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8379 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8380 == COMPONENT_REF))
8381 {
8382 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8383 is_ref = true;
8384 new_var = build2 (MEM_REF, type,
8385 build_fold_addr_expr (new_var),
8386 build_int_cst (build_pointer_type (type),
8387 offset));
8388 }
8389 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8390 {
8391 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8392 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8393 new_var = build2 (MEM_REF, type,
8394 build_fold_addr_expr (new_var),
8395 build_int_cst (build_pointer_type (type),
8396 offset));
8397 }
8398 else
8399 is_ref = omp_is_reference (var);
8400 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8401 is_ref = false;
8402 bool ref_to_array = false;
8403 if (is_ref)
8404 {
8405 type = TREE_TYPE (type);
8406 if (TREE_CODE (type) == ARRAY_TYPE)
8407 {
8408 type = build_pointer_type (type);
8409 ref_to_array = true;
8410 }
8411 }
8412 else if (TREE_CODE (type) == ARRAY_TYPE)
8413 {
8414 tree decl2 = DECL_VALUE_EXPR (new_var);
8415 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8416 decl2 = TREE_OPERAND (decl2, 0);
8417 gcc_assert (DECL_P (decl2));
8418 new_var = decl2;
8419 type = TREE_TYPE (new_var);
8420 }
8421 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8422 x = fold_convert_loc (clause_loc, type, x);
8423 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8424 {
8425 tree bias = OMP_CLAUSE_SIZE (c);
8426 if (DECL_P (bias))
8427 bias = lookup_decl (bias, ctx);
8428 bias = fold_convert_loc (clause_loc, sizetype, bias);
8429 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8430 bias);
8431 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8432 TREE_TYPE (x), x, bias);
8433 }
8434 if (ref_to_array)
8435 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8436 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8437 if (is_ref && !ref_to_array)
8438 {
8439 tree t = create_tmp_var_raw (type, get_name (var));
8440 gimple_add_tmp_var (t);
8441 TREE_ADDRESSABLE (t) = 1;
8442 gimple_seq_add_stmt (&new_body,
8443 gimple_build_assign (t, x));
8444 x = build_fold_addr_expr_loc (clause_loc, t);
8445 }
8446 gimple_seq_add_stmt (&new_body,
8447 gimple_build_assign (new_var, x));
8448 prev = NULL_TREE;
8449 }
8450 else if (OMP_CLAUSE_CHAIN (c)
8451 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8452 == OMP_CLAUSE_MAP
8453 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8454 == GOMP_MAP_FIRSTPRIVATE_POINTER
8455 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8456 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8457 prev = c;
8458 break;
8459 case OMP_CLAUSE_PRIVATE:
8460 var = OMP_CLAUSE_DECL (c);
8461 if (is_variable_sized (var))
8462 {
8463 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8464 tree new_var = lookup_decl (var, ctx);
8465 tree pvar = DECL_VALUE_EXPR (var);
8466 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8467 pvar = TREE_OPERAND (pvar, 0);
8468 gcc_assert (DECL_P (pvar));
8469 tree new_pvar = lookup_decl (pvar, ctx);
8470 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8471 tree al = size_int (DECL_ALIGN (var));
8472 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8473 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8474 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8475 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8476 gimple_seq_add_stmt (&new_body,
8477 gimple_build_assign (new_pvar, x));
8478 }
8479 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8480 {
8481 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8482 tree new_var = lookup_decl (var, ctx);
8483 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8484 if (TREE_CONSTANT (x))
8485 break;
8486 else
8487 {
8488 tree atmp
8489 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8490 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8491 tree al = size_int (TYPE_ALIGN (rtype));
8492 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8493 }
9bd46bc9 8494
629b3d75
MJ
8495 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8496 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8497 gimple_seq_add_stmt (&new_body,
8498 gimple_build_assign (new_var, x));
8499 }
8500 break;
8501 }
9bd46bc9 8502
629b3d75
MJ
8503 gimple_seq fork_seq = NULL;
8504 gimple_seq join_seq = NULL;
9bd46bc9 8505
629b3d75 8506 if (is_oacc_parallel (ctx))
9bd46bc9 8507 {
629b3d75
MJ
8508 /* If there are reductions on the offloaded region itself, treat
8509 them as a dummy GANG loop. */
8510 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
9bd46bc9 8511
629b3d75
MJ
8512 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8513 false, NULL, NULL, &fork_seq, &join_seq, ctx);
9bd46bc9 8514 }
9bd46bc9 8515
629b3d75
MJ
8516 gimple_seq_add_seq (&new_body, fork_seq);
8517 gimple_seq_add_seq (&new_body, tgt_body);
8518 gimple_seq_add_seq (&new_body, join_seq);
9bd46bc9 8519
629b3d75
MJ
8520 if (offloaded)
8521 new_body = maybe_catch_exception (new_body);
9bd46bc9 8522
629b3d75
MJ
8523 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8524 gimple_omp_set_body (stmt, new_body);
9bd46bc9
NS
8525 }
8526
629b3d75
MJ
8527 bind = gimple_build_bind (NULL, NULL,
8528 tgt_bind ? gimple_bind_block (tgt_bind)
8529 : NULL_TREE);
8530 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8531 gimple_bind_add_seq (bind, ilist);
8532 gimple_bind_add_stmt (bind, stmt);
8533 gimple_bind_add_seq (bind, olist);
9bd46bc9
NS
8534
8535 pop_gimplify_context (NULL);
8536
629b3d75 8537 if (dep_bind)
b6adbb9f 8538 {
629b3d75
MJ
8539 gimple_bind_add_seq (dep_bind, dep_ilist);
8540 gimple_bind_add_stmt (dep_bind, bind);
8541 gimple_bind_add_seq (dep_bind, dep_olist);
8542 pop_gimplify_context (dep_bind);
b6adbb9f 8543 }
b6adbb9f
NS
8544}
8545
629b3d75 8546/* Expand code for an OpenMP teams directive. */
94829f87 8547
f8393eb0 8548static void
629b3d75 8549lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
94829f87 8550{
629b3d75
MJ
8551 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8552 push_gimplify_context ();
94829f87 8553
629b3d75
MJ
8554 tree block = make_node (BLOCK);
8555 gbind *bind = gimple_build_bind (NULL, NULL, block);
8556 gsi_replace (gsi_p, bind, true);
8557 gimple_seq bind_body = NULL;
8558 gimple_seq dlist = NULL;
8559 gimple_seq olist = NULL;
94829f87 8560
629b3d75
MJ
8561 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8562 OMP_CLAUSE_NUM_TEAMS);
8563 if (num_teams == NULL_TREE)
8564 num_teams = build_int_cst (unsigned_type_node, 0);
8565 else
94829f87 8566 {
629b3d75
MJ
8567 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8568 num_teams = fold_convert (unsigned_type_node, num_teams);
8569 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
94829f87 8570 }
629b3d75
MJ
8571 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8572 OMP_CLAUSE_THREAD_LIMIT);
8573 if (thread_limit == NULL_TREE)
8574 thread_limit = build_int_cst (unsigned_type_node, 0);
8575 else
94829f87 8576 {
629b3d75
MJ
8577 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8578 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8579 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8580 fb_rvalue);
94829f87 8581 }
9bd46bc9 8582
629b3d75
MJ
8583 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8584 &bind_body, &dlist, ctx, NULL);
8585 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8586 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8587 if (!gimple_omp_teams_grid_phony (teams_stmt))
9bd46bc9 8588 {
629b3d75
MJ
8589 gimple_seq_add_stmt (&bind_body, teams_stmt);
8590 location_t loc = gimple_location (teams_stmt);
8591 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8592 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8593 gimple_set_location (call, loc);
8594 gimple_seq_add_stmt (&bind_body, call);
9bd46bc9
NS
8595 }
8596
629b3d75
MJ
8597 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8598 gimple_omp_set_body (teams_stmt, NULL);
8599 gimple_seq_add_seq (&bind_body, olist);
8600 gimple_seq_add_seq (&bind_body, dlist);
8601 if (!gimple_omp_teams_grid_phony (teams_stmt))
8602 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8603 gimple_bind_set_body (bind, bind_body);
9bd46bc9 8604
629b3d75 8605 pop_gimplify_context (bind);
9bd46bc9 8606
629b3d75
MJ
8607 gimple_bind_append_vars (bind, ctx->block_vars);
8608 BLOCK_VARS (block) = ctx->block_vars;
8609 if (BLOCK_VARS (block))
8610 TREE_USED (block) = 1;
9bd46bc9
NS
8611}
8612
629b3d75 8613/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
9bd46bc9 8614
629b3d75
MJ
8615static void
8616lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8617{
629b3d75
MJ
8618 gimple *stmt = gsi_stmt (*gsi_p);
8619 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8620 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8621 gimple_build_omp_return (false));
9bd46bc9
NS
8622}
8623
9bd46bc9 8624
629b3d75
MJ
8625/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8626 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8627 of OMP context, but with task_shared_vars set. */
9bd46bc9 8628
629b3d75
MJ
8629static tree
8630lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8631 void *data)
9bd46bc9 8632{
629b3d75 8633 tree t = *tp;
9bd46bc9 8634
629b3d75
MJ
8635 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8636 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8637 return t;
9bd46bc9 8638
629b3d75
MJ
8639 if (task_shared_vars
8640 && DECL_P (t)
8641 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8642 return t;
9bd46bc9 8643
629b3d75
MJ
8644 /* If a global variable has been privatized, TREE_CONSTANT on
8645 ADDR_EXPR might be wrong. */
8646 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8647 recompute_tree_invariant_for_addr_expr (t);
9bd46bc9 8648
629b3d75
MJ
8649 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8650 return NULL_TREE;
9bd46bc9
NS
8651}
8652
629b3d75
MJ
8653/* Data to be communicated between lower_omp_regimplify_operands and
8654 lower_omp_regimplify_operands_p. */
9bd46bc9 8655
629b3d75 8656struct lower_omp_regimplify_operands_data
9bd46bc9 8657{
629b3d75
MJ
8658 omp_context *ctx;
8659 vec<tree> *decls;
8660};
9bd46bc9 8661
629b3d75
MJ
8662/* Helper function for lower_omp_regimplify_operands. Find
8663 omp_member_access_dummy_var vars and adjust temporarily their
8664 DECL_VALUE_EXPRs if needed. */
9bd46bc9 8665
629b3d75
MJ
8666static tree
8667lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8668 void *data)
9bd46bc9 8669{
629b3d75
MJ
8670 tree t = omp_member_access_dummy_var (*tp);
8671 if (t)
9bd46bc9 8672 {
629b3d75
MJ
8673 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8674 lower_omp_regimplify_operands_data *ldata
8675 = (lower_omp_regimplify_operands_data *) wi->info;
8676 tree o = maybe_lookup_decl (t, ldata->ctx);
8677 if (o != t)
9bd46bc9 8678 {
629b3d75
MJ
8679 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8680 ldata->decls->safe_push (*tp);
8681 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8682 SET_DECL_VALUE_EXPR (*tp, v);
9bd46bc9 8683 }
9bd46bc9 8684 }
629b3d75
MJ
8685 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8686 return NULL_TREE;
9bd46bc9
NS
8687}
8688
629b3d75
MJ
8689/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8690 of omp_member_access_dummy_var vars during regimplification. */
9bd46bc9
NS
8691
8692static void
629b3d75
MJ
8693lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8694 gimple_stmt_iterator *gsi_p)
9bd46bc9 8695{
629b3d75
MJ
8696 auto_vec<tree, 10> decls;
8697 if (ctx)
8698 {
8699 struct walk_stmt_info wi;
8700 memset (&wi, '\0', sizeof (wi));
8701 struct lower_omp_regimplify_operands_data data;
8702 data.ctx = ctx;
8703 data.decls = &decls;
8704 wi.info = &data;
8705 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8706 }
8707 gimple_regimplify_operands (stmt, gsi_p);
8708 while (!decls.is_empty ())
8709 {
8710 tree t = decls.pop ();
8711 tree v = decls.pop ();
8712 SET_DECL_VALUE_EXPR (t, v);
8713 }
9bd46bc9
NS
8714}
8715
9bd46bc9 8716static void
629b3d75 8717lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8718{
629b3d75
MJ
8719 gimple *stmt = gsi_stmt (*gsi_p);
8720 struct walk_stmt_info wi;
8721 gcall *call_stmt;
9bd46bc9 8722
629b3d75
MJ
8723 if (gimple_has_location (stmt))
8724 input_location = gimple_location (stmt);
9bd46bc9 8725
629b3d75
MJ
8726 if (task_shared_vars)
8727 memset (&wi, '\0', sizeof (wi));
9bd46bc9 8728
629b3d75
MJ
8729 /* If we have issued syntax errors, avoid doing any heavy lifting.
8730 Just replace the OMP directives with a NOP to avoid
8731 confusing RTL expansion. */
8732 if (seen_error () && is_gimple_omp (stmt))
9bd46bc9 8733 {
629b3d75
MJ
8734 gsi_replace (gsi_p, gimple_build_nop (), true);
8735 return;
8736 }
9bd46bc9 8737
629b3d75
MJ
8738 switch (gimple_code (stmt))
8739 {
8740 case GIMPLE_COND:
8741 {
8742 gcond *cond_stmt = as_a <gcond *> (stmt);
8743 if ((ctx || task_shared_vars)
8744 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8745 lower_omp_regimplify_p,
8746 ctx ? NULL : &wi, NULL)
8747 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8748 lower_omp_regimplify_p,
8749 ctx ? NULL : &wi, NULL)))
8750 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8751 }
8752 break;
8753 case GIMPLE_CATCH:
8754 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8755 break;
8756 case GIMPLE_EH_FILTER:
8757 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8758 break;
8759 case GIMPLE_TRY:
8760 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8761 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8762 break;
8763 case GIMPLE_TRANSACTION:
01914336 8764 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
629b3d75
MJ
8765 ctx);
8766 break;
8767 case GIMPLE_BIND:
8768 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
6724f8a6 8769 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
629b3d75
MJ
8770 break;
8771 case GIMPLE_OMP_PARALLEL:
8772 case GIMPLE_OMP_TASK:
8773 ctx = maybe_lookup_ctx (stmt);
8774 gcc_assert (ctx);
8775 if (ctx->cancellable)
8776 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8777 lower_omp_taskreg (gsi_p, ctx);
8778 break;
8779 case GIMPLE_OMP_FOR:
8780 ctx = maybe_lookup_ctx (stmt);
8781 gcc_assert (ctx);
8782 if (ctx->cancellable)
8783 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8784 lower_omp_for (gsi_p, ctx);
8785 break;
8786 case GIMPLE_OMP_SECTIONS:
8787 ctx = maybe_lookup_ctx (stmt);
8788 gcc_assert (ctx);
8789 if (ctx->cancellable)
8790 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8791 lower_omp_sections (gsi_p, ctx);
8792 break;
8793 case GIMPLE_OMP_SINGLE:
8794 ctx = maybe_lookup_ctx (stmt);
8795 gcc_assert (ctx);
8796 lower_omp_single (gsi_p, ctx);
8797 break;
8798 case GIMPLE_OMP_MASTER:
8799 ctx = maybe_lookup_ctx (stmt);
8800 gcc_assert (ctx);
8801 lower_omp_master (gsi_p, ctx);
8802 break;
8803 case GIMPLE_OMP_TASKGROUP:
8804 ctx = maybe_lookup_ctx (stmt);
8805 gcc_assert (ctx);
8806 lower_omp_taskgroup (gsi_p, ctx);
8807 break;
8808 case GIMPLE_OMP_ORDERED:
8809 ctx = maybe_lookup_ctx (stmt);
8810 gcc_assert (ctx);
8811 lower_omp_ordered (gsi_p, ctx);
8812 break;
8813 case GIMPLE_OMP_CRITICAL:
8814 ctx = maybe_lookup_ctx (stmt);
8815 gcc_assert (ctx);
8816 lower_omp_critical (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_ATOMIC_LOAD:
8819 if ((ctx || task_shared_vars)
8820 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8821 as_a <gomp_atomic_load *> (stmt)),
8822 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8823 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8824 break;
8825 case GIMPLE_OMP_TARGET:
8826 ctx = maybe_lookup_ctx (stmt);
8827 gcc_assert (ctx);
8828 lower_omp_target (gsi_p, ctx);
8829 break;
8830 case GIMPLE_OMP_TEAMS:
8831 ctx = maybe_lookup_ctx (stmt);
8832 gcc_assert (ctx);
8833 lower_omp_teams (gsi_p, ctx);
8834 break;
8835 case GIMPLE_OMP_GRID_BODY:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_grid_body (gsi_p, ctx);
8839 break;
8840 case GIMPLE_CALL:
8841 tree fndecl;
8842 call_stmt = as_a <gcall *> (stmt);
8843 fndecl = gimple_call_fndecl (call_stmt);
8844 if (fndecl
3d78e008 8845 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
629b3d75 8846 switch (DECL_FUNCTION_CODE (fndecl))
9bd46bc9 8847 {
629b3d75
MJ
8848 case BUILT_IN_GOMP_BARRIER:
8849 if (ctx == NULL)
8850 break;
8851 /* FALLTHRU */
8852 case BUILT_IN_GOMP_CANCEL:
8853 case BUILT_IN_GOMP_CANCELLATION_POINT:
8854 omp_context *cctx;
8855 cctx = ctx;
8856 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8857 cctx = cctx->outer;
8858 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8859 if (!cctx->cancellable)
8860 {
8861 if (DECL_FUNCTION_CODE (fndecl)
8862 == BUILT_IN_GOMP_CANCELLATION_POINT)
8863 {
8864 stmt = gimple_build_nop ();
8865 gsi_replace (gsi_p, stmt, false);
8866 }
8867 break;
8868 }
8869 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8870 {
8871 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8872 gimple_call_set_fndecl (call_stmt, fndecl);
8873 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8874 }
8875 tree lhs;
8876 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8877 gimple_call_set_lhs (call_stmt, lhs);
8878 tree fallthru_label;
8879 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8880 gimple *g;
8881 g = gimple_build_label (fallthru_label);
8882 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8883 g = gimple_build_cond (NE_EXPR, lhs,
8884 fold_convert (TREE_TYPE (lhs),
8885 boolean_false_node),
8886 cctx->cancel_label, fallthru_label);
8887 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8888 break;
8889 default:
8890 break;
9bd46bc9 8891 }
629b3d75
MJ
8892 /* FALLTHRU */
8893 default:
8894 if ((ctx || task_shared_vars)
8895 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8896 ctx ? NULL : &wi))
9bd46bc9 8897 {
629b3d75
MJ
8898 /* Just remove clobbers, this should happen only if we have
8899 "privatized" local addressable variables in SIMD regions,
8900 the clobber isn't needed in that case and gimplifying address
8901 of the ARRAY_REF into a pointer and creating MEM_REF based
8902 clobber would create worse code than we get with the clobber
8903 dropped. */
8904 if (gimple_clobber_p (stmt))
4ae13300 8905 {
629b3d75
MJ
8906 gsi_replace (gsi_p, gimple_build_nop (), true);
8907 break;
9bd46bc9 8908 }
629b3d75 8909 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9bd46bc9 8910 }
629b3d75 8911 break;
9bd46bc9 8912 }
9bd46bc9
NS
8913}
8914
9bd46bc9 8915static void
629b3d75 8916lower_omp (gimple_seq *body, omp_context *ctx)
9bd46bc9 8917{
629b3d75
MJ
8918 location_t saved_location = input_location;
8919 gimple_stmt_iterator gsi;
8920 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8921 lower_omp_1 (&gsi, ctx);
8922 /* During gimplification, we haven't folded statments inside offloading
8923 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8924 if (target_nesting_level || taskreg_nesting_level)
8925 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8926 fold_stmt (&gsi);
8927 input_location = saved_location;
9bd46bc9
NS
8928}
8929
629b3d75 8930/* Main entry point. */
9bd46bc9 8931
629b3d75
MJ
8932static unsigned int
8933execute_lower_omp (void)
9bd46bc9 8934{
629b3d75
MJ
8935 gimple_seq body;
8936 int i;
8937 omp_context *ctx;
9bd46bc9 8938
629b3d75
MJ
8939 /* This pass always runs, to provide PROP_gimple_lomp.
8940 But often, there is nothing to do. */
5e9d6aa4 8941 if (flag_openacc == 0 && flag_openmp == 0
629b3d75
MJ
8942 && flag_openmp_simd == 0)
8943 return 0;
9bd46bc9 8944
629b3d75
MJ
8945 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8946 delete_omp_context);
9bd46bc9 8947
629b3d75 8948 body = gimple_body (current_function_decl);
9bd46bc9 8949
629b3d75
MJ
8950 if (hsa_gen_requested_p ())
8951 omp_grid_gridify_all_targets (&body);
8952
8953 scan_omp (&body, NULL);
8954 gcc_assert (taskreg_nesting_level == 0);
8955 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8956 finish_taskreg_scan (ctx);
8957 taskreg_contexts.release ();
9bd46bc9 8958
629b3d75
MJ
8959 if (all_contexts->root)
8960 {
8961 if (task_shared_vars)
8962 push_gimplify_context ();
8963 lower_omp (&body, NULL);
8964 if (task_shared_vars)
8965 pop_gimplify_context (NULL);
8966 }
8967
8968 if (all_contexts)
8969 {
8970 splay_tree_delete (all_contexts);
8971 all_contexts = NULL;
9bd46bc9 8972 }
629b3d75 8973 BITMAP_FREE (task_shared_vars);
6724f8a6
JJ
8974
8975 /* If current function is a method, remove artificial dummy VAR_DECL created
8976 for non-static data member privatization, they aren't needed for
8977 debuginfo nor anything else, have been already replaced everywhere in the
8978 IL and cause problems with LTO. */
8979 if (DECL_ARGUMENTS (current_function_decl)
8980 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
8981 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
8982 == POINTER_TYPE))
8983 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
629b3d75 8984 return 0;
9bd46bc9
NS
8985}
8986
629b3d75 8987namespace {
9bd46bc9 8988
629b3d75 8989const pass_data pass_data_lower_omp =
9bd46bc9 8990{
629b3d75
MJ
8991 GIMPLE_PASS, /* type */
8992 "omplower", /* name */
fd2b8c8b 8993 OPTGROUP_OMP, /* optinfo_flags */
629b3d75
MJ
8994 TV_NONE, /* tv_id */
8995 PROP_gimple_any, /* properties_required */
8996 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8997 0, /* properties_destroyed */
8998 0, /* todo_flags_start */
8999 0, /* todo_flags_finish */
9000};
9bd46bc9 9001
629b3d75
MJ
9002class pass_lower_omp : public gimple_opt_pass
9003{
9004public:
9005 pass_lower_omp (gcc::context *ctxt)
9006 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9007 {}
9bd46bc9 9008
629b3d75
MJ
9009 /* opt_pass methods: */
9010 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9bd46bc9 9011
629b3d75 9012}; // class pass_lower_omp
9bd46bc9 9013
629b3d75 9014} // anon namespace
9bd46bc9 9015
629b3d75
MJ
9016gimple_opt_pass *
9017make_pass_lower_omp (gcc::context *ctxt)
9018{
9019 return new pass_lower_omp (ctxt);
9bd46bc9 9020}
629b3d75
MJ
9021\f
9022/* The following is a utility to diagnose structured block violations.
9023 It is not part of the "omplower" pass, as that's invoked too late. It
9024 should be invoked by the respective front ends after gimplification. */
9bd46bc9 9025
629b3d75 9026static splay_tree all_labels;
9bd46bc9 9027
629b3d75
MJ
9028/* Check for mismatched contexts and generate an error if needed. Return
9029 true if an error is detected. */
9bd46bc9 9030
629b3d75
MJ
9031static bool
9032diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9033 gimple *branch_ctx, gimple *label_ctx)
9034{
9035 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9036 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9bd46bc9 9037
629b3d75
MJ
9038 if (label_ctx == branch_ctx)
9039 return false;
9bd46bc9 9040
629b3d75 9041 const char* kind = NULL;
9bd46bc9 9042
629b3d75 9043 if (flag_openacc)
9bd46bc9 9044 {
629b3d75
MJ
9045 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9046 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9bd46bc9 9047 {
629b3d75
MJ
9048 gcc_checking_assert (kind == NULL);
9049 kind = "OpenACC";
9bd46bc9
NS
9050 }
9051 }
629b3d75 9052 if (kind == NULL)
5b37e866 9053 {
0a734553 9054 gcc_checking_assert (flag_openmp || flag_openmp_simd);
629b3d75 9055 kind = "OpenMP";
5b37e866 9056 }
9bd46bc9 9057
01914336 9058 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
629b3d75
MJ
9059 so we could traverse it and issue a correct "exit" or "enter" error
9060 message upon a structured block violation.
c5a64cfe 9061
629b3d75
MJ
9062 We built the context by building a list with tree_cons'ing, but there is
9063 no easy counterpart in gimple tuples. It seems like far too much work
9064 for issuing exit/enter error messages. If someone really misses the
01914336 9065 distinct error message... patches welcome. */
c5a64cfe 9066
629b3d75
MJ
9067#if 0
9068 /* Try to avoid confusing the user by producing and error message
9069 with correct "exit" or "enter" verbiage. We prefer "exit"
9070 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9071 if (branch_ctx == NULL)
9072 exit_p = false;
9073 else
5b37e866 9074 {
629b3d75
MJ
9075 while (label_ctx)
9076 {
9077 if (TREE_VALUE (label_ctx) == branch_ctx)
9078 {
9079 exit_p = false;
9080 break;
9081 }
9082 label_ctx = TREE_CHAIN (label_ctx);
9083 }
5b37e866
NS
9084 }
9085
629b3d75
MJ
9086 if (exit_p)
9087 error ("invalid exit from %s structured block", kind);
9088 else
9089 error ("invalid entry to %s structured block", kind);
9090#endif
5b37e866 9091
629b3d75
MJ
9092 /* If it's obvious we have an invalid entry, be specific about the error. */
9093 if (branch_ctx == NULL)
9094 error ("invalid entry to %s structured block", kind);
9095 else
c5a64cfe 9096 {
629b3d75
MJ
9097 /* Otherwise, be vague and lazy, but efficient. */
9098 error ("invalid branch to/from %s structured block", kind);
c5a64cfe 9099 }
5b37e866 9100
629b3d75
MJ
9101 gsi_replace (gsi_p, gimple_build_nop (), false);
9102 return true;
c5a64cfe
NS
9103}
9104
629b3d75
MJ
9105/* Pass 1: Create a minimal tree of structured blocks, and record
9106 where each label is found. */
9bd46bc9 9107
629b3d75
MJ
9108static tree
9109diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9110 struct walk_stmt_info *wi)
9bd46bc9 9111{
629b3d75
MJ
9112 gimple *context = (gimple *) wi->info;
9113 gimple *inner_context;
9114 gimple *stmt = gsi_stmt (*gsi_p);
9bd46bc9 9115
629b3d75 9116 *handled_ops_p = true;
6e91acf8 9117
629b3d75
MJ
9118 switch (gimple_code (stmt))
9119 {
9120 WALK_SUBSTMTS;
6e91acf8 9121
629b3d75
MJ
9122 case GIMPLE_OMP_PARALLEL:
9123 case GIMPLE_OMP_TASK:
9124 case GIMPLE_OMP_SECTIONS:
9125 case GIMPLE_OMP_SINGLE:
9126 case GIMPLE_OMP_SECTION:
9127 case GIMPLE_OMP_MASTER:
9128 case GIMPLE_OMP_ORDERED:
9129 case GIMPLE_OMP_CRITICAL:
9130 case GIMPLE_OMP_TARGET:
9131 case GIMPLE_OMP_TEAMS:
9132 case GIMPLE_OMP_TASKGROUP:
9133 /* The minimal context here is just the current OMP construct. */
9134 inner_context = stmt;
9135 wi->info = inner_context;
9136 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9137 wi->info = context;
9138 break;
e5014671 9139
629b3d75
MJ
9140 case GIMPLE_OMP_FOR:
9141 inner_context = stmt;
9142 wi->info = inner_context;
9143 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9144 walk them. */
9145 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9146 diagnose_sb_1, NULL, wi);
9147 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9148 wi->info = context;
9149 break;
e5014671 9150
629b3d75
MJ
9151 case GIMPLE_LABEL:
9152 splay_tree_insert (all_labels,
9153 (splay_tree_key) gimple_label_label (
9154 as_a <glabel *> (stmt)),
9155 (splay_tree_value) context);
9156 break;
e5014671 9157
629b3d75
MJ
9158 default:
9159 break;
e5014671
NS
9160 }
9161
629b3d75 9162 return NULL_TREE;
e5014671
NS
9163}
9164
629b3d75
MJ
9165/* Pass 2: Check each branch and see if its context differs from that of
9166 the destination label's context. */
94829f87 9167
629b3d75
MJ
9168static tree
9169diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9170 struct walk_stmt_info *wi)
94829f87 9171{
629b3d75
MJ
9172 gimple *context = (gimple *) wi->info;
9173 splay_tree_node n;
9174 gimple *stmt = gsi_stmt (*gsi_p);
f8393eb0 9175
629b3d75 9176 *handled_ops_p = true;
f8393eb0 9177
629b3d75 9178 switch (gimple_code (stmt))
9bd46bc9 9179 {
629b3d75 9180 WALK_SUBSTMTS;
9bd46bc9 9181
629b3d75
MJ
9182 case GIMPLE_OMP_PARALLEL:
9183 case GIMPLE_OMP_TASK:
9184 case GIMPLE_OMP_SECTIONS:
9185 case GIMPLE_OMP_SINGLE:
9186 case GIMPLE_OMP_SECTION:
9187 case GIMPLE_OMP_MASTER:
9188 case GIMPLE_OMP_ORDERED:
9189 case GIMPLE_OMP_CRITICAL:
9190 case GIMPLE_OMP_TARGET:
9191 case GIMPLE_OMP_TEAMS:
9192 case GIMPLE_OMP_TASKGROUP:
9193 wi->info = stmt;
9194 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9195 wi->info = context;
9196 break;
e5014671 9197
629b3d75
MJ
9198 case GIMPLE_OMP_FOR:
9199 wi->info = stmt;
9200 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9201 walk them. */
9202 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9203 diagnose_sb_2, NULL, wi);
9204 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9205 wi->info = context;
9206 break;
e5014671 9207
629b3d75
MJ
9208 case GIMPLE_COND:
9209 {
9210 gcond *cond_stmt = as_a <gcond *> (stmt);
9211 tree lab = gimple_cond_true_label (cond_stmt);
9212 if (lab)
9bd46bc9 9213 {
629b3d75
MJ
9214 n = splay_tree_lookup (all_labels,
9215 (splay_tree_key) lab);
9216 diagnose_sb_0 (gsi_p, context,
9217 n ? (gimple *) n->value : NULL);
9bd46bc9 9218 }
629b3d75
MJ
9219 lab = gimple_cond_false_label (cond_stmt);
9220 if (lab)
9221 {
9222 n = splay_tree_lookup (all_labels,
9223 (splay_tree_key) lab);
9224 diagnose_sb_0 (gsi_p, context,
9225 n ? (gimple *) n->value : NULL);
9226 }
9227 }
9228 break;
9bd46bc9 9229
629b3d75
MJ
9230 case GIMPLE_GOTO:
9231 {
9232 tree lab = gimple_goto_dest (stmt);
9233 if (TREE_CODE (lab) != LABEL_DECL)
9234 break;
9235
9236 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9237 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9238 }
9239 break;
9bd46bc9 9240
629b3d75
MJ
9241 case GIMPLE_SWITCH:
9242 {
9243 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9244 unsigned int i;
9245 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9bd46bc9 9246 {
629b3d75
MJ
9247 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9248 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9249 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9250 break;
9bd46bc9 9251 }
9bd46bc9 9252 }
629b3d75 9253 break;
9bd46bc9 9254
629b3d75
MJ
9255 case GIMPLE_RETURN:
9256 diagnose_sb_0 (gsi_p, context, NULL);
9257 break;
94829f87 9258
629b3d75
MJ
9259 default:
9260 break;
94829f87
NS
9261 }
9262
629b3d75 9263 return NULL_TREE;
bd751975
NS
9264}
9265
629b3d75
MJ
9266static unsigned int
9267diagnose_omp_structured_block_errors (void)
94829f87 9268{
629b3d75
MJ
9269 struct walk_stmt_info wi;
9270 gimple_seq body = gimple_body (current_function_decl);
346a966e 9271
629b3d75 9272 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
94829f87 9273
629b3d75
MJ
9274 memset (&wi, 0, sizeof (wi));
9275 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
94829f87 9276
629b3d75
MJ
9277 memset (&wi, 0, sizeof (wi));
9278 wi.want_locations = true;
9279 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
94829f87 9280
629b3d75 9281 gimple_set_body (current_function_decl, body);
9669b00b 9282
629b3d75
MJ
9283 splay_tree_delete (all_labels);
9284 all_labels = NULL;
9669b00b 9285
9669b00b
AM
9286 return 0;
9287}
9288
9289namespace {
9290
629b3d75 9291const pass_data pass_data_diagnose_omp_blocks =
9669b00b
AM
9292{
9293 GIMPLE_PASS, /* type */
629b3d75 9294 "*diagnose_omp_blocks", /* name */
fd2b8c8b 9295 OPTGROUP_OMP, /* optinfo_flags */
9669b00b 9296 TV_NONE, /* tv_id */
629b3d75
MJ
9297 PROP_gimple_any, /* properties_required */
9298 0, /* properties_provided */
9669b00b
AM
9299 0, /* properties_destroyed */
9300 0, /* todo_flags_start */
629b3d75 9301 0, /* todo_flags_finish */
9669b00b
AM
9302};
9303
629b3d75 9304class pass_diagnose_omp_blocks : public gimple_opt_pass
9669b00b
AM
9305{
9306public:
629b3d75
MJ
9307 pass_diagnose_omp_blocks (gcc::context *ctxt)
9308 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9669b00b
AM
9309 {}
9310
9311 /* opt_pass methods: */
629b3d75
MJ
9312 virtual bool gate (function *)
9313 {
5e9d6aa4 9314 return flag_openacc || flag_openmp || flag_openmp_simd;
629b3d75 9315 }
9669b00b
AM
9316 virtual unsigned int execute (function *)
9317 {
629b3d75 9318 return diagnose_omp_structured_block_errors ();
4a38b02b
IV
9319 }
9320
629b3d75 9321}; // class pass_diagnose_omp_blocks
4a38b02b
IV
9322
9323} // anon namespace
9324
9325gimple_opt_pass *
629b3d75 9326make_pass_diagnose_omp_blocks (gcc::context *ctxt)
4a38b02b 9327{
629b3d75 9328 return new pass_diagnose_omp_blocks (ctxt);
4a38b02b 9329}
629b3d75 9330\f
4a38b02b 9331
953ff289 9332#include "gt-omp-low.h"