]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
Remove enum before machine_mode
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
41dbbb37
TS
1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
953ff289
DN
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
cbe34bb5 7 Copyright (C) 2005-2017 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5 29#include "target.h"
953ff289 30#include "tree.h"
c7131fb2 31#include "gimple.h"
957060b5 32#include "tree-pass.h"
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
2fb9a547
AM
39#include "internal-fn.h"
40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
18f429e2 43#include "gimplify-me.h"
5be5c238 44#include "gimple-walk.h"
726a989a 45#include "tree-iterator.h"
953ff289
DN
46#include "tree-inline.h"
47#include "langhooks.h"
442b4905 48#include "tree-dfa.h"
7a300452 49#include "tree-ssa.h"
6be42dd4 50#include "splay-tree.h"
629b3d75 51#include "omp-general.h"
0645c1a2 52#include "omp-low.h"
629b3d75 53#include "omp-grid.h"
4484a35a 54#include "gimple-low.h"
dd912cb8 55#include "symbol-summary.h"
1fe37220 56#include "tree-nested.h"
1f6be682 57#include "context.h"
41dbbb37 58#include "gomp-constants.h"
9bd46bc9 59#include "gimple-pretty-print.h"
13293add 60#include "hsa-common.h"
953ff289 61
41dbbb37 62/* Lowering of OMP parallel and workshare constructs proceeds in two
953ff289
DN
63 phases. The first phase scans the function looking for OMP statements
64 and then for variables that must be replaced to satisfy data sharing
65 clauses. The second phase expands code for the constructs, as well as
c0220ea4 66 re-gimplifying things when variables have been replaced with complex
953ff289
DN
67 expressions.
68
7ebaeab5 69 Final code generation is done by pass_expand_omp. The flowgraph is
41dbbb37
TS
70 scanned for regions which are then moved to a new
71 function, to be invoked by the thread library, or offloaded. */
953ff289
DN
72
73/* Context structure. Used to store information about each parallel
74 directive in the code. */
75
a79683d5 76struct omp_context
953ff289
DN
77{
78 /* This field must be at the beginning, as we do "inheritance": Some
79 callback functions for tree-inline.c (e.g., omp_copy_decl)
80 receive a copy_body_data pointer that is up-casted to an
81 omp_context pointer. */
82 copy_body_data cb;
83
84 /* The tree of contexts corresponding to the encountered constructs. */
85 struct omp_context *outer;
355fe088 86 gimple *stmt;
953ff289 87
b8698a0f 88 /* Map variables to fields in a structure that allows communication
953ff289
DN
89 between sending and receiving threads. */
90 splay_tree field_map;
91 tree record_type;
92 tree sender_decl;
93 tree receiver_decl;
94
a68ab351
JJ
95 /* These are used just by task contexts, if task firstprivate fn is
96 needed. srecord_type is used to communicate from the thread
97 that encountered the task construct to task firstprivate fn,
98 record_type is allocated by GOMP_task, initialized by task firstprivate
99 fn and passed to the task body fn. */
100 splay_tree sfield_map;
101 tree srecord_type;
102
953ff289
DN
103 /* A chain of variables to add to the top-level block surrounding the
104 construct. In the case of a parallel, this is in the child function. */
105 tree block_vars;
106
acf0174b
JJ
107 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108 barriers should jump to during omplower pass. */
109 tree cancel_label;
110
6e6cf7b0
JJ
111 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112 otherwise. */
113 gimple *simt_stmt;
114
953ff289
DN
115 /* Nesting depth of this context. Used to beautify error messages re
116 invalid gotos. The outermost ctx is depth 1, with depth 0 being
117 reserved for the main body of the function. */
118 int depth;
119
953ff289
DN
120 /* True if this parallel directive is nested within another. */
121 bool is_nested;
acf0174b
JJ
122
123 /* True if this construct can be cancelled. */
124 bool cancellable;
a79683d5 125};
953ff289 126
953ff289 127static splay_tree all_contexts;
a68ab351 128static int taskreg_nesting_level;
acf0174b 129static int target_nesting_level;
a68ab351 130static bitmap task_shared_vars;
5771c391 131static vec<omp_context *> taskreg_contexts;
953ff289 132
26127932 133static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
134static tree scan_omp_1_op (tree *, int *, void *);
135
136#define WALK_SUBSTMTS \
137 case GIMPLE_BIND: \
138 case GIMPLE_TRY: \
139 case GIMPLE_CATCH: \
140 case GIMPLE_EH_FILTER: \
0a35513e 141 case GIMPLE_TRANSACTION: \
726a989a
RB
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
145
e4834818
NS
146/* Return true if CTX corresponds to an oacc parallel region. */
147
148static bool
149is_oacc_parallel (omp_context *ctx)
150{
151 enum gimple_code outer_type = gimple_code (ctx->stmt);
152 return ((outer_type == GIMPLE_OMP_TARGET)
153 && (gimple_omp_target_kind (ctx->stmt)
154 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
155}
156
157/* Return true if CTX corresponds to an oacc kernels region. */
158
159static bool
160is_oacc_kernels (omp_context *ctx)
161{
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_KERNELS));
166}
167
d9a6bd32
JJ
168/* If DECL is the artificial dummy VAR_DECL created for non-static
169 data member privatization, return the underlying "this" parameter,
170 otherwise return NULL. */
171
172tree
173omp_member_access_dummy_var (tree decl)
174{
175 if (!VAR_P (decl)
176 || !DECL_ARTIFICIAL (decl)
177 || !DECL_IGNORED_P (decl)
178 || !DECL_HAS_VALUE_EXPR_P (decl)
179 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
180 return NULL_TREE;
181
182 tree v = DECL_VALUE_EXPR (decl);
183 if (TREE_CODE (v) != COMPONENT_REF)
184 return NULL_TREE;
185
186 while (1)
187 switch (TREE_CODE (v))
188 {
189 case COMPONENT_REF:
190 case MEM_REF:
191 case INDIRECT_REF:
192 CASE_CONVERT:
193 case POINTER_PLUS_EXPR:
194 v = TREE_OPERAND (v, 0);
195 continue;
196 case PARM_DECL:
197 if (DECL_CONTEXT (v) == current_function_decl
198 && DECL_ARTIFICIAL (v)
199 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
200 return v;
201 return NULL_TREE;
202 default:
203 return NULL_TREE;
204 }
205}
206
207/* Helper for unshare_and_remap, called through walk_tree. */
208
209static tree
210unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
211{
212 tree *pair = (tree *) data;
213 if (*tp == pair[0])
214 {
215 *tp = unshare_expr (pair[1]);
216 *walk_subtrees = 0;
217 }
218 else if (IS_TYPE_OR_DECL_P (*tp))
219 *walk_subtrees = 0;
220 return NULL_TREE;
221}
222
223/* Return unshare_expr (X) with all occurrences of FROM
224 replaced with TO. */
225
226static tree
227unshare_and_remap (tree x, tree from, tree to)
228{
229 tree pair[2] = { from, to };
230 x = unshare_expr (x);
231 walk_tree (&x, unshare_and_remap_1, pair, NULL);
232 return x;
233}
234
726a989a
RB
235/* Convenience function for calling scan_omp_1_op on tree operands. */
236
237static inline tree
238scan_omp_op (tree *tp, omp_context *ctx)
239{
240 struct walk_stmt_info wi;
241
242 memset (&wi, 0, sizeof (wi));
243 wi.info = ctx;
244 wi.want_locations = true;
245
246 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
247}
248
355a7673 249static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
250static tree lookup_decl_in_outer_ctx (tree, omp_context *);
251static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289 252
953ff289
DN
253/* Return true if CTX is for an omp parallel. */
254
255static inline bool
256is_parallel_ctx (omp_context *ctx)
257{
726a989a 258 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
259}
260
50674e96 261
a68ab351
JJ
262/* Return true if CTX is for an omp task. */
263
264static inline bool
265is_task_ctx (omp_context *ctx)
266{
726a989a 267 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
268}
269
270
d9a6bd32
JJ
271/* Return true if CTX is for an omp taskloop. */
272
273static inline bool
274is_taskloop_ctx (omp_context *ctx)
275{
276 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
277 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
278}
279
280
a68ab351
JJ
281/* Return true if CTX is for an omp parallel or omp task. */
282
283static inline bool
284is_taskreg_ctx (omp_context *ctx)
285{
d9a6bd32 286 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
a68ab351
JJ
287}
288
953ff289
DN
289/* Return true if EXPR is variable sized. */
290
291static inline bool
22ea9ec0 292is_variable_sized (const_tree expr)
953ff289
DN
293{
294 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
295}
296
41dbbb37 297/* Lookup variables. The "maybe" form
953ff289
DN
298 allows for the variable form to not have been entered, otherwise we
299 assert that the variable must have been entered. */
300
301static inline tree
302lookup_decl (tree var, omp_context *ctx)
303{
b787e7a2 304 tree *n = ctx->cb.decl_map->get (var);
6be42dd4 305 return *n;
953ff289
DN
306}
307
308static inline tree
7c8f7639 309maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 310{
b787e7a2 311 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
6be42dd4 312 return n ? *n : NULL_TREE;
953ff289
DN
313}
314
315static inline tree
316lookup_field (tree var, omp_context *ctx)
317{
318 splay_tree_node n;
319 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
320 return (tree) n->value;
321}
322
a68ab351 323static inline tree
d9a6bd32 324lookup_sfield (splay_tree_key key, omp_context *ctx)
a68ab351
JJ
325{
326 splay_tree_node n;
327 n = splay_tree_lookup (ctx->sfield_map
d9a6bd32 328 ? ctx->sfield_map : ctx->field_map, key);
a68ab351
JJ
329 return (tree) n->value;
330}
331
953ff289 332static inline tree
d9a6bd32
JJ
333lookup_sfield (tree var, omp_context *ctx)
334{
335 return lookup_sfield ((splay_tree_key) var, ctx);
336}
337
338static inline tree
339maybe_lookup_field (splay_tree_key key, omp_context *ctx)
953ff289
DN
340{
341 splay_tree_node n;
d9a6bd32 342 n = splay_tree_lookup (ctx->field_map, key);
953ff289
DN
343 return n ? (tree) n->value : NULL_TREE;
344}
345
d9a6bd32
JJ
346static inline tree
347maybe_lookup_field (tree var, omp_context *ctx)
348{
349 return maybe_lookup_field ((splay_tree_key) var, ctx);
350}
351
7c8f7639
JJ
352/* Return true if DECL should be copied by pointer. SHARED_CTX is
353 the parallel context if DECL is to be shared. */
953ff289
DN
354
355static bool
a68ab351 356use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289 357{
9dc5773f
JJ
358 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
359 || TYPE_ATOMIC (TREE_TYPE (decl)))
953ff289
DN
360 return true;
361
6fc0bb99 362 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 363 when we know the value is not accessible from an outer scope. */
7c8f7639 364 if (shared_ctx)
953ff289 365 {
41dbbb37
TS
366 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
367
953ff289
DN
368 /* ??? Trivially accessible from anywhere. But why would we even
369 be passing an address in this case? Should we simply assert
370 this to be false, or should we have a cleanup pass that removes
371 these from the list of mappings? */
372 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
373 return true;
374
375 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
376 without analyzing the expression whether or not its location
377 is accessible to anyone else. In the case of nested parallel
378 regions it certainly may be. */
077b0dfb 379 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
380 return true;
381
382 /* Do not use copy-in/copy-out for variables that have their
383 address taken. */
384 if (TREE_ADDRESSABLE (decl))
385 return true;
7c8f7639 386
6d840d99
JJ
387 /* lower_send_shared_vars only uses copy-in, but not copy-out
388 for these. */
389 if (TREE_READONLY (decl)
390 || ((TREE_CODE (decl) == RESULT_DECL
391 || TREE_CODE (decl) == PARM_DECL)
392 && DECL_BY_REFERENCE (decl)))
393 return false;
394
7c8f7639
JJ
395 /* Disallow copy-in/out in nested parallel if
396 decl is shared in outer parallel, otherwise
397 each thread could store the shared variable
398 in its own copy-in location, making the
399 variable no longer really shared. */
6d840d99 400 if (shared_ctx->is_nested)
7c8f7639
JJ
401 {
402 omp_context *up;
403
404 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 405 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
406 break;
407
d9c194cb 408 if (up)
7c8f7639
JJ
409 {
410 tree c;
411
726a989a 412 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
413 c; c = OMP_CLAUSE_CHAIN (c))
414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
415 && OMP_CLAUSE_DECL (c) == decl)
416 break;
417
418 if (c)
25142650 419 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
420 }
421 }
a68ab351 422
6d840d99 423 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
424 deferred or executed in different thread, when GOMP_task
425 returns, the task hasn't necessarily terminated. */
6d840d99 426 if (is_task_ctx (shared_ctx))
a68ab351 427 {
25142650
JJ
428 tree outer;
429 maybe_mark_addressable_and_ret:
430 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
d9a6bd32 431 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
a68ab351
JJ
432 {
433 /* Taking address of OUTER in lower_send_shared_vars
434 might need regimplification of everything that uses the
435 variable. */
436 if (!task_shared_vars)
437 task_shared_vars = BITMAP_ALLOC (NULL);
438 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
439 TREE_ADDRESSABLE (outer) = 1;
440 }
441 return true;
442 }
953ff289
DN
443 }
444
445 return false;
446}
447
917948d3
ZD
448/* Construct a new automatic decl similar to VAR. */
449
450static tree
451omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
452{
453 tree copy = copy_var_decl (var, name, type);
454
455 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 456 DECL_CHAIN (copy) = ctx->block_vars;
d9a6bd32
JJ
457 /* If VAR is listed in task_shared_vars, it means it wasn't
458 originally addressable and is just because task needs to take
459 it's address. But we don't need to take address of privatizations
460 from that var. */
461 if (TREE_ADDRESSABLE (var)
462 && task_shared_vars
463 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
464 TREE_ADDRESSABLE (copy) = 0;
953ff289
DN
465 ctx->block_vars = copy;
466
467 return copy;
468}
469
470static tree
471omp_copy_decl_1 (tree var, omp_context *ctx)
472{
473 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
474}
475
a9a58711
JJ
476/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
477 as appropriate. */
478static tree
479omp_build_component_ref (tree obj, tree field)
480{
481 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
482 if (TREE_THIS_VOLATILE (field))
483 TREE_THIS_VOLATILE (ret) |= 1;
484 if (TREE_READONLY (field))
485 TREE_READONLY (ret) |= 1;
486 return ret;
487}
488
953ff289
DN
489/* Build tree nodes to access the field for VAR on the receiver side. */
490
491static tree
492build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
493{
494 tree x, field = lookup_field (var, ctx);
495
496 /* If the receiver record type was remapped in the child function,
497 remap the field into the new record type. */
498 x = maybe_lookup_field (field, ctx);
499 if (x != NULL)
500 field = x;
501
70f34814 502 x = build_simple_mem_ref (ctx->receiver_decl);
f1b9b669 503 TREE_THIS_NOTRAP (x) = 1;
a9a58711 504 x = omp_build_component_ref (x, field);
953ff289 505 if (by_ref)
096b85f4
TV
506 {
507 x = build_simple_mem_ref (x);
508 TREE_THIS_NOTRAP (x) = 1;
509 }
953ff289
DN
510
511 return x;
512}
513
514/* Build tree nodes to access VAR in the scope outer to CTX. In the case
515 of a parallel, this is a component reference; for workshare constructs
516 this is some variable. */
517
518static tree
c39dad64
JJ
519build_outer_var_ref (tree var, omp_context *ctx,
520 enum omp_clause_code code = OMP_CLAUSE_ERROR)
953ff289
DN
521{
522 tree x;
523
8ca5b2a2 524 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
525 x = var;
526 else if (is_variable_sized (var))
527 {
528 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
c39dad64 529 x = build_outer_var_ref (x, ctx, code);
70f34814 530 x = build_simple_mem_ref (x);
953ff289 531 }
a68ab351 532 else if (is_taskreg_ctx (ctx))
953ff289 533 {
7c8f7639 534 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
535 x = build_receiver_ref (var, by_ref, ctx);
536 }
c39dad64
JJ
537 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
538 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
539 || (code == OMP_CLAUSE_PRIVATE
540 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
541 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
542 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
74bf76ed 543 {
c39dad64
JJ
544 /* #pragma omp simd isn't a worksharing construct, and can reference
545 even private vars in its linear etc. clauses.
546 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
547 to private vars in all worksharing constructs. */
74bf76ed
JJ
548 x = NULL_TREE;
549 if (ctx->outer && is_taskreg_ctx (ctx))
550 x = lookup_decl (var, ctx->outer);
551 else if (ctx->outer)
f3b331d1 552 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
553 if (x == NULL_TREE)
554 x = var;
555 }
c39dad64 556 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
d9a6bd32
JJ
557 {
558 gcc_assert (ctx->outer);
559 splay_tree_node n
560 = splay_tree_lookup (ctx->outer->field_map,
561 (splay_tree_key) &DECL_UID (var));
562 if (n == NULL)
563 {
564 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
565 x = var;
566 else
567 x = lookup_decl (var, ctx->outer);
568 }
569 else
570 {
571 tree field = (tree) n->value;
572 /* If the receiver record type was remapped in the child function,
573 remap the field into the new record type. */
574 x = maybe_lookup_field (field, ctx->outer);
575 if (x != NULL)
576 field = x;
577
578 x = build_simple_mem_ref (ctx->outer->receiver_decl);
579 x = omp_build_component_ref (x, field);
580 if (use_pointer_for_field (var, ctx->outer))
581 x = build_simple_mem_ref (x);
582 }
583 }
953ff289 584 else if (ctx->outer)
b2b40051
MJ
585 {
586 omp_context *outer = ctx->outer;
587 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
588 {
589 outer = outer->outer;
590 gcc_assert (outer
591 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
592 }
c39dad64 593 x = lookup_decl (var, outer);
b2b40051 594 }
629b3d75 595 else if (omp_is_reference (var))
eeb1d9e0
JJ
596 /* This can happen with orphaned constructs. If var is reference, it is
597 possible it is shared and as such valid. */
598 x = var;
d9a6bd32
JJ
599 else if (omp_member_access_dummy_var (var))
600 x = var;
953ff289
DN
601 else
602 gcc_unreachable ();
603
d9a6bd32
JJ
604 if (x == var)
605 {
606 tree t = omp_member_access_dummy_var (var);
607 if (t)
608 {
609 x = DECL_VALUE_EXPR (var);
610 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
611 if (o != t)
612 x = unshare_and_remap (x, t, o);
613 else
614 x = unshare_expr (x);
615 }
616 }
617
629b3d75 618 if (omp_is_reference (var))
70f34814 619 x = build_simple_mem_ref (x);
953ff289
DN
620
621 return x;
622}
623
624/* Build tree nodes to access the field for VAR on the sender side. */
625
626static tree
d9a6bd32 627build_sender_ref (splay_tree_key key, omp_context *ctx)
953ff289 628{
d9a6bd32 629 tree field = lookup_sfield (key, ctx);
a9a58711 630 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
631}
632
d9a6bd32
JJ
633static tree
634build_sender_ref (tree var, omp_context *ctx)
635{
636 return build_sender_ref ((splay_tree_key) var, ctx);
637}
638
86938de6
TV
639/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
640 BASE_POINTERS_RESTRICT, declare the field with restrict. */
953ff289
DN
641
642static void
86938de6
TV
643install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
644 bool base_pointers_restrict = false)
953ff289 645{
a68ab351 646 tree field, type, sfield = NULL_TREE;
d9a6bd32 647 splay_tree_key key = (splay_tree_key) var;
953ff289 648
d9a6bd32
JJ
649 if ((mask & 8) != 0)
650 {
651 key = (splay_tree_key) &DECL_UID (var);
652 gcc_checking_assert (key != (splay_tree_key) var);
653 }
a68ab351 654 gcc_assert ((mask & 1) == 0
d9a6bd32 655 || !splay_tree_lookup (ctx->field_map, key));
a68ab351 656 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
d9a6bd32 657 || !splay_tree_lookup (ctx->sfield_map, key));
41dbbb37
TS
658 gcc_assert ((mask & 3) == 3
659 || !is_gimple_omp_oacc (ctx->stmt));
953ff289
DN
660
661 type = TREE_TYPE (var);
8498c16b
TV
662 /* Prevent redeclaring the var in the split-off function with a restrict
663 pointer type. Note that we only clear type itself, restrict qualifiers in
664 the pointed-to type will be ignored by points-to analysis. */
665 if (POINTER_TYPE_P (type)
666 && TYPE_RESTRICT (type))
667 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
668
acf0174b
JJ
669 if (mask & 4)
670 {
671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
672 type = build_pointer_type (build_pointer_type (type));
673 }
674 else if (by_ref)
86938de6
TV
675 {
676 type = build_pointer_type (type);
677 if (base_pointers_restrict)
678 type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
679 }
629b3d75 680 else if ((mask & 3) == 1 && omp_is_reference (var))
a68ab351 681 type = TREE_TYPE (type);
953ff289 682
c2255bc4
AH
683 field = build_decl (DECL_SOURCE_LOCATION (var),
684 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
685
686 /* Remember what variable this field was created for. This does have a
687 side effect of making dwarf2out ignore this member, so for helpful
688 debugging we clear it later in delete_omp_context. */
689 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
690 if (type == TREE_TYPE (var))
691 {
fe37c7af 692 SET_DECL_ALIGN (field, DECL_ALIGN (var));
a68ab351
JJ
693 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
694 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
695 }
696 else
fe37c7af 697 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
953ff289 698
a68ab351
JJ
699 if ((mask & 3) == 3)
700 {
701 insert_field_into_struct (ctx->record_type, field);
702 if (ctx->srecord_type)
703 {
c2255bc4
AH
704 sfield = build_decl (DECL_SOURCE_LOCATION (var),
705 FIELD_DECL, DECL_NAME (var), type);
a68ab351 706 DECL_ABSTRACT_ORIGIN (sfield) = var;
fe37c7af 707 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
a68ab351
JJ
708 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
709 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
710 insert_field_into_struct (ctx->srecord_type, sfield);
711 }
712 }
713 else
714 {
715 if (ctx->srecord_type == NULL_TREE)
716 {
717 tree t;
718
719 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
720 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
721 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
722 {
d9a6bd32 723 sfield = build_decl (DECL_SOURCE_LOCATION (t),
c2255bc4 724 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
725 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
726 insert_field_into_struct (ctx->srecord_type, sfield);
727 splay_tree_insert (ctx->sfield_map,
728 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
729 (splay_tree_value) sfield);
730 }
731 }
732 sfield = field;
733 insert_field_into_struct ((mask & 1) ? ctx->record_type
734 : ctx->srecord_type, field);
735 }
953ff289 736
a68ab351 737 if (mask & 1)
d9a6bd32 738 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
a68ab351 739 if ((mask & 2) && ctx->sfield_map)
d9a6bd32 740 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
953ff289
DN
741}
742
743static tree
744install_var_local (tree var, omp_context *ctx)
745{
746 tree new_var = omp_copy_decl_1 (var, ctx);
747 insert_decl_map (&ctx->cb, var, new_var);
748 return new_var;
749}
750
751/* Adjust the replacement for DECL in CTX for the new context. This means
752 copying the DECL_VALUE_EXPR, and fixing up the type. */
753
754static void
755fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
756{
757 tree new_decl, size;
758
759 new_decl = lookup_decl (decl, ctx);
760
761 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
762
763 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
764 && DECL_HAS_VALUE_EXPR_P (decl))
765 {
766 tree ve = DECL_VALUE_EXPR (decl);
726a989a 767 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
768 SET_DECL_VALUE_EXPR (new_decl, ve);
769 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
770 }
771
772 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
773 {
774 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
775 if (size == error_mark_node)
776 size = TYPE_SIZE (TREE_TYPE (new_decl));
777 DECL_SIZE (new_decl) = size;
778
779 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
780 if (size == error_mark_node)
781 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
782 DECL_SIZE_UNIT (new_decl) = size;
783 }
784}
785
786/* The callback for remap_decl. Search all containing contexts for a
787 mapping of the variable; this avoids having to duplicate the splay
788 tree ahead of time. We know a mapping doesn't already exist in the
789 given context. Create new mappings to implement default semantics. */
790
791static tree
792omp_copy_decl (tree var, copy_body_data *cb)
793{
794 omp_context *ctx = (omp_context *) cb;
795 tree new_var;
796
953ff289
DN
797 if (TREE_CODE (var) == LABEL_DECL)
798 {
c2255bc4 799 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 800 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
801 insert_decl_map (&ctx->cb, var, new_var);
802 return new_var;
803 }
804
a68ab351 805 while (!is_taskreg_ctx (ctx))
953ff289
DN
806 {
807 ctx = ctx->outer;
808 if (ctx == NULL)
809 return var;
810 new_var = maybe_lookup_decl (var, ctx);
811 if (new_var)
812 return new_var;
813 }
814
8ca5b2a2
JJ
815 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
816 return var;
817
953ff289
DN
818 return error_mark_node;
819}
820
629b3d75 821/* Create a new context, with OUTER_CTX being the surrounding context. */
50674e96 822
629b3d75
MJ
823static omp_context *
824new_omp_context (gimple *stmt, omp_context *outer_ctx)
50674e96 825{
629b3d75 826 omp_context *ctx = XCNEW (omp_context);
50674e96 827
629b3d75
MJ
828 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
829 (splay_tree_value) ctx);
830 ctx->stmt = stmt;
50674e96 831
629b3d75 832 if (outer_ctx)
777f7f9a 833 {
629b3d75
MJ
834 ctx->outer = outer_ctx;
835 ctx->cb = outer_ctx->cb;
836 ctx->cb.block = NULL;
837 ctx->depth = outer_ctx->depth + 1;
953ff289
DN
838 }
839 else
840 {
841 ctx->cb.src_fn = current_function_decl;
842 ctx->cb.dst_fn = current_function_decl;
d52f5295 843 ctx->cb.src_node = cgraph_node::get (current_function_decl);
fe660d7b 844 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
845 ctx->cb.dst_node = ctx->cb.src_node;
846 ctx->cb.src_cfun = cfun;
847 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 848 ctx->cb.eh_lp_nr = 0;
953ff289
DN
849 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
850 ctx->depth = 1;
851 }
852
b787e7a2 853 ctx->cb.decl_map = new hash_map<tree, tree>;
953ff289
DN
854
855 return ctx;
856}
857
726a989a 858static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
859
860/* Finalize task copyfn. */
861
862static void
538dd0b7 863finalize_task_copyfn (gomp_task *task_stmt)
2368a460
JJ
864{
865 struct function *child_cfun;
af16bc76 866 tree child_fn;
355a7673 867 gimple_seq seq = NULL, new_seq;
538dd0b7 868 gbind *bind;
2368a460 869
726a989a 870 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
871 if (child_fn == NULL_TREE)
872 return;
873
874 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 875 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 876
2368a460 877 push_cfun (child_cfun);
3ad065ef 878 bind = gimplify_body (child_fn, false);
726a989a
RB
879 gimple_seq_add_stmt (&seq, bind);
880 new_seq = maybe_catch_exception (seq);
881 if (new_seq != seq)
882 {
883 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 884 seq = NULL;
726a989a
RB
885 gimple_seq_add_stmt (&seq, bind);
886 }
887 gimple_set_body (child_fn, seq);
2368a460 888 pop_cfun ();
2368a460 889
d7ed20db 890 /* Inform the callgraph about the new function. */
edafad14
TV
891 cgraph_node *node = cgraph_node::get_create (child_fn);
892 node->parallelized_function = 1;
d52f5295 893 cgraph_node::add_new_function (child_fn, false);
2368a460
JJ
894}
895
953ff289
DN
896/* Destroy a omp_context data structures. Called through the splay tree
897 value delete callback. */
898
899static void
900delete_omp_context (splay_tree_value value)
901{
902 omp_context *ctx = (omp_context *) value;
903
b787e7a2 904 delete ctx->cb.decl_map;
953ff289
DN
905
906 if (ctx->field_map)
907 splay_tree_delete (ctx->field_map);
a68ab351
JJ
908 if (ctx->sfield_map)
909 splay_tree_delete (ctx->sfield_map);
953ff289
DN
910
911 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
912 it produces corrupt debug information. */
913 if (ctx->record_type)
914 {
915 tree t;
910ad8de 916 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
917 DECL_ABSTRACT_ORIGIN (t) = NULL;
918 }
a68ab351
JJ
919 if (ctx->srecord_type)
920 {
921 tree t;
910ad8de 922 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
923 DECL_ABSTRACT_ORIGIN (t) = NULL;
924 }
953ff289 925
2368a460 926 if (is_task_ctx (ctx))
538dd0b7 927 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
2368a460 928
953ff289
DN
929 XDELETE (ctx);
930}
931
932/* Fix up RECEIVER_DECL with a type that has been remapped to the child
933 context. */
934
935static void
936fixup_child_record_type (omp_context *ctx)
937{
938 tree f, type = ctx->record_type;
939
b2b40051
MJ
940 if (!ctx->receiver_decl)
941 return;
953ff289
DN
942 /* ??? It isn't sufficient to just call remap_type here, because
943 variably_modified_type_p doesn't work the way we expect for
944 record types. Testing each field for whether it needs remapping
945 and creating a new record by hand works, however. */
910ad8de 946 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
947 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
948 break;
949 if (f)
950 {
951 tree name, new_fields = NULL;
952
953 type = lang_hooks.types.make_type (RECORD_TYPE);
954 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
955 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
956 TYPE_DECL, name, type);
953ff289
DN
957 TYPE_NAME (type) = name;
958
910ad8de 959 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
960 {
961 tree new_f = copy_node (f);
962 DECL_CONTEXT (new_f) = type;
963 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 964 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
965 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
966 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
967 &ctx->cb, NULL);
968 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
969 &ctx->cb, NULL);
953ff289
DN
970 new_fields = new_f;
971
972 /* Arrange to be able to look up the receiver field
973 given the sender field. */
974 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
975 (splay_tree_value) new_f);
976 }
977 TYPE_FIELDS (type) = nreverse (new_fields);
978 layout_type (type);
979 }
980
d9a6bd32
JJ
981 /* In a target region we never modify any of the pointers in *.omp_data_i,
982 so attempt to help the optimizers. */
983 if (is_gimple_omp_offloaded (ctx->stmt))
984 type = build_qualified_type (type, TYPE_QUAL_CONST);
985
a2a2fe4b
RB
986 TREE_TYPE (ctx->receiver_decl)
987 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
953ff289
DN
988}
989
990/* Instantiate decls as necessary in CTX to satisfy the data sharing
86938de6
TV
991 specified by CLAUSES. If BASE_POINTERS_RESTRICT, install var field with
992 restrict. */
953ff289
DN
993
994static void
86938de6
TV
995scan_sharing_clauses (tree clauses, omp_context *ctx,
996 bool base_pointers_restrict = false)
953ff289
DN
997{
998 tree c, decl;
999 bool scan_array_reductions = false;
1000
1001 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1002 {
1003 bool by_ref;
1004
aaf46ef9 1005 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1006 {
1007 case OMP_CLAUSE_PRIVATE:
1008 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1009 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1010 goto do_private;
1011 else if (!is_variable_sized (decl))
953ff289
DN
1012 install_var_local (decl, ctx);
1013 break;
1014
1015 case OMP_CLAUSE_SHARED:
9cf32741 1016 decl = OMP_CLAUSE_DECL (c);
acf0174b
JJ
1017 /* Ignore shared directives in teams construct. */
1018 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
9cf32741
JJ
1019 {
1020 /* Global variables don't need to be copied,
1021 the receiver side will use them directly. */
1022 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1023 if (is_global_var (odecl))
1024 break;
1025 insert_decl_map (&ctx->cb, decl, odecl);
1026 break;
1027 }
a68ab351 1028 gcc_assert (is_taskreg_ctx (ctx));
5da250fc
JJ
1029 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1030 || !is_variable_sized (decl));
8ca5b2a2
JJ
1031 /* Global variables don't need to be copied,
1032 the receiver side will use them directly. */
1033 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1034 break;
d9a6bd32 1035 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1a80d6b8
JJ
1036 {
1037 use_pointer_for_field (decl, ctx);
1038 break;
1039 }
1040 by_ref = use_pointer_for_field (decl, NULL);
1041 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
953ff289
DN
1042 || TREE_ADDRESSABLE (decl)
1043 || by_ref
629b3d75 1044 || omp_is_reference (decl))
953ff289 1045 {
1a80d6b8 1046 by_ref = use_pointer_for_field (decl, ctx);
a68ab351 1047 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1048 install_var_local (decl, ctx);
1049 break;
1050 }
1051 /* We don't need to copy const scalar vars back. */
aaf46ef9 1052 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1053 goto do_private;
1054
d9a6bd32
JJ
1055 case OMP_CLAUSE_REDUCTION:
1056 decl = OMP_CLAUSE_DECL (c);
1057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1058 && TREE_CODE (decl) == MEM_REF)
1059 {
1060 tree t = TREE_OPERAND (decl, 0);
e01d41e5
JJ
1061 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1062 t = TREE_OPERAND (t, 0);
d9a6bd32
JJ
1063 if (TREE_CODE (t) == INDIRECT_REF
1064 || TREE_CODE (t) == ADDR_EXPR)
1065 t = TREE_OPERAND (t, 0);
1066 install_var_local (t, ctx);
1067 if (is_taskreg_ctx (ctx)
1068 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1069 && !is_variable_sized (t))
1070 {
1071 by_ref = use_pointer_for_field (t, ctx);
1072 install_var_field (t, by_ref, 3, ctx);
1073 }
1074 break;
1075 }
1076 goto do_private;
1077
953ff289
DN
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 /* Let the corresponding firstprivate clause create
1080 the variable. */
1081 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1082 break;
1083 /* FALLTHRU */
1084
1085 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 1086 case OMP_CLAUSE_LINEAR:
953ff289
DN
1087 decl = OMP_CLAUSE_DECL (c);
1088 do_private:
d9a6bd32
JJ
1089 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1090 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1091 && is_gimple_omp_offloaded (ctx->stmt))
1092 {
1093 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
629b3d75 1094 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
d9a6bd32
JJ
1095 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1096 install_var_field (decl, true, 3, ctx);
1097 else
1098 install_var_field (decl, false, 3, ctx);
1099 }
953ff289 1100 if (is_variable_sized (decl))
953ff289 1101 {
a68ab351
JJ
1102 if (is_task_ctx (ctx))
1103 install_var_field (decl, false, 1, ctx);
1104 break;
1105 }
1106 else if (is_taskreg_ctx (ctx))
1107 {
1108 bool global
1109 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1110 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1111
1112 if (is_task_ctx (ctx)
629b3d75 1113 && (global || by_ref || omp_is_reference (decl)))
a68ab351
JJ
1114 {
1115 install_var_field (decl, false, 1, ctx);
1116 if (!global)
1117 install_var_field (decl, by_ref, 2, ctx);
1118 }
1119 else if (!global)
1120 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1121 }
1122 install_var_local (decl, ctx);
1123 break;
1124
d9a6bd32
JJ
1125 case OMP_CLAUSE_USE_DEVICE_PTR:
1126 decl = OMP_CLAUSE_DECL (c);
1127 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1128 install_var_field (decl, true, 3, ctx);
1129 else
1130 install_var_field (decl, false, 3, ctx);
1131 if (DECL_SIZE (decl)
1132 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1133 {
1134 tree decl2 = DECL_VALUE_EXPR (decl);
1135 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1136 decl2 = TREE_OPERAND (decl2, 0);
1137 gcc_assert (DECL_P (decl2));
1138 install_var_local (decl2, ctx);
1139 }
1140 install_var_local (decl, ctx);
1141 break;
1142
1143 case OMP_CLAUSE_IS_DEVICE_PTR:
1144 decl = OMP_CLAUSE_DECL (c);
1145 goto do_private;
1146
acf0174b 1147 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 1148 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
1149 decl = OMP_CLAUSE_DECL (c);
1150 install_var_field (decl, false, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1153
953ff289 1154 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1155 case OMP_CLAUSE_COPYIN:
1156 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1157 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1158 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1159 break;
1160
20906c66 1161 case OMP_CLAUSE_FINAL:
953ff289
DN
1162 case OMP_CLAUSE_IF:
1163 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1164 case OMP_CLAUSE_NUM_TEAMS:
1165 case OMP_CLAUSE_THREAD_LIMIT:
1166 case OMP_CLAUSE_DEVICE:
953ff289 1167 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1168 case OMP_CLAUSE_DIST_SCHEDULE:
1169 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
1170 case OMP_CLAUSE_PRIORITY:
1171 case OMP_CLAUSE_GRAINSIZE:
1172 case OMP_CLAUSE_NUM_TASKS:
9a771876 1173 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
1174 case OMP_CLAUSE_NUM_GANGS:
1175 case OMP_CLAUSE_NUM_WORKERS:
1176 case OMP_CLAUSE_VECTOR_LENGTH:
953ff289 1177 if (ctx->outer)
726a989a 1178 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1179 break;
1180
acf0174b
JJ
1181 case OMP_CLAUSE_TO:
1182 case OMP_CLAUSE_FROM:
1183 case OMP_CLAUSE_MAP:
1184 if (ctx->outer)
1185 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1186 decl = OMP_CLAUSE_DECL (c);
1187 /* Global variables with "omp declare target" attribute
1188 don't need to be copied, the receiver side will use them
4a38b02b
IV
1189 directly. However, global variables with "omp declare target link"
1190 attribute need to be copied. */
acf0174b
JJ
1191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1192 && DECL_P (decl)
e01d41e5
JJ
1193 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1194 && (OMP_CLAUSE_MAP_KIND (c)
1195 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1196 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1197 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
4a38b02b
IV
1198 && varpool_node::get_create (decl)->offloadable
1199 && !lookup_attribute ("omp declare target link",
1200 DECL_ATTRIBUTES (decl)))
acf0174b
JJ
1201 break;
1202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1203 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
acf0174b 1204 {
41dbbb37
TS
1205 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1206 not offloaded; there is nothing to map for those. */
1207 if (!is_gimple_omp_offloaded (ctx->stmt)
b8910447
JJ
1208 && !POINTER_TYPE_P (TREE_TYPE (decl))
1209 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
acf0174b
JJ
1210 break;
1211 }
d9a6bd32 1212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
e01d41e5
JJ
1213 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1214 || (OMP_CLAUSE_MAP_KIND (c)
1215 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
d9a6bd32
JJ
1216 {
1217 if (TREE_CODE (decl) == COMPONENT_REF
1218 || (TREE_CODE (decl) == INDIRECT_REF
1219 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1220 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1221 == REFERENCE_TYPE)))
1222 break;
1223 if (DECL_SIZE (decl)
1224 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1225 {
1226 tree decl2 = DECL_VALUE_EXPR (decl);
1227 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1228 decl2 = TREE_OPERAND (decl2, 0);
1229 gcc_assert (DECL_P (decl2));
1230 install_var_local (decl2, ctx);
1231 }
1232 install_var_local (decl, ctx);
1233 break;
1234 }
acf0174b
JJ
1235 if (DECL_P (decl))
1236 {
1237 if (DECL_SIZE (decl)
1238 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1239 {
1240 tree decl2 = DECL_VALUE_EXPR (decl);
1241 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1242 decl2 = TREE_OPERAND (decl2, 0);
1243 gcc_assert (DECL_P (decl2));
e01d41e5 1244 install_var_field (decl2, true, 3, ctx);
acf0174b
JJ
1245 install_var_local (decl2, ctx);
1246 install_var_local (decl, ctx);
1247 }
1248 else
1249 {
1250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1251 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
acf0174b
JJ
1252 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1253 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1254 install_var_field (decl, true, 7, ctx);
1255 else
86938de6
TV
1256 install_var_field (decl, true, 3, ctx,
1257 base_pointers_restrict);
c42cfb5c
CP
1258 if (is_gimple_omp_offloaded (ctx->stmt)
1259 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
acf0174b
JJ
1260 install_var_local (decl, ctx);
1261 }
1262 }
1263 else
1264 {
1265 tree base = get_base_address (decl);
1266 tree nc = OMP_CLAUSE_CHAIN (c);
1267 if (DECL_P (base)
1268 && nc != NULL_TREE
1269 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1270 && OMP_CLAUSE_DECL (nc) == base
41dbbb37 1271 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
acf0174b
JJ
1272 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1273 {
1274 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1275 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1276 }
1277 else
1278 {
f014c653
JJ
1279 if (ctx->outer)
1280 {
1281 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1282 decl = OMP_CLAUSE_DECL (c);
1283 }
acf0174b
JJ
1284 gcc_assert (!splay_tree_lookup (ctx->field_map,
1285 (splay_tree_key) decl));
1286 tree field
1287 = build_decl (OMP_CLAUSE_LOCATION (c),
1288 FIELD_DECL, NULL_TREE, ptr_type_node);
fe37c7af 1289 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
acf0174b
JJ
1290 insert_field_into_struct (ctx->record_type, field);
1291 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1292 (splay_tree_value) field);
1293 }
1294 }
1295 break;
1296
b2b40051
MJ
1297 case OMP_CLAUSE__GRIDDIM_:
1298 if (ctx->outer)
1299 {
1300 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1301 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1302 }
1303 break;
1304
953ff289
DN
1305 case OMP_CLAUSE_NOWAIT:
1306 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1307 case OMP_CLAUSE_COLLAPSE:
1308 case OMP_CLAUSE_UNTIED:
20906c66 1309 case OMP_CLAUSE_MERGEABLE:
acf0174b 1310 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1311 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
1312 case OMP_CLAUSE_SIMDLEN:
1313 case OMP_CLAUSE_THREADS:
1314 case OMP_CLAUSE_SIMD:
1315 case OMP_CLAUSE_NOGROUP:
1316 case OMP_CLAUSE_DEFAULTMAP:
41dbbb37
TS
1317 case OMP_CLAUSE_ASYNC:
1318 case OMP_CLAUSE_WAIT:
1319 case OMP_CLAUSE_GANG:
1320 case OMP_CLAUSE_WORKER:
1321 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1322 case OMP_CLAUSE_INDEPENDENT:
1323 case OMP_CLAUSE_AUTO:
1324 case OMP_CLAUSE_SEQ:
02889d23 1325 case OMP_CLAUSE_TILE:
6c7509bc 1326 case OMP_CLAUSE__SIMT_:
8a4674bb 1327 case OMP_CLAUSE_DEFAULT:
953ff289
DN
1328 break;
1329
acf0174b
JJ
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1336
41dbbb37 1337 case OMP_CLAUSE__CACHE_:
953ff289
DN
1338 default:
1339 gcc_unreachable ();
1340 }
1341 }
1342
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1344 {
aaf46ef9 1345 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1346 {
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
726a989a 1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1351 scan_array_reductions = true;
953ff289
DN
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1355
953ff289 1356 case OMP_CLAUSE_FIRSTPRIVATE:
41dbbb37 1357 case OMP_CLAUSE_PRIVATE:
74bf76ed 1358 case OMP_CLAUSE_LINEAR:
d9a6bd32 1359 case OMP_CLAUSE_IS_DEVICE_PTR:
953ff289
DN
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
d9a6bd32
JJ
1362 {
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1366 {
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1373 }
1374 install_var_local (decl, ctx);
1375 }
953ff289 1376 fixup_remapped_decl (decl, ctx,
aaf46ef9 1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
d9a6bd32
JJ
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
953ff289 1381 scan_array_reductions = true;
d9a6bd32
JJ
1382 break;
1383
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1387 {
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1391 }
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
f7468577 1393 scan_array_reductions = true;
953ff289
DN
1394 break;
1395
1396 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
953ff289 1400 decl = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1404 {
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1411 }
1412 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1413 break;
1414
acf0174b 1415 case OMP_CLAUSE_MAP:
41dbbb37 1416 if (!is_gimple_omp_offloaded (ctx->stmt))
acf0174b
JJ
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
e01d41e5
JJ
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1f6be682 1425 && varpool_node::get_create (decl)->offloadable)
acf0174b
JJ
1426 break;
1427 if (DECL_P (decl))
1428 {
d9a6bd32
JJ
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
acf0174b
JJ
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1433 {
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1437 }
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1440 {
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1447 }
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1450 }
1451 break;
1452
953ff289
DN
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
953ff289 1461 case OMP_CLAUSE_SCHEDULE:
acf0174b 1462 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
acf0174b 1469 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1470 case OMP_CLAUSE_SAFELEN:
d9a6bd32 1471 case OMP_CLAUSE_SIMDLEN:
acf0174b
JJ
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
d9a6bd32
JJ
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
9a771876 1485 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
1486 case OMP_CLAUSE_ASYNC:
1487 case OMP_CLAUSE_WAIT:
1488 case OMP_CLAUSE_NUM_GANGS:
1489 case OMP_CLAUSE_NUM_WORKERS:
1490 case OMP_CLAUSE_VECTOR_LENGTH:
1491 case OMP_CLAUSE_GANG:
1492 case OMP_CLAUSE_WORKER:
1493 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1494 case OMP_CLAUSE_INDEPENDENT:
1495 case OMP_CLAUSE_AUTO:
1496 case OMP_CLAUSE_SEQ:
02889d23 1497 case OMP_CLAUSE_TILE:
b2b40051 1498 case OMP_CLAUSE__GRIDDIM_:
6c7509bc 1499 case OMP_CLAUSE__SIMT_:
41dbbb37
TS
1500 break;
1501
41dbbb37 1502 case OMP_CLAUSE__CACHE_:
953ff289
DN
1503 default:
1504 gcc_unreachable ();
1505 }
1506 }
1507
41dbbb37
TS
1508 gcc_checking_assert (!scan_array_reductions
1509 || !is_gimple_omp_oacc (ctx->stmt));
953ff289 1510 if (scan_array_reductions)
6b37bdaf
PP
1511 {
1512 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1514 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1515 {
1516 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1518 }
1519 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1520 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1521 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1522 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1523 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1524 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1525 }
953ff289
DN
1526}
1527
9a771876
JJ
1528/* Create a new name for omp child function. Returns an identifier. If
1529 IS_CILK_FOR is true then the suffix for the child function is
1530 "_cilk_for_fn." */
953ff289 1531
953ff289 1532static tree
9a771876 1533create_omp_child_function_name (bool task_copy, bool is_cilk_for)
953ff289 1534{
9a771876
JJ
1535 if (is_cilk_for)
1536 return clone_function_name (current_function_decl, "_cilk_for_fn");
1537 return clone_function_name (current_function_decl,
1538 task_copy ? "_omp_cpyfn" : "_omp_fn");
1539}
1540
1541/* Returns the type of the induction variable for the child function for
1542 _Cilk_for and the types for _high and _low variables based on TYPE. */
1543
1544static tree
1545cilk_for_check_loop_diff_type (tree type)
1546{
1547 if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1548 {
1549 if (TYPE_UNSIGNED (type))
1550 return uint32_type_node;
1551 else
1552 return integer_type_node;
1553 }
1554 else
1555 {
1556 if (TYPE_UNSIGNED (type))
1557 return uint64_type_node;
1558 else
1559 return long_long_integer_type_node;
1560 }
953ff289
DN
1561}
1562
9669b00b
AM
1563/* Return true if CTX may belong to offloaded code: either if current function
1564 is offloaded, or any enclosing context corresponds to a target region. */
1565
1566static bool
1567omp_maybe_offloaded_ctx (omp_context *ctx)
1568{
1569 if (cgraph_node::get (current_function_decl)->offloadable)
1570 return true;
1571 for (; ctx; ctx = ctx->outer)
1572 if (is_gimple_omp_offloaded (ctx->stmt))
1573 return true;
1574 return false;
1575}
1576
953ff289
DN
1577/* Build a decl for the omp child function. It'll not contain a body
1578 yet, just the bare decl. */
1579
1580static void
a68ab351 1581create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1582{
1583 tree decl, type, name, t;
1584
9a771876
JJ
1585 tree cilk_for_count
1586 = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
629b3d75 1587 ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9a771876
JJ
1588 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1589 tree cilk_var_type = NULL_TREE;
1590
1591 name = create_omp_child_function_name (task_copy,
1592 cilk_for_count != NULL_TREE);
a68ab351
JJ
1593 if (task_copy)
1594 type = build_function_type_list (void_type_node, ptr_type_node,
1595 ptr_type_node, NULL_TREE);
9a771876
JJ
1596 else if (cilk_for_count)
1597 {
1598 type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1599 cilk_var_type = cilk_for_check_loop_diff_type (type);
1600 type = build_function_type_list (void_type_node, ptr_type_node,
1601 cilk_var_type, cilk_var_type, NULL_TREE);
1602 }
a68ab351
JJ
1603 else
1604 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1605
9a771876 1606 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
953ff289 1607
41dbbb37
TS
1608 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1609 || !task_copy);
a68ab351
JJ
1610 if (!task_copy)
1611 ctx->cb.dst_fn = decl;
1612 else
726a989a 1613 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1614
1615 TREE_STATIC (decl) = 1;
1616 TREE_USED (decl) = 1;
1617 DECL_ARTIFICIAL (decl) = 1;
1618 DECL_IGNORED_P (decl) = 0;
1619 TREE_PUBLIC (decl) = 0;
1620 DECL_UNINLINABLE (decl) = 1;
1621 DECL_EXTERNAL (decl) = 0;
1622 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1623 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 1624 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
9669b00b 1625 if (omp_maybe_offloaded_ctx (ctx))
acf0174b 1626 {
9669b00b
AM
1627 cgraph_node::get_create (decl)->offloadable = 1;
1628 if (ENABLE_OFFLOADING)
1629 g->have_offload = true;
acf0174b 1630 }
953ff289 1631
d7823208
BS
1632 if (cgraph_node::get_create (decl)->offloadable
1633 && !lookup_attribute ("omp declare target",
1634 DECL_ATTRIBUTES (current_function_decl)))
9669b00b
AM
1635 {
1636 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1637 ? "omp target entrypoint"
1638 : "omp declare target");
1639 DECL_ATTRIBUTES (decl)
1640 = tree_cons (get_identifier (target_attr),
1641 NULL_TREE, DECL_ATTRIBUTES (decl));
1642 }
d7823208 1643
c2255bc4
AH
1644 t = build_decl (DECL_SOURCE_LOCATION (decl),
1645 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1646 DECL_ARTIFICIAL (t) = 1;
1647 DECL_IGNORED_P (t) = 1;
07485407 1648 DECL_CONTEXT (t) = decl;
953ff289
DN
1649 DECL_RESULT (decl) = t;
1650
9a771876
JJ
1651 /* _Cilk_for's child function requires two extra parameters called
1652 __low and __high that are set the by Cilk runtime when it calls this
1653 function. */
1654 if (cilk_for_count)
1655 {
1656 t = build_decl (DECL_SOURCE_LOCATION (decl),
1657 PARM_DECL, get_identifier ("__high"), cilk_var_type);
1658 DECL_ARTIFICIAL (t) = 1;
1659 DECL_NAMELESS (t) = 1;
1660 DECL_ARG_TYPE (t) = ptr_type_node;
1661 DECL_CONTEXT (t) = current_function_decl;
1662 TREE_USED (t) = 1;
1663 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1664 DECL_ARGUMENTS (decl) = t;
1665
1666 t = build_decl (DECL_SOURCE_LOCATION (decl),
1667 PARM_DECL, get_identifier ("__low"), cilk_var_type);
1668 DECL_ARTIFICIAL (t) = 1;
1669 DECL_NAMELESS (t) = 1;
1670 DECL_ARG_TYPE (t) = ptr_type_node;
1671 DECL_CONTEXT (t) = current_function_decl;
1672 TREE_USED (t) = 1;
1673 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1674 DECL_ARGUMENTS (decl) = t;
1675 }
1676
1677 tree data_name = get_identifier (".omp_data_i");
1678 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1679 ptr_type_node);
953ff289 1680 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1681 DECL_NAMELESS (t) = 1;
953ff289 1682 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1683 DECL_CONTEXT (t) = current_function_decl;
953ff289 1684 TREE_USED (t) = 1;
d9a6bd32 1685 TREE_READONLY (t) = 1;
9a771876
JJ
1686 if (cilk_for_count)
1687 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
953ff289 1688 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1689 if (!task_copy)
1690 ctx->receiver_decl = t;
1691 else
1692 {
c2255bc4
AH
1693 t = build_decl (DECL_SOURCE_LOCATION (decl),
1694 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1695 ptr_type_node);
1696 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1697 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1698 DECL_ARG_TYPE (t) = ptr_type_node;
1699 DECL_CONTEXT (t) = current_function_decl;
1700 TREE_USED (t) = 1;
628c189e 1701 TREE_ADDRESSABLE (t) = 1;
910ad8de 1702 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1703 DECL_ARGUMENTS (decl) = t;
1704 }
953ff289 1705
b8698a0f 1706 /* Allocate memory for the function structure. The call to
50674e96 1707 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1708 it afterward. */
db2960f4 1709 push_struct_function (decl);
726a989a 1710 cfun->function_end_locus = gimple_location (ctx->stmt);
381cdae4 1711 init_tree_ssa (cfun);
db2960f4 1712 pop_cfun ();
953ff289
DN
1713}
1714
acf0174b
JJ
1715/* Callback for walk_gimple_seq. Check if combined parallel
1716 contains gimple_omp_for_combined_into_p OMP_FOR. */
1717
629b3d75
MJ
1718tree
1719omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1720 bool *handled_ops_p,
1721 struct walk_stmt_info *wi)
acf0174b 1722{
355fe088 1723 gimple *stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
1724
1725 *handled_ops_p = true;
1726 switch (gimple_code (stmt))
1727 {
1728 WALK_SUBSTMTS;
1729
1730 case GIMPLE_OMP_FOR:
1731 if (gimple_omp_for_combined_into_p (stmt)
d9a6bd32
JJ
1732 && gimple_omp_for_kind (stmt)
1733 == *(const enum gf_mask *) (wi->info))
acf0174b
JJ
1734 {
1735 wi->info = stmt;
1736 return integer_zero_node;
1737 }
1738 break;
1739 default:
1740 break;
1741 }
1742 return NULL;
1743}
1744
d9a6bd32
JJ
1745/* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1746
1747static void
1748add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1749 omp_context *outer_ctx)
1750{
1751 struct walk_stmt_info wi;
1752
1753 memset (&wi, 0, sizeof (wi));
1754 wi.val_only = true;
1755 wi.info = (void *) &msk;
629b3d75 1756 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
d9a6bd32
JJ
1757 if (wi.info != (void *) &msk)
1758 {
1759 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1760 struct omp_for_data fd;
629b3d75 1761 omp_extract_for_data (for_stmt, &fd, NULL);
d9a6bd32
JJ
1762 /* We need two temporaries with fd.loop.v type (istart/iend)
1763 and then (fd.collapse - 1) temporaries with the same
1764 type for count2 ... countN-1 vars if not constant. */
1765 size_t count = 2, i;
1766 tree type = fd.iter_type;
1767 if (fd.collapse > 1
1768 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1769 {
1770 count += fd.collapse - 1;
e01d41e5 1771 /* If there are lastprivate clauses on the inner
d9a6bd32
JJ
1772 GIMPLE_OMP_FOR, add one more temporaries for the total number
1773 of iterations (product of count1 ... countN-1). */
629b3d75 1774 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
e01d41e5
JJ
1775 OMP_CLAUSE_LASTPRIVATE))
1776 count++;
1777 else if (msk == GF_OMP_FOR_KIND_FOR
629b3d75 1778 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
e01d41e5 1779 OMP_CLAUSE_LASTPRIVATE))
d9a6bd32
JJ
1780 count++;
1781 }
1782 for (i = 0; i < count; i++)
1783 {
1784 tree temp = create_tmp_var (type);
1785 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1786 insert_decl_map (&outer_ctx->cb, temp, temp);
1787 OMP_CLAUSE_DECL (c) = temp;
1788 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1789 gimple_omp_taskreg_set_clauses (stmt, c);
1790 }
1791 }
1792}
1793
953ff289
DN
1794/* Scan an OpenMP parallel directive. */
1795
1796static void
726a989a 1797scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1798{
1799 omp_context *ctx;
1800 tree name;
538dd0b7 1801 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
953ff289
DN
1802
1803 /* Ignore parallel directives with empty bodies, unless there
1804 are copyin clauses. */
1805 if (optimize > 0
726a989a 1806 && empty_body_p (gimple_omp_body (stmt))
629b3d75 1807 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
726a989a 1808 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1809 {
726a989a 1810 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1811 return;
1812 }
1813
acf0174b 1814 if (gimple_omp_parallel_combined_p (stmt))
d9a6bd32 1815 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
acf0174b 1816
726a989a 1817 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1818 taskreg_contexts.safe_push (ctx);
a68ab351 1819 if (taskreg_nesting_level > 1)
50674e96 1820 ctx->is_nested = true;
953ff289 1821 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289 1822 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1823 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1824 name = build_decl (gimple_location (stmt),
1825 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1826 DECL_ARTIFICIAL (name) = 1;
1827 DECL_NAMELESS (name) = 1;
953ff289 1828 TYPE_NAME (ctx->record_type) = name;
f7484978 1829 TYPE_ARTIFICIAL (ctx->record_type) = 1;
b2b40051
MJ
1830 if (!gimple_omp_parallel_grid_phony (stmt))
1831 {
1832 create_omp_child_function (ctx, false);
1833 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1834 }
953ff289 1835
726a989a 1836 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 1837 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
1838
1839 if (TYPE_FIELDS (ctx->record_type) == NULL)
1840 ctx->record_type = ctx->receiver_decl = NULL;
953ff289
DN
1841}
1842
a68ab351
JJ
1843/* Scan an OpenMP task directive. */
1844
1845static void
726a989a 1846scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1847{
1848 omp_context *ctx;
726a989a 1849 tree name, t;
538dd0b7 1850 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
a68ab351 1851
fbc698e0
JJ
1852 /* Ignore task directives with empty bodies, unless they have depend
1853 clause. */
a68ab351 1854 if (optimize > 0
fbc698e0
JJ
1855 && empty_body_p (gimple_omp_body (stmt))
1856 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
a68ab351 1857 {
726a989a 1858 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1859 return;
1860 }
1861
d9a6bd32
JJ
1862 if (gimple_omp_task_taskloop_p (stmt))
1863 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1864
726a989a 1865 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1866 taskreg_contexts.safe_push (ctx);
a68ab351
JJ
1867 if (taskreg_nesting_level > 1)
1868 ctx->is_nested = true;
1869 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
a68ab351
JJ
1870 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1871 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1872 name = build_decl (gimple_location (stmt),
1873 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1874 DECL_ARTIFICIAL (name) = 1;
1875 DECL_NAMELESS (name) = 1;
a68ab351 1876 TYPE_NAME (ctx->record_type) = name;
f7484978 1877 TYPE_ARTIFICIAL (ctx->record_type) = 1;
a68ab351 1878 create_omp_child_function (ctx, false);
726a989a 1879 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1880
726a989a 1881 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1882
1883 if (ctx->srecord_type)
1884 {
1885 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1886 name = build_decl (gimple_location (stmt),
1887 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1888 DECL_ARTIFICIAL (name) = 1;
1889 DECL_NAMELESS (name) = 1;
a68ab351 1890 TYPE_NAME (ctx->srecord_type) = name;
f7484978 1891 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
a68ab351
JJ
1892 create_omp_child_function (ctx, true);
1893 }
1894
26127932 1895 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
1896
1897 if (TYPE_FIELDS (ctx->record_type) == NULL)
1898 {
1899 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1900 t = build_int_cst (long_integer_type_node, 0);
1901 gimple_omp_task_set_arg_size (stmt, t);
1902 t = build_int_cst (long_integer_type_node, 1);
1903 gimple_omp_task_set_arg_align (stmt, t);
a68ab351 1904 }
5771c391
JJ
1905}
1906
655e5265
JJ
1907/* Helper function for finish_taskreg_scan, called through walk_tree.
1908 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1909 tree, replace it in the expression. */
1910
1911static tree
1912finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1913{
1914 if (VAR_P (*tp))
1915 {
1916 omp_context *ctx = (omp_context *) data;
1917 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1918 if (t != *tp)
1919 {
1920 if (DECL_HAS_VALUE_EXPR_P (t))
1921 t = unshare_expr (DECL_VALUE_EXPR (t));
1922 *tp = t;
1923 }
1924 *walk_subtrees = 0;
1925 }
1926 else if (IS_TYPE_OR_DECL_P (*tp))
1927 *walk_subtrees = 0;
1928 return NULL_TREE;
1929}
5771c391
JJ
1930
1931/* If any decls have been made addressable during scan_omp,
1932 adjust their fields if needed, and layout record types
1933 of parallel/task constructs. */
1934
1935static void
1936finish_taskreg_scan (omp_context *ctx)
1937{
1938 if (ctx->record_type == NULL_TREE)
1939 return;
1940
1941 /* If any task_shared_vars were needed, verify all
1942 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1943 statements if use_pointer_for_field hasn't changed
1944 because of that. If it did, update field types now. */
1945 if (task_shared_vars)
1946 {
1947 tree c;
1948
1949 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1950 c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
1951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1952 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5771c391
JJ
1953 {
1954 tree decl = OMP_CLAUSE_DECL (c);
1955
1956 /* Global variables don't need to be copied,
1957 the receiver side will use them directly. */
1958 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1959 continue;
1960 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1961 || !use_pointer_for_field (decl, ctx))
1962 continue;
1963 tree field = lookup_field (decl, ctx);
1964 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1965 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1966 continue;
1967 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1968 TREE_THIS_VOLATILE (field) = 0;
1969 DECL_USER_ALIGN (field) = 0;
fe37c7af 1970 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
5771c391 1971 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
fe37c7af 1972 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
5771c391
JJ
1973 if (ctx->srecord_type)
1974 {
1975 tree sfield = lookup_sfield (decl, ctx);
1976 TREE_TYPE (sfield) = TREE_TYPE (field);
1977 TREE_THIS_VOLATILE (sfield) = 0;
1978 DECL_USER_ALIGN (sfield) = 0;
fe37c7af 1979 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
5771c391 1980 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
fe37c7af 1981 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
5771c391
JJ
1982 }
1983 }
1984 }
1985
1986 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1987 {
1988 layout_type (ctx->record_type);
1989 fixup_child_record_type (ctx);
1990 }
a68ab351
JJ
1991 else
1992 {
5771c391 1993 location_t loc = gimple_location (ctx->stmt);
a68ab351
JJ
1994 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1995 /* Move VLA fields to the end. */
1996 p = &TYPE_FIELDS (ctx->record_type);
1997 while (*p)
1998 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1999 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2000 {
2001 *q = *p;
2002 *p = TREE_CHAIN (*p);
2003 TREE_CHAIN (*q) = NULL_TREE;
2004 q = &TREE_CHAIN (*q);
2005 }
2006 else
910ad8de 2007 p = &DECL_CHAIN (*p);
a68ab351 2008 *p = vla_fields;
d9a6bd32
JJ
2009 if (gimple_omp_task_taskloop_p (ctx->stmt))
2010 {
2011 /* Move fields corresponding to first and second _looptemp_
2012 clause first. There are filled by GOMP_taskloop
2013 and thus need to be in specific positions. */
2014 tree c1 = gimple_omp_task_clauses (ctx->stmt);
629b3d75
MJ
2015 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2016 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
d9a6bd32
JJ
2017 OMP_CLAUSE__LOOPTEMP_);
2018 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2019 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2020 p = &TYPE_FIELDS (ctx->record_type);
2021 while (*p)
2022 if (*p == f1 || *p == f2)
2023 *p = DECL_CHAIN (*p);
2024 else
2025 p = &DECL_CHAIN (*p);
2026 DECL_CHAIN (f1) = f2;
2027 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2028 TYPE_FIELDS (ctx->record_type) = f1;
2029 if (ctx->srecord_type)
2030 {
2031 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2032 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2033 p = &TYPE_FIELDS (ctx->srecord_type);
2034 while (*p)
2035 if (*p == f1 || *p == f2)
2036 *p = DECL_CHAIN (*p);
2037 else
2038 p = &DECL_CHAIN (*p);
2039 DECL_CHAIN (f1) = f2;
2040 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2041 TYPE_FIELDS (ctx->srecord_type) = f1;
2042 }
2043 }
a68ab351
JJ
2044 layout_type (ctx->record_type);
2045 fixup_child_record_type (ctx);
2046 if (ctx->srecord_type)
2047 layout_type (ctx->srecord_type);
5771c391
JJ
2048 tree t = fold_convert_loc (loc, long_integer_type_node,
2049 TYPE_SIZE_UNIT (ctx->record_type));
655e5265
JJ
2050 if (TREE_CODE (t) != INTEGER_CST)
2051 {
2052 t = unshare_expr (t);
2053 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2054 }
5771c391 2055 gimple_omp_task_set_arg_size (ctx->stmt, t);
726a989a 2056 t = build_int_cst (long_integer_type_node,
a68ab351 2057 TYPE_ALIGN_UNIT (ctx->record_type));
5771c391 2058 gimple_omp_task_set_arg_align (ctx->stmt, t);
a68ab351
JJ
2059 }
2060}
2061
e4834818 2062/* Find the enclosing offload context. */
953ff289 2063
41dbbb37
TS
2064static omp_context *
2065enclosing_target_ctx (omp_context *ctx)
2066{
e4834818
NS
2067 for (; ctx; ctx = ctx->outer)
2068 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2069 break;
2070
41dbbb37
TS
2071 return ctx;
2072}
2073
e4834818
NS
2074/* Return true if ctx is part of an oacc kernels region. */
2075
41dbbb37 2076static bool
e4834818 2077ctx_in_oacc_kernels_region (omp_context *ctx)
41dbbb37 2078{
e4834818
NS
2079 for (;ctx != NULL; ctx = ctx->outer)
2080 {
2081 gimple *stmt = ctx->stmt;
2082 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2083 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2084 return true;
2085 }
2086
2087 return false;
2088}
2089
2090/* Check the parallelism clauses inside a kernels regions.
2091 Until kernels handling moves to use the same loop indirection
2092 scheme as parallel, we need to do this checking early. */
2093
2094static unsigned
2095check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2096{
2097 bool checking = true;
2098 unsigned outer_mask = 0;
2099 unsigned this_mask = 0;
2100 bool has_seq = false, has_auto = false;
2101
2102 if (ctx->outer)
2103 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2104 if (!stmt)
2105 {
2106 checking = false;
2107 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2108 return outer_mask;
2109 stmt = as_a <gomp_for *> (ctx->stmt);
2110 }
2111
2112 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2113 {
2114 switch (OMP_CLAUSE_CODE (c))
2115 {
2116 case OMP_CLAUSE_GANG:
2117 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2118 break;
2119 case OMP_CLAUSE_WORKER:
2120 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2121 break;
2122 case OMP_CLAUSE_VECTOR:
2123 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2124 break;
2125 case OMP_CLAUSE_SEQ:
2126 has_seq = true;
2127 break;
2128 case OMP_CLAUSE_AUTO:
2129 has_auto = true;
2130 break;
2131 default:
2132 break;
2133 }
2134 }
2135
2136 if (checking)
2137 {
2138 if (has_seq && (this_mask || has_auto))
2139 error_at (gimple_location (stmt), "%<seq%> overrides other"
2140 " OpenACC loop specifiers");
2141 else if (has_auto && this_mask)
2142 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2143 " OpenACC loop specifiers");
2144
2145 if (this_mask & outer_mask)
2146 error_at (gimple_location (stmt), "inner loop uses same"
2147 " OpenACC parallelism as containing loop");
2148 }
2149
2150 return outer_mask | this_mask;
41dbbb37
TS
2151}
2152
2153/* Scan a GIMPLE_OMP_FOR. */
953ff289 2154
6e6cf7b0 2155static omp_context *
538dd0b7 2156scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
953ff289 2157{
50674e96 2158 omp_context *ctx;
726a989a 2159 size_t i;
41dbbb37
TS
2160 tree clauses = gimple_omp_for_clauses (stmt);
2161
50674e96 2162 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2163
41dbbb37
TS
2164 if (is_gimple_omp_oacc (stmt))
2165 {
e4834818
NS
2166 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2167
2168 if (!tgt || is_oacc_parallel (tgt))
2169 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2170 {
2171 char const *check = NULL;
2172
2173 switch (OMP_CLAUSE_CODE (c))
2174 {
2175 case OMP_CLAUSE_GANG:
2176 check = "gang";
2177 break;
2178
2179 case OMP_CLAUSE_WORKER:
2180 check = "worker";
2181 break;
2182
2183 case OMP_CLAUSE_VECTOR:
2184 check = "vector";
2185 break;
2186
2187 default:
2188 break;
2189 }
2190
2191 if (check && OMP_CLAUSE_OPERAND (c, 0))
2192 error_at (gimple_location (stmt),
2193 "argument not permitted on %qs clause in"
2194 " OpenACC %<parallel%>", check);
2195 }
2196
2197 if (tgt && is_oacc_kernels (tgt))
2198 {
2199 /* Strip out reductions, as they are not handled yet. */
2200 tree *prev_ptr = &clauses;
2201
2202 while (tree probe = *prev_ptr)
41dbbb37 2203 {
e4834818
NS
2204 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2205
2206 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2207 *prev_ptr = *next_ptr;
2208 else
2209 prev_ptr = next_ptr;
41dbbb37 2210 }
e4834818
NS
2211
2212 gimple_omp_for_set_clauses (stmt, clauses);
2213 check_oacc_kernel_gwv (stmt, ctx);
41dbbb37
TS
2214 }
2215 }
2216
2217 scan_sharing_clauses (clauses, ctx);
953ff289 2218
26127932 2219 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2220 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2221 {
726a989a
RB
2222 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2223 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2224 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2225 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2226 }
26127932 2227 scan_omp (gimple_omp_body_ptr (stmt), ctx);
6e6cf7b0 2228 return ctx;
953ff289
DN
2229}
2230
6c7509bc
JJ
2231/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2232
2233static void
2234scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2235 omp_context *outer_ctx)
2236{
2237 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2238 gsi_replace (gsi, bind, false);
2239 gimple_seq seq = NULL;
2240 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2241 tree cond = create_tmp_var_raw (integer_type_node);
2242 DECL_CONTEXT (cond) = current_function_decl;
2243 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2244 gimple_bind_set_vars (bind, cond);
2245 gimple_call_set_lhs (g, cond);
2246 gimple_seq_add_stmt (&seq, g);
2247 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2248 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2249 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2250 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2251 gimple_seq_add_stmt (&seq, g);
2252 g = gimple_build_label (lab1);
2253 gimple_seq_add_stmt (&seq, g);
2254 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2255 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2256 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2257 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2258 gimple_omp_for_set_clauses (new_stmt, clause);
2259 gimple_seq_add_stmt (&seq, new_stmt);
2260 g = gimple_build_goto (lab3);
2261 gimple_seq_add_stmt (&seq, g);
2262 g = gimple_build_label (lab2);
2263 gimple_seq_add_stmt (&seq, g);
2264 gimple_seq_add_stmt (&seq, stmt);
2265 g = gimple_build_label (lab3);
2266 gimple_seq_add_stmt (&seq, g);
2267 gimple_bind_set_body (bind, seq);
2268 update_stmt (bind);
2269 scan_omp_for (new_stmt, outer_ctx);
6e6cf7b0 2270 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
6c7509bc
JJ
2271}
2272
953ff289
DN
2273/* Scan an OpenMP sections directive. */
2274
2275static void
538dd0b7 2276scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
953ff289 2277{
953ff289
DN
2278 omp_context *ctx;
2279
2280 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2281 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2282 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2283}
2284
2285/* Scan an OpenMP single directive. */
2286
2287static void
538dd0b7 2288scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
953ff289 2289{
953ff289
DN
2290 omp_context *ctx;
2291 tree name;
2292
2293 ctx = new_omp_context (stmt, outer_ctx);
2294 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2295 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2296 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2297 name = build_decl (gimple_location (stmt),
2298 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2299 TYPE_NAME (ctx->record_type) = name;
2300
726a989a 2301 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2302 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2303
2304 if (TYPE_FIELDS (ctx->record_type) == NULL)
2305 ctx->record_type = NULL;
2306 else
2307 layout_type (ctx->record_type);
2308}
2309
86938de6
TV
2310/* Return true if the CLAUSES of an omp target guarantee that the base pointers
2311 used in the corresponding offloaded function are restrict. */
2312
2313static bool
2314omp_target_base_pointers_restrict_p (tree clauses)
2315{
2316 /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2317 used by OpenACC. */
2318 if (flag_openacc == 0)
2319 return false;
2320
2321 /* I. Basic example:
2322
2323 void foo (void)
2324 {
2325 unsigned int a[2], b[2];
2326
2327 #pragma acc kernels \
2328 copyout (a) \
2329 copyout (b)
2330 {
2331 a[0] = 0;
2332 b[0] = 1;
2333 }
2334 }
2335
2336 After gimplification, we have:
2337
2338 #pragma omp target oacc_kernels \
2339 map(force_from:a [len: 8]) \
2340 map(force_from:b [len: 8])
2341 {
2342 a[0] = 0;
2343 b[0] = 1;
2344 }
2345
2346 Because both mappings have the force prefix, we know that they will be
2347 allocated when calling the corresponding offloaded function, which means we
2348 can mark the base pointers for a and b in the offloaded function as
2349 restrict. */
2350
2351 tree c;
2352 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2353 {
2354 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2355 return false;
2356
2357 switch (OMP_CLAUSE_MAP_KIND (c))
2358 {
2359 case GOMP_MAP_FORCE_ALLOC:
2360 case GOMP_MAP_FORCE_TO:
2361 case GOMP_MAP_FORCE_FROM:
2362 case GOMP_MAP_FORCE_TOFROM:
2363 break;
2364 default:
2365 return false;
2366 }
2367 }
2368
2369 return true;
2370}
2371
41dbbb37 2372/* Scan a GIMPLE_OMP_TARGET. */
acf0174b
JJ
2373
2374static void
538dd0b7 2375scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
acf0174b
JJ
2376{
2377 omp_context *ctx;
2378 tree name;
41dbbb37
TS
2379 bool offloaded = is_gimple_omp_offloaded (stmt);
2380 tree clauses = gimple_omp_target_clauses (stmt);
acf0174b
JJ
2381
2382 ctx = new_omp_context (stmt, outer_ctx);
2383 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
acf0174b
JJ
2384 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2385 name = create_tmp_var_name (".omp_data_t");
2386 name = build_decl (gimple_location (stmt),
2387 TYPE_DECL, name, ctx->record_type);
2388 DECL_ARTIFICIAL (name) = 1;
2389 DECL_NAMELESS (name) = 1;
2390 TYPE_NAME (ctx->record_type) = name;
f7484978 2391 TYPE_ARTIFICIAL (ctx->record_type) = 1;
86938de6
TV
2392
2393 bool base_pointers_restrict = false;
41dbbb37 2394 if (offloaded)
acf0174b
JJ
2395 {
2396 create_omp_child_function (ctx, false);
2397 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
86938de6
TV
2398
2399 base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2400 if (base_pointers_restrict
2401 && dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file,
2403 "Base pointers in offloaded function are restrict\n");
acf0174b
JJ
2404 }
2405
86938de6 2406 scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
acf0174b
JJ
2407 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2408
2409 if (TYPE_FIELDS (ctx->record_type) == NULL)
2410 ctx->record_type = ctx->receiver_decl = NULL;
2411 else
2412 {
2413 TYPE_FIELDS (ctx->record_type)
2414 = nreverse (TYPE_FIELDS (ctx->record_type));
b2b29377
MM
2415 if (flag_checking)
2416 {
2417 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2418 for (tree field = TYPE_FIELDS (ctx->record_type);
2419 field;
2420 field = DECL_CHAIN (field))
2421 gcc_assert (DECL_ALIGN (field) == align);
2422 }
acf0174b 2423 layout_type (ctx->record_type);
41dbbb37 2424 if (offloaded)
acf0174b
JJ
2425 fixup_child_record_type (ctx);
2426 }
2427}
2428
2429/* Scan an OpenMP teams directive. */
2430
2431static void
538dd0b7 2432scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
acf0174b
JJ
2433{
2434 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2435 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2436 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2437}
953ff289 2438
41dbbb37 2439/* Check nesting restrictions. */
26127932 2440static bool
355fe088 2441check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
a6fc8e21 2442{
d9a6bd32
JJ
2443 tree c;
2444
b2b40051
MJ
2445 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2446 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2447 the original copy of its contents. */
2448 return true;
2449
41dbbb37
TS
2450 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2451 inside an OpenACC CTX. */
2452 if (!(is_gimple_omp (stmt)
640b7e74
TV
2453 && is_gimple_omp_oacc (stmt))
2454 /* Except for atomic codes that we share with OpenMP. */
2455 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2456 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2457 {
629b3d75 2458 if (oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2459 {
2460 error_at (gimple_location (stmt),
2461 "non-OpenACC construct inside of OpenACC routine");
2462 return false;
2463 }
2464 else
2465 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2466 if (is_gimple_omp (octx->stmt)
2467 && is_gimple_omp_oacc (octx->stmt))
2468 {
2469 error_at (gimple_location (stmt),
2470 "non-OpenACC construct inside of OpenACC region");
2471 return false;
2472 }
41dbbb37
TS
2473 }
2474
74bf76ed
JJ
2475 if (ctx != NULL)
2476 {
2477 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 2478 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2479 {
d9a6bd32
JJ
2480 c = NULL_TREE;
2481 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2482 {
2483 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2484 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18 2485 {
629b3d75 2486 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
d9f4ea18
JJ
2487 && (ctx->outer == NULL
2488 || !gimple_omp_for_combined_into_p (ctx->stmt)
2489 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2490 || (gimple_omp_for_kind (ctx->outer->stmt)
2491 != GF_OMP_FOR_KIND_FOR)
2492 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2493 {
2494 error_at (gimple_location (stmt),
2495 "%<ordered simd threads%> must be closely "
2496 "nested inside of %<for simd%> region");
2497 return false;
2498 }
2499 return true;
2500 }
d9a6bd32 2501 }
74bf76ed 2502 error_at (gimple_location (stmt),
d9a6bd32 2503 "OpenMP constructs other than %<#pragma omp ordered simd%>"
d9f4ea18 2504 " may not be nested inside %<simd%> region");
74bf76ed
JJ
2505 return false;
2506 }
acf0174b
JJ
2507 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2508 {
2509 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
56b1c60e
MJ
2510 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2511 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
acf0174b
JJ
2512 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2513 {
2514 error_at (gimple_location (stmt),
d9f4ea18
JJ
2515 "only %<distribute%> or %<parallel%> regions are "
2516 "allowed to be strictly nested inside %<teams%> "
2517 "region");
acf0174b
JJ
2518 return false;
2519 }
2520 }
74bf76ed 2521 }
726a989a 2522 switch (gimple_code (stmt))
a6fc8e21 2523 {
726a989a 2524 case GIMPLE_OMP_FOR:
0aadce73 2525 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2526 return true;
acf0174b
JJ
2527 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2528 {
2529 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2530 {
2531 error_at (gimple_location (stmt),
d9f4ea18
JJ
2532 "%<distribute%> region must be strictly nested "
2533 "inside %<teams%> construct");
acf0174b
JJ
2534 return false;
2535 }
2536 return true;
2537 }
d9a6bd32
JJ
2538 /* We split taskloop into task and nested taskloop in it. */
2539 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2540 return true;
68d58afb
NS
2541 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2542 {
2543 bool ok = false;
01914336 2544
68d58afb
NS
2545 if (ctx)
2546 switch (gimple_code (ctx->stmt))
2547 {
2548 case GIMPLE_OMP_FOR:
2549 ok = (gimple_omp_for_kind (ctx->stmt)
2550 == GF_OMP_FOR_KIND_OACC_LOOP);
2551 break;
2552
2553 case GIMPLE_OMP_TARGET:
2554 switch (gimple_omp_target_kind (ctx->stmt))
2555 {
2556 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2557 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2558 ok = true;
2559 break;
2560
2561 default:
2562 break;
2563 }
2564
2565 default:
2566 break;
2567 }
629b3d75 2568 else if (oacc_get_fn_attrib (current_function_decl))
68d58afb
NS
2569 ok = true;
2570 if (!ok)
2571 {
2572 error_at (gimple_location (stmt),
2573 "OpenACC loop directive must be associated with"
2574 " an OpenACC compute region");
2575 return false;
2576 }
2577 }
acf0174b
JJ
2578 /* FALLTHRU */
2579 case GIMPLE_CALL:
2580 if (is_gimple_call (stmt)
2581 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 == BUILT_IN_GOMP_CANCEL
2583 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2584 == BUILT_IN_GOMP_CANCELLATION_POINT))
2585 {
2586 const char *bad = NULL;
2587 const char *kind = NULL;
d9f4ea18
JJ
2588 const char *construct
2589 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2590 == BUILT_IN_GOMP_CANCEL)
2591 ? "#pragma omp cancel"
2592 : "#pragma omp cancellation point";
acf0174b
JJ
2593 if (ctx == NULL)
2594 {
2595 error_at (gimple_location (stmt), "orphaned %qs construct",
d9f4ea18 2596 construct);
acf0174b
JJ
2597 return false;
2598 }
9541ffee 2599 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2600 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2601 : 0)
2602 {
2603 case 1:
2604 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2605 bad = "#pragma omp parallel";
2606 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2607 == BUILT_IN_GOMP_CANCEL
2608 && !integer_zerop (gimple_call_arg (stmt, 1)))
2609 ctx->cancellable = true;
2610 kind = "parallel";
2611 break;
2612 case 2:
2613 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2614 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2615 bad = "#pragma omp for";
2616 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2617 == BUILT_IN_GOMP_CANCEL
2618 && !integer_zerop (gimple_call_arg (stmt, 1)))
2619 {
2620 ctx->cancellable = true;
629b3d75 2621 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2622 OMP_CLAUSE_NOWAIT))
2623 warning_at (gimple_location (stmt), 0,
2624 "%<#pragma omp cancel for%> inside "
2625 "%<nowait%> for construct");
629b3d75 2626 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2627 OMP_CLAUSE_ORDERED))
2628 warning_at (gimple_location (stmt), 0,
2629 "%<#pragma omp cancel for%> inside "
2630 "%<ordered%> for construct");
2631 }
2632 kind = "for";
2633 break;
2634 case 4:
2635 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2636 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2637 bad = "#pragma omp sections";
2638 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2639 == BUILT_IN_GOMP_CANCEL
2640 && !integer_zerop (gimple_call_arg (stmt, 1)))
2641 {
2642 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2643 {
2644 ctx->cancellable = true;
629b3d75 2645 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2646 (ctx->stmt),
2647 OMP_CLAUSE_NOWAIT))
2648 warning_at (gimple_location (stmt), 0,
2649 "%<#pragma omp cancel sections%> inside "
2650 "%<nowait%> sections construct");
2651 }
2652 else
2653 {
2654 gcc_assert (ctx->outer
2655 && gimple_code (ctx->outer->stmt)
2656 == GIMPLE_OMP_SECTIONS);
2657 ctx->outer->cancellable = true;
629b3d75 2658 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2659 (ctx->outer->stmt),
2660 OMP_CLAUSE_NOWAIT))
2661 warning_at (gimple_location (stmt), 0,
2662 "%<#pragma omp cancel sections%> inside "
2663 "%<nowait%> sections construct");
2664 }
2665 }
2666 kind = "sections";
2667 break;
2668 case 8:
2669 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2670 bad = "#pragma omp task";
2671 else
d9f4ea18
JJ
2672 {
2673 for (omp_context *octx = ctx->outer;
2674 octx; octx = octx->outer)
2675 {
2676 switch (gimple_code (octx->stmt))
2677 {
2678 case GIMPLE_OMP_TASKGROUP:
2679 break;
2680 case GIMPLE_OMP_TARGET:
2681 if (gimple_omp_target_kind (octx->stmt)
2682 != GF_OMP_TARGET_KIND_REGION)
2683 continue;
2684 /* FALLTHRU */
2685 case GIMPLE_OMP_PARALLEL:
2686 case GIMPLE_OMP_TEAMS:
2687 error_at (gimple_location (stmt),
2688 "%<%s taskgroup%> construct not closely "
2689 "nested inside of %<taskgroup%> region",
2690 construct);
2691 return false;
2692 default:
2693 continue;
2694 }
2695 break;
2696 }
2697 ctx->cancellable = true;
2698 }
acf0174b
JJ
2699 kind = "taskgroup";
2700 break;
2701 default:
2702 error_at (gimple_location (stmt), "invalid arguments");
2703 return false;
2704 }
2705 if (bad)
2706 {
2707 error_at (gimple_location (stmt),
2708 "%<%s %s%> construct not closely nested inside of %qs",
d9f4ea18 2709 construct, kind, bad);
acf0174b
JJ
2710 return false;
2711 }
2712 }
74bf76ed 2713 /* FALLTHRU */
726a989a
RB
2714 case GIMPLE_OMP_SECTIONS:
2715 case GIMPLE_OMP_SINGLE:
a6fc8e21 2716 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2717 switch (gimple_code (ctx->stmt))
a6fc8e21 2718 {
726a989a 2719 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2720 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2721 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2722 break;
2723 /* FALLTHRU */
726a989a
RB
2724 case GIMPLE_OMP_SECTIONS:
2725 case GIMPLE_OMP_SINGLE:
2726 case GIMPLE_OMP_ORDERED:
2727 case GIMPLE_OMP_MASTER:
2728 case GIMPLE_OMP_TASK:
acf0174b 2729 case GIMPLE_OMP_CRITICAL:
726a989a 2730 if (is_gimple_call (stmt))
a68ab351 2731 {
acf0174b
JJ
2732 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2733 != BUILT_IN_GOMP_BARRIER)
2734 return true;
26127932
JJ
2735 error_at (gimple_location (stmt),
2736 "barrier region may not be closely nested inside "
d9f4ea18
JJ
2737 "of work-sharing, %<critical%>, %<ordered%>, "
2738 "%<master%>, explicit %<task%> or %<taskloop%> "
2739 "region");
26127932 2740 return false;
a68ab351 2741 }
26127932
JJ
2742 error_at (gimple_location (stmt),
2743 "work-sharing region may not be closely nested inside "
d9f4ea18
JJ
2744 "of work-sharing, %<critical%>, %<ordered%>, "
2745 "%<master%>, explicit %<task%> or %<taskloop%> region");
26127932 2746 return false;
726a989a 2747 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2748 case GIMPLE_OMP_TEAMS:
26127932 2749 return true;
d9f4ea18
JJ
2750 case GIMPLE_OMP_TARGET:
2751 if (gimple_omp_target_kind (ctx->stmt)
2752 == GF_OMP_TARGET_KIND_REGION)
2753 return true;
2754 break;
a6fc8e21
JJ
2755 default:
2756 break;
2757 }
2758 break;
726a989a 2759 case GIMPLE_OMP_MASTER:
a6fc8e21 2760 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2761 switch (gimple_code (ctx->stmt))
a6fc8e21 2762 {
726a989a 2763 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2764 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2765 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2766 break;
2767 /* FALLTHRU */
726a989a
RB
2768 case GIMPLE_OMP_SECTIONS:
2769 case GIMPLE_OMP_SINGLE:
2770 case GIMPLE_OMP_TASK:
26127932 2771 error_at (gimple_location (stmt),
d9f4ea18
JJ
2772 "%<master%> region may not be closely nested inside "
2773 "of work-sharing, explicit %<task%> or %<taskloop%> "
2774 "region");
26127932 2775 return false;
726a989a 2776 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2777 case GIMPLE_OMP_TEAMS:
26127932 2778 return true;
d9f4ea18
JJ
2779 case GIMPLE_OMP_TARGET:
2780 if (gimple_omp_target_kind (ctx->stmt)
2781 == GF_OMP_TARGET_KIND_REGION)
2782 return true;
2783 break;
a6fc8e21
JJ
2784 default:
2785 break;
2786 }
2787 break;
d9a6bd32
JJ
2788 case GIMPLE_OMP_TASK:
2789 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2790 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2791 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2792 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2793 {
2794 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2795 error_at (OMP_CLAUSE_LOCATION (c),
2796 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2797 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2798 return false;
2799 }
2800 break;
726a989a 2801 case GIMPLE_OMP_ORDERED:
d9a6bd32
JJ
2802 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2803 c; c = OMP_CLAUSE_CHAIN (c))
2804 {
2805 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2806 {
2807 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
d9f4ea18 2808 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
d9a6bd32
JJ
2809 continue;
2810 }
2811 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2812 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2813 || kind == OMP_CLAUSE_DEPEND_SINK)
2814 {
2815 tree oclause;
2816 /* Look for containing ordered(N) loop. */
2817 if (ctx == NULL
2818 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2819 || (oclause
629b3d75 2820 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
d9a6bd32
JJ
2821 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2822 {
2823 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2824 "%<ordered%> construct with %<depend%> clause "
2825 "must be closely nested inside an %<ordered%> "
2826 "loop");
d9a6bd32
JJ
2827 return false;
2828 }
2829 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2830 {
2831 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2832 "%<ordered%> construct with %<depend%> clause "
2833 "must be closely nested inside a loop with "
2834 "%<ordered%> clause with a parameter");
d9a6bd32
JJ
2835 return false;
2836 }
2837 }
2838 else
2839 {
2840 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2841 "invalid depend kind in omp %<ordered%> %<depend%>");
2842 return false;
2843 }
2844 }
2845 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2846 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18
JJ
2847 {
2848 /* ordered simd must be closely nested inside of simd region,
2849 and simd region must not encounter constructs other than
2850 ordered simd, therefore ordered simd may be either orphaned,
2851 or ctx->stmt must be simd. The latter case is handled already
2852 earlier. */
2853 if (ctx != NULL)
2854 {
2855 error_at (gimple_location (stmt),
2856 "%<ordered%> %<simd%> must be closely nested inside "
2857 "%<simd%> region");
d9a6bd32
JJ
2858 return false;
2859 }
2860 }
a6fc8e21 2861 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2862 switch (gimple_code (ctx->stmt))
a6fc8e21 2863 {
726a989a
RB
2864 case GIMPLE_OMP_CRITICAL:
2865 case GIMPLE_OMP_TASK:
d9f4ea18
JJ
2866 case GIMPLE_OMP_ORDERED:
2867 ordered_in_taskloop:
26127932 2868 error_at (gimple_location (stmt),
d9f4ea18
JJ
2869 "%<ordered%> region may not be closely nested inside "
2870 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2871 "%<taskloop%> region");
26127932 2872 return false;
726a989a 2873 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2874 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2875 goto ordered_in_taskloop;
629b3d75 2876 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2877 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2878 {
2879 error_at (gimple_location (stmt),
d9f4ea18
JJ
2880 "%<ordered%> region must be closely nested inside "
2881 "a loop region with an %<ordered%> clause");
26127932
JJ
2882 return false;
2883 }
2884 return true;
d9f4ea18
JJ
2885 case GIMPLE_OMP_TARGET:
2886 if (gimple_omp_target_kind (ctx->stmt)
2887 != GF_OMP_TARGET_KIND_REGION)
2888 break;
2889 /* FALLTHRU */
726a989a 2890 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2891 case GIMPLE_OMP_TEAMS:
acf0174b 2892 error_at (gimple_location (stmt),
d9f4ea18
JJ
2893 "%<ordered%> region must be closely nested inside "
2894 "a loop region with an %<ordered%> clause");
acf0174b 2895 return false;
a6fc8e21
JJ
2896 default:
2897 break;
2898 }
2899 break;
726a989a 2900 case GIMPLE_OMP_CRITICAL:
538dd0b7
DM
2901 {
2902 tree this_stmt_name
2903 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2904 for (; ctx != NULL; ctx = ctx->outer)
2905 if (gomp_critical *other_crit
2906 = dyn_cast <gomp_critical *> (ctx->stmt))
2907 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2908 {
2909 error_at (gimple_location (stmt),
d9f4ea18
JJ
2910 "%<critical%> region may not be nested inside "
2911 "a %<critical%> region with the same name");
538dd0b7
DM
2912 return false;
2913 }
2914 }
a6fc8e21 2915 break;
acf0174b
JJ
2916 case GIMPLE_OMP_TEAMS:
2917 if (ctx == NULL
2918 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2919 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2920 {
2921 error_at (gimple_location (stmt),
d9f4ea18
JJ
2922 "%<teams%> construct not closely nested inside of "
2923 "%<target%> construct");
acf0174b
JJ
2924 return false;
2925 }
2926 break;
f014c653 2927 case GIMPLE_OMP_TARGET:
d9a6bd32
JJ
2928 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2930 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2931 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2932 {
2933 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2934 error_at (OMP_CLAUSE_LOCATION (c),
2935 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2936 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2937 return false;
2938 }
640b7e74 2939 if (is_gimple_omp_offloaded (stmt)
629b3d75 2940 && oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2941 {
2942 error_at (gimple_location (stmt),
2943 "OpenACC region inside of OpenACC routine, nested "
2944 "parallelism not supported yet");
2945 return false;
2946 }
f014c653 2947 for (; ctx != NULL; ctx = ctx->outer)
41dbbb37
TS
2948 {
2949 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2950 {
2951 if (is_gimple_omp (stmt)
2952 && is_gimple_omp_oacc (stmt)
2953 && is_gimple_omp (ctx->stmt))
2954 {
2955 error_at (gimple_location (stmt),
2956 "OpenACC construct inside of non-OpenACC region");
2957 return false;
2958 }
2959 continue;
2960 }
2961
2962 const char *stmt_name, *ctx_stmt_name;
2963 switch (gimple_omp_target_kind (stmt))
2964 {
2965 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2966 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2967 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
d9a6bd32
JJ
2968 case GF_OMP_TARGET_KIND_ENTER_DATA:
2969 stmt_name = "target enter data"; break;
2970 case GF_OMP_TARGET_KIND_EXIT_DATA:
2971 stmt_name = "target exit data"; break;
41dbbb37
TS
2972 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2973 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2974 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2975 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
d9a6bd32
JJ
2976 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2977 stmt_name = "enter/exit data"; break;
37d5ad46
JB
2978 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2979 break;
41dbbb37
TS
2980 default: gcc_unreachable ();
2981 }
2982 switch (gimple_omp_target_kind (ctx->stmt))
2983 {
2984 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2985 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
d9a6bd32
JJ
2986 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2987 ctx_stmt_name = "parallel"; break;
2988 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2989 ctx_stmt_name = "kernels"; break;
41dbbb37 2990 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
37d5ad46
JB
2991 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2992 ctx_stmt_name = "host_data"; break;
41dbbb37
TS
2993 default: gcc_unreachable ();
2994 }
2995
2996 /* OpenACC/OpenMP mismatch? */
2997 if (is_gimple_omp_oacc (stmt)
2998 != is_gimple_omp_oacc (ctx->stmt))
2999 {
3000 error_at (gimple_location (stmt),
d9f4ea18 3001 "%s %qs construct inside of %s %qs region",
41dbbb37
TS
3002 (is_gimple_omp_oacc (stmt)
3003 ? "OpenACC" : "OpenMP"), stmt_name,
3004 (is_gimple_omp_oacc (ctx->stmt)
3005 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3006 return false;
3007 }
3008 if (is_gimple_omp_offloaded (ctx->stmt))
3009 {
3010 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3011 if (is_gimple_omp_oacc (ctx->stmt))
3012 {
3013 error_at (gimple_location (stmt),
d9f4ea18 3014 "%qs construct inside of %qs region",
41dbbb37
TS
3015 stmt_name, ctx_stmt_name);
3016 return false;
3017 }
3018 else
3019 {
41dbbb37 3020 warning_at (gimple_location (stmt), 0,
d9f4ea18 3021 "%qs construct inside of %qs region",
41dbbb37
TS
3022 stmt_name, ctx_stmt_name);
3023 }
3024 }
3025 }
f014c653 3026 break;
a6fc8e21
JJ
3027 default:
3028 break;
3029 }
26127932 3030 return true;
a6fc8e21
JJ
3031}
3032
3033
726a989a
RB
3034/* Helper function scan_omp.
3035
3036 Callback for walk_tree or operators in walk_gimple_stmt used to
41dbbb37 3037 scan for OMP directives in TP. */
953ff289
DN
3038
3039static tree
726a989a 3040scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 3041{
d3bfe4de
KG
3042 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3043 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
3044 tree t = *tp;
3045
726a989a
RB
3046 switch (TREE_CODE (t))
3047 {
3048 case VAR_DECL:
3049 case PARM_DECL:
3050 case LABEL_DECL:
3051 case RESULT_DECL:
3052 if (ctx)
b2b40051
MJ
3053 {
3054 tree repl = remap_decl (t, &ctx->cb);
3055 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3056 *tp = repl;
3057 }
726a989a
RB
3058 break;
3059
3060 default:
3061 if (ctx && TYPE_P (t))
3062 *tp = remap_type (t, &ctx->cb);
3063 else if (!DECL_P (t))
a900ae6b
JJ
3064 {
3065 *walk_subtrees = 1;
3066 if (ctx)
70f34814
RG
3067 {
3068 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3069 if (tem != TREE_TYPE (t))
3070 {
3071 if (TREE_CODE (t) == INTEGER_CST)
807e902e 3072 *tp = wide_int_to_tree (tem, t);
70f34814
RG
3073 else
3074 TREE_TYPE (t) = tem;
3075 }
3076 }
a900ae6b 3077 }
726a989a
RB
3078 break;
3079 }
3080
3081 return NULL_TREE;
3082}
3083
c02065fc
AH
3084/* Return true if FNDECL is a setjmp or a longjmp. */
3085
3086static bool
3087setjmp_or_longjmp_p (const_tree fndecl)
3088{
3089 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3090 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3091 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3092 return true;
3093
3094 tree declname = DECL_NAME (fndecl);
3095 if (!declname)
3096 return false;
3097 const char *name = IDENTIFIER_POINTER (declname);
3098 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3099}
3100
726a989a
RB
3101
3102/* Helper function for scan_omp.
3103
41dbbb37 3104 Callback for walk_gimple_stmt used to scan for OMP directives in
726a989a
RB
3105 the current statement in GSI. */
3106
3107static tree
3108scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3109 struct walk_stmt_info *wi)
3110{
355fe088 3111 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
3112 omp_context *ctx = (omp_context *) wi->info;
3113
3114 if (gimple_has_location (stmt))
3115 input_location = gimple_location (stmt);
953ff289 3116
41dbbb37 3117 /* Check the nesting restrictions. */
acf0174b
JJ
3118 bool remove = false;
3119 if (is_gimple_omp (stmt))
3120 remove = !check_omp_nesting_restrictions (stmt, ctx);
3121 else if (is_gimple_call (stmt))
3122 {
3123 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
3124 if (fndecl)
3125 {
3126 if (setjmp_or_longjmp_p (fndecl)
3127 && ctx
3128 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3129 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
c02065fc
AH
3130 {
3131 remove = true;
3132 error_at (gimple_location (stmt),
3133 "setjmp/longjmp inside simd construct");
3134 }
3135 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3136 switch (DECL_FUNCTION_CODE (fndecl))
3137 {
3138 case BUILT_IN_GOMP_BARRIER:
3139 case BUILT_IN_GOMP_CANCEL:
3140 case BUILT_IN_GOMP_CANCELLATION_POINT:
3141 case BUILT_IN_GOMP_TASKYIELD:
3142 case BUILT_IN_GOMP_TASKWAIT:
3143 case BUILT_IN_GOMP_TASKGROUP_START:
3144 case BUILT_IN_GOMP_TASKGROUP_END:
3145 remove = !check_omp_nesting_restrictions (stmt, ctx);
3146 break;
3147 default:
3148 break;
3149 }
3150 }
acf0174b
JJ
3151 }
3152 if (remove)
3153 {
3154 stmt = gimple_build_nop ();
3155 gsi_replace (gsi, stmt, false);
a68ab351 3156 }
a6fc8e21 3157
726a989a
RB
3158 *handled_ops_p = true;
3159
3160 switch (gimple_code (stmt))
953ff289 3161 {
726a989a 3162 case GIMPLE_OMP_PARALLEL:
a68ab351 3163 taskreg_nesting_level++;
726a989a 3164 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
3165 taskreg_nesting_level--;
3166 break;
3167
726a989a 3168 case GIMPLE_OMP_TASK:
a68ab351 3169 taskreg_nesting_level++;
726a989a 3170 scan_omp_task (gsi, ctx);
a68ab351 3171 taskreg_nesting_level--;
953ff289
DN
3172 break;
3173
726a989a 3174 case GIMPLE_OMP_FOR:
6c7509bc
JJ
3175 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3176 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3177 && omp_maybe_offloaded_ctx (ctx)
3178 && omp_max_simt_vf ())
3179 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3180 else
3181 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
953ff289
DN
3182 break;
3183
726a989a 3184 case GIMPLE_OMP_SECTIONS:
538dd0b7 3185 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
953ff289
DN
3186 break;
3187
726a989a 3188 case GIMPLE_OMP_SINGLE:
538dd0b7 3189 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
953ff289
DN
3190 break;
3191
726a989a
RB
3192 case GIMPLE_OMP_SECTION:
3193 case GIMPLE_OMP_MASTER:
acf0174b 3194 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
3195 case GIMPLE_OMP_ORDERED:
3196 case GIMPLE_OMP_CRITICAL:
b2b40051 3197 case GIMPLE_OMP_GRID_BODY:
726a989a 3198 ctx = new_omp_context (stmt, ctx);
26127932 3199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
3200 break;
3201
acf0174b 3202 case GIMPLE_OMP_TARGET:
538dd0b7 3203 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
acf0174b
JJ
3204 break;
3205
3206 case GIMPLE_OMP_TEAMS:
538dd0b7 3207 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
acf0174b
JJ
3208 break;
3209
726a989a 3210 case GIMPLE_BIND:
953ff289
DN
3211 {
3212 tree var;
953ff289 3213
726a989a
RB
3214 *handled_ops_p = false;
3215 if (ctx)
538dd0b7
DM
3216 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3217 var ;
3218 var = DECL_CHAIN (var))
726a989a 3219 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
3220 }
3221 break;
953ff289 3222 default:
726a989a 3223 *handled_ops_p = false;
953ff289
DN
3224 break;
3225 }
3226
3227 return NULL_TREE;
3228}
3229
3230
726a989a 3231/* Scan all the statements starting at the current statement. CTX
41dbbb37 3232 contains context information about the OMP directives and
726a989a 3233 clauses found during the scan. */
953ff289
DN
3234
3235static void
26127932 3236scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
3237{
3238 location_t saved_location;
3239 struct walk_stmt_info wi;
3240
3241 memset (&wi, 0, sizeof (wi));
953ff289 3242 wi.info = ctx;
953ff289
DN
3243 wi.want_locations = true;
3244
3245 saved_location = input_location;
26127932 3246 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
3247 input_location = saved_location;
3248}
3249\f
3250/* Re-gimplification and code generation routines. */
3251
953ff289
DN
3252/* If a context was created for STMT when it was scanned, return it. */
3253
3254static omp_context *
355fe088 3255maybe_lookup_ctx (gimple *stmt)
953ff289
DN
3256{
3257 splay_tree_node n;
3258 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3259 return n ? (omp_context *) n->value : NULL;
3260}
3261
50674e96
DN
3262
3263/* Find the mapping for DECL in CTX or the immediately enclosing
3264 context that has a mapping for DECL.
3265
3266 If CTX is a nested parallel directive, we may have to use the decl
3267 mappings created in CTX's parent context. Suppose that we have the
3268 following parallel nesting (variable UIDs showed for clarity):
3269
3270 iD.1562 = 0;
3271 #omp parallel shared(iD.1562) -> outer parallel
3272 iD.1562 = iD.1562 + 1;
3273
3274 #omp parallel shared (iD.1562) -> inner parallel
3275 iD.1562 = iD.1562 - 1;
3276
3277 Each parallel structure will create a distinct .omp_data_s structure
3278 for copying iD.1562 in/out of the directive:
3279
3280 outer parallel .omp_data_s.1.i -> iD.1562
3281 inner parallel .omp_data_s.2.i -> iD.1562
3282
3283 A shared variable mapping will produce a copy-out operation before
3284 the parallel directive and a copy-in operation after it. So, in
3285 this case we would have:
3286
3287 iD.1562 = 0;
3288 .omp_data_o.1.i = iD.1562;
3289 #omp parallel shared(iD.1562) -> outer parallel
3290 .omp_data_i.1 = &.omp_data_o.1
3291 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3292
3293 .omp_data_o.2.i = iD.1562; -> **
3294 #omp parallel shared(iD.1562) -> inner parallel
3295 .omp_data_i.2 = &.omp_data_o.2
3296 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3297
3298
3299 ** This is a problem. The symbol iD.1562 cannot be referenced
3300 inside the body of the outer parallel region. But since we are
3301 emitting this copy operation while expanding the inner parallel
3302 directive, we need to access the CTX structure of the outer
3303 parallel directive to get the correct mapping:
3304
3305 .omp_data_o.2.i = .omp_data_i.1->i
3306
3307 Since there may be other workshare or parallel directives enclosing
3308 the parallel directive, it may be necessary to walk up the context
3309 parent chain. This is not a problem in general because nested
3310 parallelism happens only rarely. */
3311
3312static tree
3313lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3314{
3315 tree t;
3316 omp_context *up;
3317
50674e96
DN
3318 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3319 t = maybe_lookup_decl (decl, up);
3320
d2dda7fe 3321 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 3322
64964499 3323 return t ? t : decl;
50674e96
DN
3324}
3325
3326
8ca5b2a2
JJ
3327/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3328 in outer contexts. */
3329
3330static tree
3331maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3332{
3333 tree t = NULL;
3334 omp_context *up;
3335
d2dda7fe
JJ
3336 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3337 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
3338
3339 return t ? t : decl;
3340}
3341
3342
f2c9f71d 3343/* Construct the initialization value for reduction operation OP. */
953ff289
DN
3344
3345tree
f2c9f71d 3346omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
953ff289 3347{
f2c9f71d 3348 switch (op)
953ff289
DN
3349 {
3350 case PLUS_EXPR:
3351 case MINUS_EXPR:
3352 case BIT_IOR_EXPR:
3353 case BIT_XOR_EXPR:
3354 case TRUTH_OR_EXPR:
3355 case TRUTH_ORIF_EXPR:
3356 case TRUTH_XOR_EXPR:
3357 case NE_EXPR:
e8160c9a 3358 return build_zero_cst (type);
953ff289
DN
3359
3360 case MULT_EXPR:
3361 case TRUTH_AND_EXPR:
3362 case TRUTH_ANDIF_EXPR:
3363 case EQ_EXPR:
db3927fb 3364 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
3365
3366 case BIT_AND_EXPR:
db3927fb 3367 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
3368
3369 case MAX_EXPR:
3370 if (SCALAR_FLOAT_TYPE_P (type))
3371 {
3372 REAL_VALUE_TYPE max, min;
3d3dbadd 3373 if (HONOR_INFINITIES (type))
953ff289
DN
3374 {
3375 real_inf (&max);
3376 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3377 }
3378 else
3379 real_maxval (&min, 1, TYPE_MODE (type));
3380 return build_real (type, min);
3381 }
3ff2d74e
TV
3382 else if (POINTER_TYPE_P (type))
3383 {
3384 wide_int min
3385 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3386 return wide_int_to_tree (type, min);
3387 }
953ff289
DN
3388 else
3389 {
3390 gcc_assert (INTEGRAL_TYPE_P (type));
3391 return TYPE_MIN_VALUE (type);
3392 }
3393
3394 case MIN_EXPR:
3395 if (SCALAR_FLOAT_TYPE_P (type))
3396 {
3397 REAL_VALUE_TYPE max;
3d3dbadd 3398 if (HONOR_INFINITIES (type))
953ff289
DN
3399 real_inf (&max);
3400 else
3401 real_maxval (&max, 0, TYPE_MODE (type));
3402 return build_real (type, max);
3403 }
3ff2d74e
TV
3404 else if (POINTER_TYPE_P (type))
3405 {
3406 wide_int max
3407 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3408 return wide_int_to_tree (type, max);
3409 }
953ff289
DN
3410 else
3411 {
3412 gcc_assert (INTEGRAL_TYPE_P (type));
3413 return TYPE_MAX_VALUE (type);
3414 }
3415
3416 default:
3417 gcc_unreachable ();
3418 }
3419}
3420
f2c9f71d
TS
3421/* Construct the initialization value for reduction CLAUSE. */
3422
3423tree
3424omp_reduction_init (tree clause, tree type)
3425{
3426 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3427 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3428}
3429
acf0174b
JJ
3430/* Return alignment to be assumed for var in CLAUSE, which should be
3431 OMP_CLAUSE_ALIGNED. */
3432
3433static tree
3434omp_clause_aligned_alignment (tree clause)
3435{
3436 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3437 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3438
3439 /* Otherwise return implementation defined alignment. */
3440 unsigned int al = 1;
ef4bddc2 3441 machine_mode mode, vmode;
acf0174b
JJ
3442 int vs = targetm.vectorize.autovectorize_vector_sizes ();
3443 if (vs)
3444 vs = 1 << floor_log2 (vs);
3445 static enum mode_class classes[]
3446 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3447 for (int i = 0; i < 4; i += 2)
3448 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3449 mode != VOIDmode;
3450 mode = GET_MODE_WIDER_MODE (mode))
3451 {
3452 vmode = targetm.vectorize.preferred_simd_mode (mode);
3453 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3454 continue;
3455 while (vs
3456 && GET_MODE_SIZE (vmode) < vs
3457 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3458 vmode = GET_MODE_2XWIDER_MODE (vmode);
01914336 3459
acf0174b
JJ
3460 tree type = lang_hooks.types.type_for_mode (mode, 1);
3461 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3462 continue;
3463 type = build_vector_type (type, GET_MODE_SIZE (vmode)
3464 / GET_MODE_SIZE (mode));
3465 if (TYPE_MODE (type) != vmode)
3466 continue;
3467 if (TYPE_ALIGN_UNIT (type) > al)
3468 al = TYPE_ALIGN_UNIT (type);
3469 }
3470 return build_int_cst (integer_type_node, al);
3471}
3472
6943af07
AM
3473
3474/* This structure is part of the interface between lower_rec_simd_input_clauses
3475 and lower_rec_input_clauses. */
3476
3477struct omplow_simd_context {
3478 tree idx;
3479 tree lane;
0c6b03b5
AM
3480 vec<tree, va_heap> simt_eargs;
3481 gimple_seq simt_dlist;
6943af07
AM
3482 int max_vf;
3483 bool is_simt;
3484};
3485
74bf76ed
JJ
3486/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3487 privatization. */
3488
3489static bool
6943af07
AM
3490lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3491 omplow_simd_context *sctx, tree &ivar, tree &lvar)
74bf76ed 3492{
6943af07 3493 if (sctx->max_vf == 0)
74bf76ed 3494 {
6943af07
AM
3495 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3496 if (sctx->max_vf > 1)
74bf76ed 3497 {
629b3d75 3498 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 3499 OMP_CLAUSE_SAFELEN);
8ebc1e0f
JJ
3500 if (c
3501 && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3502 || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
6943af07 3503 sctx->max_vf = 1;
b46ebd6c 3504 else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
6943af07
AM
3505 sctx->max_vf) == -1)
3506 sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
74bf76ed 3507 }
6943af07 3508 if (sctx->max_vf > 1)
74bf76ed 3509 {
6943af07
AM
3510 sctx->idx = create_tmp_var (unsigned_type_node);
3511 sctx->lane = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
3512 }
3513 }
6943af07 3514 if (sctx->max_vf == 1)
74bf76ed
JJ
3515 return false;
3516
0c6b03b5
AM
3517 if (sctx->is_simt)
3518 {
3519 if (is_gimple_reg (new_var))
3520 {
3521 ivar = lvar = new_var;
3522 return true;
3523 }
3524 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3525 ivar = lvar = create_tmp_var (type);
3526 TREE_ADDRESSABLE (ivar) = 1;
3527 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3528 NULL, DECL_ATTRIBUTES (ivar));
3529 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3530 tree clobber = build_constructor (type, NULL);
3531 TREE_THIS_VOLATILE (clobber) = 1;
3532 gimple *g = gimple_build_assign (ivar, clobber);
3533 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3534 }
3535 else
3536 {
3537 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3538 tree avar = create_tmp_var_raw (atype);
3539 if (TREE_ADDRESSABLE (new_var))
3540 TREE_ADDRESSABLE (avar) = 1;
3541 DECL_ATTRIBUTES (avar)
3542 = tree_cons (get_identifier ("omp simd array"), NULL,
3543 DECL_ATTRIBUTES (avar));
3544 gimple_add_tmp_var (avar);
3545 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3546 NULL_TREE, NULL_TREE);
3547 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3548 NULL_TREE, NULL_TREE);
3549 }
acf0174b
JJ
3550 if (DECL_P (new_var))
3551 {
3552 SET_DECL_VALUE_EXPR (new_var, lvar);
3553 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3554 }
74bf76ed
JJ
3555 return true;
3556}
3557
decaaec8
JJ
3558/* Helper function of lower_rec_input_clauses. For a reference
3559 in simd reduction, add an underlying variable it will reference. */
3560
3561static void
3562handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3563{
3564 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3565 if (TREE_CONSTANT (z))
3566 {
d9a6bd32
JJ
3567 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3568 get_name (new_vard));
decaaec8
JJ
3569 gimple_add_tmp_var (z);
3570 TREE_ADDRESSABLE (z) = 1;
3571 z = build_fold_addr_expr_loc (loc, z);
3572 gimplify_assign (new_vard, z, ilist);
3573 }
3574}
3575
953ff289
DN
3576/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3577 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3578 private variables. Initialization statements go in ILIST, while calls
3579 to destructors go in DLIST. */
3580
3581static void
726a989a 3582lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 3583 omp_context *ctx, struct omp_for_data *fd)
953ff289 3584{
5039610b 3585 tree c, dtor, copyin_seq, x, ptr;
953ff289 3586 bool copyin_by_ref = false;
8ca5b2a2 3587 bool lastprivate_firstprivate = false;
acf0174b 3588 bool reduction_omp_orig_ref = false;
953ff289 3589 int pass;
74bf76ed 3590 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3591 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
6943af07 3592 omplow_simd_context sctx = omplow_simd_context ();
0c6b03b5
AM
3593 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3594 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
9669b00b 3595 gimple_seq llist[3] = { };
953ff289 3596
953ff289 3597 copyin_seq = NULL;
6943af07 3598 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
953ff289 3599
74bf76ed
JJ
3600 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3601 with data sharing clauses referencing variable sized vars. That
3602 is unnecessarily hard to support and very unlikely to result in
3603 vectorized code anyway. */
3604 if (is_simd)
3605 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3606 switch (OMP_CLAUSE_CODE (c))
3607 {
da6f124d
JJ
3608 case OMP_CLAUSE_LINEAR:
3609 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6943af07 3610 sctx.max_vf = 1;
da6f124d 3611 /* FALLTHRU */
74bf76ed
JJ
3612 case OMP_CLAUSE_PRIVATE:
3613 case OMP_CLAUSE_FIRSTPRIVATE:
3614 case OMP_CLAUSE_LASTPRIVATE:
74bf76ed 3615 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3616 sctx.max_vf = 1;
74bf76ed 3617 break;
d9a6bd32
JJ
3618 case OMP_CLAUSE_REDUCTION:
3619 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3620 || is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3621 sctx.max_vf = 1;
d9a6bd32 3622 break;
74bf76ed
JJ
3623 default:
3624 continue;
3625 }
3626
0c6b03b5
AM
3627 /* Add a placeholder for simduid. */
3628 if (sctx.is_simt && sctx.max_vf != 1)
3629 sctx.simt_eargs.safe_push (NULL_TREE);
3630
953ff289
DN
3631 /* Do all the fixed sized types in the first pass, and the variable sized
3632 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3633 the variable sized types are processed before we use them in the
953ff289
DN
3634 variable sized operations. */
3635 for (pass = 0; pass < 2; ++pass)
3636 {
3637 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3638 {
aaf46ef9 3639 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3640 tree var, new_var;
3641 bool by_ref;
db3927fb 3642 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3643
3644 switch (c_kind)
3645 {
3646 case OMP_CLAUSE_PRIVATE:
3647 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3648 continue;
3649 break;
3650 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3651 /* Ignore shared directives in teams construct. */
3652 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3653 continue;
8ca5b2a2
JJ
3654 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3655 {
d9a6bd32
JJ
3656 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3657 || is_global_var (OMP_CLAUSE_DECL (c)));
8ca5b2a2
JJ
3658 continue;
3659 }
953ff289 3660 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3661 case OMP_CLAUSE_COPYIN:
d9a6bd32 3662 break;
acf0174b 3663 case OMP_CLAUSE_LINEAR:
d9a6bd32
JJ
3664 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3665 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3666 lastprivate_firstprivate = true;
acf0174b 3667 break;
953ff289 3668 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3669 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3670 reduction_omp_orig_ref = true;
953ff289 3671 break;
acf0174b 3672 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 3673 /* Handle _looptemp_ clauses only on parallel/task. */
acf0174b
JJ
3674 if (fd)
3675 continue;
74bf76ed 3676 break;
077b0dfb 3677 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3678 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3679 {
3680 lastprivate_firstprivate = true;
d9a6bd32 3681 if (pass != 0 || is_taskloop_ctx (ctx))
8ca5b2a2
JJ
3682 continue;
3683 }
92d28cbb
JJ
3684 /* Even without corresponding firstprivate, if
3685 decl is Fortran allocatable, it needs outer var
3686 reference. */
3687 else if (pass == 0
3688 && lang_hooks.decls.omp_private_outer_ref
3689 (OMP_CLAUSE_DECL (c)))
3690 lastprivate_firstprivate = true;
077b0dfb 3691 break;
acf0174b
JJ
3692 case OMP_CLAUSE_ALIGNED:
3693 if (pass == 0)
3694 continue;
3695 var = OMP_CLAUSE_DECL (c);
3696 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3697 && !is_global_var (var))
3698 {
3699 new_var = maybe_lookup_decl (var, ctx);
3700 if (new_var == NULL_TREE)
3701 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3702 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3703 tree alarg = omp_clause_aligned_alignment (c);
3704 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3705 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
acf0174b
JJ
3706 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3707 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3708 gimplify_and_add (x, ilist);
3709 }
3710 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3711 && is_global_var (var))
3712 {
3713 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3714 new_var = lookup_decl (var, ctx);
3715 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3716 t = build_fold_addr_expr_loc (clause_loc, t);
3717 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3718 tree alarg = omp_clause_aligned_alignment (c);
3719 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3720 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
acf0174b 3721 t = fold_convert_loc (clause_loc, ptype, t);
b731b390 3722 x = create_tmp_var (ptype);
acf0174b
JJ
3723 t = build2 (MODIFY_EXPR, ptype, x, t);
3724 gimplify_and_add (t, ilist);
3725 t = build_simple_mem_ref_loc (clause_loc, x);
3726 SET_DECL_VALUE_EXPR (new_var, t);
3727 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3728 }
3729 continue;
953ff289
DN
3730 default:
3731 continue;
3732 }
3733
3734 new_var = var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
3735 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3736 {
3737 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
3738 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3739 var = TREE_OPERAND (var, 0);
d9a6bd32
JJ
3740 if (TREE_CODE (var) == INDIRECT_REF
3741 || TREE_CODE (var) == ADDR_EXPR)
3742 var = TREE_OPERAND (var, 0);
3743 if (is_variable_sized (var))
3744 {
3745 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3746 var = DECL_VALUE_EXPR (var);
3747 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3748 var = TREE_OPERAND (var, 0);
3749 gcc_assert (DECL_P (var));
3750 }
3751 new_var = var;
3752 }
953ff289
DN
3753 if (c_kind != OMP_CLAUSE_COPYIN)
3754 new_var = lookup_decl (var, ctx);
3755
3756 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3757 {
3758 if (pass != 0)
3759 continue;
3760 }
d9a6bd32
JJ
3761 /* C/C++ array section reductions. */
3762 else if (c_kind == OMP_CLAUSE_REDUCTION
3763 && var != OMP_CLAUSE_DECL (c))
953ff289
DN
3764 {
3765 if (pass == 0)
3766 continue;
3767
e01d41e5 3768 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
d9a6bd32 3769 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
e01d41e5
JJ
3770 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3771 {
3772 tree b = TREE_OPERAND (orig_var, 1);
3773 b = maybe_lookup_decl (b, ctx);
3774 if (b == NULL)
3775 {
3776 b = TREE_OPERAND (orig_var, 1);
3777 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3778 }
3779 if (integer_zerop (bias))
3780 bias = b;
3781 else
3782 {
3783 bias = fold_convert_loc (clause_loc,
3784 TREE_TYPE (b), bias);
3785 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3786 TREE_TYPE (b), b, bias);
3787 }
3788 orig_var = TREE_OPERAND (orig_var, 0);
3789 }
d9a6bd32
JJ
3790 if (TREE_CODE (orig_var) == INDIRECT_REF
3791 || TREE_CODE (orig_var) == ADDR_EXPR)
3792 orig_var = TREE_OPERAND (orig_var, 0);
3793 tree d = OMP_CLAUSE_DECL (c);
3794 tree type = TREE_TYPE (d);
3795 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3796 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3797 const char *name = get_name (orig_var);
3798 if (TREE_CONSTANT (v))
a68ab351 3799 {
d9a6bd32
JJ
3800 x = create_tmp_var_raw (type, name);
3801 gimple_add_tmp_var (x);
3802 TREE_ADDRESSABLE (x) = 1;
3803 x = build_fold_addr_expr_loc (clause_loc, x);
3804 }
3805 else
3806 {
3807 tree atmp
3808 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3809 tree t = maybe_lookup_decl (v, ctx);
3810 if (t)
3811 v = t;
3812 else
3813 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3814 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3815 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3816 TREE_TYPE (v), v,
3817 build_int_cst (TREE_TYPE (v), 1));
3818 t = fold_build2_loc (clause_loc, MULT_EXPR,
3819 TREE_TYPE (v), t,
3820 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3821 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3822 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3823 }
3824
3825 tree ptype = build_pointer_type (TREE_TYPE (type));
3826 x = fold_convert_loc (clause_loc, ptype, x);
3827 tree y = create_tmp_var (ptype, name);
3828 gimplify_assign (y, x, ilist);
3829 x = y;
e01d41e5
JJ
3830 tree yb = y;
3831
3832 if (!integer_zerop (bias))
3833 {
48a78aee
JJ
3834 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3835 bias);
3836 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3837 x);
3838 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3839 pointer_sized_int_node, yb, bias);
3840 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
e01d41e5
JJ
3841 yb = create_tmp_var (ptype, name);
3842 gimplify_assign (yb, x, ilist);
3843 x = yb;
3844 }
3845
3846 d = TREE_OPERAND (d, 0);
3847 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3848 d = TREE_OPERAND (d, 0);
3849 if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
3850 {
3851 if (orig_var != var)
3852 {
3853 gcc_assert (is_variable_sized (orig_var));
3854 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3855 x);
3856 gimplify_assign (new_var, x, ilist);
3857 tree new_orig_var = lookup_decl (orig_var, ctx);
3858 tree t = build_fold_indirect_ref (new_var);
3859 DECL_IGNORED_P (new_var) = 0;
3860 TREE_THIS_NOTRAP (t);
3861 SET_DECL_VALUE_EXPR (new_orig_var, t);
3862 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3863 }
3864 else
3865 {
3866 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3867 build_int_cst (ptype, 0));
3868 SET_DECL_VALUE_EXPR (new_var, x);
3869 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3870 }
3871 }
3872 else
3873 {
3874 gcc_assert (orig_var == var);
e01d41e5 3875 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
3876 {
3877 x = create_tmp_var (ptype, name);
3878 TREE_ADDRESSABLE (x) = 1;
e01d41e5 3879 gimplify_assign (x, yb, ilist);
d9a6bd32
JJ
3880 x = build_fold_addr_expr_loc (clause_loc, x);
3881 }
3882 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3883 gimplify_assign (new_var, x, ilist);
3884 }
3885 tree y1 = create_tmp_var (ptype, NULL);
3886 gimplify_assign (y1, y, ilist);
3887 tree i2 = NULL_TREE, y2 = NULL_TREE;
3888 tree body2 = NULL_TREE, end2 = NULL_TREE;
3889 tree y3 = NULL_TREE, y4 = NULL_TREE;
3890 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3891 {
3892 y2 = create_tmp_var (ptype, NULL);
3893 gimplify_assign (y2, y, ilist);
3894 tree ref = build_outer_var_ref (var, ctx);
3895 /* For ref build_outer_var_ref already performs this. */
e01d41e5 3896 if (TREE_CODE (d) == INDIRECT_REF)
629b3d75 3897 gcc_assert (omp_is_reference (var));
e01d41e5 3898 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32 3899 ref = build_fold_addr_expr (ref);
629b3d75 3900 else if (omp_is_reference (var))
d9a6bd32
JJ
3901 ref = build_fold_addr_expr (ref);
3902 ref = fold_convert_loc (clause_loc, ptype, ref);
3903 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3904 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3905 {
3906 y3 = create_tmp_var (ptype, NULL);
3907 gimplify_assign (y3, unshare_expr (ref), ilist);
3908 }
3909 if (is_simd)
3910 {
3911 y4 = create_tmp_var (ptype, NULL);
3912 gimplify_assign (y4, ref, dlist);
3913 }
3914 }
3915 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3916 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3917 tree body = create_artificial_label (UNKNOWN_LOCATION);
3918 tree end = create_artificial_label (UNKNOWN_LOCATION);
3919 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3920 if (y2)
3921 {
3922 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3923 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3924 body2 = create_artificial_label (UNKNOWN_LOCATION);
3925 end2 = create_artificial_label (UNKNOWN_LOCATION);
3926 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3927 }
3928 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3929 {
3930 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3931 tree decl_placeholder
3932 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3933 SET_DECL_VALUE_EXPR (decl_placeholder,
3934 build_simple_mem_ref (y1));
3935 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3936 SET_DECL_VALUE_EXPR (placeholder,
3937 y3 ? build_simple_mem_ref (y3)
3938 : error_mark_node);
3939 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3940 x = lang_hooks.decls.omp_clause_default_ctor
3941 (c, build_simple_mem_ref (y1),
3942 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3943 if (x)
3944 gimplify_and_add (x, ilist);
3945 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3946 {
3947 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3948 lower_omp (&tseq, ctx);
3949 gimple_seq_add_seq (ilist, tseq);
3950 }
3951 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3952 if (is_simd)
3953 {
3954 SET_DECL_VALUE_EXPR (decl_placeholder,
3955 build_simple_mem_ref (y2));
3956 SET_DECL_VALUE_EXPR (placeholder,
3957 build_simple_mem_ref (y4));
3958 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3959 lower_omp (&tseq, ctx);
3960 gimple_seq_add_seq (dlist, tseq);
3961 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3962 }
3963 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3964 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3965 x = lang_hooks.decls.omp_clause_dtor
3966 (c, build_simple_mem_ref (y2));
3967 if (x)
3968 {
3969 gimple_seq tseq = NULL;
3970 dtor = x;
3971 gimplify_stmt (&dtor, &tseq);
3972 gimple_seq_add_seq (dlist, tseq);
3973 }
3974 }
3975 else
3976 {
3977 x = omp_reduction_init (c, TREE_TYPE (type));
3978 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3979
3980 /* reduction(-:var) sums up the partial results, so it
3981 acts identically to reduction(+:var). */
3982 if (code == MINUS_EXPR)
3983 code = PLUS_EXPR;
3984
3985 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3986 if (is_simd)
3987 {
3988 x = build2 (code, TREE_TYPE (type),
3989 build_simple_mem_ref (y4),
3990 build_simple_mem_ref (y2));
3991 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3992 }
3993 }
3994 gimple *g
3995 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3996 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3997 gimple_seq_add_stmt (ilist, g);
3998 if (y3)
3999 {
4000 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4002 gimple_seq_add_stmt (ilist, g);
4003 }
4004 g = gimple_build_assign (i, PLUS_EXPR, i,
4005 build_int_cst (TREE_TYPE (i), 1));
4006 gimple_seq_add_stmt (ilist, g);
4007 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4008 gimple_seq_add_stmt (ilist, g);
4009 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4010 if (y2)
4011 {
4012 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4013 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4014 gimple_seq_add_stmt (dlist, g);
4015 if (y4)
4016 {
4017 g = gimple_build_assign
4018 (y4, POINTER_PLUS_EXPR, y4,
4019 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4020 gimple_seq_add_stmt (dlist, g);
4021 }
4022 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4023 build_int_cst (TREE_TYPE (i2), 1));
4024 gimple_seq_add_stmt (dlist, g);
4025 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4026 gimple_seq_add_stmt (dlist, g);
4027 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4028 }
4029 continue;
4030 }
4031 else if (is_variable_sized (var))
4032 {
4033 /* For variable sized types, we need to allocate the
4034 actual storage here. Call alloca and store the
4035 result in the pointer decl that we created elsewhere. */
4036 if (pass == 0)
4037 continue;
4038
4039 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4040 {
4041 gcall *stmt;
4042 tree tmp, atmp;
4043
4044 ptr = DECL_VALUE_EXPR (new_var);
4045 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4046 ptr = TREE_OPERAND (ptr, 0);
a68ab351
JJ
4047 gcc_assert (DECL_P (ptr));
4048 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
4049
4050 /* void *tmp = __builtin_alloca */
d9a6bd32
JJ
4051 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4052 stmt = gimple_build_call (atmp, 2, x,
4053 size_int (DECL_ALIGN (var)));
b731b390 4054 tmp = create_tmp_var_raw (ptr_type_node);
726a989a
RB
4055 gimple_add_tmp_var (tmp);
4056 gimple_call_set_lhs (stmt, tmp);
4057
4058 gimple_seq_add_stmt (ilist, stmt);
4059
db3927fb 4060 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 4061 gimplify_assign (ptr, x, ilist);
a68ab351 4062 }
953ff289 4063 }
629b3d75 4064 else if (omp_is_reference (var))
953ff289 4065 {
50674e96
DN
4066 /* For references that are being privatized for Fortran,
4067 allocate new backing storage for the new pointer
4068 variable. This allows us to avoid changing all the
4069 code that expects a pointer to something that expects
acf0174b 4070 a direct variable. */
953ff289
DN
4071 if (pass == 0)
4072 continue;
4073
4074 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
4075 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4076 {
4077 x = build_receiver_ref (var, false, ctx);
db3927fb 4078 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4079 }
4080 else if (TREE_CONSTANT (x))
953ff289 4081 {
decaaec8
JJ
4082 /* For reduction in SIMD loop, defer adding the
4083 initialization of the reference, because if we decide
4084 to use SIMD array for it, the initilization could cause
4085 expansion ICE. */
4086 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4ceffa27
JJ
4087 x = NULL_TREE;
4088 else
4089 {
4ceffa27 4090 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
d9a6bd32 4091 get_name (var));
4ceffa27
JJ
4092 gimple_add_tmp_var (x);
4093 TREE_ADDRESSABLE (x) = 1;
4094 x = build_fold_addr_expr_loc (clause_loc, x);
4095 }
953ff289
DN
4096 }
4097 else
4098 {
d9a6bd32
JJ
4099 tree atmp
4100 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4101 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4102 tree al = size_int (TYPE_ALIGN (rtype));
4103 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
953ff289
DN
4104 }
4105
4ceffa27
JJ
4106 if (x)
4107 {
4108 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4109 gimplify_assign (new_var, x, ilist);
4110 }
953ff289 4111
70f34814 4112 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
4113 }
4114 else if (c_kind == OMP_CLAUSE_REDUCTION
4115 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4116 {
4117 if (pass == 0)
4118 continue;
4119 }
4120 else if (pass != 0)
4121 continue;
4122
aaf46ef9 4123 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
4124 {
4125 case OMP_CLAUSE_SHARED:
acf0174b
JJ
4126 /* Ignore shared directives in teams construct. */
4127 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4128 continue;
8ca5b2a2
JJ
4129 /* Shared global vars are just accessed directly. */
4130 if (is_global_var (new_var))
4131 break;
d9a6bd32
JJ
4132 /* For taskloop firstprivate/lastprivate, represented
4133 as firstprivate and shared clause on the task, new_var
4134 is the firstprivate var. */
4135 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4136 break;
953ff289
DN
4137 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4138 needs to be delayed until after fixup_child_record_type so
4139 that we get the correct type during the dereference. */
7c8f7639 4140 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
4141 x = build_receiver_ref (var, by_ref, ctx);
4142 SET_DECL_VALUE_EXPR (new_var, x);
4143 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4144
4145 /* ??? If VAR is not passed by reference, and the variable
4146 hasn't been initialized yet, then we'll get a warning for
4147 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 4148 able to notice this and not store anything at all, but
953ff289
DN
4149 we're generating code too early. Suppress the warning. */
4150 if (!by_ref)
4151 TREE_NO_WARNING (var) = 1;
4152 break;
4153
4154 case OMP_CLAUSE_LASTPRIVATE:
4155 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4156 break;
4157 /* FALLTHRU */
4158
4159 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
4160 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4161 x = build_outer_var_ref (var, ctx);
4162 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4163 {
4164 if (is_task_ctx (ctx))
4165 x = build_receiver_ref (var, false, ctx);
4166 else
c39dad64 4167 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
a68ab351
JJ
4168 }
4169 else
4170 x = NULL;
74bf76ed 4171 do_private:
acf0174b 4172 tree nx;
d9a6bd32
JJ
4173 nx = lang_hooks.decls.omp_clause_default_ctor
4174 (c, unshare_expr (new_var), x);
74bf76ed
JJ
4175 if (is_simd)
4176 {
4177 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 4178 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed 4179 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6943af07
AM
4180 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4181 ivar, lvar))
74bf76ed 4182 {
acf0174b 4183 if (nx)
74bf76ed
JJ
4184 x = lang_hooks.decls.omp_clause_default_ctor
4185 (c, unshare_expr (ivar), x);
acf0174b 4186 if (nx && x)
74bf76ed
JJ
4187 gimplify_and_add (x, &llist[0]);
4188 if (y)
4189 {
4190 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4191 if (y)
4192 {
4193 gimple_seq tseq = NULL;
4194
4195 dtor = y;
4196 gimplify_stmt (&dtor, &tseq);
4197 gimple_seq_add_seq (&llist[1], tseq);
4198 }
4199 }
4200 break;
4201 }
4202 }
acf0174b
JJ
4203 if (nx)
4204 gimplify_and_add (nx, ilist);
953ff289
DN
4205 /* FALLTHRU */
4206
4207 do_dtor:
4208 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4209 if (x)
4210 {
726a989a
RB
4211 gimple_seq tseq = NULL;
4212
953ff289 4213 dtor = x;
726a989a 4214 gimplify_stmt (&dtor, &tseq);
355a7673 4215 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
4216 }
4217 break;
4218
74bf76ed
JJ
4219 case OMP_CLAUSE_LINEAR:
4220 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4221 goto do_firstprivate;
4222 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4223 x = NULL;
4224 else
4225 x = build_outer_var_ref (var, ctx);
4226 goto do_private;
4227
953ff289 4228 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
4229 if (is_task_ctx (ctx))
4230 {
629b3d75 4231 if (omp_is_reference (var) || is_variable_sized (var))
a68ab351
JJ
4232 goto do_dtor;
4233 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4234 ctx))
4235 || use_pointer_for_field (var, NULL))
4236 {
4237 x = build_receiver_ref (var, false, ctx);
4238 SET_DECL_VALUE_EXPR (new_var, x);
4239 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4240 goto do_dtor;
4241 }
4242 }
74bf76ed 4243 do_firstprivate:
953ff289 4244 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
4245 if (is_simd)
4246 {
acf0174b
JJ
4247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4248 && gimple_omp_for_combined_into_p (ctx->stmt))
4249 {
da6f124d
JJ
4250 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4251 tree stept = TREE_TYPE (t);
629b3d75 4252 tree ct = omp_find_clause (clauses,
da6f124d
JJ
4253 OMP_CLAUSE__LOOPTEMP_);
4254 gcc_assert (ct);
4255 tree l = OMP_CLAUSE_DECL (ct);
56ad0e38
JJ
4256 tree n1 = fd->loop.n1;
4257 tree step = fd->loop.step;
4258 tree itype = TREE_TYPE (l);
4259 if (POINTER_TYPE_P (itype))
4260 itype = signed_type_for (itype);
4261 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4262 if (TYPE_UNSIGNED (itype)
4263 && fd->loop.cond_code == GT_EXPR)
4264 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4265 fold_build1 (NEGATE_EXPR, itype, l),
4266 fold_build1 (NEGATE_EXPR,
4267 itype, step));
4268 else
4269 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
acf0174b
JJ
4270 t = fold_build2 (MULT_EXPR, stept,
4271 fold_convert (stept, l), t);
da6f124d
JJ
4272
4273 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4274 {
4275 x = lang_hooks.decls.omp_clause_linear_ctor
4276 (c, new_var, x, t);
4277 gimplify_and_add (x, ilist);
4278 goto do_dtor;
4279 }
4280
acf0174b
JJ
4281 if (POINTER_TYPE_P (TREE_TYPE (x)))
4282 x = fold_build2 (POINTER_PLUS_EXPR,
4283 TREE_TYPE (x), x, t);
4284 else
4285 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4286 }
4287
74bf76ed
JJ
4288 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4289 || TREE_ADDRESSABLE (new_var))
6943af07
AM
4290 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4291 ivar, lvar))
74bf76ed
JJ
4292 {
4293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4294 {
b731b390 4295 tree iv = create_tmp_var (TREE_TYPE (new_var));
74bf76ed
JJ
4296 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4297 gimplify_and_add (x, ilist);
4298 gimple_stmt_iterator gsi
4299 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
538dd0b7 4300 gassign *g
74bf76ed
JJ
4301 = gimple_build_assign (unshare_expr (lvar), iv);
4302 gsi_insert_before_without_update (&gsi, g,
4303 GSI_SAME_STMT);
da6f124d 4304 tree t = OMP_CLAUSE_LINEAR_STEP (c);
74bf76ed
JJ
4305 enum tree_code code = PLUS_EXPR;
4306 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4307 code = POINTER_PLUS_EXPR;
0d0e4a03 4308 g = gimple_build_assign (iv, code, iv, t);
74bf76ed
JJ
4309 gsi_insert_before_without_update (&gsi, g,
4310 GSI_SAME_STMT);
4311 break;
4312 }
4313 x = lang_hooks.decls.omp_clause_copy_ctor
4314 (c, unshare_expr (ivar), x);
4315 gimplify_and_add (x, &llist[0]);
4316 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4317 if (x)
4318 {
4319 gimple_seq tseq = NULL;
4320
4321 dtor = x;
4322 gimplify_stmt (&dtor, &tseq);
4323 gimple_seq_add_seq (&llist[1], tseq);
4324 }
4325 break;
4326 }
4327 }
d9a6bd32
JJ
4328 x = lang_hooks.decls.omp_clause_copy_ctor
4329 (c, unshare_expr (new_var), x);
953ff289
DN
4330 gimplify_and_add (x, ilist);
4331 goto do_dtor;
953ff289 4332
acf0174b 4333 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 4334 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
4335 x = build_outer_var_ref (var, ctx);
4336 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4337 gimplify_and_add (x, ilist);
4338 break;
4339
953ff289 4340 case OMP_CLAUSE_COPYIN:
7c8f7639 4341 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
4342 x = build_receiver_ref (var, by_ref, ctx);
4343 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4344 append_to_statement_list (x, &copyin_seq);
4345 copyin_by_ref |= by_ref;
4346 break;
4347
4348 case OMP_CLAUSE_REDUCTION:
e5014671
NS
4349 /* OpenACC reductions are initialized using the
4350 GOACC_REDUCTION internal function. */
4351 if (is_gimple_omp_oacc (ctx->stmt))
4352 break;
953ff289
DN
4353 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4354 {
a68ab351 4355 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
355fe088 4356 gimple *tseq;
a68ab351
JJ
4357 x = build_outer_var_ref (var, ctx);
4358
629b3d75 4359 if (omp_is_reference (var)
acf0174b
JJ
4360 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4361 TREE_TYPE (x)))
db3927fb 4362 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4363 SET_DECL_VALUE_EXPR (placeholder, x);
4364 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b 4365 tree new_vard = new_var;
629b3d75 4366 if (omp_is_reference (var))
acf0174b
JJ
4367 {
4368 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4369 new_vard = TREE_OPERAND (new_var, 0);
4370 gcc_assert (DECL_P (new_vard));
4371 }
74bf76ed 4372 if (is_simd
6943af07
AM
4373 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4374 ivar, lvar))
74bf76ed 4375 {
acf0174b
JJ
4376 if (new_vard == new_var)
4377 {
4378 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4379 SET_DECL_VALUE_EXPR (new_var, ivar);
4380 }
4381 else
4382 {
4383 SET_DECL_VALUE_EXPR (new_vard,
4384 build_fold_addr_expr (ivar));
4385 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4386 }
4387 x = lang_hooks.decls.omp_clause_default_ctor
4388 (c, unshare_expr (ivar),
4389 build_outer_var_ref (var, ctx));
4390 if (x)
4391 gimplify_and_add (x, &llist[0]);
4392 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4393 {
4394 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4395 lower_omp (&tseq, ctx);
4396 gimple_seq_add_seq (&llist[0], tseq);
4397 }
4398 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4399 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4400 lower_omp (&tseq, ctx);
4401 gimple_seq_add_seq (&llist[1], tseq);
4402 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4403 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4404 if (new_vard == new_var)
4405 SET_DECL_VALUE_EXPR (new_var, lvar);
4406 else
4407 SET_DECL_VALUE_EXPR (new_vard,
4408 build_fold_addr_expr (lvar));
4409 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4410 if (x)
4411 {
4412 tseq = NULL;
4413 dtor = x;
4414 gimplify_stmt (&dtor, &tseq);
4415 gimple_seq_add_seq (&llist[1], tseq);
4416 }
4417 break;
4418 }
4ceffa27
JJ
4419 /* If this is a reference to constant size reduction var
4420 with placeholder, we haven't emitted the initializer
4421 for it because it is undesirable if SIMD arrays are used.
4422 But if they aren't used, we need to emit the deferred
4423 initialization now. */
629b3d75 4424 else if (omp_is_reference (var) && is_simd)
decaaec8 4425 handle_simd_reference (clause_loc, new_vard, ilist);
acf0174b 4426 x = lang_hooks.decls.omp_clause_default_ctor
92d28cbb
JJ
4427 (c, unshare_expr (new_var),
4428 build_outer_var_ref (var, ctx));
acf0174b
JJ
4429 if (x)
4430 gimplify_and_add (x, ilist);
4431 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4432 {
4433 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4434 lower_omp (&tseq, ctx);
4435 gimple_seq_add_seq (ilist, tseq);
4436 }
4437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4438 if (is_simd)
4439 {
4440 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4441 lower_omp (&tseq, ctx);
4442 gimple_seq_add_seq (dlist, tseq);
4443 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4444 }
4445 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4446 goto do_dtor;
4447 }
4448 else
4449 {
4450 x = omp_reduction_init (c, TREE_TYPE (new_var));
4451 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
e9792e1d
JJ
4452 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4453
4454 /* reduction(-:var) sums up the partial results, so it
4455 acts identically to reduction(+:var). */
4456 if (code == MINUS_EXPR)
4457 code = PLUS_EXPR;
4458
decaaec8 4459 tree new_vard = new_var;
629b3d75 4460 if (is_simd && omp_is_reference (var))
decaaec8
JJ
4461 {
4462 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4463 new_vard = TREE_OPERAND (new_var, 0);
4464 gcc_assert (DECL_P (new_vard));
4465 }
acf0174b 4466 if (is_simd
6943af07
AM
4467 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4468 ivar, lvar))
acf0174b 4469 {
acf0174b
JJ
4470 tree ref = build_outer_var_ref (var, ctx);
4471
4472 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4473
6943af07 4474 if (sctx.is_simt)
9669b00b
AM
4475 {
4476 if (!simt_lane)
4477 simt_lane = create_tmp_var (unsigned_type_node);
4478 x = build_call_expr_internal_loc
4479 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4480 TREE_TYPE (ivar), 2, ivar, simt_lane);
4481 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4482 gimplify_assign (ivar, x, &llist[2]);
4483 }
acf0174b 4484 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
4485 ref = build_outer_var_ref (var, ctx);
4486 gimplify_assign (ref, x, &llist[1]);
decaaec8
JJ
4487
4488 if (new_vard != new_var)
4489 {
4490 SET_DECL_VALUE_EXPR (new_vard,
4491 build_fold_addr_expr (lvar));
4492 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4493 }
74bf76ed
JJ
4494 }
4495 else
4496 {
629b3d75 4497 if (omp_is_reference (var) && is_simd)
decaaec8 4498 handle_simd_reference (clause_loc, new_vard, ilist);
74bf76ed
JJ
4499 gimplify_assign (new_var, x, ilist);
4500 if (is_simd)
e9792e1d
JJ
4501 {
4502 tree ref = build_outer_var_ref (var, ctx);
4503
4504 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4505 ref = build_outer_var_ref (var, ctx);
4506 gimplify_assign (ref, x, dlist);
4507 }
74bf76ed 4508 }
953ff289
DN
4509 }
4510 break;
4511
4512 default:
4513 gcc_unreachable ();
4514 }
4515 }
4516 }
4517
0c6b03b5
AM
4518 if (sctx.max_vf == 1)
4519 sctx.is_simt = false;
4520
4521 if (sctx.lane || sctx.is_simt)
74bf76ed 4522 {
0c6b03b5 4523 uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
4524 /* Don't want uninit warnings on simduid, it is always uninitialized,
4525 but we use it not for the value, but for the DECL_UID only. */
4526 TREE_NO_WARNING (uid) = 1;
0c6b03b5
AM
4527 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4528 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4529 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4530 gimple_omp_for_set_clauses (ctx->stmt, c);
4531 }
4532 /* Emit calls denoting privatized variables and initializing a pointer to
4533 structure that holds private variables as fields after ompdevlow pass. */
4534 if (sctx.is_simt)
4535 {
4536 sctx.simt_eargs[0] = uid;
4537 gimple *g
4538 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4539 gimple_call_set_lhs (g, uid);
4540 gimple_seq_add_stmt (ilist, g);
4541 sctx.simt_eargs.release ();
4542
4543 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4544 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4545 gimple_call_set_lhs (g, simtrec);
4546 gimple_seq_add_stmt (ilist, g);
4547 }
4548 if (sctx.lane)
4549 {
355fe088 4550 gimple *g
74bf76ed 4551 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
6943af07 4552 gimple_call_set_lhs (g, sctx.lane);
74bf76ed
JJ
4553 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4554 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6943af07 4555 g = gimple_build_assign (sctx.lane, INTEGER_CST,
0d0e4a03 4556 build_int_cst (unsigned_type_node, 0));
74bf76ed 4557 gimple_seq_add_stmt (ilist, g);
9669b00b
AM
4558 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4559 if (llist[2])
4560 {
4561 tree simt_vf = create_tmp_var (unsigned_type_node);
4562 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4563 gimple_call_set_lhs (g, simt_vf);
4564 gimple_seq_add_stmt (dlist, g);
4565
4566 tree t = build_int_cst (unsigned_type_node, 1);
4567 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4568 gimple_seq_add_stmt (dlist, g);
4569
4570 t = build_int_cst (unsigned_type_node, 0);
6943af07 4571 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
9669b00b
AM
4572 gimple_seq_add_stmt (dlist, g);
4573
4574 tree body = create_artificial_label (UNKNOWN_LOCATION);
4575 tree header = create_artificial_label (UNKNOWN_LOCATION);
4576 tree end = create_artificial_label (UNKNOWN_LOCATION);
4577 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4578 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4579
4580 gimple_seq_add_seq (dlist, llist[2]);
4581
4582 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4583 gimple_seq_add_stmt (dlist, g);
4584
4585 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4586 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4587 gimple_seq_add_stmt (dlist, g);
4588
4589 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4590 }
74bf76ed
JJ
4591 for (int i = 0; i < 2; i++)
4592 if (llist[i])
4593 {
b731b390 4594 tree vf = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
4595 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4596 gimple_call_set_lhs (g, vf);
4597 gimple_seq *seq = i == 0 ? ilist : dlist;
4598 gimple_seq_add_stmt (seq, g);
4599 tree t = build_int_cst (unsigned_type_node, 0);
6943af07 4600 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
74bf76ed
JJ
4601 gimple_seq_add_stmt (seq, g);
4602 tree body = create_artificial_label (UNKNOWN_LOCATION);
4603 tree header = create_artificial_label (UNKNOWN_LOCATION);
4604 tree end = create_artificial_label (UNKNOWN_LOCATION);
4605 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4606 gimple_seq_add_stmt (seq, gimple_build_label (body));
4607 gimple_seq_add_seq (seq, llist[i]);
4608 t = build_int_cst (unsigned_type_node, 1);
6943af07 4609 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
74bf76ed
JJ
4610 gimple_seq_add_stmt (seq, g);
4611 gimple_seq_add_stmt (seq, gimple_build_label (header));
6943af07 4612 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
74bf76ed
JJ
4613 gimple_seq_add_stmt (seq, g);
4614 gimple_seq_add_stmt (seq, gimple_build_label (end));
4615 }
4616 }
0c6b03b5
AM
4617 if (sctx.is_simt)
4618 {
4619 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4620 gimple *g
4621 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4622 gimple_seq_add_stmt (dlist, g);
4623 }
74bf76ed 4624
953ff289
DN
4625 /* The copyin sequence is not to be executed by the main thread, since
4626 that would result in self-copies. Perhaps not visible to scalars,
4627 but it certainly is to C++ operator=. */
4628 if (copyin_seq)
4629 {
e79983f4
MM
4630 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4631 0);
953ff289
DN
4632 x = build2 (NE_EXPR, boolean_type_node, x,
4633 build_int_cst (TREE_TYPE (x), 0));
4634 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4635 gimplify_and_add (x, ilist);
4636 }
4637
4638 /* If any copyin variable is passed by reference, we must ensure the
4639 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
4640 threads. Similarly for variables in both firstprivate and
4641 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
4642 happens after firstprivate copying in all threads. And similarly
4643 for UDRs if initializer expression refers to omp_orig. */
4644 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
4645 {
4646 /* Don't add any barrier for #pragma omp simd or
4647 #pragma omp distribute. */
4648 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
e2110f8f 4649 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
629b3d75 4650 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
74bf76ed
JJ
4651 }
4652
4653 /* If max_vf is non-zero, then we can use only a vectorization factor
4654 up to the max_vf we chose. So stick it into the safelen clause. */
6943af07 4655 if (sctx.max_vf)
74bf76ed 4656 {
629b3d75 4657 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed
JJ
4658 OMP_CLAUSE_SAFELEN);
4659 if (c == NULL_TREE
b46ebd6c
JJ
4660 || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4661 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
6943af07 4662 sctx.max_vf) == 1))
74bf76ed
JJ
4663 {
4664 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4665 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943af07 4666 sctx.max_vf);
74bf76ed
JJ
4667 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4668 gimple_omp_for_set_clauses (ctx->stmt, c);
4669 }
4670 }
953ff289
DN
4671}
4672
50674e96 4673
953ff289
DN
4674/* Generate code to implement the LASTPRIVATE clauses. This is used for
4675 both parallel and workshare constructs. PREDICATE may be NULL if it's
4676 always true. */
4677
4678static void
726a989a 4679lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 4680 omp_context *ctx)
953ff289 4681{
74bf76ed 4682 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 4683 bool par_clauses = false;
9669b00b 4684 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
953ff289 4685
74bf76ed
JJ
4686 /* Early exit if there are no lastprivate or linear clauses. */
4687 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4688 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4689 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4690 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4691 break;
953ff289
DN
4692 if (clauses == NULL)
4693 {
4694 /* If this was a workshare clause, see if it had been combined
4695 with its parallel. In that case, look for the clauses on the
4696 parallel statement itself. */
4697 if (is_parallel_ctx (ctx))
4698 return;
4699
4700 ctx = ctx->outer;
4701 if (ctx == NULL || !is_parallel_ctx (ctx))
4702 return;
4703
629b3d75 4704 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
4705 OMP_CLAUSE_LASTPRIVATE);
4706 if (clauses == NULL)
4707 return;
a68ab351 4708 par_clauses = true;
953ff289
DN
4709 }
4710
9669b00b
AM
4711 bool maybe_simt = false;
4712 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4713 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4714 {
629b3d75
MJ
4715 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4716 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
9669b00b
AM
4717 if (simduid)
4718 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4719 }
4720
726a989a
RB
4721 if (predicate)
4722 {
538dd0b7 4723 gcond *stmt;
726a989a 4724 tree label_true, arm1, arm2;
56b1c60e 4725 enum tree_code pred_code = TREE_CODE (predicate);
726a989a 4726
c2255bc4
AH
4727 label = create_artificial_label (UNKNOWN_LOCATION);
4728 label_true = create_artificial_label (UNKNOWN_LOCATION);
56b1c60e
MJ
4729 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4730 {
4731 arm1 = TREE_OPERAND (predicate, 0);
4732 arm2 = TREE_OPERAND (predicate, 1);
4733 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4734 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4735 }
4736 else
4737 {
4738 arm1 = predicate;
4739 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4740 arm2 = boolean_false_node;
4741 pred_code = NE_EXPR;
4742 }
9669b00b
AM
4743 if (maybe_simt)
4744 {
56b1c60e 4745 c = build2 (pred_code, boolean_type_node, arm1, arm2);
9669b00b
AM
4746 c = fold_convert (integer_type_node, c);
4747 simtcond = create_tmp_var (integer_type_node);
4748 gimplify_assign (simtcond, c, stmt_list);
4749 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4750 1, simtcond);
4751 c = create_tmp_var (integer_type_node);
4752 gimple_call_set_lhs (g, c);
4753 gimple_seq_add_stmt (stmt_list, g);
4754 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4755 label_true, label);
4756 }
4757 else
56b1c60e 4758 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
726a989a
RB
4759 gimple_seq_add_stmt (stmt_list, stmt);
4760 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4761 }
953ff289 4762
a68ab351 4763 for (c = clauses; c ;)
953ff289
DN
4764 {
4765 tree var, new_var;
db3927fb 4766 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 4767
74bf76ed
JJ
4768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4769 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4770 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
4771 {
4772 var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
4773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4774 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4775 && is_taskloop_ctx (ctx))
4776 {
4777 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4778 new_var = lookup_decl (var, ctx->outer);
4779 }
4780 else
2187f2a2
JJ
4781 {
4782 new_var = lookup_decl (var, ctx);
4783 /* Avoid uninitialized warnings for lastprivate and
4784 for linear iterators. */
4785 if (predicate
4786 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4787 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4788 TREE_NO_WARNING (new_var) = 1;
4789 }
953ff289 4790
2260d19d 4791 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
74bf76ed
JJ
4792 {
4793 tree val = DECL_VALUE_EXPR (new_var);
2260d19d 4794 if (TREE_CODE (val) == ARRAY_REF
74bf76ed
JJ
4795 && VAR_P (TREE_OPERAND (val, 0))
4796 && lookup_attribute ("omp simd array",
4797 DECL_ATTRIBUTES (TREE_OPERAND (val,
4798 0))))
4799 {
4800 if (lastlane == NULL)
4801 {
b731b390 4802 lastlane = create_tmp_var (unsigned_type_node);
538dd0b7 4803 gcall *g
74bf76ed
JJ
4804 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4805 2, simduid,
4806 TREE_OPERAND (val, 1));
4807 gimple_call_set_lhs (g, lastlane);
4808 gimple_seq_add_stmt (stmt_list, g);
4809 }
4810 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4811 TREE_OPERAND (val, 0), lastlane,
4812 NULL_TREE, NULL_TREE);
0c6b03b5 4813 }
2260d19d
AM
4814 }
4815 else if (maybe_simt)
4816 {
4817 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4818 ? DECL_VALUE_EXPR (new_var)
4819 : new_var);
4820 if (simtlast == NULL)
0c6b03b5 4821 {
2260d19d
AM
4822 simtlast = create_tmp_var (unsigned_type_node);
4823 gcall *g = gimple_build_call_internal
4824 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4825 gimple_call_set_lhs (g, simtlast);
4826 gimple_seq_add_stmt (stmt_list, g);
74bf76ed 4827 }
2260d19d
AM
4828 x = build_call_expr_internal_loc
4829 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4830 TREE_TYPE (val), 2, val, simtlast);
4831 new_var = unshare_expr (new_var);
4832 gimplify_assign (new_var, x, stmt_list);
4833 new_var = unshare_expr (new_var);
74bf76ed
JJ
4834 }
4835
4836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4837 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 4838 {
355a7673 4839 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
4840 gimple_seq_add_seq (stmt_list,
4841 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 4842 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 4843 }
f7468577
JJ
4844 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4845 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4846 {
4847 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4848 gimple_seq_add_seq (stmt_list,
4849 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4850 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4851 }
953ff289 4852
d9a6bd32
JJ
4853 x = NULL_TREE;
4854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4855 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4856 {
4857 gcc_checking_assert (is_taskloop_ctx (ctx));
4858 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4859 ctx->outer->outer);
4860 if (is_global_var (ovar))
4861 x = ovar;
4862 }
4863 if (!x)
c39dad64 4864 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
629b3d75 4865 if (omp_is_reference (var))
70f34814 4866 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 4867 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 4868 gimplify_and_add (x, stmt_list);
a68ab351
JJ
4869 }
4870 c = OMP_CLAUSE_CHAIN (c);
4871 if (c == NULL && !par_clauses)
4872 {
4873 /* If this was a workshare clause, see if it had been combined
4874 with its parallel. In that case, continue looking for the
4875 clauses also on the parallel statement itself. */
4876 if (is_parallel_ctx (ctx))
4877 break;
4878
4879 ctx = ctx->outer;
4880 if (ctx == NULL || !is_parallel_ctx (ctx))
4881 break;
4882
629b3d75 4883 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
4884 OMP_CLAUSE_LASTPRIVATE);
4885 par_clauses = true;
4886 }
953ff289
DN
4887 }
4888
726a989a
RB
4889 if (label)
4890 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
4891}
4892
e5014671
NS
4893/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4894 (which might be a placeholder). INNER is true if this is an inner
4895 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4896 join markers. Generate the before-loop forking sequence in
4897 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4898 general form of these sequences is
4899
4900 GOACC_REDUCTION_SETUP
4901 GOACC_FORK
4902 GOACC_REDUCTION_INIT
4903 ...
4904 GOACC_REDUCTION_FINI
4905 GOACC_JOIN
4906 GOACC_REDUCTION_TEARDOWN. */
4907
41dbbb37 4908static void
e5014671
NS
4909lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4910 gcall *fork, gcall *join, gimple_seq *fork_seq,
4911 gimple_seq *join_seq, omp_context *ctx)
41dbbb37 4912{
e5014671
NS
4913 gimple_seq before_fork = NULL;
4914 gimple_seq after_fork = NULL;
4915 gimple_seq before_join = NULL;
4916 gimple_seq after_join = NULL;
4917 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4918 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4919 unsigned offset = 0;
4920
4921 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4923 {
4924 tree orig = OMP_CLAUSE_DECL (c);
4925 tree var = maybe_lookup_decl (orig, ctx);
4926 tree ref_to_res = NULL_TREE;
c42cfb5c
CP
4927 tree incoming, outgoing, v1, v2, v3;
4928 bool is_private = false;
e5014671
NS
4929
4930 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4931 if (rcode == MINUS_EXPR)
4932 rcode = PLUS_EXPR;
4933 else if (rcode == TRUTH_ANDIF_EXPR)
4934 rcode = BIT_AND_EXPR;
4935 else if (rcode == TRUTH_ORIF_EXPR)
4936 rcode = BIT_IOR_EXPR;
4937 tree op = build_int_cst (unsigned_type_node, rcode);
4938
4939 if (!var)
4940 var = orig;
e5014671
NS
4941
4942 incoming = outgoing = var;
01914336 4943
e5014671
NS
4944 if (!inner)
4945 {
4946 /* See if an outer construct also reduces this variable. */
4947 omp_context *outer = ctx;
41dbbb37 4948
e5014671
NS
4949 while (omp_context *probe = outer->outer)
4950 {
4951 enum gimple_code type = gimple_code (probe->stmt);
4952 tree cls;
41dbbb37 4953
e5014671
NS
4954 switch (type)
4955 {
4956 case GIMPLE_OMP_FOR:
4957 cls = gimple_omp_for_clauses (probe->stmt);
4958 break;
41dbbb37 4959
e5014671
NS
4960 case GIMPLE_OMP_TARGET:
4961 if (gimple_omp_target_kind (probe->stmt)
4962 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4963 goto do_lookup;
41dbbb37 4964
e5014671
NS
4965 cls = gimple_omp_target_clauses (probe->stmt);
4966 break;
41dbbb37 4967
e5014671
NS
4968 default:
4969 goto do_lookup;
4970 }
01914336 4971
e5014671
NS
4972 outer = probe;
4973 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4974 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4975 && orig == OMP_CLAUSE_DECL (cls))
c42cfb5c
CP
4976 {
4977 incoming = outgoing = lookup_decl (orig, probe);
4978 goto has_outer_reduction;
4979 }
4980 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4981 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4982 && orig == OMP_CLAUSE_DECL (cls))
4983 {
4984 is_private = true;
4985 goto do_lookup;
4986 }
e5014671 4987 }
41dbbb37 4988
e5014671
NS
4989 do_lookup:
4990 /* This is the outermost construct with this reduction,
4991 see if there's a mapping for it. */
4992 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
c42cfb5c 4993 && maybe_lookup_field (orig, outer) && !is_private)
e5014671
NS
4994 {
4995 ref_to_res = build_receiver_ref (orig, false, outer);
629b3d75 4996 if (omp_is_reference (orig))
e5014671 4997 ref_to_res = build_simple_mem_ref (ref_to_res);
41dbbb37 4998
c42cfb5c
CP
4999 tree type = TREE_TYPE (var);
5000 if (POINTER_TYPE_P (type))
5001 type = TREE_TYPE (type);
5002
e5014671 5003 outgoing = var;
c42cfb5c 5004 incoming = omp_reduction_init_op (loc, rcode, type);
e5014671
NS
5005 }
5006 else
11c4c4ba
CLT
5007 {
5008 /* Try to look at enclosing contexts for reduction var,
5009 use original if no mapping found. */
5010 tree t = NULL_TREE;
5011 omp_context *c = ctx->outer;
5012 while (c && !t)
5013 {
5014 t = maybe_lookup_decl (orig, c);
5015 c = c->outer;
5016 }
5017 incoming = outgoing = (t ? t : orig);
5018 }
01914336 5019
e5014671
NS
5020 has_outer_reduction:;
5021 }
41dbbb37 5022
e5014671
NS
5023 if (!ref_to_res)
5024 ref_to_res = integer_zero_node;
41dbbb37 5025
01914336 5026 if (omp_is_reference (orig))
c42cfb5c
CP
5027 {
5028 tree type = TREE_TYPE (var);
5029 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5030
5031 if (!inner)
5032 {
5033 tree x = create_tmp_var (TREE_TYPE (type), id);
5034 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5035 }
5036
5037 v1 = create_tmp_var (type, id);
5038 v2 = create_tmp_var (type, id);
5039 v3 = create_tmp_var (type, id);
5040
5041 gimplify_assign (v1, var, fork_seq);
5042 gimplify_assign (v2, var, fork_seq);
5043 gimplify_assign (v3, var, fork_seq);
5044
5045 var = build_simple_mem_ref (var);
5046 v1 = build_simple_mem_ref (v1);
5047 v2 = build_simple_mem_ref (v2);
5048 v3 = build_simple_mem_ref (v3);
5049 outgoing = build_simple_mem_ref (outgoing);
5050
e387fc64 5051 if (!TREE_CONSTANT (incoming))
c42cfb5c
CP
5052 incoming = build_simple_mem_ref (incoming);
5053 }
5054 else
5055 v1 = v2 = v3 = var;
5056
e5014671
NS
5057 /* Determine position in reduction buffer, which may be used
5058 by target. */
b8506a8a 5059 machine_mode mode = TYPE_MODE (TREE_TYPE (var));
e5014671
NS
5060 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5061 offset = (offset + align - 1) & ~(align - 1);
5062 tree off = build_int_cst (sizetype, offset);
5063 offset += GET_MODE_SIZE (mode);
41dbbb37 5064
e5014671
NS
5065 if (!init_code)
5066 {
5067 init_code = build_int_cst (integer_type_node,
5068 IFN_GOACC_REDUCTION_INIT);
5069 fini_code = build_int_cst (integer_type_node,
5070 IFN_GOACC_REDUCTION_FINI);
5071 setup_code = build_int_cst (integer_type_node,
5072 IFN_GOACC_REDUCTION_SETUP);
5073 teardown_code = build_int_cst (integer_type_node,
5074 IFN_GOACC_REDUCTION_TEARDOWN);
5075 }
5076
5077 tree setup_call
5078 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5079 TREE_TYPE (var), 6, setup_code,
5080 unshare_expr (ref_to_res),
5081 incoming, level, op, off);
5082 tree init_call
5083 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5084 TREE_TYPE (var), 6, init_code,
5085 unshare_expr (ref_to_res),
c42cfb5c 5086 v1, level, op, off);
e5014671
NS
5087 tree fini_call
5088 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5089 TREE_TYPE (var), 6, fini_code,
5090 unshare_expr (ref_to_res),
c42cfb5c 5091 v2, level, op, off);
e5014671
NS
5092 tree teardown_call
5093 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5094 TREE_TYPE (var), 6, teardown_code,
c42cfb5c 5095 ref_to_res, v3, level, op, off);
e5014671 5096
c42cfb5c
CP
5097 gimplify_assign (v1, setup_call, &before_fork);
5098 gimplify_assign (v2, init_call, &after_fork);
5099 gimplify_assign (v3, fini_call, &before_join);
e5014671
NS
5100 gimplify_assign (outgoing, teardown_call, &after_join);
5101 }
5102
5103 /* Now stitch things together. */
5104 gimple_seq_add_seq (fork_seq, before_fork);
5105 if (fork)
5106 gimple_seq_add_stmt (fork_seq, fork);
5107 gimple_seq_add_seq (fork_seq, after_fork);
5108
5109 gimple_seq_add_seq (join_seq, before_join);
5110 if (join)
5111 gimple_seq_add_stmt (join_seq, join);
5112 gimple_seq_add_seq (join_seq, after_join);
41dbbb37 5113}
50674e96 5114
953ff289
DN
5115/* Generate code to implement the REDUCTION clauses. */
5116
5117static void
726a989a 5118lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 5119{
726a989a 5120 gimple_seq sub_seq = NULL;
355fe088 5121 gimple *stmt;
374d0225 5122 tree x, c;
953ff289
DN
5123 int count = 0;
5124
e5014671
NS
5125 /* OpenACC loop reductions are handled elsewhere. */
5126 if (is_gimple_omp_oacc (ctx->stmt))
5127 return;
5128
74bf76ed
JJ
5129 /* SIMD reductions are handled in lower_rec_input_clauses. */
5130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 5131 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed
JJ
5132 return;
5133
953ff289
DN
5134 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5135 update in that case, otherwise use a lock. */
5136 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 5137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289 5138 {
d9a6bd32
JJ
5139 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5140 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
953ff289 5141 {
acf0174b 5142 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
5143 count = -1;
5144 break;
5145 }
5146 count++;
5147 }
5148
5149 if (count == 0)
5150 return;
5151
5152 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5153 {
d9a6bd32 5154 tree var, ref, new_var, orig_var;
953ff289 5155 enum tree_code code;
db3927fb 5156 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5157
aaf46ef9 5158 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
5159 continue;
5160
c24783c4 5161 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
d9a6bd32
JJ
5162 orig_var = var = OMP_CLAUSE_DECL (c);
5163 if (TREE_CODE (var) == MEM_REF)
5164 {
5165 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
5166 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5167 var = TREE_OPERAND (var, 0);
c24783c4 5168 if (TREE_CODE (var) == ADDR_EXPR)
d9a6bd32 5169 var = TREE_OPERAND (var, 0);
c24783c4
JJ
5170 else
5171 {
5172 /* If this is a pointer or referenced based array
5173 section, the var could be private in the outer
5174 context e.g. on orphaned loop construct. Pretend this
5175 is private variable's outer reference. */
5176 ccode = OMP_CLAUSE_PRIVATE;
5177 if (TREE_CODE (var) == INDIRECT_REF)
5178 var = TREE_OPERAND (var, 0);
5179 }
d9a6bd32
JJ
5180 orig_var = var;
5181 if (is_variable_sized (var))
5182 {
5183 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5184 var = DECL_VALUE_EXPR (var);
5185 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5186 var = TREE_OPERAND (var, 0);
5187 gcc_assert (DECL_P (var));
5188 }
5189 }
953ff289 5190 new_var = lookup_decl (var, ctx);
629b3d75 5191 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
70f34814 5192 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
c24783c4 5193 ref = build_outer_var_ref (var, ctx, ccode);
953ff289 5194 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
5195
5196 /* reduction(-:var) sums up the partial results, so it acts
5197 identically to reduction(+:var). */
953ff289
DN
5198 if (code == MINUS_EXPR)
5199 code = PLUS_EXPR;
5200
e5014671 5201 if (count == 1)
953ff289 5202 {
db3927fb 5203 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5204
5205 addr = save_expr (addr);
5206 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 5207 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 5208 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 5209 gimplify_and_add (x, stmt_seqp);
953ff289
DN
5210 return;
5211 }
d9a6bd32
JJ
5212 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5213 {
5214 tree d = OMP_CLAUSE_DECL (c);
5215 tree type = TREE_TYPE (d);
5216 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5217 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5218 tree ptype = build_pointer_type (TREE_TYPE (type));
e01d41e5
JJ
5219 tree bias = TREE_OPERAND (d, 1);
5220 d = TREE_OPERAND (d, 0);
5221 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5222 {
5223 tree b = TREE_OPERAND (d, 1);
5224 b = maybe_lookup_decl (b, ctx);
5225 if (b == NULL)
5226 {
5227 b = TREE_OPERAND (d, 1);
5228 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5229 }
5230 if (integer_zerop (bias))
5231 bias = b;
5232 else
5233 {
5234 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5235 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5236 TREE_TYPE (b), b, bias);
5237 }
5238 d = TREE_OPERAND (d, 0);
5239 }
d9a6bd32
JJ
5240 /* For ref build_outer_var_ref already performs this, so
5241 only new_var needs a dereference. */
e01d41e5 5242 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
5243 {
5244 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
629b3d75 5245 gcc_assert (omp_is_reference (var) && var == orig_var);
d9a6bd32 5246 }
e01d41e5 5247 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
5248 {
5249 if (orig_var == var)
5250 {
5251 new_var = build_fold_addr_expr (new_var);
5252 ref = build_fold_addr_expr (ref);
5253 }
5254 }
5255 else
5256 {
5257 gcc_assert (orig_var == var);
629b3d75 5258 if (omp_is_reference (var))
d9a6bd32
JJ
5259 ref = build_fold_addr_expr (ref);
5260 }
5261 if (DECL_P (v))
5262 {
5263 tree t = maybe_lookup_decl (v, ctx);
5264 if (t)
5265 v = t;
5266 else
5267 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5268 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5269 }
e01d41e5
JJ
5270 if (!integer_zerop (bias))
5271 {
5272 bias = fold_convert_loc (clause_loc, sizetype, bias);
5273 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5274 TREE_TYPE (new_var), new_var,
5275 unshare_expr (bias));
5276 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5277 TREE_TYPE (ref), ref, bias);
5278 }
d9a6bd32
JJ
5279 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5280 ref = fold_convert_loc (clause_loc, ptype, ref);
5281 tree m = create_tmp_var (ptype, NULL);
5282 gimplify_assign (m, new_var, stmt_seqp);
5283 new_var = m;
5284 m = create_tmp_var (ptype, NULL);
5285 gimplify_assign (m, ref, stmt_seqp);
5286 ref = m;
5287 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5288 tree body = create_artificial_label (UNKNOWN_LOCATION);
5289 tree end = create_artificial_label (UNKNOWN_LOCATION);
5290 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5291 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5292 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5293 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5294 {
5295 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5296 tree decl_placeholder
5297 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5298 SET_DECL_VALUE_EXPR (placeholder, out);
5299 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5300 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5301 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5302 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5303 gimple_seq_add_seq (&sub_seq,
5304 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5305 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5306 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5307 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5308 }
5309 else
5310 {
5311 x = build2 (code, TREE_TYPE (out), out, priv);
5312 out = unshare_expr (out);
5313 gimplify_assign (out, x, &sub_seq);
5314 }
5315 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5316 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5317 gimple_seq_add_stmt (&sub_seq, g);
5318 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5319 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5320 gimple_seq_add_stmt (&sub_seq, g);
5321 g = gimple_build_assign (i, PLUS_EXPR, i,
5322 build_int_cst (TREE_TYPE (i), 1));
5323 gimple_seq_add_stmt (&sub_seq, g);
5324 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5325 gimple_seq_add_stmt (&sub_seq, g);
5326 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5327 }
41dbbb37 5328 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
953ff289
DN
5329 {
5330 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5331
629b3d75 5332 if (omp_is_reference (var)
acf0174b
JJ
5333 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5334 TREE_TYPE (ref)))
db3927fb 5335 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5336 SET_DECL_VALUE_EXPR (placeholder, ref);
5337 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 5338 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
5339 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5340 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
5341 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5342 }
5343 else
5344 {
5345 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5346 ref = build_outer_var_ref (var, ctx);
726a989a 5347 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
5348 }
5349 }
5350
e79983f4
MM
5351 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5352 0);
726a989a 5353 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 5354
726a989a 5355 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 5356
e79983f4
MM
5357 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5358 0);
726a989a 5359 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
5360}
5361
50674e96 5362
953ff289
DN
5363/* Generate code to implement the COPYPRIVATE clauses. */
5364
5365static void
726a989a 5366lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
5367 omp_context *ctx)
5368{
5369 tree c;
5370
5371 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5372 {
78db7d92 5373 tree var, new_var, ref, x;
953ff289 5374 bool by_ref;
db3927fb 5375 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5376
aaf46ef9 5377 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
5378 continue;
5379
5380 var = OMP_CLAUSE_DECL (c);
7c8f7639 5381 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
5382
5383 ref = build_sender_ref (var, ctx);
78db7d92
JJ
5384 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5385 if (by_ref)
5386 {
5387 x = build_fold_addr_expr_loc (clause_loc, new_var);
5388 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5389 }
726a989a 5390 gimplify_assign (ref, x, slist);
953ff289 5391
78db7d92
JJ
5392 ref = build_receiver_ref (var, false, ctx);
5393 if (by_ref)
5394 {
5395 ref = fold_convert_loc (clause_loc,
5396 build_pointer_type (TREE_TYPE (new_var)),
5397 ref);
5398 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5399 }
629b3d75 5400 if (omp_is_reference (var))
953ff289 5401 {
78db7d92 5402 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
5403 ref = build_simple_mem_ref_loc (clause_loc, ref);
5404 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 5405 }
78db7d92 5406 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
5407 gimplify_and_add (x, rlist);
5408 }
5409}
5410
50674e96 5411
953ff289
DN
5412/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5413 and REDUCTION from the sender (aka parent) side. */
5414
5415static void
726a989a
RB
5416lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5417 omp_context *ctx)
953ff289 5418{
d9a6bd32
JJ
5419 tree c, t;
5420 int ignored_looptemp = 0;
5421 bool is_taskloop = false;
5422
5423 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5424 by GOMP_taskloop. */
5425 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5426 {
5427 ignored_looptemp = 2;
5428 is_taskloop = true;
5429 }
953ff289
DN
5430
5431 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5432 {
50674e96 5433 tree val, ref, x, var;
953ff289 5434 bool by_ref, do_in = false, do_out = false;
db3927fb 5435 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5436
aaf46ef9 5437 switch (OMP_CLAUSE_CODE (c))
953ff289 5438 {
a68ab351
JJ
5439 case OMP_CLAUSE_PRIVATE:
5440 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5441 break;
5442 continue;
953ff289
DN
5443 case OMP_CLAUSE_FIRSTPRIVATE:
5444 case OMP_CLAUSE_COPYIN:
5445 case OMP_CLAUSE_LASTPRIVATE:
5446 case OMP_CLAUSE_REDUCTION:
d9a6bd32
JJ
5447 break;
5448 case OMP_CLAUSE_SHARED:
5449 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5450 break;
5451 continue;
acf0174b 5452 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32
JJ
5453 if (ignored_looptemp)
5454 {
5455 ignored_looptemp--;
5456 continue;
5457 }
953ff289
DN
5458 break;
5459 default:
5460 continue;
5461 }
5462
d2dda7fe 5463 val = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
5464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5465 && TREE_CODE (val) == MEM_REF)
5466 {
5467 val = TREE_OPERAND (val, 0);
e01d41e5
JJ
5468 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5469 val = TREE_OPERAND (val, 0);
d9a6bd32
JJ
5470 if (TREE_CODE (val) == INDIRECT_REF
5471 || TREE_CODE (val) == ADDR_EXPR)
5472 val = TREE_OPERAND (val, 0);
5473 if (is_variable_sized (val))
5474 continue;
5475 }
5476
5477 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5478 outer taskloop region. */
5479 omp_context *ctx_for_o = ctx;
5480 if (is_taskloop
5481 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5482 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5483 ctx_for_o = ctx->outer;
5484
5485 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
50674e96 5486
8ca5b2a2
JJ
5487 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5488 && is_global_var (var))
5489 continue;
d9a6bd32
JJ
5490
5491 t = omp_member_access_dummy_var (var);
5492 if (t)
5493 {
5494 var = DECL_VALUE_EXPR (var);
5495 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5496 if (o != t)
5497 var = unshare_and_remap (var, t, o);
5498 else
5499 var = unshare_expr (var);
5500 }
5501
5502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5503 {
5504 /* Handle taskloop firstprivate/lastprivate, where the
5505 lastprivate on GIMPLE_OMP_TASK is represented as
5506 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5507 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5508 x = omp_build_component_ref (ctx->sender_decl, f);
5509 if (use_pointer_for_field (val, ctx))
5510 var = build_fold_addr_expr (var);
5511 gimplify_assign (x, var, ilist);
5512 DECL_ABSTRACT_ORIGIN (f) = NULL;
5513 continue;
5514 }
5515
5516 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5517 || val == OMP_CLAUSE_DECL (c))
5518 && is_variable_sized (val))
953ff289 5519 continue;
7c8f7639 5520 by_ref = use_pointer_for_field (val, NULL);
953ff289 5521
aaf46ef9 5522 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5523 {
5524 case OMP_CLAUSE_FIRSTPRIVATE:
ec35ea45
JJ
5525 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5526 && !by_ref
5527 && is_task_ctx (ctx))
5528 TREE_NO_WARNING (var) = 1;
5529 do_in = true;
5530 break;
5531
5532 case OMP_CLAUSE_PRIVATE:
953ff289 5533 case OMP_CLAUSE_COPYIN:
acf0174b 5534 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
5535 do_in = true;
5536 break;
5537
5538 case OMP_CLAUSE_LASTPRIVATE:
629b3d75 5539 if (by_ref || omp_is_reference (val))
953ff289
DN
5540 {
5541 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5542 continue;
5543 do_in = true;
5544 }
5545 else
a68ab351
JJ
5546 {
5547 do_out = true;
5548 if (lang_hooks.decls.omp_private_outer_ref (val))
5549 do_in = true;
5550 }
953ff289
DN
5551 break;
5552
5553 case OMP_CLAUSE_REDUCTION:
5554 do_in = true;
d9a6bd32 5555 if (val == OMP_CLAUSE_DECL (c))
629b3d75 5556 do_out = !(by_ref || omp_is_reference (val));
d9a6bd32
JJ
5557 else
5558 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
953ff289
DN
5559 break;
5560
5561 default:
5562 gcc_unreachable ();
5563 }
5564
5565 if (do_in)
5566 {
5567 ref = build_sender_ref (val, ctx);
db3927fb 5568 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 5569 gimplify_assign (ref, x, ilist);
a68ab351
JJ
5570 if (is_task_ctx (ctx))
5571 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 5572 }
50674e96 5573
953ff289
DN
5574 if (do_out)
5575 {
5576 ref = build_sender_ref (val, ctx);
726a989a 5577 gimplify_assign (var, ref, olist);
953ff289
DN
5578 }
5579 }
5580}
5581
726a989a
RB
5582/* Generate code to implement SHARED from the sender (aka parent)
5583 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5584 list things that got automatically shared. */
953ff289
DN
5585
5586static void
726a989a 5587lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 5588{
d9a6bd32 5589 tree var, ovar, nvar, t, f, x, record_type;
953ff289
DN
5590
5591 if (ctx->record_type == NULL)
5592 return;
50674e96 5593
a68ab351 5594 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 5595 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
5596 {
5597 ovar = DECL_ABSTRACT_ORIGIN (f);
d9a6bd32
JJ
5598 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5599 continue;
5600
953ff289
DN
5601 nvar = maybe_lookup_decl (ovar, ctx);
5602 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5603 continue;
5604
50674e96
DN
5605 /* If CTX is a nested parallel directive. Find the immediately
5606 enclosing parallel or workshare construct that contains a
5607 mapping for OVAR. */
d2dda7fe 5608 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 5609
d9a6bd32
JJ
5610 t = omp_member_access_dummy_var (var);
5611 if (t)
5612 {
5613 var = DECL_VALUE_EXPR (var);
5614 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5615 if (o != t)
5616 var = unshare_and_remap (var, t, o);
5617 else
5618 var = unshare_expr (var);
5619 }
5620
7c8f7639 5621 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
5622 {
5623 x = build_sender_ref (ovar, ctx);
50674e96 5624 var = build_fold_addr_expr (var);
726a989a 5625 gimplify_assign (x, var, ilist);
953ff289
DN
5626 }
5627 else
5628 {
5629 x = build_sender_ref (ovar, ctx);
726a989a 5630 gimplify_assign (x, var, ilist);
953ff289 5631
14e5b285
RG
5632 if (!TREE_READONLY (var)
5633 /* We don't need to receive a new reference to a result
5634 or parm decl. In fact we may not store to it as we will
5635 invalidate any pending RSO and generate wrong gimple
5636 during inlining. */
5637 && !((TREE_CODE (var) == RESULT_DECL
5638 || TREE_CODE (var) == PARM_DECL)
5639 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
5640 {
5641 x = build_sender_ref (ovar, ctx);
726a989a 5642 gimplify_assign (var, x, olist);
a68ab351 5643 }
953ff289
DN
5644 }
5645 }
5646}
5647
e4834818
NS
5648/* Emit an OpenACC head marker call, encapulating the partitioning and
5649 other information that must be processed by the target compiler.
5650 Return the maximum number of dimensions the associated loop might
5651 be partitioned over. */
5652
5653static unsigned
5654lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5655 gimple_seq *seq, omp_context *ctx)
5656{
5657 unsigned levels = 0;
5658 unsigned tag = 0;
5659 tree gang_static = NULL_TREE;
5660 auto_vec<tree, 5> args;
5661
5662 args.quick_push (build_int_cst
5663 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5664 args.quick_push (ddvar);
5665 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5666 {
5667 switch (OMP_CLAUSE_CODE (c))
5668 {
5669 case OMP_CLAUSE_GANG:
5670 tag |= OLF_DIM_GANG;
5671 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5672 /* static:* is represented by -1, and we can ignore it, as
5673 scheduling is always static. */
5674 if (gang_static && integer_minus_onep (gang_static))
5675 gang_static = NULL_TREE;
5676 levels++;
5677 break;
5678
5679 case OMP_CLAUSE_WORKER:
5680 tag |= OLF_DIM_WORKER;
5681 levels++;
5682 break;
5683
5684 case OMP_CLAUSE_VECTOR:
5685 tag |= OLF_DIM_VECTOR;
5686 levels++;
5687 break;
5688
5689 case OMP_CLAUSE_SEQ:
5690 tag |= OLF_SEQ;
5691 break;
5692
5693 case OMP_CLAUSE_AUTO:
5694 tag |= OLF_AUTO;
5695 break;
5696
5697 case OMP_CLAUSE_INDEPENDENT:
5698 tag |= OLF_INDEPENDENT;
5699 break;
5700
02889d23
CLT
5701 case OMP_CLAUSE_TILE:
5702 tag |= OLF_TILE;
5703 break;
5704
e4834818
NS
5705 default:
5706 continue;
5707 }
5708 }
5709
5710 if (gang_static)
5711 {
5712 if (DECL_P (gang_static))
5713 gang_static = build_outer_var_ref (gang_static, ctx);
5714 tag |= OLF_GANG_STATIC;
5715 }
5716
5717 /* In a parallel region, loops are implicitly INDEPENDENT. */
5718 omp_context *tgt = enclosing_target_ctx (ctx);
5719 if (!tgt || is_oacc_parallel (tgt))
5720 tag |= OLF_INDEPENDENT;
5721
02889d23
CLT
5722 if (tag & OLF_TILE)
5723 /* Tiling could use all 3 levels. */
5724 levels = 3;
5725 else
5726 {
5727 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5728 Ensure at least one level, or 2 for possible auto
5729 partitioning */
5730 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5731 << OLF_DIM_BASE) | OLF_SEQ));
5732
5733 if (levels < 1u + maybe_auto)
5734 levels = 1u + maybe_auto;
5735 }
e4834818
NS
5736
5737 args.quick_push (build_int_cst (integer_type_node, levels));
5738 args.quick_push (build_int_cst (integer_type_node, tag));
5739 if (gang_static)
5740 args.quick_push (gang_static);
5741
5742 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5743 gimple_set_location (call, loc);
5744 gimple_set_lhs (call, ddvar);
5745 gimple_seq_add_stmt (seq, call);
5746
5747 return levels;
5748}
5749
5750/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5751 partitioning level of the enclosed region. */
5752
5753static void
5754lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5755 tree tofollow, gimple_seq *seq)
5756{
5757 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5758 : IFN_UNIQUE_OACC_TAIL_MARK);
5759 tree marker = build_int_cst (integer_type_node, marker_kind);
5760 int nargs = 2 + (tofollow != NULL_TREE);
5761 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5762 marker, ddvar, tofollow);
5763 gimple_set_location (call, loc);
5764 gimple_set_lhs (call, ddvar);
5765 gimple_seq_add_stmt (seq, call);
5766}
5767
5768/* Generate the before and after OpenACC loop sequences. CLAUSES are
5769 the loop clauses, from which we extract reductions. Initialize
5770 HEAD and TAIL. */
5771
5772static void
5773lower_oacc_head_tail (location_t loc, tree clauses,
5774 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5775{
5776 bool inner = false;
5777 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5778 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5779
5780 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
e4834818
NS
5781 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5782 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5783
4877b5a4 5784 gcc_assert (count);
e4834818
NS
5785 for (unsigned done = 1; count; count--, done++)
5786 {
5787 gimple_seq fork_seq = NULL;
5788 gimple_seq join_seq = NULL;
5789
5790 tree place = build_int_cst (integer_type_node, -1);
5791 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5792 fork_kind, ddvar, place);
5793 gimple_set_location (fork, loc);
5794 gimple_set_lhs (fork, ddvar);
5795
5796 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5797 join_kind, ddvar, place);
5798 gimple_set_location (join, loc);
5799 gimple_set_lhs (join, ddvar);
5800
5801 /* Mark the beginning of this level sequence. */
5802 if (inner)
5803 lower_oacc_loop_marker (loc, ddvar, true,
5804 build_int_cst (integer_type_node, count),
5805 &fork_seq);
5806 lower_oacc_loop_marker (loc, ddvar, false,
5807 build_int_cst (integer_type_node, done),
5808 &join_seq);
5809
e5014671
NS
5810 lower_oacc_reductions (loc, clauses, place, inner,
5811 fork, join, &fork_seq, &join_seq, ctx);
e4834818
NS
5812
5813 /* Append this level to head. */
5814 gimple_seq_add_seq (head, fork_seq);
5815 /* Prepend it to tail. */
5816 gimple_seq_add_seq (&join_seq, *tail);
5817 *tail = join_seq;
5818
5819 inner = true;
5820 }
5821
5822 /* Mark the end of the sequence. */
5823 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5824 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5825}
726a989a 5826
629b3d75
MJ
5827/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5828 catch handler and return it. This prevents programs from violating the
5829 structured block semantics with throws. */
726a989a 5830
629b3d75
MJ
5831static gimple_seq
5832maybe_catch_exception (gimple_seq body)
726a989a 5833{
629b3d75
MJ
5834 gimple *g;
5835 tree decl;
b2b40051 5836
629b3d75
MJ
5837 if (!flag_exceptions)
5838 return body;
b2b40051 5839
629b3d75
MJ
5840 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5841 decl = lang_hooks.eh_protect_cleanup_actions ();
5842 else
5843 decl = builtin_decl_explicit (BUILT_IN_TRAP);
b2b40051 5844
629b3d75
MJ
5845 g = gimple_build_eh_must_not_throw (decl);
5846 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5847 GIMPLE_TRY_CATCH);
b2b40051 5848
629b3d75 5849 return gimple_seq_alloc_with_stmt (g);
b2b40051
MJ
5850}
5851
629b3d75
MJ
5852\f
5853/* Routines to lower OMP directives into OMP-GIMPLE. */
726a989a 5854
629b3d75
MJ
5855/* If ctx is a worksharing context inside of a cancellable parallel
5856 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5857 and conditional branch to parallel's cancel_label to handle
5858 cancellation in the implicit barrier. */
953ff289
DN
5859
5860static void
629b3d75 5861maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
953ff289 5862{
629b3d75
MJ
5863 gimple *omp_return = gimple_seq_last_stmt (*body);
5864 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5865 if (gimple_omp_return_nowait_p (omp_return))
5866 return;
5867 if (ctx->outer
5868 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5869 && ctx->outer->cancellable)
50674e96 5870 {
629b3d75
MJ
5871 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5872 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5873 tree lhs = create_tmp_var (c_bool_type);
5874 gimple_omp_return_set_lhs (omp_return, lhs);
5875 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5876 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5877 fold_convert (c_bool_type,
5878 boolean_false_node),
5879 ctx->outer->cancel_label, fallthru_label);
5880 gimple_seq_add_stmt (body, g);
5881 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
50674e96 5882 }
629b3d75 5883}
953ff289 5884
629b3d75
MJ
5885/* Lower the OpenMP sections directive in the current statement in GSI_P.
5886 CTX is the enclosing OMP context for the current statement. */
953ff289 5887
629b3d75
MJ
5888static void
5889lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5890{
5891 tree block, control;
5892 gimple_stmt_iterator tgsi;
5893 gomp_sections *stmt;
5894 gimple *t;
5895 gbind *new_stmt, *bind;
5896 gimple_seq ilist, dlist, olist, new_body;
953ff289 5897
629b3d75 5898 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
953ff289 5899
629b3d75 5900 push_gimplify_context ();
acf0174b 5901
629b3d75
MJ
5902 dlist = NULL;
5903 ilist = NULL;
5904 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5905 &ilist, &dlist, ctx, NULL);
953ff289 5906
629b3d75
MJ
5907 new_body = gimple_omp_body (stmt);
5908 gimple_omp_set_body (stmt, NULL);
5909 tgsi = gsi_start (new_body);
5910 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
953ff289 5911 {
629b3d75
MJ
5912 omp_context *sctx;
5913 gimple *sec_start;
50674e96 5914
629b3d75
MJ
5915 sec_start = gsi_stmt (tgsi);
5916 sctx = maybe_lookup_ctx (sec_start);
5917 gcc_assert (sctx);
5918
5919 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5920 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5921 GSI_CONTINUE_LINKING);
5922 gimple_omp_set_body (sec_start, NULL);
5923
5924 if (gsi_one_before_end_p (tgsi))
50674e96 5925 {
629b3d75
MJ
5926 gimple_seq l = NULL;
5927 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5928 &l, ctx);
5929 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5930 gimple_omp_section_set_last (sec_start);
5931 }
917948d3 5932
629b3d75
MJ
5933 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5934 GSI_CONTINUE_LINKING);
5935 }
50674e96 5936
629b3d75
MJ
5937 block = make_node (BLOCK);
5938 bind = gimple_build_bind (NULL, new_body, block);
50674e96 5939
629b3d75
MJ
5940 olist = NULL;
5941 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5942
629b3d75
MJ
5943 block = make_node (BLOCK);
5944 new_stmt = gimple_build_bind (NULL, NULL, block);
5945 gsi_replace (gsi_p, new_stmt, true);
50674e96 5946
629b3d75
MJ
5947 pop_gimplify_context (new_stmt);
5948 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5949 BLOCK_VARS (block) = gimple_bind_vars (bind);
5950 if (BLOCK_VARS (block))
5951 TREE_USED (block) = 1;
50674e96 5952
629b3d75
MJ
5953 new_body = NULL;
5954 gimple_seq_add_seq (&new_body, ilist);
5955 gimple_seq_add_stmt (&new_body, stmt);
5956 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5957 gimple_seq_add_stmt (&new_body, bind);
50674e96 5958
629b3d75
MJ
5959 control = create_tmp_var (unsigned_type_node, ".section");
5960 t = gimple_build_omp_continue (control, control);
5961 gimple_omp_sections_set_control (stmt, control);
5962 gimple_seq_add_stmt (&new_body, t);
50674e96 5963
629b3d75
MJ
5964 gimple_seq_add_seq (&new_body, olist);
5965 if (ctx->cancellable)
5966 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5967 gimple_seq_add_seq (&new_body, dlist);
917948d3 5968
629b3d75 5969 new_body = maybe_catch_exception (new_body);
50674e96 5970
01914336
MJ
5971 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5972 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5973 t = gimple_build_omp_return (nowait);
629b3d75
MJ
5974 gimple_seq_add_stmt (&new_body, t);
5975 maybe_add_implicit_barrier_cancel (ctx, &new_body);
953ff289 5976
629b3d75 5977 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
5978}
5979
9a771876 5980
629b3d75
MJ
5981/* A subroutine of lower_omp_single. Expand the simple form of
5982 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
9a771876 5983
629b3d75
MJ
5984 if (GOMP_single_start ())
5985 BODY;
5986 [ GOMP_barrier (); ] -> unless 'nowait' is present.
9a771876 5987
629b3d75
MJ
5988 FIXME. It may be better to delay expanding the logic of this until
5989 pass_expand_omp. The expanded logic may make the job more difficult
5990 to a synchronization analysis pass. */
a68ab351
JJ
5991
5992static void
629b3d75 5993lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
a68ab351 5994{
629b3d75
MJ
5995 location_t loc = gimple_location (single_stmt);
5996 tree tlabel = create_artificial_label (loc);
5997 tree flabel = create_artificial_label (loc);
5998 gimple *call, *cond;
5999 tree lhs, decl;
20906c66 6000
629b3d75
MJ
6001 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6002 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6003 call = gimple_build_call (decl, 0);
6004 gimple_call_set_lhs (call, lhs);
6005 gimple_seq_add_stmt (pre_p, call);
a68ab351 6006
629b3d75
MJ
6007 cond = gimple_build_cond (EQ_EXPR, lhs,
6008 fold_convert_loc (loc, TREE_TYPE (lhs),
6009 boolean_true_node),
6010 tlabel, flabel);
6011 gimple_seq_add_stmt (pre_p, cond);
6012 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6013 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6014 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
a68ab351
JJ
6015}
6016
6017
629b3d75
MJ
6018/* A subroutine of lower_omp_single. Expand the simple form of
6019 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289 6020
629b3d75 6021 #pragma omp single copyprivate (a, b, c)
953ff289 6022
629b3d75 6023 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
953ff289 6024
629b3d75
MJ
6025 {
6026 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6027 {
6028 BODY;
6029 copyout.a = a;
6030 copyout.b = b;
6031 copyout.c = c;
6032 GOMP_single_copy_end (&copyout);
6033 }
6034 else
6035 {
6036 a = copyout_p->a;
6037 b = copyout_p->b;
6038 c = copyout_p->c;
6039 }
6040 GOMP_barrier ();
6041 }
726a989a 6042
629b3d75
MJ
6043 FIXME. It may be better to delay expanding the logic of this until
6044 pass_expand_omp. The expanded logic may make the job more difficult
6045 to a synchronization analysis pass. */
953ff289 6046
629b3d75
MJ
6047static void
6048lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6049 omp_context *ctx)
6050{
6051 tree ptr_type, t, l0, l1, l2, bfn_decl;
6052 gimple_seq copyin_seq;
6053 location_t loc = gimple_location (single_stmt);
953ff289 6054
629b3d75 6055 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
953ff289 6056
629b3d75
MJ
6057 ptr_type = build_pointer_type (ctx->record_type);
6058 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
953ff289 6059
629b3d75
MJ
6060 l0 = create_artificial_label (loc);
6061 l1 = create_artificial_label (loc);
6062 l2 = create_artificial_label (loc);
953ff289 6063
629b3d75
MJ
6064 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6065 t = build_call_expr_loc (loc, bfn_decl, 0);
6066 t = fold_convert_loc (loc, ptr_type, t);
6067 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289 6068
629b3d75
MJ
6069 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6070 build_int_cst (ptr_type, 0));
6071 t = build3 (COND_EXPR, void_type_node, t,
6072 build_and_jump (&l0), build_and_jump (&l1));
6073 gimplify_and_add (t, pre_p);
953ff289 6074
629b3d75 6075 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 6076
629b3d75 6077 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289 6078
629b3d75
MJ
6079 copyin_seq = NULL;
6080 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6081 &copyin_seq, ctx);
953ff289 6082
629b3d75
MJ
6083 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6084 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6085 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6086 gimplify_and_add (t, pre_p);
2aee3e57 6087
629b3d75
MJ
6088 t = build_and_jump (&l2);
6089 gimplify_and_add (t, pre_p);
953ff289 6090
629b3d75 6091 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 6092
629b3d75 6093 gimple_seq_add_seq (pre_p, copyin_seq);
777f7f9a 6094
629b3d75 6095 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
777f7f9a 6096}
50674e96 6097
629b3d75
MJ
6098
6099/* Expand code for an OpenMP single directive. */
2b4cf991
JJ
6100
6101static void
629b3d75 6102lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
2b4cf991 6103{
629b3d75 6104 tree block;
629b3d75
MJ
6105 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6106 gbind *bind;
6107 gimple_seq bind_body, bind_body_tail = NULL, dlist;
2b4cf991 6108
629b3d75 6109 push_gimplify_context ();
2b4cf991 6110
629b3d75
MJ
6111 block = make_node (BLOCK);
6112 bind = gimple_build_bind (NULL, NULL, block);
6113 gsi_replace (gsi_p, bind, true);
6114 bind_body = NULL;
6115 dlist = NULL;
6116 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6117 &bind_body, &dlist, ctx, NULL);
6118 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
2b4cf991 6119
629b3d75 6120 gimple_seq_add_stmt (&bind_body, single_stmt);
2b4cf991 6121
629b3d75
MJ
6122 if (ctx->record_type)
6123 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6124 else
6125 lower_omp_single_simple (single_stmt, &bind_body);
2b4cf991 6126
629b3d75 6127 gimple_omp_set_body (single_stmt, NULL);
2b4cf991 6128
629b3d75 6129 gimple_seq_add_seq (&bind_body, dlist);
5a0f4dd3 6130
629b3d75 6131 bind_body = maybe_catch_exception (bind_body);
5a0f4dd3 6132
01914336
MJ
6133 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6134 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6135 gimple *g = gimple_build_omp_return (nowait);
6136 gimple_seq_add_stmt (&bind_body_tail, g);
629b3d75
MJ
6137 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6138 if (ctx->record_type)
6139 {
6140 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6141 tree clobber = build_constructor (ctx->record_type, NULL);
6142 TREE_THIS_VOLATILE (clobber) = 1;
6143 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6144 clobber), GSI_SAME_STMT);
6145 }
6146 gimple_seq_add_seq (&bind_body, bind_body_tail);
6147 gimple_bind_set_body (bind, bind_body);
5a0f4dd3 6148
629b3d75 6149 pop_gimplify_context (bind);
5a0f4dd3 6150
629b3d75
MJ
6151 gimple_bind_append_vars (bind, ctx->block_vars);
6152 BLOCK_VARS (block) = ctx->block_vars;
6153 if (BLOCK_VARS (block))
6154 TREE_USED (block) = 1;
5a0f4dd3
JJ
6155}
6156
74bf76ed 6157
629b3d75 6158/* Expand code for an OpenMP master directive. */
953ff289
DN
6159
6160static void
629b3d75 6161lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6162{
629b3d75
MJ
6163 tree block, lab = NULL, x, bfn_decl;
6164 gimple *stmt = gsi_stmt (*gsi_p);
6165 gbind *bind;
6166 location_t loc = gimple_location (stmt);
6167 gimple_seq tseq;
50674e96 6168
629b3d75 6169 push_gimplify_context ();
50674e96 6170
629b3d75
MJ
6171 block = make_node (BLOCK);
6172 bind = gimple_build_bind (NULL, NULL, block);
6173 gsi_replace (gsi_p, bind, true);
6174 gimple_bind_add_stmt (bind, stmt);
50674e96 6175
629b3d75
MJ
6176 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6177 x = build_call_expr_loc (loc, bfn_decl, 0);
6178 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6179 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6180 tseq = NULL;
6181 gimplify_and_add (x, &tseq);
6182 gimple_bind_add_seq (bind, tseq);
9a771876 6183
629b3d75
MJ
6184 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6185 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6186 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6187 gimple_omp_set_body (stmt, NULL);
b357f682 6188
629b3d75 6189 gimple_bind_add_stmt (bind, gimple_build_label (lab));
99819c63 6190
629b3d75 6191 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e01d41e5 6192
629b3d75 6193 pop_gimplify_context (bind);
b8698a0f 6194
629b3d75
MJ
6195 gimple_bind_append_vars (bind, ctx->block_vars);
6196 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
6197}
6198
e4834818 6199
629b3d75 6200/* Expand code for an OpenMP taskgroup directive. */
e4834818 6201
629b3d75
MJ
6202static void
6203lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
e4834818 6204{
629b3d75
MJ
6205 gimple *stmt = gsi_stmt (*gsi_p);
6206 gcall *x;
6207 gbind *bind;
6208 tree block = make_node (BLOCK);
e4834818 6209
629b3d75
MJ
6210 bind = gimple_build_bind (NULL, NULL, block);
6211 gsi_replace (gsi_p, bind, true);
6212 gimple_bind_add_stmt (bind, stmt);
e4834818 6213
629b3d75
MJ
6214 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6215 0);
6216 gimple_bind_add_stmt (bind, x);
e4834818 6217
629b3d75
MJ
6218 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6219 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6220 gimple_omp_set_body (stmt, NULL);
e4834818 6221
629b3d75 6222 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e4834818 6223
629b3d75
MJ
6224 gimple_bind_append_vars (bind, ctx->block_vars);
6225 BLOCK_VARS (block) = ctx->block_vars;
e4834818
NS
6226}
6227
50674e96 6228
629b3d75 6229/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
74bf76ed
JJ
6230
6231static void
629b3d75
MJ
6232lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6233 omp_context *ctx)
74bf76ed 6234{
629b3d75
MJ
6235 struct omp_for_data fd;
6236 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6237 return;
74bf76ed 6238
629b3d75
MJ
6239 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6240 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6241 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6242 if (!fd.ordered)
6243 return;
acf0174b 6244
629b3d75
MJ
6245 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6246 tree c = gimple_omp_ordered_clauses (ord_stmt);
6247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6248 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
74bf76ed 6249 {
629b3d75
MJ
6250 /* Merge depend clauses from multiple adjacent
6251 #pragma omp ordered depend(sink:...) constructs
6252 into one #pragma omp ordered depend(sink:...), so that
6253 we can optimize them together. */
6254 gimple_stmt_iterator gsi = *gsi_p;
6255 gsi_next (&gsi);
6256 while (!gsi_end_p (gsi))
74bf76ed 6257 {
629b3d75
MJ
6258 gimple *stmt = gsi_stmt (gsi);
6259 if (is_gimple_debug (stmt)
6260 || gimple_code (stmt) == GIMPLE_NOP)
74bf76ed 6261 {
629b3d75
MJ
6262 gsi_next (&gsi);
6263 continue;
74bf76ed 6264 }
629b3d75
MJ
6265 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6266 break;
6267 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6268 c = gimple_omp_ordered_clauses (ord_stmt2);
6269 if (c == NULL_TREE
6270 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6271 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6272 break;
6273 while (*list_p)
6274 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6275 *list_p = c;
6276 gsi_remove (&gsi, true);
74bf76ed
JJ
6277 }
6278 }
74bf76ed 6279
629b3d75
MJ
6280 /* Canonicalize sink dependence clauses into one folded clause if
6281 possible.
74bf76ed 6282
629b3d75
MJ
6283 The basic algorithm is to create a sink vector whose first
6284 element is the GCD of all the first elements, and whose remaining
6285 elements are the minimum of the subsequent columns.
74bf76ed 6286
629b3d75
MJ
6287 We ignore dependence vectors whose first element is zero because
6288 such dependencies are known to be executed by the same thread.
acf0174b 6289
629b3d75
MJ
6290 We take into account the direction of the loop, so a minimum
6291 becomes a maximum if the loop is iterating forwards. We also
6292 ignore sink clauses where the loop direction is unknown, or where
6293 the offsets are clearly invalid because they are not a multiple
6294 of the loop increment.
6295
6296 For example:
6297
6298 #pragma omp for ordered(2)
6299 for (i=0; i < N; ++i)
6300 for (j=0; j < M; ++j)
acf0174b 6301 {
629b3d75
MJ
6302 #pragma omp ordered \
6303 depend(sink:i-8,j-2) \
6304 depend(sink:i,j-1) \ // Completely ignored because i+0.
6305 depend(sink:i-4,j-3) \
6306 depend(sink:i-6,j-4)
6307 #pragma omp ordered depend(source)
acf0174b 6308 }
acf0174b 6309
629b3d75 6310 Folded clause is:
74bf76ed 6311
629b3d75
MJ
6312 depend(sink:-gcd(8,4,6),-min(2,3,4))
6313 -or-
6314 depend(sink:-2,-2)
6315 */
74bf76ed 6316
629b3d75
MJ
6317 /* FIXME: Computing GCD's where the first element is zero is
6318 non-trivial in the presence of collapsed loops. Do this later. */
6319 if (fd.collapse > 1)
6320 return;
74bf76ed 6321
629b3d75 6322 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
c3684b7b
MS
6323
6324 /* wide_int is not a POD so it must be default-constructed. */
6325 for (unsigned i = 0; i != 2 * len - 1; ++i)
6326 new (static_cast<void*>(folded_deps + i)) wide_int ();
6327
629b3d75
MJ
6328 tree folded_dep = NULL_TREE;
6329 /* TRUE if the first dimension's offset is negative. */
6330 bool neg_offset_p = false;
74bf76ed 6331
629b3d75
MJ
6332 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6333 unsigned int i;
6334 while ((c = *list_p) != NULL)
74bf76ed 6335 {
629b3d75 6336 bool remove = false;
74bf76ed 6337
629b3d75
MJ
6338 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6339 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6340 goto next_ordered_clause;
74bf76ed 6341
629b3d75
MJ
6342 tree vec;
6343 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6344 vec && TREE_CODE (vec) == TREE_LIST;
6345 vec = TREE_CHAIN (vec), ++i)
74bf76ed 6346 {
629b3d75 6347 gcc_assert (i < len);
74bf76ed 6348
629b3d75
MJ
6349 /* omp_extract_for_data has canonicalized the condition. */
6350 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6351 || fd.loops[i].cond_code == GT_EXPR);
6352 bool forward = fd.loops[i].cond_code == LT_EXPR;
6353 bool maybe_lexically_later = true;
953ff289 6354
629b3d75
MJ
6355 /* While the committee makes up its mind, bail if we have any
6356 non-constant steps. */
6357 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6358 goto lower_omp_ordered_ret;
953ff289 6359
629b3d75
MJ
6360 tree itype = TREE_TYPE (TREE_VALUE (vec));
6361 if (POINTER_TYPE_P (itype))
6362 itype = sizetype;
6363 wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6364 TYPE_PRECISION (itype),
6365 TYPE_SIGN (itype));
a68ab351 6366
629b3d75
MJ
6367 /* Ignore invalid offsets that are not multiples of the step. */
6368 if (!wi::multiple_of_p
6369 (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6370 UNSIGNED))
b4c3a85b 6371 {
629b3d75
MJ
6372 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6373 "ignoring sink clause with offset that is not "
6374 "a multiple of the loop step");
6375 remove = true;
6376 goto next_ordered_clause;
b4c3a85b 6377 }
d9a6bd32 6378
629b3d75
MJ
6379 /* Calculate the first dimension. The first dimension of
6380 the folded dependency vector is the GCD of the first
6381 elements, while ignoring any first elements whose offset
6382 is 0. */
6383 if (i == 0)
b4c3a85b 6384 {
629b3d75
MJ
6385 /* Ignore dependence vectors whose first dimension is 0. */
6386 if (offset == 0)
b4c3a85b 6387 {
629b3d75
MJ
6388 remove = true;
6389 goto next_ordered_clause;
b4c3a85b 6390 }
d9a6bd32 6391 else
629b3d75
MJ
6392 {
6393 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6394 {
6395 error_at (OMP_CLAUSE_LOCATION (c),
6396 "first offset must be in opposite direction "
6397 "of loop iterations");
6398 goto lower_omp_ordered_ret;
6399 }
6400 if (forward)
6401 offset = -offset;
6402 neg_offset_p = forward;
6403 /* Initialize the first time around. */
6404 if (folded_dep == NULL_TREE)
6405 {
6406 folded_dep = c;
6407 folded_deps[0] = offset;
6408 }
6409 else
6410 folded_deps[0] = wi::gcd (folded_deps[0],
6411 offset, UNSIGNED);
6412 }
d9a6bd32 6413 }
629b3d75 6414 /* Calculate minimum for the remaining dimensions. */
d9a6bd32 6415 else
d9a6bd32 6416 {
629b3d75
MJ
6417 folded_deps[len + i - 1] = offset;
6418 if (folded_dep == c)
6419 folded_deps[i] = offset;
6420 else if (maybe_lexically_later
6421 && !wi::eq_p (folded_deps[i], offset))
6422 {
6423 if (forward ^ wi::gts_p (folded_deps[i], offset))
6424 {
6425 unsigned int j;
6426 folded_dep = c;
6427 for (j = 1; j <= i; j++)
6428 folded_deps[j] = folded_deps[len + j - 1];
6429 }
6430 else
6431 maybe_lexically_later = false;
6432 }
d9a6bd32 6433 }
d9a6bd32 6434 }
629b3d75 6435 gcc_assert (i == len);
d9a6bd32 6436
629b3d75
MJ
6437 remove = true;
6438
6439 next_ordered_clause:
6440 if (remove)
6441 *list_p = OMP_CLAUSE_CHAIN (c);
d9a6bd32 6442 else
629b3d75 6443 list_p = &OMP_CLAUSE_CHAIN (c);
d9a6bd32 6444 }
d9a6bd32 6445
629b3d75 6446 if (folded_dep)
d9a6bd32 6447 {
629b3d75
MJ
6448 if (neg_offset_p)
6449 folded_deps[0] = -folded_deps[0];
d9a6bd32 6450
629b3d75
MJ
6451 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6452 if (POINTER_TYPE_P (itype))
6453 itype = sizetype;
6454
6455 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6456 = wide_int_to_tree (itype, folded_deps[0]);
6457 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6458 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
d9a6bd32
JJ
6459 }
6460
629b3d75 6461 lower_omp_ordered_ret:
d9a6bd32 6462
629b3d75
MJ
6463 /* Ordered without clauses is #pragma omp threads, while we want
6464 a nop instead if we remove all clauses. */
6465 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6466 gsi_replace (gsi_p, gimple_build_nop (), true);
d9a6bd32
JJ
6467}
6468
6469
629b3d75 6470/* Expand code for an OpenMP ordered directive. */
953ff289 6471
777f7f9a 6472static void
629b3d75 6473lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6474{
629b3d75
MJ
6475 tree block;
6476 gimple *stmt = gsi_stmt (*gsi_p), *g;
6477 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6478 gcall *x;
6479 gbind *bind;
6480 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6481 OMP_CLAUSE_SIMD);
6482 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6483 loop. */
6484 bool maybe_simt
6485 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6486 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6487 OMP_CLAUSE_THREADS);
d9a6bd32 6488
629b3d75
MJ
6489 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6490 OMP_CLAUSE_DEPEND))
d9a6bd32 6491 {
629b3d75
MJ
6492 /* FIXME: This is needs to be moved to the expansion to verify various
6493 conditions only testable on cfg with dominators computed, and also
6494 all the depend clauses to be merged still might need to be available
6495 for the runtime checks. */
6496 if (0)
6497 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6498 return;
a68ab351 6499 }
d9a6bd32 6500
629b3d75
MJ
6501 push_gimplify_context ();
6502
6503 block = make_node (BLOCK);
6504 bind = gimple_build_bind (NULL, NULL, block);
6505 gsi_replace (gsi_p, bind, true);
6506 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 6507
629b3d75 6508 if (simd)
917948d3 6509 {
629b3d75
MJ
6510 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6511 build_int_cst (NULL_TREE, threads));
6512 cfun->has_simduid_loops = true;
917948d3
ZD
6513 }
6514 else
629b3d75
MJ
6515 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6516 0);
6517 gimple_bind_add_stmt (bind, x);
6518
6519 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6520 if (maybe_simt)
953ff289 6521 {
629b3d75
MJ
6522 counter = create_tmp_var (integer_type_node);
6523 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6524 gimple_call_set_lhs (g, counter);
6525 gimple_bind_add_stmt (bind, g);
d9a6bd32 6526
629b3d75
MJ
6527 body = create_artificial_label (UNKNOWN_LOCATION);
6528 test = create_artificial_label (UNKNOWN_LOCATION);
6529 gimple_bind_add_stmt (bind, gimple_build_label (body));
953ff289 6530
629b3d75
MJ
6531 tree simt_pred = create_tmp_var (integer_type_node);
6532 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6533 gimple_call_set_lhs (g, simt_pred);
6534 gimple_bind_add_stmt (bind, g);
d9a6bd32 6535
629b3d75
MJ
6536 tree t = create_artificial_label (UNKNOWN_LOCATION);
6537 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6538 gimple_bind_add_stmt (bind, g);
74bf76ed 6539
629b3d75 6540 gimple_bind_add_stmt (bind, gimple_build_label (t));
acf0174b 6541 }
629b3d75
MJ
6542 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6543 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6544 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6545 gimple_omp_set_body (stmt, NULL);
acf0174b 6546
629b3d75 6547 if (maybe_simt)
d9a6bd32 6548 {
629b3d75
MJ
6549 gimple_bind_add_stmt (bind, gimple_build_label (test));
6550 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6551 gimple_bind_add_stmt (bind, g);
50674e96 6552
629b3d75
MJ
6553 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6554 tree nonneg = create_tmp_var (integer_type_node);
6555 gimple_seq tseq = NULL;
6556 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6557 gimple_bind_add_seq (bind, tseq);
d9a6bd32 6558
629b3d75
MJ
6559 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6560 gimple_call_set_lhs (g, nonneg);
6561 gimple_bind_add_stmt (bind, g);
d9a6bd32 6562
629b3d75
MJ
6563 tree end = create_artificial_label (UNKNOWN_LOCATION);
6564 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6565 gimple_bind_add_stmt (bind, g);
50674e96 6566
629b3d75 6567 gimple_bind_add_stmt (bind, gimple_build_label (end));
e5c95afe 6568 }
629b3d75
MJ
6569 if (simd)
6570 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6571 build_int_cst (NULL_TREE, threads));
777f7f9a 6572 else
629b3d75
MJ
6573 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6574 0);
6575 gimple_bind_add_stmt (bind, x);
917948d3 6576
629b3d75 6577 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6578
629b3d75 6579 pop_gimplify_context (bind);
917948d3 6580
629b3d75
MJ
6581 gimple_bind_append_vars (bind, ctx->block_vars);
6582 BLOCK_VARS (block) = gimple_bind_vars (bind);
6583}
56102c7f 6584
56102c7f 6585
629b3d75
MJ
6586/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6587 substitution of a couple of function calls. But in the NAMED case,
6588 requires that languages coordinate a symbol name. It is therefore
6589 best put here in common code. */
56102c7f 6590
629b3d75 6591static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
56102c7f 6592
629b3d75
MJ
6593static void
6594lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6595{
6596 tree block;
6597 tree name, lock, unlock;
6598 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6599 gbind *bind;
6600 location_t loc = gimple_location (stmt);
6601 gimple_seq tbody;
56102c7f 6602
629b3d75
MJ
6603 name = gimple_omp_critical_name (stmt);
6604 if (name)
6605 {
6606 tree decl;
56102c7f 6607
629b3d75
MJ
6608 if (!critical_name_mutexes)
6609 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
56102c7f 6610
629b3d75
MJ
6611 tree *n = critical_name_mutexes->get (name);
6612 if (n == NULL)
74bf76ed 6613 {
629b3d75 6614 char *new_str;
953ff289 6615
629b3d75 6616 decl = create_tmp_var_raw (ptr_type_node);
953ff289 6617
629b3d75
MJ
6618 new_str = ACONCAT ((".gomp_critical_user_",
6619 IDENTIFIER_POINTER (name), NULL));
6620 DECL_NAME (decl) = get_identifier (new_str);
6621 TREE_PUBLIC (decl) = 1;
6622 TREE_STATIC (decl) = 1;
6623 DECL_COMMON (decl) = 1;
6624 DECL_ARTIFICIAL (decl) = 1;
6625 DECL_IGNORED_P (decl) = 1;
953ff289 6626
629b3d75 6627 varpool_node::finalize_decl (decl);
953ff289 6628
629b3d75
MJ
6629 critical_name_mutexes->put (name, decl);
6630 }
6631 else
6632 decl = *n;
953ff289 6633
629b3d75
MJ
6634 /* If '#pragma omp critical' is inside offloaded region or
6635 inside function marked as offloadable, the symbol must be
6636 marked as offloadable too. */
6637 omp_context *octx;
6638 if (cgraph_node::get (current_function_decl)->offloadable)
6639 varpool_node::get_create (decl)->offloadable = 1;
6640 else
6641 for (octx = ctx->outer; octx; octx = octx->outer)
6642 if (is_gimple_omp_offloaded (octx->stmt))
6643 {
6644 varpool_node::get_create (decl)->offloadable = 1;
6645 break;
6646 }
777f7f9a 6647
629b3d75 6648 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
01914336
MJ
6649 lock = build_call_expr_loc (loc, lock, 1,
6650 build_fold_addr_expr_loc (loc, decl));
777f7f9a 6651
629b3d75
MJ
6652 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6653 unlock = build_call_expr_loc (loc, unlock, 1,
6654 build_fold_addr_expr_loc (loc, decl));
acf0174b 6655 }
acf0174b 6656 else
5a0f4dd3 6657 {
629b3d75
MJ
6658 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6659 lock = build_call_expr_loc (loc, lock, 0);
5a0f4dd3 6660
629b3d75
MJ
6661 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6662 unlock = build_call_expr_loc (loc, unlock, 0);
acf0174b 6663 }
953ff289 6664
629b3d75 6665 push_gimplify_context ();
fb79f500 6666
629b3d75
MJ
6667 block = make_node (BLOCK);
6668 bind = gimple_build_bind (NULL, NULL, block);
6669 gsi_replace (gsi_p, bind, true);
6670 gimple_bind_add_stmt (bind, stmt);
fb79f500 6671
629b3d75
MJ
6672 tbody = gimple_bind_body (bind);
6673 gimplify_and_add (lock, &tbody);
6674 gimple_bind_set_body (bind, tbody);
fb79f500 6675
629b3d75
MJ
6676 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6677 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6678 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6679 gimple_omp_set_body (stmt, NULL);
953ff289 6680
629b3d75
MJ
6681 tbody = gimple_bind_body (bind);
6682 gimplify_and_add (unlock, &tbody);
6683 gimple_bind_set_body (bind, tbody);
953ff289 6684
629b3d75 6685 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6686
629b3d75
MJ
6687 pop_gimplify_context (bind);
6688 gimple_bind_append_vars (bind, ctx->block_vars);
6689 BLOCK_VARS (block) = gimple_bind_vars (bind);
6690}
50674e96 6691
629b3d75
MJ
6692/* A subroutine of lower_omp_for. Generate code to emit the predicate
6693 for a lastprivate clause. Given a loop control predicate of (V
6694 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6695 is appended to *DLIST, iterator initialization is appended to
6696 *BODY_P. */
50674e96 6697
629b3d75
MJ
6698static void
6699lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6700 gimple_seq *dlist, struct omp_context *ctx)
6701{
6702 tree clauses, cond, vinit;
6703 enum tree_code cond_code;
6704 gimple_seq stmts;
953ff289 6705
629b3d75
MJ
6706 cond_code = fd->loop.cond_code;
6707 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
acf0174b 6708
629b3d75
MJ
6709 /* When possible, use a strict equality expression. This can let VRP
6710 type optimizations deduce the value and remove a copy. */
6711 if (tree_fits_shwi_p (fd->loop.step))
acf0174b 6712 {
629b3d75
MJ
6713 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6714 if (step == 1 || step == -1)
6715 cond_code = EQ_EXPR;
acf0174b 6716 }
629b3d75
MJ
6717
6718 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6719 || gimple_omp_for_grid_phony (fd->for_stmt))
6720 cond = omp_grid_lastprivate_predicate (fd);
a68ab351 6721 else
acf0174b 6722 {
629b3d75
MJ
6723 tree n2 = fd->loop.n2;
6724 if (fd->collapse > 1
6725 && TREE_CODE (n2) != INTEGER_CST
6726 && gimple_omp_for_combined_into_p (fd->for_stmt))
d9a6bd32 6727 {
629b3d75
MJ
6728 struct omp_context *taskreg_ctx = NULL;
6729 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
d9a6bd32 6730 {
629b3d75
MJ
6731 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6732 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6733 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
d9a6bd32 6734 {
629b3d75
MJ
6735 if (gimple_omp_for_combined_into_p (gfor))
6736 {
6737 gcc_assert (ctx->outer->outer
6738 && is_parallel_ctx (ctx->outer->outer));
6739 taskreg_ctx = ctx->outer->outer;
6740 }
6741 else
6742 {
6743 struct omp_for_data outer_fd;
6744 omp_extract_for_data (gfor, &outer_fd, NULL);
6745 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6746 }
d9a6bd32 6747 }
629b3d75
MJ
6748 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6749 taskreg_ctx = ctx->outer->outer;
6750 }
6751 else if (is_taskreg_ctx (ctx->outer))
6752 taskreg_ctx = ctx->outer;
6753 if (taskreg_ctx)
6754 {
6755 int i;
6756 tree taskreg_clauses
6757 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6758 tree innerc = omp_find_clause (taskreg_clauses,
6759 OMP_CLAUSE__LOOPTEMP_);
6760 gcc_assert (innerc);
6761 for (i = 0; i < fd->collapse; i++)
6762 {
6763 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6764 OMP_CLAUSE__LOOPTEMP_);
6765 gcc_assert (innerc);
6766 }
6767 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6768 OMP_CLAUSE__LOOPTEMP_);
6769 if (innerc)
6770 n2 = fold_convert (TREE_TYPE (n2),
6771 lookup_decl (OMP_CLAUSE_DECL (innerc),
6772 taskreg_ctx));
d9a6bd32 6773 }
acf0174b 6774 }
629b3d75 6775 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
acf0174b 6776 }
50674e96 6777
629b3d75
MJ
6778 clauses = gimple_omp_for_clauses (fd->for_stmt);
6779 stmts = NULL;
6780 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6781 if (!gimple_seq_empty_p (stmts))
acf0174b 6782 {
629b3d75
MJ
6783 gimple_seq_add_seq (&stmts, *dlist);
6784 *dlist = stmts;
6093bc06 6785
629b3d75
MJ
6786 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6787 vinit = fd->loop.n1;
6788 if (cond_code == EQ_EXPR
6789 && tree_fits_shwi_p (fd->loop.n2)
6790 && ! integer_zerop (fd->loop.n2))
6791 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6792 else
6793 vinit = unshare_expr (vinit);
e67d7a1e 6794
629b3d75
MJ
6795 /* Initialize the iterator variable, so that threads that don't execute
6796 any iterations don't execute the lastprivate clauses by accident. */
6797 gimplify_assign (fd->loop.v, vinit, body_p);
acf0174b 6798 }
953ff289
DN
6799}
6800
1b96e9a4 6801
629b3d75 6802/* Lower code for an OMP loop directive. */
50674e96 6803
629b3d75
MJ
6804static void
6805lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6806{
6807 tree *rhs_p, block;
6808 struct omp_for_data fd, *fdp = NULL;
6809 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6810 gbind *new_stmt;
6811 gimple_seq omp_for_body, body, dlist;
6812 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6813 size_t i;
953ff289 6814
629b3d75 6815 push_gimplify_context ();
953ff289 6816
629b3d75 6817 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
953ff289 6818
629b3d75
MJ
6819 block = make_node (BLOCK);
6820 new_stmt = gimple_build_bind (NULL, NULL, block);
6821 /* Replace at gsi right away, so that 'stmt' is no member
6822 of a sequence anymore as we're going to add to a different
6823 one below. */
6824 gsi_replace (gsi_p, new_stmt, true);
953ff289 6825
629b3d75
MJ
6826 /* Move declaration of temporaries in the loop body before we make
6827 it go away. */
6828 omp_for_body = gimple_omp_body (stmt);
6829 if (!gimple_seq_empty_p (omp_for_body)
6830 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
acf0174b 6831 {
629b3d75
MJ
6832 gbind *inner_bind
6833 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6834 tree vars = gimple_bind_vars (inner_bind);
6835 gimple_bind_append_vars (new_stmt, vars);
6836 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6837 keep them on the inner_bind and it's block. */
6838 gimple_bind_set_vars (inner_bind, NULL_TREE);
6839 if (gimple_bind_block (inner_bind))
6840 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
acf0174b 6841 }
50674e96 6842
629b3d75 6843 if (gimple_omp_for_combined_into_p (stmt))
5a0f4dd3 6844 {
629b3d75
MJ
6845 omp_extract_for_data (stmt, &fd, NULL);
6846 fdp = &fd;
6847
6848 /* We need two temporaries with fd.loop.v type (istart/iend)
6849 and then (fd.collapse - 1) temporaries with the same
6850 type for count2 ... countN-1 vars if not constant. */
6851 size_t count = 2;
6852 tree type = fd.iter_type;
6853 if (fd.collapse > 1
6854 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6855 count += fd.collapse - 1;
6856 bool taskreg_for
6857 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6858 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6859 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6e6cf7b0 6860 tree simtc = NULL;
629b3d75
MJ
6861 tree clauses = *pc;
6862 if (taskreg_for)
6863 outerc
6864 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6865 OMP_CLAUSE__LOOPTEMP_);
6e6cf7b0
JJ
6866 if (ctx->simt_stmt)
6867 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6868 OMP_CLAUSE__LOOPTEMP_);
629b3d75 6869 for (i = 0; i < count; i++)
5a0f4dd3 6870 {
629b3d75
MJ
6871 tree temp;
6872 if (taskreg_for)
6873 {
6874 gcc_assert (outerc);
6875 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6876 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6877 OMP_CLAUSE__LOOPTEMP_);
6878 }
6879 else
5a0f4dd3 6880 {
6e6cf7b0
JJ
6881 /* If there are 2 adjacent SIMD stmts, one with _simt_
6882 clause, another without, make sure they have the same
6883 decls in _looptemp_ clauses, because the outer stmt
6884 they are combined into will look up just one inner_stmt. */
6885 if (ctx->simt_stmt)
6886 temp = OMP_CLAUSE_DECL (simtc);
6887 else
6888 temp = create_tmp_var (type);
629b3d75 6889 insert_decl_map (&ctx->outer->cb, temp, temp);
5a0f4dd3 6890 }
629b3d75
MJ
6891 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6892 OMP_CLAUSE_DECL (*pc) = temp;
6893 pc = &OMP_CLAUSE_CHAIN (*pc);
6e6cf7b0
JJ
6894 if (ctx->simt_stmt)
6895 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6896 OMP_CLAUSE__LOOPTEMP_);
5a0f4dd3 6897 }
629b3d75 6898 *pc = clauses;
5a0f4dd3
JJ
6899 }
6900
629b3d75
MJ
6901 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6902 dlist = NULL;
6903 body = NULL;
6904 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6905 fdp);
6906 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
917948d3 6907
629b3d75 6908 lower_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289 6909
629b3d75
MJ
6910 /* Lower the header expressions. At this point, we can assume that
6911 the header is of the form:
50674e96 6912
629b3d75 6913 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
917948d3 6914
629b3d75
MJ
6915 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6916 using the .omp_data_s mapping, if needed. */
6917 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6918 {
6919 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6920 if (!is_gimple_min_invariant (*rhs_p))
6921 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
50674e96 6922
629b3d75
MJ
6923 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6924 if (!is_gimple_min_invariant (*rhs_p))
6925 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
d9a6bd32 6926
629b3d75
MJ
6927 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6928 if (!is_gimple_min_invariant (*rhs_p))
6929 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6930 }
953ff289 6931
629b3d75
MJ
6932 /* Once lowered, extract the bounds and clauses. */
6933 omp_extract_for_data (stmt, &fd, NULL);
953ff289 6934
629b3d75
MJ
6935 if (is_gimple_omp_oacc (ctx->stmt)
6936 && !ctx_in_oacc_kernels_region (ctx))
6937 lower_oacc_head_tail (gimple_location (stmt),
6938 gimple_omp_for_clauses (stmt),
6939 &oacc_head, &oacc_tail, ctx);
953ff289 6940
01914336 6941 /* Add OpenACC partitioning and reduction markers just before the loop. */
629b3d75
MJ
6942 if (oacc_head)
6943 gimple_seq_add_seq (&body, oacc_head);
01914336 6944
629b3d75 6945 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
acf0174b 6946
629b3d75
MJ
6947 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6948 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
6949 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6950 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6951 {
629b3d75
MJ
6952 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6953 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6954 OMP_CLAUSE_LINEAR_STEP (c)
6955 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6956 ctx);
d9a6bd32 6957 }
acf0174b 6958
629b3d75
MJ
6959 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6960 && gimple_omp_for_grid_phony (stmt));
6961 if (!phony_loop)
6962 gimple_seq_add_stmt (&body, stmt);
6963 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6964
6965 if (!phony_loop)
6966 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6967 fd.loop.v));
917948d3 6968
629b3d75
MJ
6969 /* After the loop, add exit clauses. */
6970 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
b8698a0f 6971
629b3d75
MJ
6972 if (ctx->cancellable)
6973 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
50674e96 6974
629b3d75 6975 gimple_seq_add_seq (&body, dlist);
953ff289 6976
629b3d75 6977 body = maybe_catch_exception (body);
953ff289 6978
629b3d75 6979 if (!phony_loop)
acf0174b 6980 {
629b3d75
MJ
6981 /* Region exit marker goes at the end of the loop body. */
6982 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6983 maybe_add_implicit_barrier_cancel (ctx, &body);
acf0174b 6984 }
953ff289 6985
629b3d75
MJ
6986 /* Add OpenACC joining and reduction markers just after the loop. */
6987 if (oacc_tail)
6988 gimple_seq_add_seq (&body, oacc_tail);
917948d3 6989
629b3d75 6990 pop_gimplify_context (new_stmt);
917948d3 6991
629b3d75
MJ
6992 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6993 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6994 if (BLOCK_VARS (block))
6995 TREE_USED (block) = 1;
917948d3 6996
629b3d75
MJ
6997 gimple_bind_set_body (new_stmt, body);
6998 gimple_omp_set_body (stmt, NULL);
6999 gimple_omp_for_set_pre_body (stmt, NULL);
7000}
17720e84 7001
629b3d75
MJ
7002/* Callback for walk_stmts. Check if the current statement only contains
7003 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
917948d3 7004
629b3d75
MJ
7005static tree
7006check_combined_parallel (gimple_stmt_iterator *gsi_p,
7007 bool *handled_ops_p,
7008 struct walk_stmt_info *wi)
7009{
7010 int *info = (int *) wi->info;
7011 gimple *stmt = gsi_stmt (*gsi_p);
917948d3 7012
629b3d75
MJ
7013 *handled_ops_p = true;
7014 switch (gimple_code (stmt))
acf0174b 7015 {
629b3d75 7016 WALK_SUBSTMTS;
8cba6b95 7017
629b3d75
MJ
7018 case GIMPLE_OMP_FOR:
7019 case GIMPLE_OMP_SECTIONS:
7020 *info = *info == 0 ? 1 : -1;
7021 break;
7022 default:
7023 *info = -1;
7024 break;
acf0174b 7025 }
629b3d75 7026 return NULL;
953ff289
DN
7027}
7028
629b3d75
MJ
7029struct omp_taskcopy_context
7030{
7031 /* This field must be at the beginning, as we do "inheritance": Some
7032 callback functions for tree-inline.c (e.g., omp_copy_decl)
7033 receive a copy_body_data pointer that is up-casted to an
7034 omp_context pointer. */
7035 copy_body_data cb;
7036 omp_context *ctx;
7037};
9a771876 7038
629b3d75
MJ
7039static tree
7040task_copyfn_copy_decl (tree var, copy_body_data *cb)
7041{
7042 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9a771876 7043
629b3d75
MJ
7044 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7045 return create_tmp_var (TREE_TYPE (var));
9a771876 7046
629b3d75
MJ
7047 return var;
7048}
9a771876 7049
629b3d75
MJ
7050static tree
7051task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9a771876 7052{
629b3d75 7053 tree name, new_fields = NULL, type, f;
9a771876 7054
629b3d75
MJ
7055 type = lang_hooks.types.make_type (RECORD_TYPE);
7056 name = DECL_NAME (TYPE_NAME (orig_type));
7057 name = build_decl (gimple_location (tcctx->ctx->stmt),
7058 TYPE_DECL, name, type);
7059 TYPE_NAME (type) = name;
9a771876 7060
629b3d75 7061 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9a771876 7062 {
629b3d75
MJ
7063 tree new_f = copy_node (f);
7064 DECL_CONTEXT (new_f) = type;
7065 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7066 TREE_CHAIN (new_f) = new_fields;
7067 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7068 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7069 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7070 &tcctx->cb, NULL);
7071 new_fields = new_f;
7072 tcctx->cb.decl_map->put (f, new_f);
9a771876 7073 }
629b3d75
MJ
7074 TYPE_FIELDS (type) = nreverse (new_fields);
7075 layout_type (type);
7076 return type;
7077}
9a771876 7078
629b3d75 7079/* Create task copyfn. */
9a771876 7080
629b3d75
MJ
7081static void
7082create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7083{
7084 struct function *child_cfun;
7085 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7086 tree record_type, srecord_type, bind, list;
7087 bool record_needs_remap = false, srecord_needs_remap = false;
7088 splay_tree_node n;
7089 struct omp_taskcopy_context tcctx;
7090 location_t loc = gimple_location (task_stmt);
9a771876 7091
629b3d75
MJ
7092 child_fn = gimple_omp_task_copy_fn (task_stmt);
7093 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7094 gcc_assert (child_cfun->cfg == NULL);
7095 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9a771876 7096
629b3d75
MJ
7097 /* Reset DECL_CONTEXT on function arguments. */
7098 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7099 DECL_CONTEXT (t) = child_fn;
9a771876 7100
629b3d75
MJ
7101 /* Populate the function. */
7102 push_gimplify_context ();
7103 push_cfun (child_cfun);
9a771876 7104
629b3d75
MJ
7105 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7106 TREE_SIDE_EFFECTS (bind) = 1;
7107 list = NULL;
7108 DECL_SAVED_TREE (child_fn) = bind;
7109 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9a771876 7110
629b3d75
MJ
7111 /* Remap src and dst argument types if needed. */
7112 record_type = ctx->record_type;
7113 srecord_type = ctx->srecord_type;
7114 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7115 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7116 {
7117 record_needs_remap = true;
7118 break;
7119 }
7120 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7121 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7122 {
7123 srecord_needs_remap = true;
7124 break;
7125 }
9a771876 7126
629b3d75 7127 if (record_needs_remap || srecord_needs_remap)
9a771876 7128 {
629b3d75
MJ
7129 memset (&tcctx, '\0', sizeof (tcctx));
7130 tcctx.cb.src_fn = ctx->cb.src_fn;
7131 tcctx.cb.dst_fn = child_fn;
7132 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7133 gcc_checking_assert (tcctx.cb.src_node);
7134 tcctx.cb.dst_node = tcctx.cb.src_node;
7135 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7136 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7137 tcctx.cb.eh_lp_nr = 0;
7138 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7139 tcctx.cb.decl_map = new hash_map<tree, tree>;
7140 tcctx.ctx = ctx;
9a771876 7141
629b3d75
MJ
7142 if (record_needs_remap)
7143 record_type = task_copyfn_remap_type (&tcctx, record_type);
7144 if (srecord_needs_remap)
7145 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9a771876
JJ
7146 }
7147 else
629b3d75 7148 tcctx.cb.decl_map = NULL;
9a771876 7149
629b3d75
MJ
7150 arg = DECL_ARGUMENTS (child_fn);
7151 TREE_TYPE (arg) = build_pointer_type (record_type);
7152 sarg = DECL_CHAIN (arg);
7153 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9a771876 7154
629b3d75
MJ
7155 /* First pass: initialize temporaries used in record_type and srecord_type
7156 sizes and field offsets. */
7157 if (tcctx.cb.decl_map)
7158 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7159 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7160 {
7161 tree *p;
9a771876 7162
629b3d75
MJ
7163 decl = OMP_CLAUSE_DECL (c);
7164 p = tcctx.cb.decl_map->get (decl);
7165 if (p == NULL)
7166 continue;
7167 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7168 sf = (tree) n->value;
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7173 append_to_statement_list (t, &list);
7174 }
9a771876 7175
629b3d75
MJ
7176 /* Second pass: copy shared var pointers and copy construct non-VLA
7177 firstprivate vars. */
7178 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7179 switch (OMP_CLAUSE_CODE (c))
7180 {
7181 splay_tree_key key;
7182 case OMP_CLAUSE_SHARED:
7183 decl = OMP_CLAUSE_DECL (c);
7184 key = (splay_tree_key) decl;
7185 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7186 key = (splay_tree_key) &DECL_UID (decl);
7187 n = splay_tree_lookup (ctx->field_map, key);
7188 if (n == NULL)
7189 break;
7190 f = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 f = *tcctx.cb.decl_map->get (f);
7193 n = splay_tree_lookup (ctx->sfield_map, key);
7194 sf = (tree) n->value;
7195 if (tcctx.cb.decl_map)
7196 sf = *tcctx.cb.decl_map->get (sf);
7197 src = build_simple_mem_ref_loc (loc, sarg);
7198 src = omp_build_component_ref (src, sf);
7199 dst = build_simple_mem_ref_loc (loc, arg);
7200 dst = omp_build_component_ref (dst, f);
7201 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7202 append_to_statement_list (t, &list);
7203 break;
7204 case OMP_CLAUSE_FIRSTPRIVATE:
7205 decl = OMP_CLAUSE_DECL (c);
7206 if (is_variable_sized (decl))
7207 break;
7208 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7209 if (n == NULL)
7210 break;
7211 f = (tree) n->value;
7212 if (tcctx.cb.decl_map)
7213 f = *tcctx.cb.decl_map->get (f);
7214 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7215 if (n != NULL)
7216 {
7217 sf = (tree) n->value;
7218 if (tcctx.cb.decl_map)
7219 sf = *tcctx.cb.decl_map->get (sf);
7220 src = build_simple_mem_ref_loc (loc, sarg);
7221 src = omp_build_component_ref (src, sf);
7222 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7223 src = build_simple_mem_ref_loc (loc, src);
7224 }
7225 else
7226 src = decl;
7227 dst = build_simple_mem_ref_loc (loc, arg);
7228 dst = omp_build_component_ref (dst, f);
7229 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7230 append_to_statement_list (t, &list);
7231 break;
7232 case OMP_CLAUSE_PRIVATE:
7233 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7234 break;
7235 decl = OMP_CLAUSE_DECL (c);
7236 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7237 f = (tree) n->value;
7238 if (tcctx.cb.decl_map)
7239 f = *tcctx.cb.decl_map->get (f);
7240 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7241 if (n != NULL)
7242 {
7243 sf = (tree) n->value;
7244 if (tcctx.cb.decl_map)
7245 sf = *tcctx.cb.decl_map->get (sf);
7246 src = build_simple_mem_ref_loc (loc, sarg);
7247 src = omp_build_component_ref (src, sf);
7248 if (use_pointer_for_field (decl, NULL))
7249 src = build_simple_mem_ref_loc (loc, src);
7250 }
7251 else
7252 src = decl;
7253 dst = build_simple_mem_ref_loc (loc, arg);
7254 dst = omp_build_component_ref (dst, f);
7255 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7256 append_to_statement_list (t, &list);
7257 break;
7258 default:
7259 break;
7260 }
74bf76ed 7261
629b3d75
MJ
7262 /* Last pass: handle VLA firstprivates. */
7263 if (tcctx.cb.decl_map)
7264 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7266 {
7267 tree ind, ptr, df;
74bf76ed 7268
629b3d75
MJ
7269 decl = OMP_CLAUSE_DECL (c);
7270 if (!is_variable_sized (decl))
7271 continue;
7272 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7273 if (n == NULL)
7274 continue;
7275 f = (tree) n->value;
7276 f = *tcctx.cb.decl_map->get (f);
7277 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7278 ind = DECL_VALUE_EXPR (decl);
7279 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7280 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7281 n = splay_tree_lookup (ctx->sfield_map,
7282 (splay_tree_key) TREE_OPERAND (ind, 0));
7283 sf = (tree) n->value;
7284 sf = *tcctx.cb.decl_map->get (sf);
7285 src = build_simple_mem_ref_loc (loc, sarg);
7286 src = omp_build_component_ref (src, sf);
7287 src = build_simple_mem_ref_loc (loc, src);
7288 dst = build_simple_mem_ref_loc (loc, arg);
7289 dst = omp_build_component_ref (dst, f);
7290 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7291 append_to_statement_list (t, &list);
7292 n = splay_tree_lookup (ctx->field_map,
7293 (splay_tree_key) TREE_OPERAND (ind, 0));
7294 df = (tree) n->value;
7295 df = *tcctx.cb.decl_map->get (df);
7296 ptr = build_simple_mem_ref_loc (loc, arg);
7297 ptr = omp_build_component_ref (ptr, df);
7298 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7299 build_fold_addr_expr_loc (loc, dst));
7300 append_to_statement_list (t, &list);
7301 }
74bf76ed 7302
629b3d75
MJ
7303 t = build1 (RETURN_EXPR, void_type_node, NULL);
7304 append_to_statement_list (t, &list);
74bf76ed 7305
629b3d75
MJ
7306 if (tcctx.cb.decl_map)
7307 delete tcctx.cb.decl_map;
7308 pop_gimplify_context (NULL);
7309 BIND_EXPR_BODY (bind) = list;
7310 pop_cfun ();
7311}
74bf76ed
JJ
7312
7313static void
629b3d75 7314lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
74bf76ed 7315{
629b3d75
MJ
7316 tree c, clauses;
7317 gimple *g;
7318 size_t n_in = 0, n_out = 0, idx = 2, i;
7319
7320 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7321 gcc_assert (clauses);
7322 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7324 switch (OMP_CLAUSE_DEPEND_KIND (c))
7325 {
7326 case OMP_CLAUSE_DEPEND_IN:
7327 n_in++;
7328 break;
7329 case OMP_CLAUSE_DEPEND_OUT:
7330 case OMP_CLAUSE_DEPEND_INOUT:
7331 n_out++;
7332 break;
7333 case OMP_CLAUSE_DEPEND_SOURCE:
7334 case OMP_CLAUSE_DEPEND_SINK:
7335 /* FALLTHRU */
7336 default:
7337 gcc_unreachable ();
7338 }
7339 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7340 tree array = create_tmp_var (type);
7341 TREE_ADDRESSABLE (array) = 1;
7342 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7343 NULL_TREE);
7344 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7345 gimple_seq_add_stmt (iseq, g);
7346 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7347 NULL_TREE);
7348 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7349 gimple_seq_add_stmt (iseq, g);
7350 for (i = 0; i < 2; i++)
74bf76ed 7351 {
629b3d75
MJ
7352 if ((i ? n_in : n_out) == 0)
7353 continue;
7354 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7356 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7357 {
7358 tree t = OMP_CLAUSE_DECL (c);
7359 t = fold_convert (ptr_type_node, t);
7360 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7361 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7362 NULL_TREE, NULL_TREE);
7363 g = gimple_build_assign (r, t);
7364 gimple_seq_add_stmt (iseq, g);
7365 }
74bf76ed 7366 }
629b3d75
MJ
7367 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7368 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7369 OMP_CLAUSE_CHAIN (c) = *pclauses;
7370 *pclauses = c;
7371 tree clobber = build_constructor (type, NULL);
7372 TREE_THIS_VOLATILE (clobber) = 1;
7373 g = gimple_build_assign (array, clobber);
7374 gimple_seq_add_stmt (oseq, g);
7375}
7376
7377/* Lower the OpenMP parallel or task directive in the current statement
7378 in GSI_P. CTX holds context information for the directive. */
74bf76ed 7379
629b3d75
MJ
7380static void
7381lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7382{
7383 tree clauses;
7384 tree child_fn, t;
7385 gimple *stmt = gsi_stmt (*gsi_p);
7386 gbind *par_bind, *bind, *dep_bind = NULL;
7387 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7388 location_t loc = gimple_location (stmt);
74bf76ed 7389
629b3d75
MJ
7390 clauses = gimple_omp_taskreg_clauses (stmt);
7391 par_bind
7392 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7393 par_body = gimple_bind_body (par_bind);
7394 child_fn = ctx->cb.dst_fn;
7395 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7396 && !gimple_omp_parallel_combined_p (stmt))
74bf76ed 7397 {
629b3d75
MJ
7398 struct walk_stmt_info wi;
7399 int ws_num = 0;
74bf76ed 7400
629b3d75
MJ
7401 memset (&wi, 0, sizeof (wi));
7402 wi.info = &ws_num;
7403 wi.val_only = true;
7404 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7405 if (ws_num == 1)
7406 gimple_omp_parallel_set_combined_p (stmt, true);
74bf76ed 7407 }
629b3d75
MJ
7408 gimple_seq dep_ilist = NULL;
7409 gimple_seq dep_olist = NULL;
7410 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7411 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
acf0174b 7412 {
629b3d75
MJ
7413 push_gimplify_context ();
7414 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7415 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7416 &dep_ilist, &dep_olist);
9669b00b 7417 }
9669b00b 7418
629b3d75
MJ
7419 if (ctx->srecord_type)
7420 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9669b00b 7421
629b3d75 7422 push_gimplify_context ();
74bf76ed 7423
629b3d75
MJ
7424 par_olist = NULL;
7425 par_ilist = NULL;
7426 par_rlist = NULL;
7427 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7428 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7429 if (phony_construct && ctx->record_type)
9669b00b 7430 {
629b3d75
MJ
7431 gcc_checking_assert (!ctx->receiver_decl);
7432 ctx->receiver_decl = create_tmp_var
7433 (build_reference_type (ctx->record_type), ".omp_rec");
9669b00b 7434 }
629b3d75
MJ
7435 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7436 lower_omp (&par_body, ctx);
7437 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7438 lower_reduction_clauses (clauses, &par_rlist, ctx);
9669b00b 7439
629b3d75
MJ
7440 /* Declare all the variables created by mapping and the variables
7441 declared in the scope of the parallel body. */
7442 record_vars_into (ctx->block_vars, child_fn);
7443 record_vars_into (gimple_bind_vars (par_bind), child_fn);
74bf76ed 7444
629b3d75 7445 if (ctx->record_type)
74bf76ed 7446 {
629b3d75
MJ
7447 ctx->sender_decl
7448 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7449 : ctx->record_type, ".omp_data_o");
7450 DECL_NAMELESS (ctx->sender_decl) = 1;
7451 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7452 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
74bf76ed 7453 }
74bf76ed 7454
629b3d75
MJ
7455 olist = NULL;
7456 ilist = NULL;
7457 lower_send_clauses (clauses, &ilist, &olist, ctx);
7458 lower_send_shared_vars (&ilist, &olist, ctx);
9669b00b 7459
629b3d75 7460 if (ctx->record_type)
74bf76ed 7461 {
629b3d75
MJ
7462 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7463 TREE_THIS_VOLATILE (clobber) = 1;
7464 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7465 clobber));
d9a6bd32 7466 }
d9a6bd32 7467
629b3d75
MJ
7468 /* Once all the expansions are done, sequence all the different
7469 fragments inside gimple_omp_body. */
d9a6bd32 7470
629b3d75 7471 new_body = NULL;
d9a6bd32 7472
629b3d75 7473 if (ctx->record_type)
d9a6bd32 7474 {
629b3d75
MJ
7475 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7476 /* fixup_child_record_type might have changed receiver_decl's type. */
7477 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7478 gimple_seq_add_stmt (&new_body,
7479 gimple_build_assign (ctx->receiver_decl, t));
d9a6bd32
JJ
7480 }
7481
629b3d75
MJ
7482 gimple_seq_add_seq (&new_body, par_ilist);
7483 gimple_seq_add_seq (&new_body, par_body);
7484 gimple_seq_add_seq (&new_body, par_rlist);
7485 if (ctx->cancellable)
7486 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7487 gimple_seq_add_seq (&new_body, par_olist);
7488 new_body = maybe_catch_exception (new_body);
7489 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7490 gimple_seq_add_stmt (&new_body,
7491 gimple_build_omp_continue (integer_zero_node,
7492 integer_zero_node));
7493 if (!phony_construct)
d9a6bd32 7494 {
629b3d75
MJ
7495 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7496 gimple_omp_set_body (stmt, new_body);
d9a6bd32
JJ
7497 }
7498
629b3d75
MJ
7499 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7500 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7501 gimple_bind_add_seq (bind, ilist);
7502 if (!phony_construct)
7503 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 7504 else
629b3d75
MJ
7505 gimple_bind_add_seq (bind, new_body);
7506 gimple_bind_add_seq (bind, olist);
d9a6bd32 7507
629b3d75
MJ
7508 pop_gimplify_context (NULL);
7509
7510 if (dep_bind)
d9a6bd32 7511 {
629b3d75
MJ
7512 gimple_bind_add_seq (dep_bind, dep_ilist);
7513 gimple_bind_add_stmt (dep_bind, bind);
7514 gimple_bind_add_seq (dep_bind, dep_olist);
7515 pop_gimplify_context (dep_bind);
d9a6bd32 7516 }
d9a6bd32
JJ
7517}
7518
629b3d75
MJ
7519/* Lower the GIMPLE_OMP_TARGET in the current statement
7520 in GSI_P. CTX holds context information for the directive. */
d9a6bd32
JJ
7521
7522static void
629b3d75 7523lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
d9a6bd32 7524{
629b3d75
MJ
7525 tree clauses;
7526 tree child_fn, t, c;
7527 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7528 gbind *tgt_bind, *bind, *dep_bind = NULL;
7529 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7530 location_t loc = gimple_location (stmt);
7531 bool offloaded, data_region;
7532 unsigned int map_cnt = 0;
d9a6bd32 7533
629b3d75
MJ
7534 offloaded = is_gimple_omp_offloaded (stmt);
7535 switch (gimple_omp_target_kind (stmt))
d9a6bd32 7536 {
629b3d75
MJ
7537 case GF_OMP_TARGET_KIND_REGION:
7538 case GF_OMP_TARGET_KIND_UPDATE:
7539 case GF_OMP_TARGET_KIND_ENTER_DATA:
7540 case GF_OMP_TARGET_KIND_EXIT_DATA:
7541 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7542 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7543 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7544 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7545 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7546 data_region = false;
7547 break;
7548 case GF_OMP_TARGET_KIND_DATA:
7549 case GF_OMP_TARGET_KIND_OACC_DATA:
7550 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7551 data_region = true;
7552 break;
7553 default:
7554 gcc_unreachable ();
74bf76ed 7555 }
74bf76ed 7556
629b3d75 7557 clauses = gimple_omp_target_clauses (stmt);
d9a6bd32 7558
629b3d75
MJ
7559 gimple_seq dep_ilist = NULL;
7560 gimple_seq dep_olist = NULL;
7561 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
d9a6bd32 7562 {
629b3d75
MJ
7563 push_gimplify_context ();
7564 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7565 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7566 &dep_ilist, &dep_olist);
d9a6bd32 7567 }
953ff289 7568
629b3d75
MJ
7569 tgt_bind = NULL;
7570 tgt_body = NULL;
7571 if (offloaded)
e4834818 7572 {
629b3d75
MJ
7573 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7574 tgt_body = gimple_bind_body (tgt_bind);
e4834818 7575 }
629b3d75
MJ
7576 else if (data_region)
7577 tgt_body = gimple_omp_body (stmt);
7578 child_fn = ctx->cb.dst_fn;
e4834818 7579
629b3d75
MJ
7580 push_gimplify_context ();
7581 fplist = NULL;
e4834818 7582
629b3d75
MJ
7583 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7584 switch (OMP_CLAUSE_CODE (c))
7585 {
7586 tree var, x;
e4834818 7587
629b3d75
MJ
7588 default:
7589 break;
7590 case OMP_CLAUSE_MAP:
7591#if CHECKING_P
7592 /* First check what we're prepared to handle in the following. */
7593 switch (OMP_CLAUSE_MAP_KIND (c))
7594 {
7595 case GOMP_MAP_ALLOC:
7596 case GOMP_MAP_TO:
7597 case GOMP_MAP_FROM:
7598 case GOMP_MAP_TOFROM:
7599 case GOMP_MAP_POINTER:
7600 case GOMP_MAP_TO_PSET:
7601 case GOMP_MAP_DELETE:
7602 case GOMP_MAP_RELEASE:
7603 case GOMP_MAP_ALWAYS_TO:
7604 case GOMP_MAP_ALWAYS_FROM:
7605 case GOMP_MAP_ALWAYS_TOFROM:
7606 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7607 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7608 case GOMP_MAP_STRUCT:
7609 case GOMP_MAP_ALWAYS_POINTER:
7610 break;
7611 case GOMP_MAP_FORCE_ALLOC:
7612 case GOMP_MAP_FORCE_TO:
7613 case GOMP_MAP_FORCE_FROM:
7614 case GOMP_MAP_FORCE_TOFROM:
7615 case GOMP_MAP_FORCE_PRESENT:
7616 case GOMP_MAP_FORCE_DEVICEPTR:
7617 case GOMP_MAP_DEVICE_RESIDENT:
7618 case GOMP_MAP_LINK:
7619 gcc_assert (is_gimple_omp_oacc (stmt));
7620 break;
7621 default:
7622 gcc_unreachable ();
7623 }
7624#endif
7625 /* FALLTHRU */
7626 case OMP_CLAUSE_TO:
7627 case OMP_CLAUSE_FROM:
7628 oacc_firstprivate:
7629 var = OMP_CLAUSE_DECL (c);
7630 if (!DECL_P (var))
7631 {
7632 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7633 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7634 && (OMP_CLAUSE_MAP_KIND (c)
7635 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7636 map_cnt++;
7637 continue;
7638 }
e4834818 7639
629b3d75
MJ
7640 if (DECL_SIZE (var)
7641 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7642 {
7643 tree var2 = DECL_VALUE_EXPR (var);
7644 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7645 var2 = TREE_OPERAND (var2, 0);
7646 gcc_assert (DECL_P (var2));
7647 var = var2;
7648 }
e4834818 7649
629b3d75
MJ
7650 if (offloaded
7651 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7652 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7653 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7654 {
7655 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7656 {
7657 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7658 && varpool_node::get_create (var)->offloadable)
7659 continue;
e4834818 7660
629b3d75
MJ
7661 tree type = build_pointer_type (TREE_TYPE (var));
7662 tree new_var = lookup_decl (var, ctx);
7663 x = create_tmp_var_raw (type, get_name (new_var));
7664 gimple_add_tmp_var (x);
7665 x = build_simple_mem_ref (x);
7666 SET_DECL_VALUE_EXPR (new_var, x);
7667 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7668 }
7669 continue;
7670 }
e4834818 7671
629b3d75
MJ
7672 if (!maybe_lookup_field (var, ctx))
7673 continue;
e4834818 7674
629b3d75
MJ
7675 /* Don't remap oacc parallel reduction variables, because the
7676 intermediate result must be local to each gang. */
7677 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7678 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7679 {
7680 x = build_receiver_ref (var, true, ctx);
7681 tree new_var = lookup_decl (var, ctx);
e4834818 7682
629b3d75
MJ
7683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7684 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7685 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7686 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7687 x = build_simple_mem_ref (x);
7688 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7689 {
7690 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7691 if (omp_is_reference (new_var))
7692 {
7693 /* Create a local object to hold the instance
7694 value. */
7695 tree type = TREE_TYPE (TREE_TYPE (new_var));
7696 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7697 tree inst = create_tmp_var (type, id);
7698 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7699 x = build_fold_addr_expr (inst);
7700 }
7701 gimplify_assign (new_var, x, &fplist);
7702 }
7703 else if (DECL_P (new_var))
7704 {
7705 SET_DECL_VALUE_EXPR (new_var, x);
7706 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7707 }
7708 else
7709 gcc_unreachable ();
7710 }
7711 map_cnt++;
7712 break;
e4834818 7713
629b3d75
MJ
7714 case OMP_CLAUSE_FIRSTPRIVATE:
7715 if (is_oacc_parallel (ctx))
7716 goto oacc_firstprivate;
7717 map_cnt++;
7718 var = OMP_CLAUSE_DECL (c);
7719 if (!omp_is_reference (var)
7720 && !is_gimple_reg_type (TREE_TYPE (var)))
7721 {
7722 tree new_var = lookup_decl (var, ctx);
7723 if (is_variable_sized (var))
7724 {
7725 tree pvar = DECL_VALUE_EXPR (var);
7726 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7727 pvar = TREE_OPERAND (pvar, 0);
7728 gcc_assert (DECL_P (pvar));
7729 tree new_pvar = lookup_decl (pvar, ctx);
7730 x = build_fold_indirect_ref (new_pvar);
7731 TREE_THIS_NOTRAP (x) = 1;
7732 }
7733 else
7734 x = build_receiver_ref (var, true, ctx);
7735 SET_DECL_VALUE_EXPR (new_var, x);
7736 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7737 }
7738 break;
e4834818 7739
629b3d75
MJ
7740 case OMP_CLAUSE_PRIVATE:
7741 if (is_gimple_omp_oacc (ctx->stmt))
7742 break;
7743 var = OMP_CLAUSE_DECL (c);
7744 if (is_variable_sized (var))
7745 {
7746 tree new_var = lookup_decl (var, ctx);
7747 tree pvar = DECL_VALUE_EXPR (var);
7748 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7749 pvar = TREE_OPERAND (pvar, 0);
7750 gcc_assert (DECL_P (pvar));
7751 tree new_pvar = lookup_decl (pvar, ctx);
7752 x = build_fold_indirect_ref (new_pvar);
7753 TREE_THIS_NOTRAP (x) = 1;
7754 SET_DECL_VALUE_EXPR (new_var, x);
7755 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7756 }
7757 break;
e4834818 7758
629b3d75
MJ
7759 case OMP_CLAUSE_USE_DEVICE_PTR:
7760 case OMP_CLAUSE_IS_DEVICE_PTR:
7761 var = OMP_CLAUSE_DECL (c);
7762 map_cnt++;
7763 if (is_variable_sized (var))
7764 {
7765 tree new_var = lookup_decl (var, ctx);
7766 tree pvar = DECL_VALUE_EXPR (var);
7767 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7768 pvar = TREE_OPERAND (pvar, 0);
7769 gcc_assert (DECL_P (pvar));
7770 tree new_pvar = lookup_decl (pvar, ctx);
7771 x = build_fold_indirect_ref (new_pvar);
7772 TREE_THIS_NOTRAP (x) = 1;
7773 SET_DECL_VALUE_EXPR (new_var, x);
7774 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7775 }
7776 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7777 {
7778 tree new_var = lookup_decl (var, ctx);
7779 tree type = build_pointer_type (TREE_TYPE (var));
7780 x = create_tmp_var_raw (type, get_name (new_var));
7781 gimple_add_tmp_var (x);
7782 x = build_simple_mem_ref (x);
7783 SET_DECL_VALUE_EXPR (new_var, x);
7784 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7785 }
7786 else
7787 {
7788 tree new_var = lookup_decl (var, ctx);
7789 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7790 gimple_add_tmp_var (x);
7791 SET_DECL_VALUE_EXPR (new_var, x);
7792 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7793 }
7794 break;
7795 }
e4834818 7796
629b3d75 7797 if (offloaded)
e4834818 7798 {
629b3d75
MJ
7799 target_nesting_level++;
7800 lower_omp (&tgt_body, ctx);
7801 target_nesting_level--;
e4834818 7802 }
629b3d75
MJ
7803 else if (data_region)
7804 lower_omp (&tgt_body, ctx);
e4834818 7805
629b3d75 7806 if (offloaded)
e4834818 7807 {
629b3d75
MJ
7808 /* Declare all the variables created by mapping and the variables
7809 declared in the scope of the target body. */
7810 record_vars_into (ctx->block_vars, child_fn);
7811 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
e4834818
NS
7812 }
7813
629b3d75
MJ
7814 olist = NULL;
7815 ilist = NULL;
7816 if (ctx->record_type)
e4834818 7817 {
629b3d75
MJ
7818 ctx->sender_decl
7819 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7820 DECL_NAMELESS (ctx->sender_decl) = 1;
7821 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7822 t = make_tree_vec (3);
7823 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7824 TREE_VEC_ELT (t, 1)
7825 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7826 ".omp_data_sizes");
7827 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7828 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7829 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7830 tree tkind_type = short_unsigned_type_node;
7831 int talign_shift = 8;
7832 TREE_VEC_ELT (t, 2)
7833 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7834 ".omp_data_kinds");
7835 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7836 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7837 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7838 gimple_omp_target_set_data_arg (stmt, t);
953ff289 7839
629b3d75
MJ
7840 vec<constructor_elt, va_gc> *vsize;
7841 vec<constructor_elt, va_gc> *vkind;
7842 vec_alloc (vsize, map_cnt);
7843 vec_alloc (vkind, map_cnt);
7844 unsigned int map_idx = 0;
953ff289 7845
629b3d75
MJ
7846 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7847 switch (OMP_CLAUSE_CODE (c))
953ff289 7848 {
629b3d75
MJ
7849 tree ovar, nc, s, purpose, var, x, type;
7850 unsigned int talign;
953ff289 7851
629b3d75
MJ
7852 default:
7853 break;
953ff289 7854
629b3d75
MJ
7855 case OMP_CLAUSE_MAP:
7856 case OMP_CLAUSE_TO:
7857 case OMP_CLAUSE_FROM:
7858 oacc_firstprivate_map:
7859 nc = c;
7860 ovar = OMP_CLAUSE_DECL (c);
7861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7862 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7863 || (OMP_CLAUSE_MAP_KIND (c)
7864 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7865 break;
7866 if (!DECL_P (ovar))
c34938a8 7867 {
629b3d75
MJ
7868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7869 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7870 {
7871 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7872 == get_base_address (ovar));
7873 nc = OMP_CLAUSE_CHAIN (c);
7874 ovar = OMP_CLAUSE_DECL (nc);
7875 }
7876 else
7877 {
7878 tree x = build_sender_ref (ovar, ctx);
7879 tree v
7880 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7881 gimplify_assign (x, v, &ilist);
7882 nc = NULL_TREE;
7883 }
7884 }
7885 else
7886 {
7887 if (DECL_SIZE (ovar)
7888 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7889 {
7890 tree ovar2 = DECL_VALUE_EXPR (ovar);
7891 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7892 ovar2 = TREE_OPERAND (ovar2, 0);
7893 gcc_assert (DECL_P (ovar2));
7894 ovar = ovar2;
7895 }
7896 if (!maybe_lookup_field (ovar, ctx))
7897 continue;
c34938a8 7898 }
777f7f9a 7899
629b3d75
MJ
7900 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7901 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7902 talign = DECL_ALIGN_UNIT (ovar);
7903 if (nc)
7904 {
7905 var = lookup_decl_in_outer_ctx (ovar, ctx);
7906 x = build_sender_ref (ovar, ctx);
777f7f9a 7907
629b3d75
MJ
7908 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7909 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7910 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7911 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7912 {
7913 gcc_assert (offloaded);
7914 tree avar
7915 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7916 mark_addressable (avar);
7917 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7918 talign = DECL_ALIGN_UNIT (avar);
7919 avar = build_fold_addr_expr (avar);
7920 gimplify_assign (x, avar, &ilist);
7921 }
7922 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7923 {
7924 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7925 if (!omp_is_reference (var))
7926 {
7927 if (is_gimple_reg (var)
7928 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7929 TREE_NO_WARNING (var) = 1;
7930 var = build_fold_addr_expr (var);
7931 }
7932 else
7933 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7934 gimplify_assign (x, var, &ilist);
7935 }
7936 else if (is_gimple_reg (var))
7937 {
7938 gcc_assert (offloaded);
7939 tree avar = create_tmp_var (TREE_TYPE (var));
7940 mark_addressable (avar);
7941 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7942 if (GOMP_MAP_COPY_TO_P (map_kind)
7943 || map_kind == GOMP_MAP_POINTER
7944 || map_kind == GOMP_MAP_TO_PSET
7945 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7946 {
7947 /* If we need to initialize a temporary
7948 with VAR because it is not addressable, and
7949 the variable hasn't been initialized yet, then
7950 we'll get a warning for the store to avar.
7951 Don't warn in that case, the mapping might
7952 be implicit. */
7953 TREE_NO_WARNING (var) = 1;
7954 gimplify_assign (avar, var, &ilist);
7955 }
7956 avar = build_fold_addr_expr (avar);
7957 gimplify_assign (x, avar, &ilist);
7958 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7959 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7960 && !TYPE_READONLY (TREE_TYPE (var)))
7961 {
7962 x = unshare_expr (x);
7963 x = build_simple_mem_ref (x);
7964 gimplify_assign (var, x, &olist);
7965 }
7966 }
7967 else
7968 {
7969 var = build_fold_addr_expr (var);
7970 gimplify_assign (x, var, &ilist);
7971 }
7972 }
7973 s = NULL_TREE;
7974 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7975 {
7976 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7977 s = TREE_TYPE (ovar);
7978 if (TREE_CODE (s) == REFERENCE_TYPE)
7979 s = TREE_TYPE (s);
7980 s = TYPE_SIZE_UNIT (s);
7981 }
7982 else
7983 s = OMP_CLAUSE_SIZE (c);
7984 if (s == NULL_TREE)
7985 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7986 s = fold_convert (size_type_node, s);
7987 purpose = size_int (map_idx++);
7988 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7989 if (TREE_CODE (s) != INTEGER_CST)
7990 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
777f7f9a 7991
629b3d75
MJ
7992 unsigned HOST_WIDE_INT tkind, tkind_zero;
7993 switch (OMP_CLAUSE_CODE (c))
7994 {
7995 case OMP_CLAUSE_MAP:
7996 tkind = OMP_CLAUSE_MAP_KIND (c);
7997 tkind_zero = tkind;
7998 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7999 switch (tkind)
8000 {
8001 case GOMP_MAP_ALLOC:
8002 case GOMP_MAP_TO:
8003 case GOMP_MAP_FROM:
8004 case GOMP_MAP_TOFROM:
8005 case GOMP_MAP_ALWAYS_TO:
8006 case GOMP_MAP_ALWAYS_FROM:
8007 case GOMP_MAP_ALWAYS_TOFROM:
8008 case GOMP_MAP_RELEASE:
8009 case GOMP_MAP_FORCE_TO:
8010 case GOMP_MAP_FORCE_FROM:
8011 case GOMP_MAP_FORCE_TOFROM:
8012 case GOMP_MAP_FORCE_PRESENT:
8013 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8014 break;
8015 case GOMP_MAP_DELETE:
8016 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8017 default:
8018 break;
8019 }
8020 if (tkind_zero != tkind)
8021 {
8022 if (integer_zerop (s))
8023 tkind = tkind_zero;
8024 else if (integer_nonzerop (s))
8025 tkind_zero = tkind;
8026 }
8027 break;
8028 case OMP_CLAUSE_FIRSTPRIVATE:
8029 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8030 tkind = GOMP_MAP_TO;
8031 tkind_zero = tkind;
8032 break;
8033 case OMP_CLAUSE_TO:
8034 tkind = GOMP_MAP_TO;
8035 tkind_zero = tkind;
8036 break;
8037 case OMP_CLAUSE_FROM:
8038 tkind = GOMP_MAP_FROM;
8039 tkind_zero = tkind;
8040 break;
8041 default:
8042 gcc_unreachable ();
8043 }
8044 gcc_checking_assert (tkind
8045 < (HOST_WIDE_INT_C (1U) << talign_shift));
8046 gcc_checking_assert (tkind_zero
8047 < (HOST_WIDE_INT_C (1U) << talign_shift));
8048 talign = ceil_log2 (talign);
8049 tkind |= talign << talign_shift;
8050 tkind_zero |= talign << talign_shift;
8051 gcc_checking_assert (tkind
8052 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8053 gcc_checking_assert (tkind_zero
8054 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8055 if (tkind == tkind_zero)
8056 x = build_int_cstu (tkind_type, tkind);
8057 else
8058 {
8059 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8060 x = build3 (COND_EXPR, tkind_type,
8061 fold_build2 (EQ_EXPR, boolean_type_node,
8062 unshare_expr (s), size_zero_node),
8063 build_int_cstu (tkind_type, tkind_zero),
8064 build_int_cstu (tkind_type, tkind));
8065 }
8066 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8067 if (nc && nc != c)
8068 c = nc;
8069 break;
05409788 8070
629b3d75
MJ
8071 case OMP_CLAUSE_FIRSTPRIVATE:
8072 if (is_oacc_parallel (ctx))
8073 goto oacc_firstprivate_map;
8074 ovar = OMP_CLAUSE_DECL (c);
8075 if (omp_is_reference (ovar))
8076 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8077 else
8078 talign = DECL_ALIGN_UNIT (ovar);
8079 var = lookup_decl_in_outer_ctx (ovar, ctx);
8080 x = build_sender_ref (ovar, ctx);
8081 tkind = GOMP_MAP_FIRSTPRIVATE;
8082 type = TREE_TYPE (ovar);
8083 if (omp_is_reference (ovar))
8084 type = TREE_TYPE (type);
8085 if ((INTEGRAL_TYPE_P (type)
8086 && TYPE_PRECISION (type) <= POINTER_SIZE)
8087 || TREE_CODE (type) == POINTER_TYPE)
8088 {
8089 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8090 tree t = var;
8091 if (omp_is_reference (var))
8092 t = build_simple_mem_ref (var);
8093 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8094 TREE_NO_WARNING (var) = 1;
8095 if (TREE_CODE (type) != POINTER_TYPE)
8096 t = fold_convert (pointer_sized_int_node, t);
8097 t = fold_convert (TREE_TYPE (x), t);
8098 gimplify_assign (x, t, &ilist);
8099 }
8100 else if (omp_is_reference (var))
8101 gimplify_assign (x, var, &ilist);
8102 else if (is_gimple_reg (var))
8103 {
8104 tree avar = create_tmp_var (TREE_TYPE (var));
8105 mark_addressable (avar);
8106 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8107 TREE_NO_WARNING (var) = 1;
8108 gimplify_assign (avar, var, &ilist);
8109 avar = build_fold_addr_expr (avar);
8110 gimplify_assign (x, avar, &ilist);
8111 }
8112 else
8113 {
8114 var = build_fold_addr_expr (var);
8115 gimplify_assign (x, var, &ilist);
8116 }
8117 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8118 s = size_int (0);
8119 else if (omp_is_reference (ovar))
8120 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8121 else
8122 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8123 s = fold_convert (size_type_node, s);
8124 purpose = size_int (map_idx++);
8125 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8126 if (TREE_CODE (s) != INTEGER_CST)
8127 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
05409788 8128
629b3d75
MJ
8129 gcc_checking_assert (tkind
8130 < (HOST_WIDE_INT_C (1U) << talign_shift));
8131 talign = ceil_log2 (talign);
8132 tkind |= talign << talign_shift;
8133 gcc_checking_assert (tkind
8134 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8135 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8136 build_int_cstu (tkind_type, tkind));
8137 break;
05409788 8138
629b3d75
MJ
8139 case OMP_CLAUSE_USE_DEVICE_PTR:
8140 case OMP_CLAUSE_IS_DEVICE_PTR:
8141 ovar = OMP_CLAUSE_DECL (c);
8142 var = lookup_decl_in_outer_ctx (ovar, ctx);
8143 x = build_sender_ref (ovar, ctx);
8144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8145 tkind = GOMP_MAP_USE_DEVICE_PTR;
8146 else
8147 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8148 type = TREE_TYPE (ovar);
8149 if (TREE_CODE (type) == ARRAY_TYPE)
8150 var = build_fold_addr_expr (var);
8151 else
8152 {
8153 if (omp_is_reference (ovar))
8154 {
8155 type = TREE_TYPE (type);
8156 if (TREE_CODE (type) != ARRAY_TYPE)
8157 var = build_simple_mem_ref (var);
8158 var = fold_convert (TREE_TYPE (x), var);
8159 }
8160 }
8161 gimplify_assign (x, var, &ilist);
8162 s = size_int (0);
8163 purpose = size_int (map_idx++);
8164 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8165 gcc_checking_assert (tkind
8166 < (HOST_WIDE_INT_C (1U) << talign_shift));
8167 gcc_checking_assert (tkind
8168 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8169 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8170 build_int_cstu (tkind_type, tkind));
8171 break;
8172 }
05409788 8173
629b3d75 8174 gcc_assert (map_idx == map_cnt);
20906c66 8175
629b3d75
MJ
8176 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8177 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8178 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8179 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8180 for (int i = 1; i <= 2; i++)
8181 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8182 {
8183 gimple_seq initlist = NULL;
8184 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8185 TREE_VEC_ELT (t, i)),
8186 &initlist, true, NULL_TREE);
8187 gimple_seq_add_seq (&ilist, initlist);
20906c66 8188
629b3d75
MJ
8189 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8190 NULL);
8191 TREE_THIS_VOLATILE (clobber) = 1;
8192 gimple_seq_add_stmt (&olist,
8193 gimple_build_assign (TREE_VEC_ELT (t, i),
8194 clobber));
8195 }
05409788 8196
629b3d75
MJ
8197 tree clobber = build_constructor (ctx->record_type, NULL);
8198 TREE_THIS_VOLATILE (clobber) = 1;
8199 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8200 clobber));
8201 }
05409788 8202
629b3d75
MJ
8203 /* Once all the expansions are done, sequence all the different
8204 fragments inside gimple_omp_body. */
05409788 8205
629b3d75 8206 new_body = NULL;
05409788 8207
629b3d75
MJ
8208 if (offloaded
8209 && ctx->record_type)
05409788 8210 {
629b3d75
MJ
8211 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8212 /* fixup_child_record_type might have changed receiver_decl's type. */
8213 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8214 gimple_seq_add_stmt (&new_body,
8215 gimple_build_assign (ctx->receiver_decl, t));
05409788 8216 }
629b3d75 8217 gimple_seq_add_seq (&new_body, fplist);
05409788 8218
629b3d75 8219 if (offloaded || data_region)
0645c1a2 8220 {
629b3d75
MJ
8221 tree prev = NULL_TREE;
8222 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8223 switch (OMP_CLAUSE_CODE (c))
0645c1a2 8224 {
629b3d75
MJ
8225 tree var, x;
8226 default:
8227 break;
8228 case OMP_CLAUSE_FIRSTPRIVATE:
8229 if (is_gimple_omp_oacc (ctx->stmt))
8230 break;
8231 var = OMP_CLAUSE_DECL (c);
8232 if (omp_is_reference (var)
8233 || is_gimple_reg_type (TREE_TYPE (var)))
0645c1a2 8234 {
629b3d75
MJ
8235 tree new_var = lookup_decl (var, ctx);
8236 tree type;
8237 type = TREE_TYPE (var);
8238 if (omp_is_reference (var))
8239 type = TREE_TYPE (type);
8240 if ((INTEGRAL_TYPE_P (type)
8241 && TYPE_PRECISION (type) <= POINTER_SIZE)
8242 || TREE_CODE (type) == POINTER_TYPE)
8243 {
8244 x = build_receiver_ref (var, false, ctx);
8245 if (TREE_CODE (type) != POINTER_TYPE)
8246 x = fold_convert (pointer_sized_int_node, x);
8247 x = fold_convert (type, x);
8248 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8249 fb_rvalue);
8250 if (omp_is_reference (var))
8251 {
8252 tree v = create_tmp_var_raw (type, get_name (var));
8253 gimple_add_tmp_var (v);
8254 TREE_ADDRESSABLE (v) = 1;
8255 gimple_seq_add_stmt (&new_body,
8256 gimple_build_assign (v, x));
8257 x = build_fold_addr_expr (v);
8258 }
8259 gimple_seq_add_stmt (&new_body,
8260 gimple_build_assign (new_var, x));
8261 }
8262 else
8263 {
8264 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8265 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8266 fb_rvalue);
8267 gimple_seq_add_stmt (&new_body,
8268 gimple_build_assign (new_var, x));
8269 }
8270 }
8271 else if (is_variable_sized (var))
8272 {
8273 tree pvar = DECL_VALUE_EXPR (var);
8274 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8275 pvar = TREE_OPERAND (pvar, 0);
8276 gcc_assert (DECL_P (pvar));
8277 tree new_var = lookup_decl (pvar, ctx);
8278 x = build_receiver_ref (var, false, ctx);
8279 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8280 gimple_seq_add_stmt (&new_body,
8281 gimple_build_assign (new_var, x));
8282 }
8283 break;
8284 case OMP_CLAUSE_PRIVATE:
8285 if (is_gimple_omp_oacc (ctx->stmt))
8286 break;
8287 var = OMP_CLAUSE_DECL (c);
8288 if (omp_is_reference (var))
8289 {
8290 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8291 tree new_var = lookup_decl (var, ctx);
8292 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8293 if (TREE_CONSTANT (x))
8294 {
8295 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8296 get_name (var));
8297 gimple_add_tmp_var (x);
8298 TREE_ADDRESSABLE (x) = 1;
8299 x = build_fold_addr_expr_loc (clause_loc, x);
8300 }
8301 else
8302 break;
9bd46bc9 8303
629b3d75
MJ
8304 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8305 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8306 gimple_seq_add_stmt (&new_body,
8307 gimple_build_assign (new_var, x));
8308 }
8309 break;
8310 case OMP_CLAUSE_USE_DEVICE_PTR:
8311 case OMP_CLAUSE_IS_DEVICE_PTR:
8312 var = OMP_CLAUSE_DECL (c);
8313 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8314 x = build_sender_ref (var, ctx);
8315 else
8316 x = build_receiver_ref (var, false, ctx);
8317 if (is_variable_sized (var))
8318 {
8319 tree pvar = DECL_VALUE_EXPR (var);
8320 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8321 pvar = TREE_OPERAND (pvar, 0);
8322 gcc_assert (DECL_P (pvar));
8323 tree new_var = lookup_decl (pvar, ctx);
8324 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8325 gimple_seq_add_stmt (&new_body,
8326 gimple_build_assign (new_var, x));
8327 }
8328 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8329 {
8330 tree new_var = lookup_decl (var, ctx);
8331 new_var = DECL_VALUE_EXPR (new_var);
8332 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8333 new_var = TREE_OPERAND (new_var, 0);
8334 gcc_assert (DECL_P (new_var));
8335 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8336 gimple_seq_add_stmt (&new_body,
8337 gimple_build_assign (new_var, x));
8338 }
9bd46bc9 8339 else
629b3d75
MJ
8340 {
8341 tree type = TREE_TYPE (var);
8342 tree new_var = lookup_decl (var, ctx);
8343 if (omp_is_reference (var))
8344 {
8345 type = TREE_TYPE (type);
8346 if (TREE_CODE (type) != ARRAY_TYPE)
8347 {
8348 tree v = create_tmp_var_raw (type, get_name (var));
8349 gimple_add_tmp_var (v);
8350 TREE_ADDRESSABLE (v) = 1;
8351 x = fold_convert (type, x);
8352 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8353 fb_rvalue);
8354 gimple_seq_add_stmt (&new_body,
8355 gimple_build_assign (v, x));
8356 x = build_fold_addr_expr (v);
8357 }
8358 }
8359 new_var = DECL_VALUE_EXPR (new_var);
8360 x = fold_convert (TREE_TYPE (new_var), x);
8361 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8362 gimple_seq_add_stmt (&new_body,
8363 gimple_build_assign (new_var, x));
8364 }
8365 break;
9bd46bc9 8366 }
629b3d75
MJ
8367 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8368 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8369 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8370 or references to VLAs. */
8371 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8372 switch (OMP_CLAUSE_CODE (c))
8373 {
8374 tree var;
8375 default:
8376 break;
8377 case OMP_CLAUSE_MAP:
8378 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8379 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8380 {
8381 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8382 HOST_WIDE_INT offset = 0;
8383 gcc_assert (prev);
8384 var = OMP_CLAUSE_DECL (c);
8385 if (DECL_P (var)
8386 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8387 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8388 ctx))
8389 && varpool_node::get_create (var)->offloadable)
8390 break;
8391 if (TREE_CODE (var) == INDIRECT_REF
8392 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8393 var = TREE_OPERAND (var, 0);
8394 if (TREE_CODE (var) == COMPONENT_REF)
8395 {
8396 var = get_addr_base_and_unit_offset (var, &offset);
8397 gcc_assert (var != NULL_TREE && DECL_P (var));
8398 }
8399 else if (DECL_SIZE (var)
8400 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8401 {
8402 tree var2 = DECL_VALUE_EXPR (var);
8403 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8404 var2 = TREE_OPERAND (var2, 0);
8405 gcc_assert (DECL_P (var2));
8406 var = var2;
8407 }
8408 tree new_var = lookup_decl (var, ctx), x;
8409 tree type = TREE_TYPE (new_var);
8410 bool is_ref;
8411 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8412 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8413 == COMPONENT_REF))
8414 {
8415 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8416 is_ref = true;
8417 new_var = build2 (MEM_REF, type,
8418 build_fold_addr_expr (new_var),
8419 build_int_cst (build_pointer_type (type),
8420 offset));
8421 }
8422 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8423 {
8424 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8425 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8426 new_var = build2 (MEM_REF, type,
8427 build_fold_addr_expr (new_var),
8428 build_int_cst (build_pointer_type (type),
8429 offset));
8430 }
8431 else
8432 is_ref = omp_is_reference (var);
8433 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8434 is_ref = false;
8435 bool ref_to_array = false;
8436 if (is_ref)
8437 {
8438 type = TREE_TYPE (type);
8439 if (TREE_CODE (type) == ARRAY_TYPE)
8440 {
8441 type = build_pointer_type (type);
8442 ref_to_array = true;
8443 }
8444 }
8445 else if (TREE_CODE (type) == ARRAY_TYPE)
8446 {
8447 tree decl2 = DECL_VALUE_EXPR (new_var);
8448 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8449 decl2 = TREE_OPERAND (decl2, 0);
8450 gcc_assert (DECL_P (decl2));
8451 new_var = decl2;
8452 type = TREE_TYPE (new_var);
8453 }
8454 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8455 x = fold_convert_loc (clause_loc, type, x);
8456 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8457 {
8458 tree bias = OMP_CLAUSE_SIZE (c);
8459 if (DECL_P (bias))
8460 bias = lookup_decl (bias, ctx);
8461 bias = fold_convert_loc (clause_loc, sizetype, bias);
8462 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8463 bias);
8464 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8465 TREE_TYPE (x), x, bias);
8466 }
8467 if (ref_to_array)
8468 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8469 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8470 if (is_ref && !ref_to_array)
8471 {
8472 tree t = create_tmp_var_raw (type, get_name (var));
8473 gimple_add_tmp_var (t);
8474 TREE_ADDRESSABLE (t) = 1;
8475 gimple_seq_add_stmt (&new_body,
8476 gimple_build_assign (t, x));
8477 x = build_fold_addr_expr_loc (clause_loc, t);
8478 }
8479 gimple_seq_add_stmt (&new_body,
8480 gimple_build_assign (new_var, x));
8481 prev = NULL_TREE;
8482 }
8483 else if (OMP_CLAUSE_CHAIN (c)
8484 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8485 == OMP_CLAUSE_MAP
8486 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8487 == GOMP_MAP_FIRSTPRIVATE_POINTER
8488 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8489 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8490 prev = c;
8491 break;
8492 case OMP_CLAUSE_PRIVATE:
8493 var = OMP_CLAUSE_DECL (c);
8494 if (is_variable_sized (var))
8495 {
8496 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8497 tree new_var = lookup_decl (var, ctx);
8498 tree pvar = DECL_VALUE_EXPR (var);
8499 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8500 pvar = TREE_OPERAND (pvar, 0);
8501 gcc_assert (DECL_P (pvar));
8502 tree new_pvar = lookup_decl (pvar, ctx);
8503 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8504 tree al = size_int (DECL_ALIGN (var));
8505 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8506 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8507 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8508 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8509 gimple_seq_add_stmt (&new_body,
8510 gimple_build_assign (new_pvar, x));
8511 }
8512 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8513 {
8514 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8515 tree new_var = lookup_decl (var, ctx);
8516 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8517 if (TREE_CONSTANT (x))
8518 break;
8519 else
8520 {
8521 tree atmp
8522 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8523 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8524 tree al = size_int (TYPE_ALIGN (rtype));
8525 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8526 }
9bd46bc9 8527
629b3d75
MJ
8528 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8529 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8530 gimple_seq_add_stmt (&new_body,
8531 gimple_build_assign (new_var, x));
8532 }
8533 break;
8534 }
9bd46bc9 8535
629b3d75
MJ
8536 gimple_seq fork_seq = NULL;
8537 gimple_seq join_seq = NULL;
9bd46bc9 8538
629b3d75 8539 if (is_oacc_parallel (ctx))
9bd46bc9 8540 {
629b3d75
MJ
8541 /* If there are reductions on the offloaded region itself, treat
8542 them as a dummy GANG loop. */
8543 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
9bd46bc9 8544
629b3d75
MJ
8545 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8546 false, NULL, NULL, &fork_seq, &join_seq, ctx);
9bd46bc9 8547 }
9bd46bc9 8548
629b3d75
MJ
8549 gimple_seq_add_seq (&new_body, fork_seq);
8550 gimple_seq_add_seq (&new_body, tgt_body);
8551 gimple_seq_add_seq (&new_body, join_seq);
9bd46bc9 8552
629b3d75
MJ
8553 if (offloaded)
8554 new_body = maybe_catch_exception (new_body);
9bd46bc9 8555
629b3d75
MJ
8556 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8557 gimple_omp_set_body (stmt, new_body);
9bd46bc9
NS
8558 }
8559
629b3d75
MJ
8560 bind = gimple_build_bind (NULL, NULL,
8561 tgt_bind ? gimple_bind_block (tgt_bind)
8562 : NULL_TREE);
8563 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8564 gimple_bind_add_seq (bind, ilist);
8565 gimple_bind_add_stmt (bind, stmt);
8566 gimple_bind_add_seq (bind, olist);
9bd46bc9
NS
8567
8568 pop_gimplify_context (NULL);
8569
629b3d75 8570 if (dep_bind)
b6adbb9f 8571 {
629b3d75
MJ
8572 gimple_bind_add_seq (dep_bind, dep_ilist);
8573 gimple_bind_add_stmt (dep_bind, bind);
8574 gimple_bind_add_seq (dep_bind, dep_olist);
8575 pop_gimplify_context (dep_bind);
b6adbb9f 8576 }
b6adbb9f
NS
8577}
8578
629b3d75 8579/* Expand code for an OpenMP teams directive. */
94829f87 8580
f8393eb0 8581static void
629b3d75 8582lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
94829f87 8583{
629b3d75
MJ
8584 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8585 push_gimplify_context ();
94829f87 8586
629b3d75
MJ
8587 tree block = make_node (BLOCK);
8588 gbind *bind = gimple_build_bind (NULL, NULL, block);
8589 gsi_replace (gsi_p, bind, true);
8590 gimple_seq bind_body = NULL;
8591 gimple_seq dlist = NULL;
8592 gimple_seq olist = NULL;
94829f87 8593
629b3d75
MJ
8594 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8595 OMP_CLAUSE_NUM_TEAMS);
8596 if (num_teams == NULL_TREE)
8597 num_teams = build_int_cst (unsigned_type_node, 0);
8598 else
94829f87 8599 {
629b3d75
MJ
8600 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8601 num_teams = fold_convert (unsigned_type_node, num_teams);
8602 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
94829f87 8603 }
629b3d75
MJ
8604 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8605 OMP_CLAUSE_THREAD_LIMIT);
8606 if (thread_limit == NULL_TREE)
8607 thread_limit = build_int_cst (unsigned_type_node, 0);
8608 else
94829f87 8609 {
629b3d75
MJ
8610 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8611 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8612 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8613 fb_rvalue);
94829f87 8614 }
9bd46bc9 8615
629b3d75
MJ
8616 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8617 &bind_body, &dlist, ctx, NULL);
8618 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8619 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8620 if (!gimple_omp_teams_grid_phony (teams_stmt))
9bd46bc9 8621 {
629b3d75
MJ
8622 gimple_seq_add_stmt (&bind_body, teams_stmt);
8623 location_t loc = gimple_location (teams_stmt);
8624 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8625 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8626 gimple_set_location (call, loc);
8627 gimple_seq_add_stmt (&bind_body, call);
9bd46bc9
NS
8628 }
8629
629b3d75
MJ
8630 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8631 gimple_omp_set_body (teams_stmt, NULL);
8632 gimple_seq_add_seq (&bind_body, olist);
8633 gimple_seq_add_seq (&bind_body, dlist);
8634 if (!gimple_omp_teams_grid_phony (teams_stmt))
8635 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8636 gimple_bind_set_body (bind, bind_body);
9bd46bc9 8637
629b3d75 8638 pop_gimplify_context (bind);
9bd46bc9 8639
629b3d75
MJ
8640 gimple_bind_append_vars (bind, ctx->block_vars);
8641 BLOCK_VARS (block) = ctx->block_vars;
8642 if (BLOCK_VARS (block))
8643 TREE_USED (block) = 1;
9bd46bc9
NS
8644}
8645
629b3d75 8646/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
9bd46bc9 8647
629b3d75
MJ
8648static void
8649lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8650{
629b3d75
MJ
8651 gimple *stmt = gsi_stmt (*gsi_p);
8652 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8653 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8654 gimple_build_omp_return (false));
9bd46bc9
NS
8655}
8656
9bd46bc9 8657
629b3d75
MJ
8658/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8659 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8660 of OMP context, but with task_shared_vars set. */
9bd46bc9 8661
629b3d75
MJ
8662static tree
8663lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8664 void *data)
9bd46bc9 8665{
629b3d75 8666 tree t = *tp;
9bd46bc9 8667
629b3d75
MJ
8668 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8669 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8670 return t;
9bd46bc9 8671
629b3d75
MJ
8672 if (task_shared_vars
8673 && DECL_P (t)
8674 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8675 return t;
9bd46bc9 8676
629b3d75
MJ
8677 /* If a global variable has been privatized, TREE_CONSTANT on
8678 ADDR_EXPR might be wrong. */
8679 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8680 recompute_tree_invariant_for_addr_expr (t);
9bd46bc9 8681
629b3d75
MJ
8682 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8683 return NULL_TREE;
9bd46bc9
NS
8684}
8685
629b3d75
MJ
8686/* Data to be communicated between lower_omp_regimplify_operands and
8687 lower_omp_regimplify_operands_p. */
9bd46bc9 8688
629b3d75 8689struct lower_omp_regimplify_operands_data
9bd46bc9 8690{
629b3d75
MJ
8691 omp_context *ctx;
8692 vec<tree> *decls;
8693};
9bd46bc9 8694
629b3d75
MJ
8695/* Helper function for lower_omp_regimplify_operands. Find
8696 omp_member_access_dummy_var vars and adjust temporarily their
8697 DECL_VALUE_EXPRs if needed. */
9bd46bc9 8698
629b3d75
MJ
8699static tree
8700lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8701 void *data)
9bd46bc9 8702{
629b3d75
MJ
8703 tree t = omp_member_access_dummy_var (*tp);
8704 if (t)
9bd46bc9 8705 {
629b3d75
MJ
8706 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8707 lower_omp_regimplify_operands_data *ldata
8708 = (lower_omp_regimplify_operands_data *) wi->info;
8709 tree o = maybe_lookup_decl (t, ldata->ctx);
8710 if (o != t)
9bd46bc9 8711 {
629b3d75
MJ
8712 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8713 ldata->decls->safe_push (*tp);
8714 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8715 SET_DECL_VALUE_EXPR (*tp, v);
9bd46bc9 8716 }
9bd46bc9 8717 }
629b3d75
MJ
8718 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8719 return NULL_TREE;
9bd46bc9
NS
8720}
8721
629b3d75
MJ
8722/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8723 of omp_member_access_dummy_var vars during regimplification. */
9bd46bc9
NS
8724
8725static void
629b3d75
MJ
8726lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8727 gimple_stmt_iterator *gsi_p)
9bd46bc9 8728{
629b3d75
MJ
8729 auto_vec<tree, 10> decls;
8730 if (ctx)
8731 {
8732 struct walk_stmt_info wi;
8733 memset (&wi, '\0', sizeof (wi));
8734 struct lower_omp_regimplify_operands_data data;
8735 data.ctx = ctx;
8736 data.decls = &decls;
8737 wi.info = &data;
8738 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8739 }
8740 gimple_regimplify_operands (stmt, gsi_p);
8741 while (!decls.is_empty ())
8742 {
8743 tree t = decls.pop ();
8744 tree v = decls.pop ();
8745 SET_DECL_VALUE_EXPR (t, v);
8746 }
9bd46bc9
NS
8747}
8748
9bd46bc9 8749static void
629b3d75 8750lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8751{
629b3d75
MJ
8752 gimple *stmt = gsi_stmt (*gsi_p);
8753 struct walk_stmt_info wi;
8754 gcall *call_stmt;
9bd46bc9 8755
629b3d75
MJ
8756 if (gimple_has_location (stmt))
8757 input_location = gimple_location (stmt);
9bd46bc9 8758
629b3d75
MJ
8759 if (task_shared_vars)
8760 memset (&wi, '\0', sizeof (wi));
9bd46bc9 8761
629b3d75
MJ
8762 /* If we have issued syntax errors, avoid doing any heavy lifting.
8763 Just replace the OMP directives with a NOP to avoid
8764 confusing RTL expansion. */
8765 if (seen_error () && is_gimple_omp (stmt))
9bd46bc9 8766 {
629b3d75
MJ
8767 gsi_replace (gsi_p, gimple_build_nop (), true);
8768 return;
8769 }
9bd46bc9 8770
629b3d75
MJ
8771 switch (gimple_code (stmt))
8772 {
8773 case GIMPLE_COND:
8774 {
8775 gcond *cond_stmt = as_a <gcond *> (stmt);
8776 if ((ctx || task_shared_vars)
8777 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8778 lower_omp_regimplify_p,
8779 ctx ? NULL : &wi, NULL)
8780 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8781 lower_omp_regimplify_p,
8782 ctx ? NULL : &wi, NULL)))
8783 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8784 }
8785 break;
8786 case GIMPLE_CATCH:
8787 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8788 break;
8789 case GIMPLE_EH_FILTER:
8790 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8791 break;
8792 case GIMPLE_TRY:
8793 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8794 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8795 break;
8796 case GIMPLE_TRANSACTION:
01914336 8797 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
629b3d75
MJ
8798 ctx);
8799 break;
8800 case GIMPLE_BIND:
8801 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8802 break;
8803 case GIMPLE_OMP_PARALLEL:
8804 case GIMPLE_OMP_TASK:
8805 ctx = maybe_lookup_ctx (stmt);
8806 gcc_assert (ctx);
8807 if (ctx->cancellable)
8808 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8809 lower_omp_taskreg (gsi_p, ctx);
8810 break;
8811 case GIMPLE_OMP_FOR:
8812 ctx = maybe_lookup_ctx (stmt);
8813 gcc_assert (ctx);
8814 if (ctx->cancellable)
8815 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8816 lower_omp_for (gsi_p, ctx);
8817 break;
8818 case GIMPLE_OMP_SECTIONS:
8819 ctx = maybe_lookup_ctx (stmt);
8820 gcc_assert (ctx);
8821 if (ctx->cancellable)
8822 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8823 lower_omp_sections (gsi_p, ctx);
8824 break;
8825 case GIMPLE_OMP_SINGLE:
8826 ctx = maybe_lookup_ctx (stmt);
8827 gcc_assert (ctx);
8828 lower_omp_single (gsi_p, ctx);
8829 break;
8830 case GIMPLE_OMP_MASTER:
8831 ctx = maybe_lookup_ctx (stmt);
8832 gcc_assert (ctx);
8833 lower_omp_master (gsi_p, ctx);
8834 break;
8835 case GIMPLE_OMP_TASKGROUP:
8836 ctx = maybe_lookup_ctx (stmt);
8837 gcc_assert (ctx);
8838 lower_omp_taskgroup (gsi_p, ctx);
8839 break;
8840 case GIMPLE_OMP_ORDERED:
8841 ctx = maybe_lookup_ctx (stmt);
8842 gcc_assert (ctx);
8843 lower_omp_ordered (gsi_p, ctx);
8844 break;
8845 case GIMPLE_OMP_CRITICAL:
8846 ctx = maybe_lookup_ctx (stmt);
8847 gcc_assert (ctx);
8848 lower_omp_critical (gsi_p, ctx);
8849 break;
8850 case GIMPLE_OMP_ATOMIC_LOAD:
8851 if ((ctx || task_shared_vars)
8852 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8853 as_a <gomp_atomic_load *> (stmt)),
8854 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8855 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8856 break;
8857 case GIMPLE_OMP_TARGET:
8858 ctx = maybe_lookup_ctx (stmt);
8859 gcc_assert (ctx);
8860 lower_omp_target (gsi_p, ctx);
8861 break;
8862 case GIMPLE_OMP_TEAMS:
8863 ctx = maybe_lookup_ctx (stmt);
8864 gcc_assert (ctx);
8865 lower_omp_teams (gsi_p, ctx);
8866 break;
8867 case GIMPLE_OMP_GRID_BODY:
8868 ctx = maybe_lookup_ctx (stmt);
8869 gcc_assert (ctx);
8870 lower_omp_grid_body (gsi_p, ctx);
8871 break;
8872 case GIMPLE_CALL:
8873 tree fndecl;
8874 call_stmt = as_a <gcall *> (stmt);
8875 fndecl = gimple_call_fndecl (call_stmt);
8876 if (fndecl
8877 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8878 switch (DECL_FUNCTION_CODE (fndecl))
9bd46bc9 8879 {
629b3d75
MJ
8880 case BUILT_IN_GOMP_BARRIER:
8881 if (ctx == NULL)
8882 break;
8883 /* FALLTHRU */
8884 case BUILT_IN_GOMP_CANCEL:
8885 case BUILT_IN_GOMP_CANCELLATION_POINT:
8886 omp_context *cctx;
8887 cctx = ctx;
8888 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8889 cctx = cctx->outer;
8890 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8891 if (!cctx->cancellable)
8892 {
8893 if (DECL_FUNCTION_CODE (fndecl)
8894 == BUILT_IN_GOMP_CANCELLATION_POINT)
8895 {
8896 stmt = gimple_build_nop ();
8897 gsi_replace (gsi_p, stmt, false);
8898 }
8899 break;
8900 }
8901 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8902 {
8903 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8904 gimple_call_set_fndecl (call_stmt, fndecl);
8905 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8906 }
8907 tree lhs;
8908 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8909 gimple_call_set_lhs (call_stmt, lhs);
8910 tree fallthru_label;
8911 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8912 gimple *g;
8913 g = gimple_build_label (fallthru_label);
8914 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8915 g = gimple_build_cond (NE_EXPR, lhs,
8916 fold_convert (TREE_TYPE (lhs),
8917 boolean_false_node),
8918 cctx->cancel_label, fallthru_label);
8919 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8920 break;
8921 default:
8922 break;
9bd46bc9 8923 }
629b3d75
MJ
8924 /* FALLTHRU */
8925 default:
8926 if ((ctx || task_shared_vars)
8927 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8928 ctx ? NULL : &wi))
9bd46bc9 8929 {
629b3d75
MJ
8930 /* Just remove clobbers, this should happen only if we have
8931 "privatized" local addressable variables in SIMD regions,
8932 the clobber isn't needed in that case and gimplifying address
8933 of the ARRAY_REF into a pointer and creating MEM_REF based
8934 clobber would create worse code than we get with the clobber
8935 dropped. */
8936 if (gimple_clobber_p (stmt))
4ae13300 8937 {
629b3d75
MJ
8938 gsi_replace (gsi_p, gimple_build_nop (), true);
8939 break;
9bd46bc9 8940 }
629b3d75 8941 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9bd46bc9 8942 }
629b3d75 8943 break;
9bd46bc9 8944 }
9bd46bc9
NS
8945}
8946
9bd46bc9 8947static void
629b3d75 8948lower_omp (gimple_seq *body, omp_context *ctx)
9bd46bc9 8949{
629b3d75
MJ
8950 location_t saved_location = input_location;
8951 gimple_stmt_iterator gsi;
8952 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8953 lower_omp_1 (&gsi, ctx);
8954 /* During gimplification, we haven't folded statments inside offloading
8955 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8956 if (target_nesting_level || taskreg_nesting_level)
8957 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8958 fold_stmt (&gsi);
8959 input_location = saved_location;
9bd46bc9
NS
8960}
8961
629b3d75 8962/* Main entry point. */
9bd46bc9 8963
629b3d75
MJ
8964static unsigned int
8965execute_lower_omp (void)
9bd46bc9 8966{
629b3d75
MJ
8967 gimple_seq body;
8968 int i;
8969 omp_context *ctx;
9bd46bc9 8970
629b3d75
MJ
8971 /* This pass always runs, to provide PROP_gimple_lomp.
8972 But often, there is nothing to do. */
8973 if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
8974 && flag_openmp_simd == 0)
8975 return 0;
9bd46bc9 8976
629b3d75
MJ
8977 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8978 delete_omp_context);
9bd46bc9 8979
629b3d75 8980 body = gimple_body (current_function_decl);
9bd46bc9 8981
629b3d75
MJ
8982 if (hsa_gen_requested_p ())
8983 omp_grid_gridify_all_targets (&body);
8984
8985 scan_omp (&body, NULL);
8986 gcc_assert (taskreg_nesting_level == 0);
8987 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8988 finish_taskreg_scan (ctx);
8989 taskreg_contexts.release ();
9bd46bc9 8990
629b3d75
MJ
8991 if (all_contexts->root)
8992 {
8993 if (task_shared_vars)
8994 push_gimplify_context ();
8995 lower_omp (&body, NULL);
8996 if (task_shared_vars)
8997 pop_gimplify_context (NULL);
8998 }
8999
9000 if (all_contexts)
9001 {
9002 splay_tree_delete (all_contexts);
9003 all_contexts = NULL;
9bd46bc9 9004 }
629b3d75
MJ
9005 BITMAP_FREE (task_shared_vars);
9006 return 0;
9bd46bc9
NS
9007}
9008
629b3d75 9009namespace {
9bd46bc9 9010
629b3d75 9011const pass_data pass_data_lower_omp =
9bd46bc9 9012{
629b3d75
MJ
9013 GIMPLE_PASS, /* type */
9014 "omplower", /* name */
fd2b8c8b 9015 OPTGROUP_OMP, /* optinfo_flags */
629b3d75
MJ
9016 TV_NONE, /* tv_id */
9017 PROP_gimple_any, /* properties_required */
9018 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9019 0, /* properties_destroyed */
9020 0, /* todo_flags_start */
9021 0, /* todo_flags_finish */
9022};
9bd46bc9 9023
629b3d75
MJ
9024class pass_lower_omp : public gimple_opt_pass
9025{
9026public:
9027 pass_lower_omp (gcc::context *ctxt)
9028 : gimple_opt_pass (pass_data_lower_omp, ctxt)
9029 {}
9bd46bc9 9030
629b3d75
MJ
9031 /* opt_pass methods: */
9032 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9bd46bc9 9033
629b3d75 9034}; // class pass_lower_omp
9bd46bc9 9035
629b3d75 9036} // anon namespace
9bd46bc9 9037
629b3d75
MJ
9038gimple_opt_pass *
9039make_pass_lower_omp (gcc::context *ctxt)
9040{
9041 return new pass_lower_omp (ctxt);
9bd46bc9 9042}
629b3d75
MJ
9043\f
9044/* The following is a utility to diagnose structured block violations.
9045 It is not part of the "omplower" pass, as that's invoked too late. It
9046 should be invoked by the respective front ends after gimplification. */
9bd46bc9 9047
629b3d75 9048static splay_tree all_labels;
9bd46bc9 9049
629b3d75
MJ
9050/* Check for mismatched contexts and generate an error if needed. Return
9051 true if an error is detected. */
9bd46bc9 9052
629b3d75
MJ
9053static bool
9054diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9055 gimple *branch_ctx, gimple *label_ctx)
9056{
9057 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9058 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9bd46bc9 9059
629b3d75
MJ
9060 if (label_ctx == branch_ctx)
9061 return false;
9bd46bc9 9062
629b3d75 9063 const char* kind = NULL;
9bd46bc9 9064
629b3d75 9065 if (flag_cilkplus)
9bd46bc9 9066 {
629b3d75
MJ
9067 if ((branch_ctx
9068 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9069 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9070 || (label_ctx
9071 && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9072 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9073 kind = "Cilk Plus";
9bd46bc9 9074 }
629b3d75 9075 if (flag_openacc)
9bd46bc9 9076 {
629b3d75
MJ
9077 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9078 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9bd46bc9 9079 {
629b3d75
MJ
9080 gcc_checking_assert (kind == NULL);
9081 kind = "OpenACC";
9bd46bc9
NS
9082 }
9083 }
629b3d75 9084 if (kind == NULL)
5b37e866 9085 {
629b3d75
MJ
9086 gcc_checking_assert (flag_openmp);
9087 kind = "OpenMP";
5b37e866 9088 }
9bd46bc9 9089
01914336 9090 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
629b3d75
MJ
9091 so we could traverse it and issue a correct "exit" or "enter" error
9092 message upon a structured block violation.
c5a64cfe 9093
629b3d75
MJ
9094 We built the context by building a list with tree_cons'ing, but there is
9095 no easy counterpart in gimple tuples. It seems like far too much work
9096 for issuing exit/enter error messages. If someone really misses the
01914336 9097 distinct error message... patches welcome. */
c5a64cfe 9098
629b3d75
MJ
9099#if 0
9100 /* Try to avoid confusing the user by producing and error message
9101 with correct "exit" or "enter" verbiage. We prefer "exit"
9102 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9103 if (branch_ctx == NULL)
9104 exit_p = false;
9105 else
5b37e866 9106 {
629b3d75
MJ
9107 while (label_ctx)
9108 {
9109 if (TREE_VALUE (label_ctx) == branch_ctx)
9110 {
9111 exit_p = false;
9112 break;
9113 }
9114 label_ctx = TREE_CHAIN (label_ctx);
9115 }
5b37e866
NS
9116 }
9117
629b3d75
MJ
9118 if (exit_p)
9119 error ("invalid exit from %s structured block", kind);
9120 else
9121 error ("invalid entry to %s structured block", kind);
9122#endif
5b37e866 9123
629b3d75
MJ
9124 /* If it's obvious we have an invalid entry, be specific about the error. */
9125 if (branch_ctx == NULL)
9126 error ("invalid entry to %s structured block", kind);
9127 else
c5a64cfe 9128 {
629b3d75
MJ
9129 /* Otherwise, be vague and lazy, but efficient. */
9130 error ("invalid branch to/from %s structured block", kind);
c5a64cfe 9131 }
5b37e866 9132
629b3d75
MJ
9133 gsi_replace (gsi_p, gimple_build_nop (), false);
9134 return true;
c5a64cfe
NS
9135}
9136
629b3d75
MJ
9137/* Pass 1: Create a minimal tree of structured blocks, and record
9138 where each label is found. */
9bd46bc9 9139
629b3d75
MJ
9140static tree
9141diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9142 struct walk_stmt_info *wi)
9bd46bc9 9143{
629b3d75
MJ
9144 gimple *context = (gimple *) wi->info;
9145 gimple *inner_context;
9146 gimple *stmt = gsi_stmt (*gsi_p);
9bd46bc9 9147
629b3d75 9148 *handled_ops_p = true;
6e91acf8 9149
629b3d75
MJ
9150 switch (gimple_code (stmt))
9151 {
9152 WALK_SUBSTMTS;
6e91acf8 9153
629b3d75
MJ
9154 case GIMPLE_OMP_PARALLEL:
9155 case GIMPLE_OMP_TASK:
9156 case GIMPLE_OMP_SECTIONS:
9157 case GIMPLE_OMP_SINGLE:
9158 case GIMPLE_OMP_SECTION:
9159 case GIMPLE_OMP_MASTER:
9160 case GIMPLE_OMP_ORDERED:
9161 case GIMPLE_OMP_CRITICAL:
9162 case GIMPLE_OMP_TARGET:
9163 case GIMPLE_OMP_TEAMS:
9164 case GIMPLE_OMP_TASKGROUP:
9165 /* The minimal context here is just the current OMP construct. */
9166 inner_context = stmt;
9167 wi->info = inner_context;
9168 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9169 wi->info = context;
9170 break;
e5014671 9171
629b3d75
MJ
9172 case GIMPLE_OMP_FOR:
9173 inner_context = stmt;
9174 wi->info = inner_context;
9175 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9176 walk them. */
9177 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9178 diagnose_sb_1, NULL, wi);
9179 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9180 wi->info = context;
9181 break;
e5014671 9182
629b3d75
MJ
9183 case GIMPLE_LABEL:
9184 splay_tree_insert (all_labels,
9185 (splay_tree_key) gimple_label_label (
9186 as_a <glabel *> (stmt)),
9187 (splay_tree_value) context);
9188 break;
e5014671 9189
629b3d75
MJ
9190 default:
9191 break;
e5014671
NS
9192 }
9193
629b3d75 9194 return NULL_TREE;
e5014671
NS
9195}
9196
629b3d75
MJ
9197/* Pass 2: Check each branch and see if its context differs from that of
9198 the destination label's context. */
94829f87 9199
629b3d75
MJ
9200static tree
9201diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9202 struct walk_stmt_info *wi)
94829f87 9203{
629b3d75
MJ
9204 gimple *context = (gimple *) wi->info;
9205 splay_tree_node n;
9206 gimple *stmt = gsi_stmt (*gsi_p);
f8393eb0 9207
629b3d75 9208 *handled_ops_p = true;
f8393eb0 9209
629b3d75 9210 switch (gimple_code (stmt))
9bd46bc9 9211 {
629b3d75 9212 WALK_SUBSTMTS;
9bd46bc9 9213
629b3d75
MJ
9214 case GIMPLE_OMP_PARALLEL:
9215 case GIMPLE_OMP_TASK:
9216 case GIMPLE_OMP_SECTIONS:
9217 case GIMPLE_OMP_SINGLE:
9218 case GIMPLE_OMP_SECTION:
9219 case GIMPLE_OMP_MASTER:
9220 case GIMPLE_OMP_ORDERED:
9221 case GIMPLE_OMP_CRITICAL:
9222 case GIMPLE_OMP_TARGET:
9223 case GIMPLE_OMP_TEAMS:
9224 case GIMPLE_OMP_TASKGROUP:
9225 wi->info = stmt;
9226 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9227 wi->info = context;
9228 break;
e5014671 9229
629b3d75
MJ
9230 case GIMPLE_OMP_FOR:
9231 wi->info = stmt;
9232 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9233 walk them. */
9234 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9235 diagnose_sb_2, NULL, wi);
9236 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9237 wi->info = context;
9238 break;
e5014671 9239
629b3d75
MJ
9240 case GIMPLE_COND:
9241 {
9242 gcond *cond_stmt = as_a <gcond *> (stmt);
9243 tree lab = gimple_cond_true_label (cond_stmt);
9244 if (lab)
9bd46bc9 9245 {
629b3d75
MJ
9246 n = splay_tree_lookup (all_labels,
9247 (splay_tree_key) lab);
9248 diagnose_sb_0 (gsi_p, context,
9249 n ? (gimple *) n->value : NULL);
9bd46bc9 9250 }
629b3d75
MJ
9251 lab = gimple_cond_false_label (cond_stmt);
9252 if (lab)
9253 {
9254 n = splay_tree_lookup (all_labels,
9255 (splay_tree_key) lab);
9256 diagnose_sb_0 (gsi_p, context,
9257 n ? (gimple *) n->value : NULL);
9258 }
9259 }
9260 break;
9bd46bc9 9261
629b3d75
MJ
9262 case GIMPLE_GOTO:
9263 {
9264 tree lab = gimple_goto_dest (stmt);
9265 if (TREE_CODE (lab) != LABEL_DECL)
9266 break;
9267
9268 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9269 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9270 }
9271 break;
9bd46bc9 9272
629b3d75
MJ
9273 case GIMPLE_SWITCH:
9274 {
9275 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9276 unsigned int i;
9277 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9bd46bc9 9278 {
629b3d75
MJ
9279 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9280 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9281 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9282 break;
9bd46bc9 9283 }
9bd46bc9 9284 }
629b3d75 9285 break;
9bd46bc9 9286
629b3d75
MJ
9287 case GIMPLE_RETURN:
9288 diagnose_sb_0 (gsi_p, context, NULL);
9289 break;
94829f87 9290
629b3d75
MJ
9291 default:
9292 break;
94829f87
NS
9293 }
9294
629b3d75 9295 return NULL_TREE;
bd751975
NS
9296}
9297
629b3d75
MJ
9298static unsigned int
9299diagnose_omp_structured_block_errors (void)
94829f87 9300{
629b3d75
MJ
9301 struct walk_stmt_info wi;
9302 gimple_seq body = gimple_body (current_function_decl);
346a966e 9303
629b3d75 9304 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
94829f87 9305
629b3d75
MJ
9306 memset (&wi, 0, sizeof (wi));
9307 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
94829f87 9308
629b3d75
MJ
9309 memset (&wi, 0, sizeof (wi));
9310 wi.want_locations = true;
9311 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
94829f87 9312
629b3d75 9313 gimple_set_body (current_function_decl, body);
9669b00b 9314
629b3d75
MJ
9315 splay_tree_delete (all_labels);
9316 all_labels = NULL;
9669b00b 9317
9669b00b
AM
9318 return 0;
9319}
9320
9321namespace {
9322
629b3d75 9323const pass_data pass_data_diagnose_omp_blocks =
9669b00b
AM
9324{
9325 GIMPLE_PASS, /* type */
629b3d75 9326 "*diagnose_omp_blocks", /* name */
fd2b8c8b 9327 OPTGROUP_OMP, /* optinfo_flags */
9669b00b 9328 TV_NONE, /* tv_id */
629b3d75
MJ
9329 PROP_gimple_any, /* properties_required */
9330 0, /* properties_provided */
9669b00b
AM
9331 0, /* properties_destroyed */
9332 0, /* todo_flags_start */
629b3d75 9333 0, /* todo_flags_finish */
9669b00b
AM
9334};
9335
629b3d75 9336class pass_diagnose_omp_blocks : public gimple_opt_pass
9669b00b
AM
9337{
9338public:
629b3d75
MJ
9339 pass_diagnose_omp_blocks (gcc::context *ctxt)
9340 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9669b00b
AM
9341 {}
9342
9343 /* opt_pass methods: */
629b3d75
MJ
9344 virtual bool gate (function *)
9345 {
9346 return flag_cilkplus || flag_openacc || flag_openmp;
9347 }
9669b00b
AM
9348 virtual unsigned int execute (function *)
9349 {
629b3d75 9350 return diagnose_omp_structured_block_errors ();
4a38b02b
IV
9351 }
9352
629b3d75 9353}; // class pass_diagnose_omp_blocks
4a38b02b
IV
9354
9355} // anon namespace
9356
9357gimple_opt_pass *
629b3d75 9358make_pass_diagnose_omp_blocks (gcc::context *ctxt)
4a38b02b 9359{
629b3d75 9360 return new pass_diagnose_omp_blocks (ctxt);
4a38b02b 9361}
629b3d75 9362\f
4a38b02b 9363
953ff289 9364#include "gt-omp-low.h"