]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
Skip tests for GNU extensions when testing with strict mode
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
41dbbb37
TS
1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
953ff289
DN
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
85ec4feb 7 Copyright (C) 2005-2018 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5 29#include "target.h"
953ff289 30#include "tree.h"
c7131fb2 31#include "gimple.h"
957060b5 32#include "tree-pass.h"
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
2fb9a547
AM
39#include "internal-fn.h"
40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
18f429e2 43#include "gimplify-me.h"
5be5c238 44#include "gimple-walk.h"
726a989a 45#include "tree-iterator.h"
953ff289
DN
46#include "tree-inline.h"
47#include "langhooks.h"
442b4905 48#include "tree-dfa.h"
7a300452 49#include "tree-ssa.h"
6be42dd4 50#include "splay-tree.h"
629b3d75 51#include "omp-general.h"
0645c1a2 52#include "omp-low.h"
629b3d75 53#include "omp-grid.h"
4484a35a 54#include "gimple-low.h"
dd912cb8 55#include "symbol-summary.h"
1fe37220 56#include "tree-nested.h"
1f6be682 57#include "context.h"
41dbbb37 58#include "gomp-constants.h"
9bd46bc9 59#include "gimple-pretty-print.h"
13293add 60#include "hsa-common.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
953ff289 63
41dbbb37 64/* Lowering of OMP parallel and workshare constructs proceeds in two
953ff289
DN
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
c0220ea4 68 re-gimplifying things when variables have been replaced with complex
953ff289
DN
69 expressions.
70
7ebaeab5 71 Final code generation is done by pass_expand_omp. The flowgraph is
41dbbb37
TS
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
953ff289
DN
74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
a79683d5 78struct omp_context
953ff289
DN
79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
355fe088 88 gimple *stmt;
953ff289 89
b8698a0f 90 /* Map variables to fields in a structure that allows communication
953ff289
DN
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
a68ab351
JJ
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
953ff289
DN
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
acf0174b
JJ
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
6e6cf7b0
JJ
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
953ff289
DN
117 /* Nesting depth of this context. Used to beautify error messages re
118 invalid gotos. The outermost ctx is depth 1, with depth 0 being
119 reserved for the main body of the function. */
120 int depth;
121
953ff289
DN
122 /* True if this parallel directive is nested within another. */
123 bool is_nested;
acf0174b
JJ
124
125 /* True if this construct can be cancelled. */
126 bool cancellable;
a79683d5 127};
953ff289 128
953ff289 129static splay_tree all_contexts;
a68ab351 130static int taskreg_nesting_level;
acf0174b 131static int target_nesting_level;
a68ab351 132static bitmap task_shared_vars;
5771c391 133static vec<omp_context *> taskreg_contexts;
953ff289 134
26127932 135static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
136static tree scan_omp_1_op (tree *, int *, void *);
137
138#define WALK_SUBSTMTS \
139 case GIMPLE_BIND: \
140 case GIMPLE_TRY: \
141 case GIMPLE_CATCH: \
142 case GIMPLE_EH_FILTER: \
0a35513e 143 case GIMPLE_TRANSACTION: \
726a989a
RB
144 /* The sub-statements for these should be walked. */ \
145 *handled_ops_p = false; \
146 break;
147
e4834818
NS
148/* Return true if CTX corresponds to an oacc parallel region. */
149
150static bool
151is_oacc_parallel (omp_context *ctx)
152{
153 enum gimple_code outer_type = gimple_code (ctx->stmt);
154 return ((outer_type == GIMPLE_OMP_TARGET)
155 && (gimple_omp_target_kind (ctx->stmt)
156 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157}
158
159/* Return true if CTX corresponds to an oacc kernels region. */
160
161static bool
162is_oacc_kernels (omp_context *ctx)
163{
164 enum gimple_code outer_type = gimple_code (ctx->stmt);
165 return ((outer_type == GIMPLE_OMP_TARGET)
166 && (gimple_omp_target_kind (ctx->stmt)
167 == GF_OMP_TARGET_KIND_OACC_KERNELS));
168}
169
d9a6bd32
JJ
170/* If DECL is the artificial dummy VAR_DECL created for non-static
171 data member privatization, return the underlying "this" parameter,
172 otherwise return NULL. */
173
174tree
175omp_member_access_dummy_var (tree decl)
176{
177 if (!VAR_P (decl)
178 || !DECL_ARTIFICIAL (decl)
179 || !DECL_IGNORED_P (decl)
180 || !DECL_HAS_VALUE_EXPR_P (decl)
181 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182 return NULL_TREE;
183
184 tree v = DECL_VALUE_EXPR (decl);
185 if (TREE_CODE (v) != COMPONENT_REF)
186 return NULL_TREE;
187
188 while (1)
189 switch (TREE_CODE (v))
190 {
191 case COMPONENT_REF:
192 case MEM_REF:
193 case INDIRECT_REF:
194 CASE_CONVERT:
195 case POINTER_PLUS_EXPR:
196 v = TREE_OPERAND (v, 0);
197 continue;
198 case PARM_DECL:
199 if (DECL_CONTEXT (v) == current_function_decl
200 && DECL_ARTIFICIAL (v)
201 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 return v;
203 return NULL_TREE;
204 default:
205 return NULL_TREE;
206 }
207}
208
209/* Helper for unshare_and_remap, called through walk_tree. */
210
211static tree
212unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213{
214 tree *pair = (tree *) data;
215 if (*tp == pair[0])
216 {
217 *tp = unshare_expr (pair[1]);
218 *walk_subtrees = 0;
219 }
220 else if (IS_TYPE_OR_DECL_P (*tp))
221 *walk_subtrees = 0;
222 return NULL_TREE;
223}
224
225/* Return unshare_expr (X) with all occurrences of FROM
226 replaced with TO. */
227
228static tree
229unshare_and_remap (tree x, tree from, tree to)
230{
231 tree pair[2] = { from, to };
232 x = unshare_expr (x);
233 walk_tree (&x, unshare_and_remap_1, pair, NULL);
234 return x;
235}
236
726a989a
RB
237/* Convenience function for calling scan_omp_1_op on tree operands. */
238
239static inline tree
240scan_omp_op (tree *tp, omp_context *ctx)
241{
242 struct walk_stmt_info wi;
243
244 memset (&wi, 0, sizeof (wi));
245 wi.info = ctx;
246 wi.want_locations = true;
247
248 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249}
250
355a7673 251static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
252static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289 254
953ff289
DN
255/* Return true if CTX is for an omp parallel. */
256
257static inline bool
258is_parallel_ctx (omp_context *ctx)
259{
726a989a 260 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
261}
262
50674e96 263
a68ab351
JJ
264/* Return true if CTX is for an omp task. */
265
266static inline bool
267is_task_ctx (omp_context *ctx)
268{
726a989a 269 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
270}
271
272
d9a6bd32
JJ
273/* Return true if CTX is for an omp taskloop. */
274
275static inline bool
276is_taskloop_ctx (omp_context *ctx)
277{
278 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280}
281
282
a68ab351
JJ
283/* Return true if CTX is for an omp parallel or omp task. */
284
285static inline bool
286is_taskreg_ctx (omp_context *ctx)
287{
d9a6bd32 288 return is_parallel_ctx (ctx) || is_task_ctx (ctx);
a68ab351
JJ
289}
290
953ff289
DN
291/* Return true if EXPR is variable sized. */
292
293static inline bool
22ea9ec0 294is_variable_sized (const_tree expr)
953ff289
DN
295{
296 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297}
298
41dbbb37 299/* Lookup variables. The "maybe" form
953ff289
DN
300 allows for the variable form to not have been entered, otherwise we
301 assert that the variable must have been entered. */
302
303static inline tree
304lookup_decl (tree var, omp_context *ctx)
305{
b787e7a2 306 tree *n = ctx->cb.decl_map->get (var);
6be42dd4 307 return *n;
953ff289
DN
308}
309
310static inline tree
7c8f7639 311maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 312{
b787e7a2 313 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
6be42dd4 314 return n ? *n : NULL_TREE;
953ff289
DN
315}
316
317static inline tree
318lookup_field (tree var, omp_context *ctx)
319{
320 splay_tree_node n;
321 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322 return (tree) n->value;
323}
324
a68ab351 325static inline tree
d9a6bd32 326lookup_sfield (splay_tree_key key, omp_context *ctx)
a68ab351
JJ
327{
328 splay_tree_node n;
329 n = splay_tree_lookup (ctx->sfield_map
d9a6bd32 330 ? ctx->sfield_map : ctx->field_map, key);
a68ab351
JJ
331 return (tree) n->value;
332}
333
953ff289 334static inline tree
d9a6bd32
JJ
335lookup_sfield (tree var, omp_context *ctx)
336{
337 return lookup_sfield ((splay_tree_key) var, ctx);
338}
339
340static inline tree
341maybe_lookup_field (splay_tree_key key, omp_context *ctx)
953ff289
DN
342{
343 splay_tree_node n;
d9a6bd32 344 n = splay_tree_lookup (ctx->field_map, key);
953ff289
DN
345 return n ? (tree) n->value : NULL_TREE;
346}
347
d9a6bd32
JJ
348static inline tree
349maybe_lookup_field (tree var, omp_context *ctx)
350{
351 return maybe_lookup_field ((splay_tree_key) var, ctx);
352}
353
7c8f7639
JJ
354/* Return true if DECL should be copied by pointer. SHARED_CTX is
355 the parallel context if DECL is to be shared. */
953ff289
DN
356
357static bool
a68ab351 358use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289 359{
9dc5773f
JJ
360 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361 || TYPE_ATOMIC (TREE_TYPE (decl)))
953ff289
DN
362 return true;
363
6fc0bb99 364 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 365 when we know the value is not accessible from an outer scope. */
7c8f7639 366 if (shared_ctx)
953ff289 367 {
41dbbb37
TS
368 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369
953ff289
DN
370 /* ??? Trivially accessible from anywhere. But why would we even
371 be passing an address in this case? Should we simply assert
372 this to be false, or should we have a cleanup pass that removes
373 these from the list of mappings? */
374 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 return true;
376
377 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 without analyzing the expression whether or not its location
379 is accessible to anyone else. In the case of nested parallel
380 regions it certainly may be. */
077b0dfb 381 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
382 return true;
383
384 /* Do not use copy-in/copy-out for variables that have their
385 address taken. */
386 if (TREE_ADDRESSABLE (decl))
387 return true;
7c8f7639 388
6d840d99
JJ
389 /* lower_send_shared_vars only uses copy-in, but not copy-out
390 for these. */
391 if (TREE_READONLY (decl)
392 || ((TREE_CODE (decl) == RESULT_DECL
393 || TREE_CODE (decl) == PARM_DECL)
394 && DECL_BY_REFERENCE (decl)))
395 return false;
396
7c8f7639
JJ
397 /* Disallow copy-in/out in nested parallel if
398 decl is shared in outer parallel, otherwise
399 each thread could store the shared variable
400 in its own copy-in location, making the
401 variable no longer really shared. */
6d840d99 402 if (shared_ctx->is_nested)
7c8f7639
JJ
403 {
404 omp_context *up;
405
406 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 407 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
408 break;
409
d9c194cb 410 if (up)
7c8f7639
JJ
411 {
412 tree c;
413
726a989a 414 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
415 c; c = OMP_CLAUSE_CHAIN (c))
416 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 && OMP_CLAUSE_DECL (c) == decl)
418 break;
419
420 if (c)
25142650 421 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
422 }
423 }
a68ab351 424
6d840d99 425 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
426 deferred or executed in different thread, when GOMP_task
427 returns, the task hasn't necessarily terminated. */
6d840d99 428 if (is_task_ctx (shared_ctx))
a68ab351 429 {
25142650
JJ
430 tree outer;
431 maybe_mark_addressable_and_ret:
432 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
d9a6bd32 433 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
a68ab351
JJ
434 {
435 /* Taking address of OUTER in lower_send_shared_vars
436 might need regimplification of everything that uses the
437 variable. */
438 if (!task_shared_vars)
439 task_shared_vars = BITMAP_ALLOC (NULL);
440 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 TREE_ADDRESSABLE (outer) = 1;
442 }
443 return true;
444 }
953ff289
DN
445 }
446
447 return false;
448}
449
917948d3
ZD
450/* Construct a new automatic decl similar to VAR. */
451
452static tree
453omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
454{
455 tree copy = copy_var_decl (var, name, type);
456
457 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 458 DECL_CHAIN (copy) = ctx->block_vars;
d9a6bd32
JJ
459 /* If VAR is listed in task_shared_vars, it means it wasn't
460 originally addressable and is just because task needs to take
461 it's address. But we don't need to take address of privatizations
462 from that var. */
463 if (TREE_ADDRESSABLE (var)
464 && task_shared_vars
465 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466 TREE_ADDRESSABLE (copy) = 0;
953ff289
DN
467 ctx->block_vars = copy;
468
469 return copy;
470}
471
472static tree
473omp_copy_decl_1 (tree var, omp_context *ctx)
474{
475 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476}
477
a9a58711
JJ
478/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479 as appropriate. */
480static tree
481omp_build_component_ref (tree obj, tree field)
482{
483 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484 if (TREE_THIS_VOLATILE (field))
485 TREE_THIS_VOLATILE (ret) |= 1;
486 if (TREE_READONLY (field))
487 TREE_READONLY (ret) |= 1;
488 return ret;
489}
490
953ff289
DN
491/* Build tree nodes to access the field for VAR on the receiver side. */
492
493static tree
494build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
495{
496 tree x, field = lookup_field (var, ctx);
497
498 /* If the receiver record type was remapped in the child function,
499 remap the field into the new record type. */
500 x = maybe_lookup_field (field, ctx);
501 if (x != NULL)
502 field = x;
503
70f34814 504 x = build_simple_mem_ref (ctx->receiver_decl);
f1b9b669 505 TREE_THIS_NOTRAP (x) = 1;
a9a58711 506 x = omp_build_component_ref (x, field);
953ff289 507 if (by_ref)
096b85f4
TV
508 {
509 x = build_simple_mem_ref (x);
510 TREE_THIS_NOTRAP (x) = 1;
511 }
953ff289
DN
512
513 return x;
514}
515
516/* Build tree nodes to access VAR in the scope outer to CTX. In the case
517 of a parallel, this is a component reference; for workshare constructs
518 this is some variable. */
519
520static tree
c39dad64
JJ
521build_outer_var_ref (tree var, omp_context *ctx,
522 enum omp_clause_code code = OMP_CLAUSE_ERROR)
953ff289
DN
523{
524 tree x;
525
8ca5b2a2 526 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
527 x = var;
528 else if (is_variable_sized (var))
529 {
530 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
c39dad64 531 x = build_outer_var_ref (x, ctx, code);
70f34814 532 x = build_simple_mem_ref (x);
953ff289 533 }
a68ab351 534 else if (is_taskreg_ctx (ctx))
953ff289 535 {
7c8f7639 536 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
537 x = build_receiver_ref (var, by_ref, ctx);
538 }
c39dad64
JJ
539 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 || (code == OMP_CLAUSE_PRIVATE
542 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
74bf76ed 545 {
c39dad64
JJ
546 /* #pragma omp simd isn't a worksharing construct, and can reference
547 even private vars in its linear etc. clauses.
548 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 to private vars in all worksharing constructs. */
74bf76ed
JJ
550 x = NULL_TREE;
551 if (ctx->outer && is_taskreg_ctx (ctx))
552 x = lookup_decl (var, ctx->outer);
553 else if (ctx->outer)
f3b331d1 554 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
555 if (x == NULL_TREE)
556 x = var;
557 }
c39dad64 558 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
d9a6bd32
JJ
559 {
560 gcc_assert (ctx->outer);
561 splay_tree_node n
562 = splay_tree_lookup (ctx->outer->field_map,
563 (splay_tree_key) &DECL_UID (var));
564 if (n == NULL)
565 {
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 x = var;
568 else
569 x = lookup_decl (var, ctx->outer);
570 }
571 else
572 {
573 tree field = (tree) n->value;
574 /* If the receiver record type was remapped in the child function,
575 remap the field into the new record type. */
576 x = maybe_lookup_field (field, ctx->outer);
577 if (x != NULL)
578 field = x;
579
580 x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 x = omp_build_component_ref (x, field);
582 if (use_pointer_for_field (var, ctx->outer))
583 x = build_simple_mem_ref (x);
584 }
585 }
953ff289 586 else if (ctx->outer)
b2b40051
MJ
587 {
588 omp_context *outer = ctx->outer;
589 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
590 {
591 outer = outer->outer;
592 gcc_assert (outer
593 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
594 }
c39dad64 595 x = lookup_decl (var, outer);
b2b40051 596 }
629b3d75 597 else if (omp_is_reference (var))
eeb1d9e0
JJ
598 /* This can happen with orphaned constructs. If var is reference, it is
599 possible it is shared and as such valid. */
600 x = var;
d9a6bd32
JJ
601 else if (omp_member_access_dummy_var (var))
602 x = var;
953ff289
DN
603 else
604 gcc_unreachable ();
605
d9a6bd32
JJ
606 if (x == var)
607 {
608 tree t = omp_member_access_dummy_var (var);
609 if (t)
610 {
611 x = DECL_VALUE_EXPR (var);
612 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 if (o != t)
614 x = unshare_and_remap (x, t, o);
615 else
616 x = unshare_expr (x);
617 }
618 }
619
629b3d75 620 if (omp_is_reference (var))
70f34814 621 x = build_simple_mem_ref (x);
953ff289
DN
622
623 return x;
624}
625
626/* Build tree nodes to access the field for VAR on the sender side. */
627
628static tree
d9a6bd32 629build_sender_ref (splay_tree_key key, omp_context *ctx)
953ff289 630{
d9a6bd32 631 tree field = lookup_sfield (key, ctx);
a9a58711 632 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
633}
634
d9a6bd32
JJ
635static tree
636build_sender_ref (tree var, omp_context *ctx)
637{
638 return build_sender_ref ((splay_tree_key) var, ctx);
639}
640
86938de6
TV
641/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
642 BASE_POINTERS_RESTRICT, declare the field with restrict. */
953ff289
DN
643
644static void
829c6349 645install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 646{
a68ab351 647 tree field, type, sfield = NULL_TREE;
d9a6bd32 648 splay_tree_key key = (splay_tree_key) var;
953ff289 649
d9a6bd32
JJ
650 if ((mask & 8) != 0)
651 {
652 key = (splay_tree_key) &DECL_UID (var);
653 gcc_checking_assert (key != (splay_tree_key) var);
654 }
a68ab351 655 gcc_assert ((mask & 1) == 0
d9a6bd32 656 || !splay_tree_lookup (ctx->field_map, key));
a68ab351 657 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
d9a6bd32 658 || !splay_tree_lookup (ctx->sfield_map, key));
41dbbb37
TS
659 gcc_assert ((mask & 3) == 3
660 || !is_gimple_omp_oacc (ctx->stmt));
953ff289
DN
661
662 type = TREE_TYPE (var);
8498c16b
TV
663 /* Prevent redeclaring the var in the split-off function with a restrict
664 pointer type. Note that we only clear type itself, restrict qualifiers in
665 the pointed-to type will be ignored by points-to analysis. */
666 if (POINTER_TYPE_P (type)
667 && TYPE_RESTRICT (type))
668 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
669
acf0174b
JJ
670 if (mask & 4)
671 {
672 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
673 type = build_pointer_type (build_pointer_type (type));
674 }
675 else if (by_ref)
829c6349 676 type = build_pointer_type (type);
629b3d75 677 else if ((mask & 3) == 1 && omp_is_reference (var))
a68ab351 678 type = TREE_TYPE (type);
953ff289 679
c2255bc4
AH
680 field = build_decl (DECL_SOURCE_LOCATION (var),
681 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
682
683 /* Remember what variable this field was created for. This does have a
684 side effect of making dwarf2out ignore this member, so for helpful
685 debugging we clear it later in delete_omp_context. */
686 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
687 if (type == TREE_TYPE (var))
688 {
fe37c7af 689 SET_DECL_ALIGN (field, DECL_ALIGN (var));
a68ab351
JJ
690 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
691 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
692 }
693 else
fe37c7af 694 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
953ff289 695
a68ab351
JJ
696 if ((mask & 3) == 3)
697 {
698 insert_field_into_struct (ctx->record_type, field);
699 if (ctx->srecord_type)
700 {
c2255bc4
AH
701 sfield = build_decl (DECL_SOURCE_LOCATION (var),
702 FIELD_DECL, DECL_NAME (var), type);
a68ab351 703 DECL_ABSTRACT_ORIGIN (sfield) = var;
fe37c7af 704 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
a68ab351
JJ
705 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
706 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
707 insert_field_into_struct (ctx->srecord_type, sfield);
708 }
709 }
710 else
711 {
712 if (ctx->srecord_type == NULL_TREE)
713 {
714 tree t;
715
716 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
717 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
718 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
719 {
d9a6bd32 720 sfield = build_decl (DECL_SOURCE_LOCATION (t),
c2255bc4 721 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
722 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
723 insert_field_into_struct (ctx->srecord_type, sfield);
724 splay_tree_insert (ctx->sfield_map,
725 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
726 (splay_tree_value) sfield);
727 }
728 }
729 sfield = field;
730 insert_field_into_struct ((mask & 1) ? ctx->record_type
731 : ctx->srecord_type, field);
732 }
953ff289 733
a68ab351 734 if (mask & 1)
d9a6bd32 735 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
a68ab351 736 if ((mask & 2) && ctx->sfield_map)
d9a6bd32 737 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
953ff289
DN
738}
739
740static tree
741install_var_local (tree var, omp_context *ctx)
742{
743 tree new_var = omp_copy_decl_1 (var, ctx);
744 insert_decl_map (&ctx->cb, var, new_var);
745 return new_var;
746}
747
748/* Adjust the replacement for DECL in CTX for the new context. This means
749 copying the DECL_VALUE_EXPR, and fixing up the type. */
750
751static void
752fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
753{
754 tree new_decl, size;
755
756 new_decl = lookup_decl (decl, ctx);
757
758 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
759
760 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
761 && DECL_HAS_VALUE_EXPR_P (decl))
762 {
763 tree ve = DECL_VALUE_EXPR (decl);
726a989a 764 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
765 SET_DECL_VALUE_EXPR (new_decl, ve);
766 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
767 }
768
769 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
770 {
771 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
772 if (size == error_mark_node)
773 size = TYPE_SIZE (TREE_TYPE (new_decl));
774 DECL_SIZE (new_decl) = size;
775
776 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
777 if (size == error_mark_node)
778 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
779 DECL_SIZE_UNIT (new_decl) = size;
780 }
781}
782
783/* The callback for remap_decl. Search all containing contexts for a
784 mapping of the variable; this avoids having to duplicate the splay
785 tree ahead of time. We know a mapping doesn't already exist in the
786 given context. Create new mappings to implement default semantics. */
787
788static tree
789omp_copy_decl (tree var, copy_body_data *cb)
790{
791 omp_context *ctx = (omp_context *) cb;
792 tree new_var;
793
953ff289
DN
794 if (TREE_CODE (var) == LABEL_DECL)
795 {
50aa16c3
JJ
796 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
797 return var;
c2255bc4 798 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 799 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
800 insert_decl_map (&ctx->cb, var, new_var);
801 return new_var;
802 }
803
a68ab351 804 while (!is_taskreg_ctx (ctx))
953ff289
DN
805 {
806 ctx = ctx->outer;
807 if (ctx == NULL)
808 return var;
809 new_var = maybe_lookup_decl (var, ctx);
810 if (new_var)
811 return new_var;
812 }
813
8ca5b2a2
JJ
814 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
815 return var;
816
953ff289
DN
817 return error_mark_node;
818}
819
629b3d75 820/* Create a new context, with OUTER_CTX being the surrounding context. */
50674e96 821
629b3d75
MJ
822static omp_context *
823new_omp_context (gimple *stmt, omp_context *outer_ctx)
50674e96 824{
629b3d75 825 omp_context *ctx = XCNEW (omp_context);
50674e96 826
629b3d75
MJ
827 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
828 (splay_tree_value) ctx);
829 ctx->stmt = stmt;
50674e96 830
629b3d75 831 if (outer_ctx)
777f7f9a 832 {
629b3d75
MJ
833 ctx->outer = outer_ctx;
834 ctx->cb = outer_ctx->cb;
835 ctx->cb.block = NULL;
836 ctx->depth = outer_ctx->depth + 1;
953ff289
DN
837 }
838 else
839 {
840 ctx->cb.src_fn = current_function_decl;
841 ctx->cb.dst_fn = current_function_decl;
d52f5295 842 ctx->cb.src_node = cgraph_node::get (current_function_decl);
fe660d7b 843 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
844 ctx->cb.dst_node = ctx->cb.src_node;
845 ctx->cb.src_cfun = cfun;
846 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 847 ctx->cb.eh_lp_nr = 0;
953ff289
DN
848 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
849 ctx->depth = 1;
850 }
851
b787e7a2 852 ctx->cb.decl_map = new hash_map<tree, tree>;
953ff289
DN
853
854 return ctx;
855}
856
726a989a 857static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
858
859/* Finalize task copyfn. */
860
861static void
538dd0b7 862finalize_task_copyfn (gomp_task *task_stmt)
2368a460
JJ
863{
864 struct function *child_cfun;
af16bc76 865 tree child_fn;
355a7673 866 gimple_seq seq = NULL, new_seq;
538dd0b7 867 gbind *bind;
2368a460 868
726a989a 869 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
870 if (child_fn == NULL_TREE)
871 return;
872
873 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 874 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 875
2368a460 876 push_cfun (child_cfun);
3ad065ef 877 bind = gimplify_body (child_fn, false);
726a989a
RB
878 gimple_seq_add_stmt (&seq, bind);
879 new_seq = maybe_catch_exception (seq);
880 if (new_seq != seq)
881 {
882 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 883 seq = NULL;
726a989a
RB
884 gimple_seq_add_stmt (&seq, bind);
885 }
886 gimple_set_body (child_fn, seq);
2368a460 887 pop_cfun ();
2368a460 888
d7ed20db 889 /* Inform the callgraph about the new function. */
edafad14
TV
890 cgraph_node *node = cgraph_node::get_create (child_fn);
891 node->parallelized_function = 1;
d52f5295 892 cgraph_node::add_new_function (child_fn, false);
2368a460
JJ
893}
894
953ff289
DN
895/* Destroy a omp_context data structures. Called through the splay tree
896 value delete callback. */
897
898static void
899delete_omp_context (splay_tree_value value)
900{
901 omp_context *ctx = (omp_context *) value;
902
b787e7a2 903 delete ctx->cb.decl_map;
953ff289
DN
904
905 if (ctx->field_map)
906 splay_tree_delete (ctx->field_map);
a68ab351
JJ
907 if (ctx->sfield_map)
908 splay_tree_delete (ctx->sfield_map);
953ff289
DN
909
910 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
911 it produces corrupt debug information. */
912 if (ctx->record_type)
913 {
914 tree t;
910ad8de 915 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
916 DECL_ABSTRACT_ORIGIN (t) = NULL;
917 }
a68ab351
JJ
918 if (ctx->srecord_type)
919 {
920 tree t;
910ad8de 921 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
922 DECL_ABSTRACT_ORIGIN (t) = NULL;
923 }
953ff289 924
2368a460 925 if (is_task_ctx (ctx))
538dd0b7 926 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
2368a460 927
953ff289
DN
928 XDELETE (ctx);
929}
930
931/* Fix up RECEIVER_DECL with a type that has been remapped to the child
932 context. */
933
934static void
935fixup_child_record_type (omp_context *ctx)
936{
937 tree f, type = ctx->record_type;
938
b2b40051
MJ
939 if (!ctx->receiver_decl)
940 return;
953ff289
DN
941 /* ??? It isn't sufficient to just call remap_type here, because
942 variably_modified_type_p doesn't work the way we expect for
943 record types. Testing each field for whether it needs remapping
944 and creating a new record by hand works, however. */
910ad8de 945 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
946 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
947 break;
948 if (f)
949 {
950 tree name, new_fields = NULL;
951
952 type = lang_hooks.types.make_type (RECORD_TYPE);
953 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
954 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
955 TYPE_DECL, name, type);
953ff289
DN
956 TYPE_NAME (type) = name;
957
910ad8de 958 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
959 {
960 tree new_f = copy_node (f);
961 DECL_CONTEXT (new_f) = type;
962 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 963 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
964 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
965 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
966 &ctx->cb, NULL);
967 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
968 &ctx->cb, NULL);
953ff289
DN
969 new_fields = new_f;
970
971 /* Arrange to be able to look up the receiver field
972 given the sender field. */
973 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
974 (splay_tree_value) new_f);
975 }
976 TYPE_FIELDS (type) = nreverse (new_fields);
977 layout_type (type);
978 }
979
d9a6bd32
JJ
980 /* In a target region we never modify any of the pointers in *.omp_data_i,
981 so attempt to help the optimizers. */
982 if (is_gimple_omp_offloaded (ctx->stmt))
983 type = build_qualified_type (type, TYPE_QUAL_CONST);
984
a2a2fe4b
RB
985 TREE_TYPE (ctx->receiver_decl)
986 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
953ff289
DN
987}
988
989/* Instantiate decls as necessary in CTX to satisfy the data sharing
829c6349 990 specified by CLAUSES. */
953ff289
DN
991
992static void
829c6349 993scan_sharing_clauses (tree clauses, omp_context *ctx)
953ff289
DN
994{
995 tree c, decl;
996 bool scan_array_reductions = false;
997
998 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
999 {
1000 bool by_ref;
1001
aaf46ef9 1002 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1003 {
1004 case OMP_CLAUSE_PRIVATE:
1005 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1006 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1007 goto do_private;
1008 else if (!is_variable_sized (decl))
953ff289
DN
1009 install_var_local (decl, ctx);
1010 break;
1011
1012 case OMP_CLAUSE_SHARED:
9cf32741 1013 decl = OMP_CLAUSE_DECL (c);
acf0174b
JJ
1014 /* Ignore shared directives in teams construct. */
1015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
9cf32741
JJ
1016 {
1017 /* Global variables don't need to be copied,
1018 the receiver side will use them directly. */
1019 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1020 if (is_global_var (odecl))
1021 break;
1022 insert_decl_map (&ctx->cb, decl, odecl);
1023 break;
1024 }
a68ab351 1025 gcc_assert (is_taskreg_ctx (ctx));
5da250fc
JJ
1026 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1027 || !is_variable_sized (decl));
8ca5b2a2
JJ
1028 /* Global variables don't need to be copied,
1029 the receiver side will use them directly. */
1030 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1031 break;
d9a6bd32 1032 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1a80d6b8
JJ
1033 {
1034 use_pointer_for_field (decl, ctx);
1035 break;
1036 }
1037 by_ref = use_pointer_for_field (decl, NULL);
1038 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
953ff289
DN
1039 || TREE_ADDRESSABLE (decl)
1040 || by_ref
629b3d75 1041 || omp_is_reference (decl))
953ff289 1042 {
1a80d6b8 1043 by_ref = use_pointer_for_field (decl, ctx);
a68ab351 1044 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1045 install_var_local (decl, ctx);
1046 break;
1047 }
1048 /* We don't need to copy const scalar vars back. */
aaf46ef9 1049 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1050 goto do_private;
1051
d9a6bd32
JJ
1052 case OMP_CLAUSE_REDUCTION:
1053 decl = OMP_CLAUSE_DECL (c);
1054 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1055 && TREE_CODE (decl) == MEM_REF)
1056 {
1057 tree t = TREE_OPERAND (decl, 0);
e01d41e5
JJ
1058 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1059 t = TREE_OPERAND (t, 0);
d9a6bd32
JJ
1060 if (TREE_CODE (t) == INDIRECT_REF
1061 || TREE_CODE (t) == ADDR_EXPR)
1062 t = TREE_OPERAND (t, 0);
1063 install_var_local (t, ctx);
1064 if (is_taskreg_ctx (ctx)
1065 && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1066 && !is_variable_sized (t))
1067 {
1068 by_ref = use_pointer_for_field (t, ctx);
1069 install_var_field (t, by_ref, 3, ctx);
1070 }
1071 break;
1072 }
1073 goto do_private;
1074
953ff289
DN
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 /* Let the corresponding firstprivate clause create
1077 the variable. */
1078 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1079 break;
1080 /* FALLTHRU */
1081
1082 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 1083 case OMP_CLAUSE_LINEAR:
953ff289
DN
1084 decl = OMP_CLAUSE_DECL (c);
1085 do_private:
d9a6bd32
JJ
1086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1088 && is_gimple_omp_offloaded (ctx->stmt))
1089 {
1090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
629b3d75 1091 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
d9a6bd32
JJ
1092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1093 install_var_field (decl, true, 3, ctx);
1094 else
1095 install_var_field (decl, false, 3, ctx);
1096 }
953ff289 1097 if (is_variable_sized (decl))
953ff289 1098 {
a68ab351
JJ
1099 if (is_task_ctx (ctx))
1100 install_var_field (decl, false, 1, ctx);
1101 break;
1102 }
1103 else if (is_taskreg_ctx (ctx))
1104 {
1105 bool global
1106 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1107 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1108
1109 if (is_task_ctx (ctx)
629b3d75 1110 && (global || by_ref || omp_is_reference (decl)))
a68ab351
JJ
1111 {
1112 install_var_field (decl, false, 1, ctx);
1113 if (!global)
1114 install_var_field (decl, by_ref, 2, ctx);
1115 }
1116 else if (!global)
1117 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1118 }
1119 install_var_local (decl, ctx);
1120 break;
1121
d9a6bd32
JJ
1122 case OMP_CLAUSE_USE_DEVICE_PTR:
1123 decl = OMP_CLAUSE_DECL (c);
1124 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1125 install_var_field (decl, true, 3, ctx);
1126 else
1127 install_var_field (decl, false, 3, ctx);
1128 if (DECL_SIZE (decl)
1129 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1130 {
1131 tree decl2 = DECL_VALUE_EXPR (decl);
1132 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1133 decl2 = TREE_OPERAND (decl2, 0);
1134 gcc_assert (DECL_P (decl2));
1135 install_var_local (decl2, ctx);
1136 }
1137 install_var_local (decl, ctx);
1138 break;
1139
1140 case OMP_CLAUSE_IS_DEVICE_PTR:
1141 decl = OMP_CLAUSE_DECL (c);
1142 goto do_private;
1143
acf0174b 1144 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 1145 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
1146 decl = OMP_CLAUSE_DECL (c);
1147 install_var_field (decl, false, 3, ctx);
1148 install_var_local (decl, ctx);
1149 break;
1150
953ff289 1151 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1152 case OMP_CLAUSE_COPYIN:
1153 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1154 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1155 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1156 break;
1157
20906c66 1158 case OMP_CLAUSE_FINAL:
953ff289
DN
1159 case OMP_CLAUSE_IF:
1160 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1161 case OMP_CLAUSE_NUM_TEAMS:
1162 case OMP_CLAUSE_THREAD_LIMIT:
1163 case OMP_CLAUSE_DEVICE:
953ff289 1164 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1165 case OMP_CLAUSE_DIST_SCHEDULE:
1166 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
1167 case OMP_CLAUSE_PRIORITY:
1168 case OMP_CLAUSE_GRAINSIZE:
1169 case OMP_CLAUSE_NUM_TASKS:
41dbbb37
TS
1170 case OMP_CLAUSE_NUM_GANGS:
1171 case OMP_CLAUSE_NUM_WORKERS:
1172 case OMP_CLAUSE_VECTOR_LENGTH:
953ff289 1173 if (ctx->outer)
726a989a 1174 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1175 break;
1176
acf0174b
JJ
1177 case OMP_CLAUSE_TO:
1178 case OMP_CLAUSE_FROM:
1179 case OMP_CLAUSE_MAP:
1180 if (ctx->outer)
1181 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1182 decl = OMP_CLAUSE_DECL (c);
1183 /* Global variables with "omp declare target" attribute
1184 don't need to be copied, the receiver side will use them
4a38b02b 1185 directly. However, global variables with "omp declare target link"
5883c5cc 1186 attribute need to be copied. Or when ALWAYS modifier is used. */
acf0174b
JJ
1187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1188 && DECL_P (decl)
e01d41e5
JJ
1189 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1190 && (OMP_CLAUSE_MAP_KIND (c)
1191 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1192 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
5883c5cc
JJ
1193 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1194 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1195 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
acf0174b 1196 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
4a38b02b
IV
1197 && varpool_node::get_create (decl)->offloadable
1198 && !lookup_attribute ("omp declare target link",
1199 DECL_ATTRIBUTES (decl)))
acf0174b
JJ
1200 break;
1201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1202 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
acf0174b 1203 {
41dbbb37
TS
1204 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1205 not offloaded; there is nothing to map for those. */
1206 if (!is_gimple_omp_offloaded (ctx->stmt)
b8910447
JJ
1207 && !POINTER_TYPE_P (TREE_TYPE (decl))
1208 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
acf0174b
JJ
1209 break;
1210 }
d9a6bd32 1211 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
e01d41e5
JJ
1212 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1213 || (OMP_CLAUSE_MAP_KIND (c)
1214 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
d9a6bd32
JJ
1215 {
1216 if (TREE_CODE (decl) == COMPONENT_REF
1217 || (TREE_CODE (decl) == INDIRECT_REF
1218 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1219 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1220 == REFERENCE_TYPE)))
1221 break;
1222 if (DECL_SIZE (decl)
1223 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1224 {
1225 tree decl2 = DECL_VALUE_EXPR (decl);
1226 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1227 decl2 = TREE_OPERAND (decl2, 0);
1228 gcc_assert (DECL_P (decl2));
1229 install_var_local (decl2, ctx);
1230 }
1231 install_var_local (decl, ctx);
1232 break;
1233 }
acf0174b
JJ
1234 if (DECL_P (decl))
1235 {
1236 if (DECL_SIZE (decl)
1237 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1238 {
1239 tree decl2 = DECL_VALUE_EXPR (decl);
1240 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1241 decl2 = TREE_OPERAND (decl2, 0);
1242 gcc_assert (DECL_P (decl2));
e01d41e5 1243 install_var_field (decl2, true, 3, ctx);
acf0174b
JJ
1244 install_var_local (decl2, ctx);
1245 install_var_local (decl, ctx);
1246 }
1247 else
1248 {
1249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1250 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
acf0174b
JJ
1251 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1252 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1253 install_var_field (decl, true, 7, ctx);
1254 else
829c6349 1255 install_var_field (decl, true, 3, ctx);
c42cfb5c
CP
1256 if (is_gimple_omp_offloaded (ctx->stmt)
1257 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
acf0174b
JJ
1258 install_var_local (decl, ctx);
1259 }
1260 }
1261 else
1262 {
1263 tree base = get_base_address (decl);
1264 tree nc = OMP_CLAUSE_CHAIN (c);
1265 if (DECL_P (base)
1266 && nc != NULL_TREE
1267 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1268 && OMP_CLAUSE_DECL (nc) == base
41dbbb37 1269 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
acf0174b
JJ
1270 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1271 {
1272 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1273 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1274 }
1275 else
1276 {
f014c653
JJ
1277 if (ctx->outer)
1278 {
1279 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1280 decl = OMP_CLAUSE_DECL (c);
1281 }
acf0174b
JJ
1282 gcc_assert (!splay_tree_lookup (ctx->field_map,
1283 (splay_tree_key) decl));
1284 tree field
1285 = build_decl (OMP_CLAUSE_LOCATION (c),
1286 FIELD_DECL, NULL_TREE, ptr_type_node);
fe37c7af 1287 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
acf0174b
JJ
1288 insert_field_into_struct (ctx->record_type, field);
1289 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1290 (splay_tree_value) field);
1291 }
1292 }
1293 break;
1294
b2b40051
MJ
1295 case OMP_CLAUSE__GRIDDIM_:
1296 if (ctx->outer)
1297 {
1298 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1299 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1300 }
1301 break;
1302
953ff289
DN
1303 case OMP_CLAUSE_NOWAIT:
1304 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1305 case OMP_CLAUSE_COLLAPSE:
1306 case OMP_CLAUSE_UNTIED:
20906c66 1307 case OMP_CLAUSE_MERGEABLE:
acf0174b 1308 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1309 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
1310 case OMP_CLAUSE_SIMDLEN:
1311 case OMP_CLAUSE_THREADS:
1312 case OMP_CLAUSE_SIMD:
1313 case OMP_CLAUSE_NOGROUP:
1314 case OMP_CLAUSE_DEFAULTMAP:
41dbbb37
TS
1315 case OMP_CLAUSE_ASYNC:
1316 case OMP_CLAUSE_WAIT:
1317 case OMP_CLAUSE_GANG:
1318 case OMP_CLAUSE_WORKER:
1319 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1320 case OMP_CLAUSE_INDEPENDENT:
1321 case OMP_CLAUSE_AUTO:
1322 case OMP_CLAUSE_SEQ:
02889d23 1323 case OMP_CLAUSE_TILE:
6c7509bc 1324 case OMP_CLAUSE__SIMT_:
8a4674bb 1325 case OMP_CLAUSE_DEFAULT:
829c6349
CLT
1326 case OMP_CLAUSE_IF_PRESENT:
1327 case OMP_CLAUSE_FINALIZE:
953ff289
DN
1328 break;
1329
acf0174b
JJ
1330 case OMP_CLAUSE_ALIGNED:
1331 decl = OMP_CLAUSE_DECL (c);
1332 if (is_global_var (decl)
1333 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1334 install_var_local (decl, ctx);
1335 break;
1336
41dbbb37 1337 case OMP_CLAUSE__CACHE_:
953ff289
DN
1338 default:
1339 gcc_unreachable ();
1340 }
1341 }
1342
1343 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1344 {
aaf46ef9 1345 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1346 {
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Let the corresponding firstprivate clause create
1349 the variable. */
726a989a 1350 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1351 scan_array_reductions = true;
953ff289
DN
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1355
953ff289 1356 case OMP_CLAUSE_FIRSTPRIVATE:
41dbbb37 1357 case OMP_CLAUSE_PRIVATE:
74bf76ed 1358 case OMP_CLAUSE_LINEAR:
d9a6bd32 1359 case OMP_CLAUSE_IS_DEVICE_PTR:
953ff289
DN
1360 decl = OMP_CLAUSE_DECL (c);
1361 if (is_variable_sized (decl))
d9a6bd32
JJ
1362 {
1363 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1364 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1365 && is_gimple_omp_offloaded (ctx->stmt))
1366 {
1367 tree decl2 = DECL_VALUE_EXPR (decl);
1368 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1369 decl2 = TREE_OPERAND (decl2, 0);
1370 gcc_assert (DECL_P (decl2));
1371 install_var_local (decl2, ctx);
1372 fixup_remapped_decl (decl2, ctx, false);
1373 }
1374 install_var_local (decl, ctx);
1375 }
953ff289 1376 fixup_remapped_decl (decl, ctx,
aaf46ef9 1377 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1378 && OMP_CLAUSE_PRIVATE_DEBUG (c));
d9a6bd32
JJ
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1380 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
953ff289 1381 scan_array_reductions = true;
d9a6bd32
JJ
1382 break;
1383
1384 case OMP_CLAUSE_REDUCTION:
1385 decl = OMP_CLAUSE_DECL (c);
1386 if (TREE_CODE (decl) != MEM_REF)
1387 {
1388 if (is_variable_sized (decl))
1389 install_var_local (decl, ctx);
1390 fixup_remapped_decl (decl, ctx, false);
1391 }
1392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
f7468577 1393 scan_array_reductions = true;
953ff289
DN
1394 break;
1395
1396 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1397 /* Ignore shared directives in teams construct. */
1398 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1399 break;
953ff289 1400 decl = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
1401 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1402 break;
1403 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1404 {
1405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1406 ctx->outer)))
1407 break;
1408 bool by_ref = use_pointer_for_field (decl, ctx);
1409 install_var_field (decl, by_ref, 11, ctx);
1410 break;
1411 }
1412 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1413 break;
1414
acf0174b 1415 case OMP_CLAUSE_MAP:
41dbbb37 1416 if (!is_gimple_omp_offloaded (ctx->stmt))
acf0174b
JJ
1417 break;
1418 decl = OMP_CLAUSE_DECL (c);
1419 if (DECL_P (decl)
e01d41e5
JJ
1420 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1421 && (OMP_CLAUSE_MAP_KIND (c)
1422 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1423 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1424 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1f6be682 1425 && varpool_node::get_create (decl)->offloadable)
acf0174b
JJ
1426 break;
1427 if (DECL_P (decl))
1428 {
d9a6bd32
JJ
1429 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1430 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
acf0174b
JJ
1431 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1432 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1433 {
1434 tree new_decl = lookup_decl (decl, ctx);
1435 TREE_TYPE (new_decl)
1436 = remap_type (TREE_TYPE (decl), &ctx->cb);
1437 }
1438 else if (DECL_SIZE (decl)
1439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1440 {
1441 tree decl2 = DECL_VALUE_EXPR (decl);
1442 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1443 decl2 = TREE_OPERAND (decl2, 0);
1444 gcc_assert (DECL_P (decl2));
1445 fixup_remapped_decl (decl2, ctx, false);
1446 fixup_remapped_decl (decl, ctx, true);
1447 }
1448 else
1449 fixup_remapped_decl (decl, ctx, false);
1450 }
1451 break;
1452
953ff289
DN
1453 case OMP_CLAUSE_COPYPRIVATE:
1454 case OMP_CLAUSE_COPYIN:
1455 case OMP_CLAUSE_DEFAULT:
1456 case OMP_CLAUSE_IF:
1457 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1458 case OMP_CLAUSE_NUM_TEAMS:
1459 case OMP_CLAUSE_THREAD_LIMIT:
1460 case OMP_CLAUSE_DEVICE:
953ff289 1461 case OMP_CLAUSE_SCHEDULE:
acf0174b 1462 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1463 case OMP_CLAUSE_NOWAIT:
1464 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1465 case OMP_CLAUSE_COLLAPSE:
1466 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1467 case OMP_CLAUSE_FINAL:
1468 case OMP_CLAUSE_MERGEABLE:
acf0174b 1469 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1470 case OMP_CLAUSE_SAFELEN:
d9a6bd32 1471 case OMP_CLAUSE_SIMDLEN:
acf0174b
JJ
1472 case OMP_CLAUSE_ALIGNED:
1473 case OMP_CLAUSE_DEPEND:
1474 case OMP_CLAUSE__LOOPTEMP_:
1475 case OMP_CLAUSE_TO:
1476 case OMP_CLAUSE_FROM:
d9a6bd32
JJ
1477 case OMP_CLAUSE_PRIORITY:
1478 case OMP_CLAUSE_GRAINSIZE:
1479 case OMP_CLAUSE_NUM_TASKS:
1480 case OMP_CLAUSE_THREADS:
1481 case OMP_CLAUSE_SIMD:
1482 case OMP_CLAUSE_NOGROUP:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_USE_DEVICE_PTR:
41dbbb37
TS
1485 case OMP_CLAUSE_ASYNC:
1486 case OMP_CLAUSE_WAIT:
1487 case OMP_CLAUSE_NUM_GANGS:
1488 case OMP_CLAUSE_NUM_WORKERS:
1489 case OMP_CLAUSE_VECTOR_LENGTH:
1490 case OMP_CLAUSE_GANG:
1491 case OMP_CLAUSE_WORKER:
1492 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1493 case OMP_CLAUSE_INDEPENDENT:
1494 case OMP_CLAUSE_AUTO:
1495 case OMP_CLAUSE_SEQ:
02889d23 1496 case OMP_CLAUSE_TILE:
b2b40051 1497 case OMP_CLAUSE__GRIDDIM_:
6c7509bc 1498 case OMP_CLAUSE__SIMT_:
829c6349
CLT
1499 case OMP_CLAUSE_IF_PRESENT:
1500 case OMP_CLAUSE_FINALIZE:
41dbbb37
TS
1501 break;
1502
41dbbb37 1503 case OMP_CLAUSE__CACHE_:
953ff289
DN
1504 default:
1505 gcc_unreachable ();
1506 }
1507 }
1508
41dbbb37
TS
1509 gcc_checking_assert (!scan_array_reductions
1510 || !is_gimple_omp_oacc (ctx->stmt));
953ff289 1511 if (scan_array_reductions)
6b37bdaf
PP
1512 {
1513 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1515 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1516 {
1517 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1518 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1519 }
1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1521 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1522 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1524 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1525 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1526 }
953ff289
DN
1527}
1528
5e9d6aa4 1529/* Create a new name for omp child function. Returns an identifier. */
953ff289 1530
953ff289 1531static tree
5e9d6aa4 1532create_omp_child_function_name (bool task_copy)
953ff289 1533{
9a771876
JJ
1534 return clone_function_name (current_function_decl,
1535 task_copy ? "_omp_cpyfn" : "_omp_fn");
1536}
1537
9669b00b
AM
1538/* Return true if CTX may belong to offloaded code: either if current function
1539 is offloaded, or any enclosing context corresponds to a target region. */
1540
1541static bool
1542omp_maybe_offloaded_ctx (omp_context *ctx)
1543{
1544 if (cgraph_node::get (current_function_decl)->offloadable)
1545 return true;
1546 for (; ctx; ctx = ctx->outer)
1547 if (is_gimple_omp_offloaded (ctx->stmt))
1548 return true;
1549 return false;
1550}
1551
953ff289
DN
1552/* Build a decl for the omp child function. It'll not contain a body
1553 yet, just the bare decl. */
1554
1555static void
a68ab351 1556create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1557{
1558 tree decl, type, name, t;
1559
5e9d6aa4 1560 name = create_omp_child_function_name (task_copy);
a68ab351
JJ
1561 if (task_copy)
1562 type = build_function_type_list (void_type_node, ptr_type_node,
1563 ptr_type_node, NULL_TREE);
1564 else
1565 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1566
9a771876 1567 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
953ff289 1568
41dbbb37
TS
1569 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1570 || !task_copy);
a68ab351
JJ
1571 if (!task_copy)
1572 ctx->cb.dst_fn = decl;
1573 else
726a989a 1574 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1575
1576 TREE_STATIC (decl) = 1;
1577 TREE_USED (decl) = 1;
1578 DECL_ARTIFICIAL (decl) = 1;
1579 DECL_IGNORED_P (decl) = 0;
1580 TREE_PUBLIC (decl) = 0;
1581 DECL_UNINLINABLE (decl) = 1;
1582 DECL_EXTERNAL (decl) = 0;
1583 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1584 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 1585 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
5c38262d 1586 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
f1542d9a
JJ
1587 /* Remove omp declare simd attribute from the new attributes. */
1588 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1589 {
1590 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1591 a = a2;
1592 a = TREE_CHAIN (a);
1593 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1594 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1595 *p = TREE_CHAIN (*p);
1596 else
1597 {
1598 tree chain = TREE_CHAIN (*p);
1599 *p = copy_node (*p);
1600 p = &TREE_CHAIN (*p);
1601 *p = chain;
1602 }
1603 }
5c38262d
JJ
1604 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1605 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1606 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1607 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1608 DECL_FUNCTION_VERSIONED (decl)
1609 = DECL_FUNCTION_VERSIONED (current_function_decl);
1610
9669b00b 1611 if (omp_maybe_offloaded_ctx (ctx))
acf0174b 1612 {
9669b00b
AM
1613 cgraph_node::get_create (decl)->offloadable = 1;
1614 if (ENABLE_OFFLOADING)
1615 g->have_offload = true;
acf0174b 1616 }
953ff289 1617
d7823208
BS
1618 if (cgraph_node::get_create (decl)->offloadable
1619 && !lookup_attribute ("omp declare target",
1620 DECL_ATTRIBUTES (current_function_decl)))
9669b00b
AM
1621 {
1622 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1623 ? "omp target entrypoint"
1624 : "omp declare target");
1625 DECL_ATTRIBUTES (decl)
1626 = tree_cons (get_identifier (target_attr),
1627 NULL_TREE, DECL_ATTRIBUTES (decl));
1628 }
d7823208 1629
c2255bc4
AH
1630 t = build_decl (DECL_SOURCE_LOCATION (decl),
1631 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1632 DECL_ARTIFICIAL (t) = 1;
1633 DECL_IGNORED_P (t) = 1;
07485407 1634 DECL_CONTEXT (t) = decl;
953ff289
DN
1635 DECL_RESULT (decl) = t;
1636
9a771876
JJ
1637 tree data_name = get_identifier (".omp_data_i");
1638 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1639 ptr_type_node);
953ff289 1640 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1641 DECL_NAMELESS (t) = 1;
953ff289 1642 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1643 DECL_CONTEXT (t) = current_function_decl;
953ff289 1644 TREE_USED (t) = 1;
d9a6bd32 1645 TREE_READONLY (t) = 1;
953ff289 1646 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1647 if (!task_copy)
1648 ctx->receiver_decl = t;
1649 else
1650 {
c2255bc4
AH
1651 t = build_decl (DECL_SOURCE_LOCATION (decl),
1652 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1653 ptr_type_node);
1654 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1655 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1656 DECL_ARG_TYPE (t) = ptr_type_node;
1657 DECL_CONTEXT (t) = current_function_decl;
1658 TREE_USED (t) = 1;
628c189e 1659 TREE_ADDRESSABLE (t) = 1;
910ad8de 1660 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1661 DECL_ARGUMENTS (decl) = t;
1662 }
953ff289 1663
b8698a0f 1664 /* Allocate memory for the function structure. The call to
50674e96 1665 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1666 it afterward. */
db2960f4 1667 push_struct_function (decl);
726a989a 1668 cfun->function_end_locus = gimple_location (ctx->stmt);
381cdae4 1669 init_tree_ssa (cfun);
db2960f4 1670 pop_cfun ();
953ff289
DN
1671}
1672
acf0174b
JJ
1673/* Callback for walk_gimple_seq. Check if combined parallel
1674 contains gimple_omp_for_combined_into_p OMP_FOR. */
1675
629b3d75
MJ
1676tree
1677omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1678 bool *handled_ops_p,
1679 struct walk_stmt_info *wi)
acf0174b 1680{
355fe088 1681 gimple *stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
1682
1683 *handled_ops_p = true;
1684 switch (gimple_code (stmt))
1685 {
1686 WALK_SUBSTMTS;
1687
1688 case GIMPLE_OMP_FOR:
1689 if (gimple_omp_for_combined_into_p (stmt)
d9a6bd32
JJ
1690 && gimple_omp_for_kind (stmt)
1691 == *(const enum gf_mask *) (wi->info))
acf0174b
JJ
1692 {
1693 wi->info = stmt;
1694 return integer_zero_node;
1695 }
1696 break;
1697 default:
1698 break;
1699 }
1700 return NULL;
1701}
1702
d9a6bd32
JJ
1703/* Add _LOOPTEMP_ clauses on OpenMP parallel or task. */
1704
1705static void
1706add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1707 omp_context *outer_ctx)
1708{
1709 struct walk_stmt_info wi;
1710
1711 memset (&wi, 0, sizeof (wi));
1712 wi.val_only = true;
1713 wi.info = (void *) &msk;
629b3d75 1714 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
d9a6bd32
JJ
1715 if (wi.info != (void *) &msk)
1716 {
1717 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1718 struct omp_for_data fd;
629b3d75 1719 omp_extract_for_data (for_stmt, &fd, NULL);
d9a6bd32
JJ
1720 /* We need two temporaries with fd.loop.v type (istart/iend)
1721 and then (fd.collapse - 1) temporaries with the same
1722 type for count2 ... countN-1 vars if not constant. */
1723 size_t count = 2, i;
1724 tree type = fd.iter_type;
1725 if (fd.collapse > 1
1726 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1727 {
1728 count += fd.collapse - 1;
e01d41e5 1729 /* If there are lastprivate clauses on the inner
d9a6bd32
JJ
1730 GIMPLE_OMP_FOR, add one more temporaries for the total number
1731 of iterations (product of count1 ... countN-1). */
629b3d75 1732 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
e01d41e5
JJ
1733 OMP_CLAUSE_LASTPRIVATE))
1734 count++;
1735 else if (msk == GF_OMP_FOR_KIND_FOR
629b3d75 1736 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
e01d41e5 1737 OMP_CLAUSE_LASTPRIVATE))
d9a6bd32
JJ
1738 count++;
1739 }
1740 for (i = 0; i < count; i++)
1741 {
1742 tree temp = create_tmp_var (type);
1743 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1744 insert_decl_map (&outer_ctx->cb, temp, temp);
1745 OMP_CLAUSE_DECL (c) = temp;
1746 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1747 gimple_omp_taskreg_set_clauses (stmt, c);
1748 }
1749 }
1750}
1751
953ff289
DN
1752/* Scan an OpenMP parallel directive. */
1753
1754static void
726a989a 1755scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1756{
1757 omp_context *ctx;
1758 tree name;
538dd0b7 1759 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
953ff289
DN
1760
1761 /* Ignore parallel directives with empty bodies, unless there
1762 are copyin clauses. */
1763 if (optimize > 0
726a989a 1764 && empty_body_p (gimple_omp_body (stmt))
629b3d75 1765 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
726a989a 1766 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1767 {
726a989a 1768 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1769 return;
1770 }
1771
acf0174b 1772 if (gimple_omp_parallel_combined_p (stmt))
d9a6bd32 1773 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
acf0174b 1774
726a989a 1775 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1776 taskreg_contexts.safe_push (ctx);
a68ab351 1777 if (taskreg_nesting_level > 1)
50674e96 1778 ctx->is_nested = true;
953ff289 1779 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289 1780 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1781 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1782 name = build_decl (gimple_location (stmt),
1783 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1784 DECL_ARTIFICIAL (name) = 1;
1785 DECL_NAMELESS (name) = 1;
953ff289 1786 TYPE_NAME (ctx->record_type) = name;
f7484978 1787 TYPE_ARTIFICIAL (ctx->record_type) = 1;
b2b40051
MJ
1788 if (!gimple_omp_parallel_grid_phony (stmt))
1789 {
1790 create_omp_child_function (ctx, false);
1791 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1792 }
953ff289 1793
726a989a 1794 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 1795 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
1796
1797 if (TYPE_FIELDS (ctx->record_type) == NULL)
1798 ctx->record_type = ctx->receiver_decl = NULL;
953ff289
DN
1799}
1800
a68ab351
JJ
1801/* Scan an OpenMP task directive. */
1802
1803static void
726a989a 1804scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1805{
1806 omp_context *ctx;
726a989a 1807 tree name, t;
538dd0b7 1808 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
a68ab351 1809
fbc698e0
JJ
1810 /* Ignore task directives with empty bodies, unless they have depend
1811 clause. */
a68ab351 1812 if (optimize > 0
fbc698e0
JJ
1813 && empty_body_p (gimple_omp_body (stmt))
1814 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
a68ab351 1815 {
726a989a 1816 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1817 return;
1818 }
1819
d9a6bd32
JJ
1820 if (gimple_omp_task_taskloop_p (stmt))
1821 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1822
726a989a 1823 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1824 taskreg_contexts.safe_push (ctx);
a68ab351
JJ
1825 if (taskreg_nesting_level > 1)
1826 ctx->is_nested = true;
1827 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
a68ab351
JJ
1828 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1829 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1830 name = build_decl (gimple_location (stmt),
1831 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1832 DECL_ARTIFICIAL (name) = 1;
1833 DECL_NAMELESS (name) = 1;
a68ab351 1834 TYPE_NAME (ctx->record_type) = name;
f7484978 1835 TYPE_ARTIFICIAL (ctx->record_type) = 1;
a68ab351 1836 create_omp_child_function (ctx, false);
726a989a 1837 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1838
726a989a 1839 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1840
1841 if (ctx->srecord_type)
1842 {
1843 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1844 name = build_decl (gimple_location (stmt),
1845 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1846 DECL_ARTIFICIAL (name) = 1;
1847 DECL_NAMELESS (name) = 1;
a68ab351 1848 TYPE_NAME (ctx->srecord_type) = name;
f7484978 1849 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
a68ab351
JJ
1850 create_omp_child_function (ctx, true);
1851 }
1852
26127932 1853 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
1854
1855 if (TYPE_FIELDS (ctx->record_type) == NULL)
1856 {
1857 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1858 t = build_int_cst (long_integer_type_node, 0);
1859 gimple_omp_task_set_arg_size (stmt, t);
1860 t = build_int_cst (long_integer_type_node, 1);
1861 gimple_omp_task_set_arg_align (stmt, t);
a68ab351 1862 }
5771c391
JJ
1863}
1864
655e5265
JJ
1865/* Helper function for finish_taskreg_scan, called through walk_tree.
1866 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1867 tree, replace it in the expression. */
1868
1869static tree
1870finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1871{
1872 if (VAR_P (*tp))
1873 {
1874 omp_context *ctx = (omp_context *) data;
1875 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1876 if (t != *tp)
1877 {
1878 if (DECL_HAS_VALUE_EXPR_P (t))
1879 t = unshare_expr (DECL_VALUE_EXPR (t));
1880 *tp = t;
1881 }
1882 *walk_subtrees = 0;
1883 }
1884 else if (IS_TYPE_OR_DECL_P (*tp))
1885 *walk_subtrees = 0;
1886 return NULL_TREE;
1887}
5771c391
JJ
1888
1889/* If any decls have been made addressable during scan_omp,
1890 adjust their fields if needed, and layout record types
1891 of parallel/task constructs. */
1892
1893static void
1894finish_taskreg_scan (omp_context *ctx)
1895{
1896 if (ctx->record_type == NULL_TREE)
1897 return;
1898
1899 /* If any task_shared_vars were needed, verify all
1900 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1901 statements if use_pointer_for_field hasn't changed
1902 because of that. If it did, update field types now. */
1903 if (task_shared_vars)
1904 {
1905 tree c;
1906
1907 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1908 c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
1909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1910 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5771c391
JJ
1911 {
1912 tree decl = OMP_CLAUSE_DECL (c);
1913
1914 /* Global variables don't need to be copied,
1915 the receiver side will use them directly. */
1916 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1917 continue;
1918 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1919 || !use_pointer_for_field (decl, ctx))
1920 continue;
1921 tree field = lookup_field (decl, ctx);
1922 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1923 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1924 continue;
1925 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1926 TREE_THIS_VOLATILE (field) = 0;
1927 DECL_USER_ALIGN (field) = 0;
fe37c7af 1928 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
5771c391 1929 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
fe37c7af 1930 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
5771c391
JJ
1931 if (ctx->srecord_type)
1932 {
1933 tree sfield = lookup_sfield (decl, ctx);
1934 TREE_TYPE (sfield) = TREE_TYPE (field);
1935 TREE_THIS_VOLATILE (sfield) = 0;
1936 DECL_USER_ALIGN (sfield) = 0;
fe37c7af 1937 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
5771c391 1938 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
fe37c7af 1939 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
5771c391
JJ
1940 }
1941 }
1942 }
1943
1944 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1945 {
1946 layout_type (ctx->record_type);
1947 fixup_child_record_type (ctx);
1948 }
a68ab351
JJ
1949 else
1950 {
5771c391 1951 location_t loc = gimple_location (ctx->stmt);
a68ab351
JJ
1952 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1953 /* Move VLA fields to the end. */
1954 p = &TYPE_FIELDS (ctx->record_type);
1955 while (*p)
1956 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1957 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1958 {
1959 *q = *p;
1960 *p = TREE_CHAIN (*p);
1961 TREE_CHAIN (*q) = NULL_TREE;
1962 q = &TREE_CHAIN (*q);
1963 }
1964 else
910ad8de 1965 p = &DECL_CHAIN (*p);
a68ab351 1966 *p = vla_fields;
d9a6bd32
JJ
1967 if (gimple_omp_task_taskloop_p (ctx->stmt))
1968 {
1969 /* Move fields corresponding to first and second _looptemp_
1970 clause first. There are filled by GOMP_taskloop
1971 and thus need to be in specific positions. */
1972 tree c1 = gimple_omp_task_clauses (ctx->stmt);
629b3d75
MJ
1973 c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1974 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
d9a6bd32
JJ
1975 OMP_CLAUSE__LOOPTEMP_);
1976 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1977 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1978 p = &TYPE_FIELDS (ctx->record_type);
1979 while (*p)
1980 if (*p == f1 || *p == f2)
1981 *p = DECL_CHAIN (*p);
1982 else
1983 p = &DECL_CHAIN (*p);
1984 DECL_CHAIN (f1) = f2;
1985 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1986 TYPE_FIELDS (ctx->record_type) = f1;
1987 if (ctx->srecord_type)
1988 {
1989 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1990 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1991 p = &TYPE_FIELDS (ctx->srecord_type);
1992 while (*p)
1993 if (*p == f1 || *p == f2)
1994 *p = DECL_CHAIN (*p);
1995 else
1996 p = &DECL_CHAIN (*p);
1997 DECL_CHAIN (f1) = f2;
1998 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
1999 TYPE_FIELDS (ctx->srecord_type) = f1;
2000 }
2001 }
a68ab351
JJ
2002 layout_type (ctx->record_type);
2003 fixup_child_record_type (ctx);
2004 if (ctx->srecord_type)
2005 layout_type (ctx->srecord_type);
5771c391
JJ
2006 tree t = fold_convert_loc (loc, long_integer_type_node,
2007 TYPE_SIZE_UNIT (ctx->record_type));
655e5265
JJ
2008 if (TREE_CODE (t) != INTEGER_CST)
2009 {
2010 t = unshare_expr (t);
2011 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2012 }
5771c391 2013 gimple_omp_task_set_arg_size (ctx->stmt, t);
726a989a 2014 t = build_int_cst (long_integer_type_node,
a68ab351 2015 TYPE_ALIGN_UNIT (ctx->record_type));
5771c391 2016 gimple_omp_task_set_arg_align (ctx->stmt, t);
a68ab351
JJ
2017 }
2018}
2019
e4834818 2020/* Find the enclosing offload context. */
953ff289 2021
41dbbb37
TS
2022static omp_context *
2023enclosing_target_ctx (omp_context *ctx)
2024{
e4834818
NS
2025 for (; ctx; ctx = ctx->outer)
2026 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2027 break;
2028
41dbbb37
TS
2029 return ctx;
2030}
2031
e4834818
NS
2032/* Return true if ctx is part of an oacc kernels region. */
2033
41dbbb37 2034static bool
e4834818 2035ctx_in_oacc_kernels_region (omp_context *ctx)
41dbbb37 2036{
e4834818
NS
2037 for (;ctx != NULL; ctx = ctx->outer)
2038 {
2039 gimple *stmt = ctx->stmt;
2040 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2041 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2042 return true;
2043 }
2044
2045 return false;
2046}
2047
2048/* Check the parallelism clauses inside a kernels regions.
2049 Until kernels handling moves to use the same loop indirection
2050 scheme as parallel, we need to do this checking early. */
2051
2052static unsigned
2053check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2054{
2055 bool checking = true;
2056 unsigned outer_mask = 0;
2057 unsigned this_mask = 0;
2058 bool has_seq = false, has_auto = false;
2059
2060 if (ctx->outer)
2061 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2062 if (!stmt)
2063 {
2064 checking = false;
2065 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2066 return outer_mask;
2067 stmt = as_a <gomp_for *> (ctx->stmt);
2068 }
2069
2070 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2071 {
2072 switch (OMP_CLAUSE_CODE (c))
2073 {
2074 case OMP_CLAUSE_GANG:
2075 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2076 break;
2077 case OMP_CLAUSE_WORKER:
2078 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2079 break;
2080 case OMP_CLAUSE_VECTOR:
2081 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2082 break;
2083 case OMP_CLAUSE_SEQ:
2084 has_seq = true;
2085 break;
2086 case OMP_CLAUSE_AUTO:
2087 has_auto = true;
2088 break;
2089 default:
2090 break;
2091 }
2092 }
2093
2094 if (checking)
2095 {
2096 if (has_seq && (this_mask || has_auto))
2097 error_at (gimple_location (stmt), "%<seq%> overrides other"
2098 " OpenACC loop specifiers");
2099 else if (has_auto && this_mask)
2100 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2101 " OpenACC loop specifiers");
2102
2103 if (this_mask & outer_mask)
2104 error_at (gimple_location (stmt), "inner loop uses same"
2105 " OpenACC parallelism as containing loop");
2106 }
2107
2108 return outer_mask | this_mask;
41dbbb37
TS
2109}
2110
2111/* Scan a GIMPLE_OMP_FOR. */
953ff289 2112
6e6cf7b0 2113static omp_context *
538dd0b7 2114scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
953ff289 2115{
50674e96 2116 omp_context *ctx;
726a989a 2117 size_t i;
41dbbb37
TS
2118 tree clauses = gimple_omp_for_clauses (stmt);
2119
50674e96 2120 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2121
41dbbb37
TS
2122 if (is_gimple_omp_oacc (stmt))
2123 {
e4834818
NS
2124 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2125
2126 if (!tgt || is_oacc_parallel (tgt))
2127 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2128 {
2129 char const *check = NULL;
2130
2131 switch (OMP_CLAUSE_CODE (c))
2132 {
2133 case OMP_CLAUSE_GANG:
2134 check = "gang";
2135 break;
2136
2137 case OMP_CLAUSE_WORKER:
2138 check = "worker";
2139 break;
2140
2141 case OMP_CLAUSE_VECTOR:
2142 check = "vector";
2143 break;
2144
2145 default:
2146 break;
2147 }
2148
2149 if (check && OMP_CLAUSE_OPERAND (c, 0))
2150 error_at (gimple_location (stmt),
2151 "argument not permitted on %qs clause in"
2152 " OpenACC %<parallel%>", check);
2153 }
2154
2155 if (tgt && is_oacc_kernels (tgt))
2156 {
2157 /* Strip out reductions, as they are not handled yet. */
2158 tree *prev_ptr = &clauses;
2159
2160 while (tree probe = *prev_ptr)
41dbbb37 2161 {
e4834818
NS
2162 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2163
2164 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2165 *prev_ptr = *next_ptr;
2166 else
2167 prev_ptr = next_ptr;
41dbbb37 2168 }
e4834818
NS
2169
2170 gimple_omp_for_set_clauses (stmt, clauses);
2171 check_oacc_kernel_gwv (stmt, ctx);
41dbbb37
TS
2172 }
2173 }
2174
2175 scan_sharing_clauses (clauses, ctx);
953ff289 2176
26127932 2177 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2178 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2179 {
726a989a
RB
2180 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2181 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2182 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2183 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2184 }
26127932 2185 scan_omp (gimple_omp_body_ptr (stmt), ctx);
6e6cf7b0 2186 return ctx;
953ff289
DN
2187}
2188
6c7509bc
JJ
2189/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2190
2191static void
2192scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2193 omp_context *outer_ctx)
2194{
2195 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2196 gsi_replace (gsi, bind, false);
2197 gimple_seq seq = NULL;
2198 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2199 tree cond = create_tmp_var_raw (integer_type_node);
2200 DECL_CONTEXT (cond) = current_function_decl;
2201 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2202 gimple_bind_set_vars (bind, cond);
2203 gimple_call_set_lhs (g, cond);
2204 gimple_seq_add_stmt (&seq, g);
2205 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2206 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2207 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2208 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2209 gimple_seq_add_stmt (&seq, g);
2210 g = gimple_build_label (lab1);
2211 gimple_seq_add_stmt (&seq, g);
2212 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2213 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2214 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2215 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2216 gimple_omp_for_set_clauses (new_stmt, clause);
2217 gimple_seq_add_stmt (&seq, new_stmt);
2218 g = gimple_build_goto (lab3);
2219 gimple_seq_add_stmt (&seq, g);
2220 g = gimple_build_label (lab2);
2221 gimple_seq_add_stmt (&seq, g);
2222 gimple_seq_add_stmt (&seq, stmt);
2223 g = gimple_build_label (lab3);
2224 gimple_seq_add_stmt (&seq, g);
2225 gimple_bind_set_body (bind, seq);
2226 update_stmt (bind);
2227 scan_omp_for (new_stmt, outer_ctx);
6e6cf7b0 2228 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
6c7509bc
JJ
2229}
2230
953ff289
DN
2231/* Scan an OpenMP sections directive. */
2232
2233static void
538dd0b7 2234scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
953ff289 2235{
953ff289
DN
2236 omp_context *ctx;
2237
2238 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2239 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2240 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2241}
2242
2243/* Scan an OpenMP single directive. */
2244
2245static void
538dd0b7 2246scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
953ff289 2247{
953ff289
DN
2248 omp_context *ctx;
2249 tree name;
2250
2251 ctx = new_omp_context (stmt, outer_ctx);
2252 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2253 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2254 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2255 name = build_decl (gimple_location (stmt),
2256 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2257 TYPE_NAME (ctx->record_type) = name;
2258
726a989a 2259 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2260 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2261
2262 if (TYPE_FIELDS (ctx->record_type) == NULL)
2263 ctx->record_type = NULL;
2264 else
2265 layout_type (ctx->record_type);
2266}
2267
41dbbb37 2268/* Scan a GIMPLE_OMP_TARGET. */
acf0174b
JJ
2269
2270static void
538dd0b7 2271scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
acf0174b
JJ
2272{
2273 omp_context *ctx;
2274 tree name;
41dbbb37
TS
2275 bool offloaded = is_gimple_omp_offloaded (stmt);
2276 tree clauses = gimple_omp_target_clauses (stmt);
acf0174b
JJ
2277
2278 ctx = new_omp_context (stmt, outer_ctx);
2279 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
acf0174b
JJ
2280 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2281 name = create_tmp_var_name (".omp_data_t");
2282 name = build_decl (gimple_location (stmt),
2283 TYPE_DECL, name, ctx->record_type);
2284 DECL_ARTIFICIAL (name) = 1;
2285 DECL_NAMELESS (name) = 1;
2286 TYPE_NAME (ctx->record_type) = name;
f7484978 2287 TYPE_ARTIFICIAL (ctx->record_type) = 1;
86938de6 2288
41dbbb37 2289 if (offloaded)
acf0174b
JJ
2290 {
2291 create_omp_child_function (ctx, false);
2292 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2293 }
2294
829c6349 2295 scan_sharing_clauses (clauses, ctx);
acf0174b
JJ
2296 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2297
2298 if (TYPE_FIELDS (ctx->record_type) == NULL)
2299 ctx->record_type = ctx->receiver_decl = NULL;
2300 else
2301 {
2302 TYPE_FIELDS (ctx->record_type)
2303 = nreverse (TYPE_FIELDS (ctx->record_type));
b2b29377
MM
2304 if (flag_checking)
2305 {
2306 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2307 for (tree field = TYPE_FIELDS (ctx->record_type);
2308 field;
2309 field = DECL_CHAIN (field))
2310 gcc_assert (DECL_ALIGN (field) == align);
2311 }
acf0174b 2312 layout_type (ctx->record_type);
41dbbb37 2313 if (offloaded)
acf0174b
JJ
2314 fixup_child_record_type (ctx);
2315 }
2316}
2317
2318/* Scan an OpenMP teams directive. */
2319
2320static void
538dd0b7 2321scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
acf0174b
JJ
2322{
2323 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2324 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2325 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2326}
953ff289 2327
41dbbb37 2328/* Check nesting restrictions. */
26127932 2329static bool
355fe088 2330check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
a6fc8e21 2331{
d9a6bd32
JJ
2332 tree c;
2333
b2b40051
MJ
2334 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2335 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2336 the original copy of its contents. */
2337 return true;
2338
41dbbb37
TS
2339 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2340 inside an OpenACC CTX. */
2341 if (!(is_gimple_omp (stmt)
640b7e74
TV
2342 && is_gimple_omp_oacc (stmt))
2343 /* Except for atomic codes that we share with OpenMP. */
2344 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2345 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2346 {
629b3d75 2347 if (oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2348 {
2349 error_at (gimple_location (stmt),
2350 "non-OpenACC construct inside of OpenACC routine");
2351 return false;
2352 }
2353 else
2354 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2355 if (is_gimple_omp (octx->stmt)
2356 && is_gimple_omp_oacc (octx->stmt))
2357 {
2358 error_at (gimple_location (stmt),
2359 "non-OpenACC construct inside of OpenACC region");
2360 return false;
2361 }
41dbbb37
TS
2362 }
2363
74bf76ed
JJ
2364 if (ctx != NULL)
2365 {
2366 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 2367 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2368 {
d9a6bd32
JJ
2369 c = NULL_TREE;
2370 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2371 {
2372 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2373 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18 2374 {
629b3d75 2375 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
d9f4ea18
JJ
2376 && (ctx->outer == NULL
2377 || !gimple_omp_for_combined_into_p (ctx->stmt)
2378 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2379 || (gimple_omp_for_kind (ctx->outer->stmt)
2380 != GF_OMP_FOR_KIND_FOR)
2381 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2382 {
2383 error_at (gimple_location (stmt),
2384 "%<ordered simd threads%> must be closely "
2385 "nested inside of %<for simd%> region");
2386 return false;
2387 }
2388 return true;
2389 }
d9a6bd32 2390 }
74bf76ed 2391 error_at (gimple_location (stmt),
d9a6bd32 2392 "OpenMP constructs other than %<#pragma omp ordered simd%>"
d9f4ea18 2393 " may not be nested inside %<simd%> region");
74bf76ed
JJ
2394 return false;
2395 }
acf0174b
JJ
2396 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2397 {
2398 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
56b1c60e
MJ
2399 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2400 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
acf0174b
JJ
2401 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2402 {
2403 error_at (gimple_location (stmt),
d9f4ea18
JJ
2404 "only %<distribute%> or %<parallel%> regions are "
2405 "allowed to be strictly nested inside %<teams%> "
2406 "region");
acf0174b
JJ
2407 return false;
2408 }
2409 }
74bf76ed 2410 }
726a989a 2411 switch (gimple_code (stmt))
a6fc8e21 2412 {
726a989a 2413 case GIMPLE_OMP_FOR:
0aadce73 2414 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2415 return true;
acf0174b
JJ
2416 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2417 {
2418 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2419 {
2420 error_at (gimple_location (stmt),
d9f4ea18
JJ
2421 "%<distribute%> region must be strictly nested "
2422 "inside %<teams%> construct");
acf0174b
JJ
2423 return false;
2424 }
2425 return true;
2426 }
d9a6bd32
JJ
2427 /* We split taskloop into task and nested taskloop in it. */
2428 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2429 return true;
68d58afb
NS
2430 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2431 {
2432 bool ok = false;
01914336 2433
68d58afb
NS
2434 if (ctx)
2435 switch (gimple_code (ctx->stmt))
2436 {
2437 case GIMPLE_OMP_FOR:
2438 ok = (gimple_omp_for_kind (ctx->stmt)
2439 == GF_OMP_FOR_KIND_OACC_LOOP);
2440 break;
2441
2442 case GIMPLE_OMP_TARGET:
2443 switch (gimple_omp_target_kind (ctx->stmt))
2444 {
2445 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2446 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2447 ok = true;
2448 break;
2449
2450 default:
2451 break;
2452 }
2453
2454 default:
2455 break;
2456 }
629b3d75 2457 else if (oacc_get_fn_attrib (current_function_decl))
68d58afb
NS
2458 ok = true;
2459 if (!ok)
2460 {
2461 error_at (gimple_location (stmt),
2462 "OpenACC loop directive must be associated with"
2463 " an OpenACC compute region");
2464 return false;
2465 }
2466 }
acf0174b
JJ
2467 /* FALLTHRU */
2468 case GIMPLE_CALL:
2469 if (is_gimple_call (stmt)
2470 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2471 == BUILT_IN_GOMP_CANCEL
2472 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2473 == BUILT_IN_GOMP_CANCELLATION_POINT))
2474 {
2475 const char *bad = NULL;
2476 const char *kind = NULL;
d9f4ea18
JJ
2477 const char *construct
2478 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2479 == BUILT_IN_GOMP_CANCEL)
2480 ? "#pragma omp cancel"
2481 : "#pragma omp cancellation point";
acf0174b
JJ
2482 if (ctx == NULL)
2483 {
2484 error_at (gimple_location (stmt), "orphaned %qs construct",
d9f4ea18 2485 construct);
acf0174b
JJ
2486 return false;
2487 }
9541ffee 2488 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2489 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2490 : 0)
2491 {
2492 case 1:
2493 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2494 bad = "#pragma omp parallel";
2495 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2496 == BUILT_IN_GOMP_CANCEL
2497 && !integer_zerop (gimple_call_arg (stmt, 1)))
2498 ctx->cancellable = true;
2499 kind = "parallel";
2500 break;
2501 case 2:
2502 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2503 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2504 bad = "#pragma omp for";
2505 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2506 == BUILT_IN_GOMP_CANCEL
2507 && !integer_zerop (gimple_call_arg (stmt, 1)))
2508 {
2509 ctx->cancellable = true;
629b3d75 2510 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2511 OMP_CLAUSE_NOWAIT))
2512 warning_at (gimple_location (stmt), 0,
2513 "%<#pragma omp cancel for%> inside "
2514 "%<nowait%> for construct");
629b3d75 2515 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2516 OMP_CLAUSE_ORDERED))
2517 warning_at (gimple_location (stmt), 0,
2518 "%<#pragma omp cancel for%> inside "
2519 "%<ordered%> for construct");
2520 }
2521 kind = "for";
2522 break;
2523 case 4:
2524 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2525 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2526 bad = "#pragma omp sections";
2527 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2528 == BUILT_IN_GOMP_CANCEL
2529 && !integer_zerop (gimple_call_arg (stmt, 1)))
2530 {
2531 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2532 {
2533 ctx->cancellable = true;
629b3d75 2534 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2535 (ctx->stmt),
2536 OMP_CLAUSE_NOWAIT))
2537 warning_at (gimple_location (stmt), 0,
2538 "%<#pragma omp cancel sections%> inside "
2539 "%<nowait%> sections construct");
2540 }
2541 else
2542 {
2543 gcc_assert (ctx->outer
2544 && gimple_code (ctx->outer->stmt)
2545 == GIMPLE_OMP_SECTIONS);
2546 ctx->outer->cancellable = true;
629b3d75 2547 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2548 (ctx->outer->stmt),
2549 OMP_CLAUSE_NOWAIT))
2550 warning_at (gimple_location (stmt), 0,
2551 "%<#pragma omp cancel sections%> inside "
2552 "%<nowait%> sections construct");
2553 }
2554 }
2555 kind = "sections";
2556 break;
2557 case 8:
2558 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2559 bad = "#pragma omp task";
2560 else
d9f4ea18
JJ
2561 {
2562 for (omp_context *octx = ctx->outer;
2563 octx; octx = octx->outer)
2564 {
2565 switch (gimple_code (octx->stmt))
2566 {
2567 case GIMPLE_OMP_TASKGROUP:
2568 break;
2569 case GIMPLE_OMP_TARGET:
2570 if (gimple_omp_target_kind (octx->stmt)
2571 != GF_OMP_TARGET_KIND_REGION)
2572 continue;
2573 /* FALLTHRU */
2574 case GIMPLE_OMP_PARALLEL:
2575 case GIMPLE_OMP_TEAMS:
2576 error_at (gimple_location (stmt),
2577 "%<%s taskgroup%> construct not closely "
2578 "nested inside of %<taskgroup%> region",
2579 construct);
2580 return false;
2581 default:
2582 continue;
2583 }
2584 break;
2585 }
2586 ctx->cancellable = true;
2587 }
acf0174b
JJ
2588 kind = "taskgroup";
2589 break;
2590 default:
2591 error_at (gimple_location (stmt), "invalid arguments");
2592 return false;
2593 }
2594 if (bad)
2595 {
2596 error_at (gimple_location (stmt),
2597 "%<%s %s%> construct not closely nested inside of %qs",
d9f4ea18 2598 construct, kind, bad);
acf0174b
JJ
2599 return false;
2600 }
2601 }
74bf76ed 2602 /* FALLTHRU */
726a989a
RB
2603 case GIMPLE_OMP_SECTIONS:
2604 case GIMPLE_OMP_SINGLE:
a6fc8e21 2605 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2606 switch (gimple_code (ctx->stmt))
a6fc8e21 2607 {
726a989a 2608 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2609 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2610 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2611 break;
2612 /* FALLTHRU */
726a989a
RB
2613 case GIMPLE_OMP_SECTIONS:
2614 case GIMPLE_OMP_SINGLE:
2615 case GIMPLE_OMP_ORDERED:
2616 case GIMPLE_OMP_MASTER:
2617 case GIMPLE_OMP_TASK:
acf0174b 2618 case GIMPLE_OMP_CRITICAL:
726a989a 2619 if (is_gimple_call (stmt))
a68ab351 2620 {
acf0174b
JJ
2621 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2622 != BUILT_IN_GOMP_BARRIER)
2623 return true;
26127932
JJ
2624 error_at (gimple_location (stmt),
2625 "barrier region may not be closely nested inside "
d9f4ea18
JJ
2626 "of work-sharing, %<critical%>, %<ordered%>, "
2627 "%<master%>, explicit %<task%> or %<taskloop%> "
2628 "region");
26127932 2629 return false;
a68ab351 2630 }
26127932
JJ
2631 error_at (gimple_location (stmt),
2632 "work-sharing region may not be closely nested inside "
d9f4ea18
JJ
2633 "of work-sharing, %<critical%>, %<ordered%>, "
2634 "%<master%>, explicit %<task%> or %<taskloop%> region");
26127932 2635 return false;
726a989a 2636 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2637 case GIMPLE_OMP_TEAMS:
26127932 2638 return true;
d9f4ea18
JJ
2639 case GIMPLE_OMP_TARGET:
2640 if (gimple_omp_target_kind (ctx->stmt)
2641 == GF_OMP_TARGET_KIND_REGION)
2642 return true;
2643 break;
a6fc8e21
JJ
2644 default:
2645 break;
2646 }
2647 break;
726a989a 2648 case GIMPLE_OMP_MASTER:
a6fc8e21 2649 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2650 switch (gimple_code (ctx->stmt))
a6fc8e21 2651 {
726a989a 2652 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2653 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2654 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2655 break;
2656 /* FALLTHRU */
726a989a
RB
2657 case GIMPLE_OMP_SECTIONS:
2658 case GIMPLE_OMP_SINGLE:
2659 case GIMPLE_OMP_TASK:
26127932 2660 error_at (gimple_location (stmt),
d9f4ea18
JJ
2661 "%<master%> region may not be closely nested inside "
2662 "of work-sharing, explicit %<task%> or %<taskloop%> "
2663 "region");
26127932 2664 return false;
726a989a 2665 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2666 case GIMPLE_OMP_TEAMS:
26127932 2667 return true;
d9f4ea18
JJ
2668 case GIMPLE_OMP_TARGET:
2669 if (gimple_omp_target_kind (ctx->stmt)
2670 == GF_OMP_TARGET_KIND_REGION)
2671 return true;
2672 break;
a6fc8e21
JJ
2673 default:
2674 break;
2675 }
2676 break;
d9a6bd32
JJ
2677 case GIMPLE_OMP_TASK:
2678 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2680 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2681 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2682 {
2683 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2684 error_at (OMP_CLAUSE_LOCATION (c),
2685 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2686 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2687 return false;
2688 }
2689 break;
726a989a 2690 case GIMPLE_OMP_ORDERED:
d9a6bd32
JJ
2691 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2692 c; c = OMP_CLAUSE_CHAIN (c))
2693 {
2694 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2695 {
2696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
d9f4ea18 2697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
d9a6bd32
JJ
2698 continue;
2699 }
2700 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2701 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2702 || kind == OMP_CLAUSE_DEPEND_SINK)
2703 {
2704 tree oclause;
2705 /* Look for containing ordered(N) loop. */
2706 if (ctx == NULL
2707 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2708 || (oclause
629b3d75 2709 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
d9a6bd32
JJ
2710 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2711 {
2712 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2713 "%<ordered%> construct with %<depend%> clause "
2714 "must be closely nested inside an %<ordered%> "
2715 "loop");
d9a6bd32
JJ
2716 return false;
2717 }
2718 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2719 {
2720 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2721 "%<ordered%> construct with %<depend%> clause "
2722 "must be closely nested inside a loop with "
2723 "%<ordered%> clause with a parameter");
d9a6bd32
JJ
2724 return false;
2725 }
2726 }
2727 else
2728 {
2729 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2730 "invalid depend kind in omp %<ordered%> %<depend%>");
2731 return false;
2732 }
2733 }
2734 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2735 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18
JJ
2736 {
2737 /* ordered simd must be closely nested inside of simd region,
2738 and simd region must not encounter constructs other than
2739 ordered simd, therefore ordered simd may be either orphaned,
2740 or ctx->stmt must be simd. The latter case is handled already
2741 earlier. */
2742 if (ctx != NULL)
2743 {
2744 error_at (gimple_location (stmt),
2745 "%<ordered%> %<simd%> must be closely nested inside "
2746 "%<simd%> region");
d9a6bd32
JJ
2747 return false;
2748 }
2749 }
a6fc8e21 2750 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2751 switch (gimple_code (ctx->stmt))
a6fc8e21 2752 {
726a989a
RB
2753 case GIMPLE_OMP_CRITICAL:
2754 case GIMPLE_OMP_TASK:
d9f4ea18
JJ
2755 case GIMPLE_OMP_ORDERED:
2756 ordered_in_taskloop:
26127932 2757 error_at (gimple_location (stmt),
d9f4ea18
JJ
2758 "%<ordered%> region may not be closely nested inside "
2759 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2760 "%<taskloop%> region");
26127932 2761 return false;
726a989a 2762 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2763 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2764 goto ordered_in_taskloop;
629b3d75 2765 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2766 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2767 {
2768 error_at (gimple_location (stmt),
d9f4ea18
JJ
2769 "%<ordered%> region must be closely nested inside "
2770 "a loop region with an %<ordered%> clause");
26127932
JJ
2771 return false;
2772 }
2773 return true;
d9f4ea18
JJ
2774 case GIMPLE_OMP_TARGET:
2775 if (gimple_omp_target_kind (ctx->stmt)
2776 != GF_OMP_TARGET_KIND_REGION)
2777 break;
2778 /* FALLTHRU */
726a989a 2779 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2780 case GIMPLE_OMP_TEAMS:
acf0174b 2781 error_at (gimple_location (stmt),
d9f4ea18
JJ
2782 "%<ordered%> region must be closely nested inside "
2783 "a loop region with an %<ordered%> clause");
acf0174b 2784 return false;
a6fc8e21
JJ
2785 default:
2786 break;
2787 }
2788 break;
726a989a 2789 case GIMPLE_OMP_CRITICAL:
538dd0b7
DM
2790 {
2791 tree this_stmt_name
2792 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2793 for (; ctx != NULL; ctx = ctx->outer)
2794 if (gomp_critical *other_crit
2795 = dyn_cast <gomp_critical *> (ctx->stmt))
2796 if (this_stmt_name == gimple_omp_critical_name (other_crit))
2797 {
2798 error_at (gimple_location (stmt),
d9f4ea18
JJ
2799 "%<critical%> region may not be nested inside "
2800 "a %<critical%> region with the same name");
538dd0b7
DM
2801 return false;
2802 }
2803 }
a6fc8e21 2804 break;
acf0174b
JJ
2805 case GIMPLE_OMP_TEAMS:
2806 if (ctx == NULL
2807 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2808 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2809 {
2810 error_at (gimple_location (stmt),
d9f4ea18
JJ
2811 "%<teams%> construct not closely nested inside of "
2812 "%<target%> construct");
acf0174b
JJ
2813 return false;
2814 }
2815 break;
f014c653 2816 case GIMPLE_OMP_TARGET:
d9a6bd32
JJ
2817 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2818 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2819 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2820 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2821 {
2822 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2823 error_at (OMP_CLAUSE_LOCATION (c),
2824 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2825 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2826 return false;
2827 }
640b7e74 2828 if (is_gimple_omp_offloaded (stmt)
629b3d75 2829 && oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2830 {
2831 error_at (gimple_location (stmt),
2832 "OpenACC region inside of OpenACC routine, nested "
2833 "parallelism not supported yet");
2834 return false;
2835 }
f014c653 2836 for (; ctx != NULL; ctx = ctx->outer)
41dbbb37
TS
2837 {
2838 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2839 {
2840 if (is_gimple_omp (stmt)
2841 && is_gimple_omp_oacc (stmt)
2842 && is_gimple_omp (ctx->stmt))
2843 {
2844 error_at (gimple_location (stmt),
2845 "OpenACC construct inside of non-OpenACC region");
2846 return false;
2847 }
2848 continue;
2849 }
2850
2851 const char *stmt_name, *ctx_stmt_name;
2852 switch (gimple_omp_target_kind (stmt))
2853 {
2854 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2855 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2856 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
d9a6bd32
JJ
2857 case GF_OMP_TARGET_KIND_ENTER_DATA:
2858 stmt_name = "target enter data"; break;
2859 case GF_OMP_TARGET_KIND_EXIT_DATA:
2860 stmt_name = "target exit data"; break;
41dbbb37
TS
2861 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2862 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2863 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2864 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
d9a6bd32
JJ
2865 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2866 stmt_name = "enter/exit data"; break;
37d5ad46
JB
2867 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2868 break;
41dbbb37
TS
2869 default: gcc_unreachable ();
2870 }
2871 switch (gimple_omp_target_kind (ctx->stmt))
2872 {
2873 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2874 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
d9a6bd32
JJ
2875 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2876 ctx_stmt_name = "parallel"; break;
2877 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2878 ctx_stmt_name = "kernels"; break;
41dbbb37 2879 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
37d5ad46
JB
2880 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2881 ctx_stmt_name = "host_data"; break;
41dbbb37
TS
2882 default: gcc_unreachable ();
2883 }
2884
2885 /* OpenACC/OpenMP mismatch? */
2886 if (is_gimple_omp_oacc (stmt)
2887 != is_gimple_omp_oacc (ctx->stmt))
2888 {
2889 error_at (gimple_location (stmt),
d9f4ea18 2890 "%s %qs construct inside of %s %qs region",
41dbbb37
TS
2891 (is_gimple_omp_oacc (stmt)
2892 ? "OpenACC" : "OpenMP"), stmt_name,
2893 (is_gimple_omp_oacc (ctx->stmt)
2894 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2895 return false;
2896 }
2897 if (is_gimple_omp_offloaded (ctx->stmt))
2898 {
2899 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
2900 if (is_gimple_omp_oacc (ctx->stmt))
2901 {
2902 error_at (gimple_location (stmt),
d9f4ea18 2903 "%qs construct inside of %qs region",
41dbbb37
TS
2904 stmt_name, ctx_stmt_name);
2905 return false;
2906 }
2907 else
2908 {
41dbbb37 2909 warning_at (gimple_location (stmt), 0,
d9f4ea18 2910 "%qs construct inside of %qs region",
41dbbb37
TS
2911 stmt_name, ctx_stmt_name);
2912 }
2913 }
2914 }
f014c653 2915 break;
a6fc8e21
JJ
2916 default:
2917 break;
2918 }
26127932 2919 return true;
a6fc8e21
JJ
2920}
2921
2922
726a989a
RB
2923/* Helper function scan_omp.
2924
2925 Callback for walk_tree or operators in walk_gimple_stmt used to
41dbbb37 2926 scan for OMP directives in TP. */
953ff289
DN
2927
2928static tree
726a989a 2929scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 2930{
d3bfe4de
KG
2931 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2932 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
2933 tree t = *tp;
2934
726a989a
RB
2935 switch (TREE_CODE (t))
2936 {
2937 case VAR_DECL:
2938 case PARM_DECL:
2939 case LABEL_DECL:
2940 case RESULT_DECL:
2941 if (ctx)
b2b40051
MJ
2942 {
2943 tree repl = remap_decl (t, &ctx->cb);
2944 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
2945 *tp = repl;
2946 }
726a989a
RB
2947 break;
2948
2949 default:
2950 if (ctx && TYPE_P (t))
2951 *tp = remap_type (t, &ctx->cb);
2952 else if (!DECL_P (t))
a900ae6b
JJ
2953 {
2954 *walk_subtrees = 1;
2955 if (ctx)
70f34814
RG
2956 {
2957 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2958 if (tem != TREE_TYPE (t))
2959 {
2960 if (TREE_CODE (t) == INTEGER_CST)
8e6cdc90 2961 *tp = wide_int_to_tree (tem, wi::to_wide (t));
70f34814
RG
2962 else
2963 TREE_TYPE (t) = tem;
2964 }
2965 }
a900ae6b 2966 }
726a989a
RB
2967 break;
2968 }
2969
2970 return NULL_TREE;
2971}
2972
c02065fc
AH
2973/* Return true if FNDECL is a setjmp or a longjmp. */
2974
2975static bool
2976setjmp_or_longjmp_p (const_tree fndecl)
2977{
3d78e008
ML
2978 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
2979 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
c02065fc
AH
2980 return true;
2981
2982 tree declname = DECL_NAME (fndecl);
2983 if (!declname)
2984 return false;
2985 const char *name = IDENTIFIER_POINTER (declname);
2986 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2987}
2988
726a989a
RB
2989
2990/* Helper function for scan_omp.
2991
41dbbb37 2992 Callback for walk_gimple_stmt used to scan for OMP directives in
726a989a
RB
2993 the current statement in GSI. */
2994
2995static tree
2996scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2997 struct walk_stmt_info *wi)
2998{
355fe088 2999 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
3000 omp_context *ctx = (omp_context *) wi->info;
3001
3002 if (gimple_has_location (stmt))
3003 input_location = gimple_location (stmt);
953ff289 3004
41dbbb37 3005 /* Check the nesting restrictions. */
acf0174b
JJ
3006 bool remove = false;
3007 if (is_gimple_omp (stmt))
3008 remove = !check_omp_nesting_restrictions (stmt, ctx);
3009 else if (is_gimple_call (stmt))
3010 {
3011 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
3012 if (fndecl)
3013 {
3014 if (setjmp_or_longjmp_p (fndecl)
3015 && ctx
3016 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3017 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
c02065fc
AH
3018 {
3019 remove = true;
3020 error_at (gimple_location (stmt),
3021 "setjmp/longjmp inside simd construct");
3022 }
3023 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3024 switch (DECL_FUNCTION_CODE (fndecl))
3025 {
3026 case BUILT_IN_GOMP_BARRIER:
3027 case BUILT_IN_GOMP_CANCEL:
3028 case BUILT_IN_GOMP_CANCELLATION_POINT:
3029 case BUILT_IN_GOMP_TASKYIELD:
3030 case BUILT_IN_GOMP_TASKWAIT:
3031 case BUILT_IN_GOMP_TASKGROUP_START:
3032 case BUILT_IN_GOMP_TASKGROUP_END:
3033 remove = !check_omp_nesting_restrictions (stmt, ctx);
3034 break;
3035 default:
3036 break;
3037 }
3038 }
acf0174b
JJ
3039 }
3040 if (remove)
3041 {
3042 stmt = gimple_build_nop ();
3043 gsi_replace (gsi, stmt, false);
a68ab351 3044 }
a6fc8e21 3045
726a989a
RB
3046 *handled_ops_p = true;
3047
3048 switch (gimple_code (stmt))
953ff289 3049 {
726a989a 3050 case GIMPLE_OMP_PARALLEL:
a68ab351 3051 taskreg_nesting_level++;
726a989a 3052 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
3053 taskreg_nesting_level--;
3054 break;
3055
726a989a 3056 case GIMPLE_OMP_TASK:
a68ab351 3057 taskreg_nesting_level++;
726a989a 3058 scan_omp_task (gsi, ctx);
a68ab351 3059 taskreg_nesting_level--;
953ff289
DN
3060 break;
3061
726a989a 3062 case GIMPLE_OMP_FOR:
6c7509bc
JJ
3063 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3064 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3065 && omp_maybe_offloaded_ctx (ctx)
3066 && omp_max_simt_vf ())
3067 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3068 else
3069 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
953ff289
DN
3070 break;
3071
726a989a 3072 case GIMPLE_OMP_SECTIONS:
538dd0b7 3073 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
953ff289
DN
3074 break;
3075
726a989a 3076 case GIMPLE_OMP_SINGLE:
538dd0b7 3077 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
953ff289
DN
3078 break;
3079
726a989a
RB
3080 case GIMPLE_OMP_SECTION:
3081 case GIMPLE_OMP_MASTER:
acf0174b 3082 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
3083 case GIMPLE_OMP_ORDERED:
3084 case GIMPLE_OMP_CRITICAL:
b2b40051 3085 case GIMPLE_OMP_GRID_BODY:
726a989a 3086 ctx = new_omp_context (stmt, ctx);
26127932 3087 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
3088 break;
3089
acf0174b 3090 case GIMPLE_OMP_TARGET:
538dd0b7 3091 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
acf0174b
JJ
3092 break;
3093
3094 case GIMPLE_OMP_TEAMS:
538dd0b7 3095 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
acf0174b
JJ
3096 break;
3097
726a989a 3098 case GIMPLE_BIND:
953ff289
DN
3099 {
3100 tree var;
953ff289 3101
726a989a
RB
3102 *handled_ops_p = false;
3103 if (ctx)
538dd0b7
DM
3104 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3105 var ;
3106 var = DECL_CHAIN (var))
726a989a 3107 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
3108 }
3109 break;
953ff289 3110 default:
726a989a 3111 *handled_ops_p = false;
953ff289
DN
3112 break;
3113 }
3114
3115 return NULL_TREE;
3116}
3117
3118
726a989a 3119/* Scan all the statements starting at the current statement. CTX
41dbbb37 3120 contains context information about the OMP directives and
726a989a 3121 clauses found during the scan. */
953ff289
DN
3122
3123static void
26127932 3124scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
3125{
3126 location_t saved_location;
3127 struct walk_stmt_info wi;
3128
3129 memset (&wi, 0, sizeof (wi));
953ff289 3130 wi.info = ctx;
953ff289
DN
3131 wi.want_locations = true;
3132
3133 saved_location = input_location;
26127932 3134 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
3135 input_location = saved_location;
3136}
3137\f
3138/* Re-gimplification and code generation routines. */
3139
6724f8a6
JJ
3140/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3141 of BIND if in a method. */
3142
3143static void
3144maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3145{
3146 if (DECL_ARGUMENTS (current_function_decl)
3147 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3148 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3149 == POINTER_TYPE))
3150 {
3151 tree vars = gimple_bind_vars (bind);
3152 for (tree *pvar = &vars; *pvar; )
3153 if (omp_member_access_dummy_var (*pvar))
3154 *pvar = DECL_CHAIN (*pvar);
3155 else
3156 pvar = &DECL_CHAIN (*pvar);
3157 gimple_bind_set_vars (bind, vars);
3158 }
3159}
3160
3161/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3162 block and its subblocks. */
3163
3164static void
3165remove_member_access_dummy_vars (tree block)
3166{
3167 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3168 if (omp_member_access_dummy_var (*pvar))
3169 *pvar = DECL_CHAIN (*pvar);
3170 else
3171 pvar = &DECL_CHAIN (*pvar);
3172
3173 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3174 remove_member_access_dummy_vars (block);
3175}
3176
953ff289
DN
3177/* If a context was created for STMT when it was scanned, return it. */
3178
3179static omp_context *
355fe088 3180maybe_lookup_ctx (gimple *stmt)
953ff289
DN
3181{
3182 splay_tree_node n;
3183 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3184 return n ? (omp_context *) n->value : NULL;
3185}
3186
50674e96
DN
3187
3188/* Find the mapping for DECL in CTX or the immediately enclosing
3189 context that has a mapping for DECL.
3190
3191 If CTX is a nested parallel directive, we may have to use the decl
3192 mappings created in CTX's parent context. Suppose that we have the
3193 following parallel nesting (variable UIDs showed for clarity):
3194
3195 iD.1562 = 0;
3196 #omp parallel shared(iD.1562) -> outer parallel
3197 iD.1562 = iD.1562 + 1;
3198
3199 #omp parallel shared (iD.1562) -> inner parallel
3200 iD.1562 = iD.1562 - 1;
3201
3202 Each parallel structure will create a distinct .omp_data_s structure
3203 for copying iD.1562 in/out of the directive:
3204
3205 outer parallel .omp_data_s.1.i -> iD.1562
3206 inner parallel .omp_data_s.2.i -> iD.1562
3207
3208 A shared variable mapping will produce a copy-out operation before
3209 the parallel directive and a copy-in operation after it. So, in
3210 this case we would have:
3211
3212 iD.1562 = 0;
3213 .omp_data_o.1.i = iD.1562;
3214 #omp parallel shared(iD.1562) -> outer parallel
3215 .omp_data_i.1 = &.omp_data_o.1
3216 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3217
3218 .omp_data_o.2.i = iD.1562; -> **
3219 #omp parallel shared(iD.1562) -> inner parallel
3220 .omp_data_i.2 = &.omp_data_o.2
3221 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3222
3223
3224 ** This is a problem. The symbol iD.1562 cannot be referenced
3225 inside the body of the outer parallel region. But since we are
3226 emitting this copy operation while expanding the inner parallel
3227 directive, we need to access the CTX structure of the outer
3228 parallel directive to get the correct mapping:
3229
3230 .omp_data_o.2.i = .omp_data_i.1->i
3231
3232 Since there may be other workshare or parallel directives enclosing
3233 the parallel directive, it may be necessary to walk up the context
3234 parent chain. This is not a problem in general because nested
3235 parallelism happens only rarely. */
3236
3237static tree
3238lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3239{
3240 tree t;
3241 omp_context *up;
3242
50674e96
DN
3243 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3244 t = maybe_lookup_decl (decl, up);
3245
d2dda7fe 3246 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 3247
64964499 3248 return t ? t : decl;
50674e96
DN
3249}
3250
3251
8ca5b2a2
JJ
3252/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3253 in outer contexts. */
3254
3255static tree
3256maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3257{
3258 tree t = NULL;
3259 omp_context *up;
3260
d2dda7fe
JJ
3261 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3262 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
3263
3264 return t ? t : decl;
3265}
3266
3267
f2c9f71d 3268/* Construct the initialization value for reduction operation OP. */
953ff289
DN
3269
3270tree
f2c9f71d 3271omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
953ff289 3272{
f2c9f71d 3273 switch (op)
953ff289
DN
3274 {
3275 case PLUS_EXPR:
3276 case MINUS_EXPR:
3277 case BIT_IOR_EXPR:
3278 case BIT_XOR_EXPR:
3279 case TRUTH_OR_EXPR:
3280 case TRUTH_ORIF_EXPR:
3281 case TRUTH_XOR_EXPR:
3282 case NE_EXPR:
e8160c9a 3283 return build_zero_cst (type);
953ff289
DN
3284
3285 case MULT_EXPR:
3286 case TRUTH_AND_EXPR:
3287 case TRUTH_ANDIF_EXPR:
3288 case EQ_EXPR:
db3927fb 3289 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
3290
3291 case BIT_AND_EXPR:
db3927fb 3292 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
3293
3294 case MAX_EXPR:
3295 if (SCALAR_FLOAT_TYPE_P (type))
3296 {
3297 REAL_VALUE_TYPE max, min;
3d3dbadd 3298 if (HONOR_INFINITIES (type))
953ff289
DN
3299 {
3300 real_inf (&max);
3301 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3302 }
3303 else
3304 real_maxval (&min, 1, TYPE_MODE (type));
3305 return build_real (type, min);
3306 }
3ff2d74e
TV
3307 else if (POINTER_TYPE_P (type))
3308 {
3309 wide_int min
3310 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3311 return wide_int_to_tree (type, min);
3312 }
953ff289
DN
3313 else
3314 {
3315 gcc_assert (INTEGRAL_TYPE_P (type));
3316 return TYPE_MIN_VALUE (type);
3317 }
3318
3319 case MIN_EXPR:
3320 if (SCALAR_FLOAT_TYPE_P (type))
3321 {
3322 REAL_VALUE_TYPE max;
3d3dbadd 3323 if (HONOR_INFINITIES (type))
953ff289
DN
3324 real_inf (&max);
3325 else
3326 real_maxval (&max, 0, TYPE_MODE (type));
3327 return build_real (type, max);
3328 }
3ff2d74e
TV
3329 else if (POINTER_TYPE_P (type))
3330 {
3331 wide_int max
3332 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3333 return wide_int_to_tree (type, max);
3334 }
953ff289
DN
3335 else
3336 {
3337 gcc_assert (INTEGRAL_TYPE_P (type));
3338 return TYPE_MAX_VALUE (type);
3339 }
3340
3341 default:
3342 gcc_unreachable ();
3343 }
3344}
3345
f2c9f71d
TS
3346/* Construct the initialization value for reduction CLAUSE. */
3347
3348tree
3349omp_reduction_init (tree clause, tree type)
3350{
3351 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3352 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3353}
3354
acf0174b
JJ
3355/* Return alignment to be assumed for var in CLAUSE, which should be
3356 OMP_CLAUSE_ALIGNED. */
3357
3358static tree
3359omp_clause_aligned_alignment (tree clause)
3360{
3361 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3362 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3363
3364 /* Otherwise return implementation defined alignment. */
3365 unsigned int al = 1;
16d22000 3366 opt_scalar_mode mode_iter;
86e36728
RS
3367 auto_vector_sizes sizes;
3368 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3369 poly_uint64 vs = 0;
3370 for (unsigned int i = 0; i < sizes.length (); ++i)
3371 vs = ordered_max (vs, sizes[i]);
acf0174b
JJ
3372 static enum mode_class classes[]
3373 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3374 for (int i = 0; i < 4; i += 2)
16d22000
RS
3375 /* The for loop above dictates that we only walk through scalar classes. */
3376 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
acf0174b 3377 {
16d22000
RS
3378 scalar_mode mode = mode_iter.require ();
3379 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
acf0174b
JJ
3380 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3381 continue;
86e36728
RS
3382 while (maybe_ne (vs, 0U)
3383 && known_lt (GET_MODE_SIZE (vmode), vs)
490d0f6c
RS
3384 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3385 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
01914336 3386
acf0174b
JJ
3387 tree type = lang_hooks.types.type_for_mode (mode, 1);
3388 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3389 continue;
cf098191
RS
3390 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3391 GET_MODE_SIZE (mode));
86e36728 3392 type = build_vector_type (type, nelts);
acf0174b
JJ
3393 if (TYPE_MODE (type) != vmode)
3394 continue;
3395 if (TYPE_ALIGN_UNIT (type) > al)
3396 al = TYPE_ALIGN_UNIT (type);
3397 }
3398 return build_int_cst (integer_type_node, al);
3399}
3400
6943af07
AM
3401
3402/* This structure is part of the interface between lower_rec_simd_input_clauses
3403 and lower_rec_input_clauses. */
3404
3405struct omplow_simd_context {
9d2f08ab 3406 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
6943af07
AM
3407 tree idx;
3408 tree lane;
0c6b03b5
AM
3409 vec<tree, va_heap> simt_eargs;
3410 gimple_seq simt_dlist;
9d2f08ab 3411 poly_uint64_pod max_vf;
6943af07
AM
3412 bool is_simt;
3413};
3414
74bf76ed
JJ
3415/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3416 privatization. */
3417
3418static bool
6943af07
AM
3419lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3420 omplow_simd_context *sctx, tree &ivar, tree &lvar)
74bf76ed 3421{
9d2f08ab 3422 if (known_eq (sctx->max_vf, 0U))
74bf76ed 3423 {
6943af07 3424 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d2f08ab 3425 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3426 {
629b3d75 3427 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 3428 OMP_CLAUSE_SAFELEN);
9d2f08ab
RS
3429 if (c)
3430 {
3431 poly_uint64 safe_len;
3432 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3433 || maybe_lt (safe_len, 1U))
3434 sctx->max_vf = 1;
3435 else
3436 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3437 }
74bf76ed 3438 }
9d2f08ab 3439 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3440 {
6943af07
AM
3441 sctx->idx = create_tmp_var (unsigned_type_node);
3442 sctx->lane = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
3443 }
3444 }
9d2f08ab 3445 if (known_eq (sctx->max_vf, 1U))
74bf76ed
JJ
3446 return false;
3447
0c6b03b5
AM
3448 if (sctx->is_simt)
3449 {
3450 if (is_gimple_reg (new_var))
3451 {
3452 ivar = lvar = new_var;
3453 return true;
3454 }
3455 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3456 ivar = lvar = create_tmp_var (type);
3457 TREE_ADDRESSABLE (ivar) = 1;
3458 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3459 NULL, DECL_ATTRIBUTES (ivar));
3460 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3461 tree clobber = build_constructor (type, NULL);
3462 TREE_THIS_VOLATILE (clobber) = 1;
3463 gimple *g = gimple_build_assign (ivar, clobber);
3464 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3465 }
3466 else
3467 {
3468 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3469 tree avar = create_tmp_var_raw (atype);
3470 if (TREE_ADDRESSABLE (new_var))
3471 TREE_ADDRESSABLE (avar) = 1;
3472 DECL_ATTRIBUTES (avar)
3473 = tree_cons (get_identifier ("omp simd array"), NULL,
3474 DECL_ATTRIBUTES (avar));
3475 gimple_add_tmp_var (avar);
3476 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3477 NULL_TREE, NULL_TREE);
3478 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3479 NULL_TREE, NULL_TREE);
3480 }
acf0174b
JJ
3481 if (DECL_P (new_var))
3482 {
3483 SET_DECL_VALUE_EXPR (new_var, lvar);
3484 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3485 }
74bf76ed
JJ
3486 return true;
3487}
3488
decaaec8
JJ
3489/* Helper function of lower_rec_input_clauses. For a reference
3490 in simd reduction, add an underlying variable it will reference. */
3491
3492static void
3493handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3494{
3495 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3496 if (TREE_CONSTANT (z))
3497 {
d9a6bd32
JJ
3498 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3499 get_name (new_vard));
decaaec8
JJ
3500 gimple_add_tmp_var (z);
3501 TREE_ADDRESSABLE (z) = 1;
3502 z = build_fold_addr_expr_loc (loc, z);
3503 gimplify_assign (new_vard, z, ilist);
3504 }
3505}
3506
953ff289
DN
3507/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3508 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3509 private variables. Initialization statements go in ILIST, while calls
3510 to destructors go in DLIST. */
3511
3512static void
726a989a 3513lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 3514 omp_context *ctx, struct omp_for_data *fd)
953ff289 3515{
5039610b 3516 tree c, dtor, copyin_seq, x, ptr;
953ff289 3517 bool copyin_by_ref = false;
8ca5b2a2 3518 bool lastprivate_firstprivate = false;
acf0174b 3519 bool reduction_omp_orig_ref = false;
953ff289 3520 int pass;
74bf76ed 3521 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3522 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
6943af07 3523 omplow_simd_context sctx = omplow_simd_context ();
0c6b03b5
AM
3524 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3525 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
9669b00b 3526 gimple_seq llist[3] = { };
953ff289 3527
953ff289 3528 copyin_seq = NULL;
6943af07 3529 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
953ff289 3530
74bf76ed
JJ
3531 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3532 with data sharing clauses referencing variable sized vars. That
3533 is unnecessarily hard to support and very unlikely to result in
3534 vectorized code anyway. */
3535 if (is_simd)
3536 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3537 switch (OMP_CLAUSE_CODE (c))
3538 {
da6f124d
JJ
3539 case OMP_CLAUSE_LINEAR:
3540 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6943af07 3541 sctx.max_vf = 1;
da6f124d 3542 /* FALLTHRU */
74bf76ed
JJ
3543 case OMP_CLAUSE_PRIVATE:
3544 case OMP_CLAUSE_FIRSTPRIVATE:
3545 case OMP_CLAUSE_LASTPRIVATE:
74bf76ed 3546 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3547 sctx.max_vf = 1;
74bf76ed 3548 break;
d9a6bd32
JJ
3549 case OMP_CLAUSE_REDUCTION:
3550 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3551 || is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3552 sctx.max_vf = 1;
d9a6bd32 3553 break;
74bf76ed
JJ
3554 default:
3555 continue;
3556 }
3557
0c6b03b5 3558 /* Add a placeholder for simduid. */
9d2f08ab 3559 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
0c6b03b5
AM
3560 sctx.simt_eargs.safe_push (NULL_TREE);
3561
953ff289
DN
3562 /* Do all the fixed sized types in the first pass, and the variable sized
3563 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3564 the variable sized types are processed before we use them in the
953ff289
DN
3565 variable sized operations. */
3566 for (pass = 0; pass < 2; ++pass)
3567 {
3568 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3569 {
aaf46ef9 3570 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3571 tree var, new_var;
3572 bool by_ref;
db3927fb 3573 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3574
3575 switch (c_kind)
3576 {
3577 case OMP_CLAUSE_PRIVATE:
3578 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3579 continue;
3580 break;
3581 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3582 /* Ignore shared directives in teams construct. */
3583 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3584 continue;
8ca5b2a2
JJ
3585 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3586 {
d9a6bd32
JJ
3587 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3588 || is_global_var (OMP_CLAUSE_DECL (c)));
8ca5b2a2
JJ
3589 continue;
3590 }
953ff289 3591 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3592 case OMP_CLAUSE_COPYIN:
d9a6bd32 3593 break;
acf0174b 3594 case OMP_CLAUSE_LINEAR:
d9a6bd32
JJ
3595 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3596 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3597 lastprivate_firstprivate = true;
acf0174b 3598 break;
953ff289 3599 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3600 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3601 reduction_omp_orig_ref = true;
953ff289 3602 break;
acf0174b 3603 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 3604 /* Handle _looptemp_ clauses only on parallel/task. */
acf0174b
JJ
3605 if (fd)
3606 continue;
74bf76ed 3607 break;
077b0dfb 3608 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3609 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3610 {
3611 lastprivate_firstprivate = true;
d9a6bd32 3612 if (pass != 0 || is_taskloop_ctx (ctx))
8ca5b2a2
JJ
3613 continue;
3614 }
92d28cbb
JJ
3615 /* Even without corresponding firstprivate, if
3616 decl is Fortran allocatable, it needs outer var
3617 reference. */
3618 else if (pass == 0
3619 && lang_hooks.decls.omp_private_outer_ref
3620 (OMP_CLAUSE_DECL (c)))
3621 lastprivate_firstprivate = true;
077b0dfb 3622 break;
acf0174b
JJ
3623 case OMP_CLAUSE_ALIGNED:
3624 if (pass == 0)
3625 continue;
3626 var = OMP_CLAUSE_DECL (c);
3627 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3628 && !is_global_var (var))
3629 {
3630 new_var = maybe_lookup_decl (var, ctx);
3631 if (new_var == NULL_TREE)
3632 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3633 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3634 tree alarg = omp_clause_aligned_alignment (c);
3635 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3636 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
acf0174b
JJ
3637 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3638 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3639 gimplify_and_add (x, ilist);
3640 }
3641 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3642 && is_global_var (var))
3643 {
3644 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3645 new_var = lookup_decl (var, ctx);
3646 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3647 t = build_fold_addr_expr_loc (clause_loc, t);
3648 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
3649 tree alarg = omp_clause_aligned_alignment (c);
3650 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3651 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
acf0174b 3652 t = fold_convert_loc (clause_loc, ptype, t);
b731b390 3653 x = create_tmp_var (ptype);
acf0174b
JJ
3654 t = build2 (MODIFY_EXPR, ptype, x, t);
3655 gimplify_and_add (t, ilist);
3656 t = build_simple_mem_ref_loc (clause_loc, x);
3657 SET_DECL_VALUE_EXPR (new_var, t);
3658 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3659 }
3660 continue;
953ff289
DN
3661 default:
3662 continue;
3663 }
3664
3665 new_var = var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
3666 if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3667 {
3668 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
3669 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3670 var = TREE_OPERAND (var, 0);
d9a6bd32
JJ
3671 if (TREE_CODE (var) == INDIRECT_REF
3672 || TREE_CODE (var) == ADDR_EXPR)
3673 var = TREE_OPERAND (var, 0);
3674 if (is_variable_sized (var))
3675 {
3676 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3677 var = DECL_VALUE_EXPR (var);
3678 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3679 var = TREE_OPERAND (var, 0);
3680 gcc_assert (DECL_P (var));
3681 }
3682 new_var = var;
3683 }
953ff289
DN
3684 if (c_kind != OMP_CLAUSE_COPYIN)
3685 new_var = lookup_decl (var, ctx);
3686
3687 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3688 {
3689 if (pass != 0)
3690 continue;
3691 }
d9a6bd32
JJ
3692 /* C/C++ array section reductions. */
3693 else if (c_kind == OMP_CLAUSE_REDUCTION
3694 && var != OMP_CLAUSE_DECL (c))
953ff289
DN
3695 {
3696 if (pass == 0)
3697 continue;
3698
e01d41e5 3699 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
d9a6bd32 3700 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
e01d41e5
JJ
3701 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3702 {
3703 tree b = TREE_OPERAND (orig_var, 1);
3704 b = maybe_lookup_decl (b, ctx);
3705 if (b == NULL)
3706 {
3707 b = TREE_OPERAND (orig_var, 1);
3708 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3709 }
3710 if (integer_zerop (bias))
3711 bias = b;
3712 else
3713 {
3714 bias = fold_convert_loc (clause_loc,
3715 TREE_TYPE (b), bias);
3716 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3717 TREE_TYPE (b), b, bias);
3718 }
3719 orig_var = TREE_OPERAND (orig_var, 0);
3720 }
d9a6bd32
JJ
3721 if (TREE_CODE (orig_var) == INDIRECT_REF
3722 || TREE_CODE (orig_var) == ADDR_EXPR)
3723 orig_var = TREE_OPERAND (orig_var, 0);
3724 tree d = OMP_CLAUSE_DECL (c);
3725 tree type = TREE_TYPE (d);
3726 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3727 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3728 const char *name = get_name (orig_var);
3729 if (TREE_CONSTANT (v))
a68ab351 3730 {
d9a6bd32
JJ
3731 x = create_tmp_var_raw (type, name);
3732 gimple_add_tmp_var (x);
3733 TREE_ADDRESSABLE (x) = 1;
3734 x = build_fold_addr_expr_loc (clause_loc, x);
3735 }
3736 else
3737 {
3738 tree atmp
3739 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3740 tree t = maybe_lookup_decl (v, ctx);
3741 if (t)
3742 v = t;
3743 else
3744 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3745 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3746 t = fold_build2_loc (clause_loc, PLUS_EXPR,
3747 TREE_TYPE (v), v,
3748 build_int_cst (TREE_TYPE (v), 1));
3749 t = fold_build2_loc (clause_loc, MULT_EXPR,
3750 TREE_TYPE (v), t,
3751 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3752 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3753 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3754 }
3755
3756 tree ptype = build_pointer_type (TREE_TYPE (type));
3757 x = fold_convert_loc (clause_loc, ptype, x);
3758 tree y = create_tmp_var (ptype, name);
3759 gimplify_assign (y, x, ilist);
3760 x = y;
e01d41e5
JJ
3761 tree yb = y;
3762
3763 if (!integer_zerop (bias))
3764 {
48a78aee
JJ
3765 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3766 bias);
3767 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3768 x);
3769 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3770 pointer_sized_int_node, yb, bias);
3771 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
e01d41e5
JJ
3772 yb = create_tmp_var (ptype, name);
3773 gimplify_assign (yb, x, ilist);
3774 x = yb;
3775 }
3776
3777 d = TREE_OPERAND (d, 0);
3778 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3779 d = TREE_OPERAND (d, 0);
3780 if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
3781 {
3782 if (orig_var != var)
3783 {
3784 gcc_assert (is_variable_sized (orig_var));
3785 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3786 x);
3787 gimplify_assign (new_var, x, ilist);
3788 tree new_orig_var = lookup_decl (orig_var, ctx);
3789 tree t = build_fold_indirect_ref (new_var);
3790 DECL_IGNORED_P (new_var) = 0;
3791 TREE_THIS_NOTRAP (t);
3792 SET_DECL_VALUE_EXPR (new_orig_var, t);
3793 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3794 }
3795 else
3796 {
3797 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3798 build_int_cst (ptype, 0));
3799 SET_DECL_VALUE_EXPR (new_var, x);
3800 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3801 }
3802 }
3803 else
3804 {
3805 gcc_assert (orig_var == var);
e01d41e5 3806 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
3807 {
3808 x = create_tmp_var (ptype, name);
3809 TREE_ADDRESSABLE (x) = 1;
e01d41e5 3810 gimplify_assign (x, yb, ilist);
d9a6bd32
JJ
3811 x = build_fold_addr_expr_loc (clause_loc, x);
3812 }
3813 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3814 gimplify_assign (new_var, x, ilist);
3815 }
3816 tree y1 = create_tmp_var (ptype, NULL);
3817 gimplify_assign (y1, y, ilist);
3818 tree i2 = NULL_TREE, y2 = NULL_TREE;
3819 tree body2 = NULL_TREE, end2 = NULL_TREE;
3820 tree y3 = NULL_TREE, y4 = NULL_TREE;
3821 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3822 {
3823 y2 = create_tmp_var (ptype, NULL);
3824 gimplify_assign (y2, y, ilist);
3825 tree ref = build_outer_var_ref (var, ctx);
3826 /* For ref build_outer_var_ref already performs this. */
e01d41e5 3827 if (TREE_CODE (d) == INDIRECT_REF)
629b3d75 3828 gcc_assert (omp_is_reference (var));
e01d41e5 3829 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32 3830 ref = build_fold_addr_expr (ref);
629b3d75 3831 else if (omp_is_reference (var))
d9a6bd32
JJ
3832 ref = build_fold_addr_expr (ref);
3833 ref = fold_convert_loc (clause_loc, ptype, ref);
3834 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3835 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3836 {
3837 y3 = create_tmp_var (ptype, NULL);
3838 gimplify_assign (y3, unshare_expr (ref), ilist);
3839 }
3840 if (is_simd)
3841 {
3842 y4 = create_tmp_var (ptype, NULL);
3843 gimplify_assign (y4, ref, dlist);
3844 }
3845 }
3846 tree i = create_tmp_var (TREE_TYPE (v), NULL);
3847 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3848 tree body = create_artificial_label (UNKNOWN_LOCATION);
3849 tree end = create_artificial_label (UNKNOWN_LOCATION);
3850 gimple_seq_add_stmt (ilist, gimple_build_label (body));
3851 if (y2)
3852 {
3853 i2 = create_tmp_var (TREE_TYPE (v), NULL);
3854 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3855 body2 = create_artificial_label (UNKNOWN_LOCATION);
3856 end2 = create_artificial_label (UNKNOWN_LOCATION);
3857 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3858 }
3859 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3860 {
3861 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3862 tree decl_placeholder
3863 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3864 SET_DECL_VALUE_EXPR (decl_placeholder,
3865 build_simple_mem_ref (y1));
3866 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3867 SET_DECL_VALUE_EXPR (placeholder,
3868 y3 ? build_simple_mem_ref (y3)
3869 : error_mark_node);
3870 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3871 x = lang_hooks.decls.omp_clause_default_ctor
3872 (c, build_simple_mem_ref (y1),
3873 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3874 if (x)
3875 gimplify_and_add (x, ilist);
3876 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3877 {
3878 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3879 lower_omp (&tseq, ctx);
3880 gimple_seq_add_seq (ilist, tseq);
3881 }
3882 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3883 if (is_simd)
3884 {
3885 SET_DECL_VALUE_EXPR (decl_placeholder,
3886 build_simple_mem_ref (y2));
3887 SET_DECL_VALUE_EXPR (placeholder,
3888 build_simple_mem_ref (y4));
3889 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3890 lower_omp (&tseq, ctx);
3891 gimple_seq_add_seq (dlist, tseq);
3892 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3893 }
3894 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3895 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3896 x = lang_hooks.decls.omp_clause_dtor
3897 (c, build_simple_mem_ref (y2));
3898 if (x)
3899 {
3900 gimple_seq tseq = NULL;
3901 dtor = x;
3902 gimplify_stmt (&dtor, &tseq);
3903 gimple_seq_add_seq (dlist, tseq);
3904 }
3905 }
3906 else
3907 {
3908 x = omp_reduction_init (c, TREE_TYPE (type));
3909 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3910
3911 /* reduction(-:var) sums up the partial results, so it
3912 acts identically to reduction(+:var). */
3913 if (code == MINUS_EXPR)
3914 code = PLUS_EXPR;
3915
3916 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3917 if (is_simd)
3918 {
3919 x = build2 (code, TREE_TYPE (type),
3920 build_simple_mem_ref (y4),
3921 build_simple_mem_ref (y2));
3922 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3923 }
3924 }
3925 gimple *g
3926 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3927 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3928 gimple_seq_add_stmt (ilist, g);
3929 if (y3)
3930 {
3931 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
3932 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3933 gimple_seq_add_stmt (ilist, g);
3934 }
3935 g = gimple_build_assign (i, PLUS_EXPR, i,
3936 build_int_cst (TREE_TYPE (i), 1));
3937 gimple_seq_add_stmt (ilist, g);
3938 g = gimple_build_cond (LE_EXPR, i, v, body, end);
3939 gimple_seq_add_stmt (ilist, g);
3940 gimple_seq_add_stmt (ilist, gimple_build_label (end));
3941 if (y2)
3942 {
3943 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
3944 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3945 gimple_seq_add_stmt (dlist, g);
3946 if (y4)
3947 {
3948 g = gimple_build_assign
3949 (y4, POINTER_PLUS_EXPR, y4,
3950 TYPE_SIZE_UNIT (TREE_TYPE (type)));
3951 gimple_seq_add_stmt (dlist, g);
3952 }
3953 g = gimple_build_assign (i2, PLUS_EXPR, i2,
3954 build_int_cst (TREE_TYPE (i2), 1));
3955 gimple_seq_add_stmt (dlist, g);
3956 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
3957 gimple_seq_add_stmt (dlist, g);
3958 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
3959 }
3960 continue;
3961 }
3962 else if (is_variable_sized (var))
3963 {
3964 /* For variable sized types, we need to allocate the
3965 actual storage here. Call alloca and store the
3966 result in the pointer decl that we created elsewhere. */
3967 if (pass == 0)
3968 continue;
3969
3970 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3971 {
3972 gcall *stmt;
3973 tree tmp, atmp;
3974
3975 ptr = DECL_VALUE_EXPR (new_var);
3976 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3977 ptr = TREE_OPERAND (ptr, 0);
a68ab351
JJ
3978 gcc_assert (DECL_P (ptr));
3979 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
3980
3981 /* void *tmp = __builtin_alloca */
d9a6bd32
JJ
3982 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3983 stmt = gimple_build_call (atmp, 2, x,
3984 size_int (DECL_ALIGN (var)));
b731b390 3985 tmp = create_tmp_var_raw (ptr_type_node);
726a989a
RB
3986 gimple_add_tmp_var (tmp);
3987 gimple_call_set_lhs (stmt, tmp);
3988
3989 gimple_seq_add_stmt (ilist, stmt);
3990
db3927fb 3991 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 3992 gimplify_assign (ptr, x, ilist);
a68ab351 3993 }
953ff289 3994 }
629b3d75 3995 else if (omp_is_reference (var))
953ff289 3996 {
50674e96
DN
3997 /* For references that are being privatized for Fortran,
3998 allocate new backing storage for the new pointer
3999 variable. This allows us to avoid changing all the
4000 code that expects a pointer to something that expects
acf0174b 4001 a direct variable. */
953ff289
DN
4002 if (pass == 0)
4003 continue;
4004
4005 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
4006 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4007 {
4008 x = build_receiver_ref (var, false, ctx);
db3927fb 4009 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4010 }
4011 else if (TREE_CONSTANT (x))
953ff289 4012 {
decaaec8
JJ
4013 /* For reduction in SIMD loop, defer adding the
4014 initialization of the reference, because if we decide
4015 to use SIMD array for it, the initilization could cause
4016 expansion ICE. */
4017 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4ceffa27
JJ
4018 x = NULL_TREE;
4019 else
4020 {
4ceffa27 4021 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
d9a6bd32 4022 get_name (var));
4ceffa27
JJ
4023 gimple_add_tmp_var (x);
4024 TREE_ADDRESSABLE (x) = 1;
4025 x = build_fold_addr_expr_loc (clause_loc, x);
4026 }
953ff289
DN
4027 }
4028 else
4029 {
d9a6bd32
JJ
4030 tree atmp
4031 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4032 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4033 tree al = size_int (TYPE_ALIGN (rtype));
4034 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
953ff289
DN
4035 }
4036
4ceffa27
JJ
4037 if (x)
4038 {
4039 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4040 gimplify_assign (new_var, x, ilist);
4041 }
953ff289 4042
70f34814 4043 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
4044 }
4045 else if (c_kind == OMP_CLAUSE_REDUCTION
4046 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4047 {
4048 if (pass == 0)
4049 continue;
4050 }
4051 else if (pass != 0)
4052 continue;
4053
aaf46ef9 4054 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
4055 {
4056 case OMP_CLAUSE_SHARED:
acf0174b
JJ
4057 /* Ignore shared directives in teams construct. */
4058 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4059 continue;
8ca5b2a2
JJ
4060 /* Shared global vars are just accessed directly. */
4061 if (is_global_var (new_var))
4062 break;
d9a6bd32
JJ
4063 /* For taskloop firstprivate/lastprivate, represented
4064 as firstprivate and shared clause on the task, new_var
4065 is the firstprivate var. */
4066 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4067 break;
953ff289
DN
4068 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4069 needs to be delayed until after fixup_child_record_type so
4070 that we get the correct type during the dereference. */
7c8f7639 4071 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
4072 x = build_receiver_ref (var, by_ref, ctx);
4073 SET_DECL_VALUE_EXPR (new_var, x);
4074 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4075
4076 /* ??? If VAR is not passed by reference, and the variable
4077 hasn't been initialized yet, then we'll get a warning for
4078 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 4079 able to notice this and not store anything at all, but
953ff289
DN
4080 we're generating code too early. Suppress the warning. */
4081 if (!by_ref)
4082 TREE_NO_WARNING (var) = 1;
4083 break;
4084
4085 case OMP_CLAUSE_LASTPRIVATE:
4086 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4087 break;
4088 /* FALLTHRU */
4089
4090 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
4091 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4092 x = build_outer_var_ref (var, ctx);
4093 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4094 {
4095 if (is_task_ctx (ctx))
4096 x = build_receiver_ref (var, false, ctx);
4097 else
c39dad64 4098 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
a68ab351
JJ
4099 }
4100 else
4101 x = NULL;
74bf76ed 4102 do_private:
acf0174b 4103 tree nx;
d9a6bd32
JJ
4104 nx = lang_hooks.decls.omp_clause_default_ctor
4105 (c, unshare_expr (new_var), x);
74bf76ed
JJ
4106 if (is_simd)
4107 {
4108 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 4109 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed 4110 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6943af07
AM
4111 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4112 ivar, lvar))
74bf76ed 4113 {
acf0174b 4114 if (nx)
74bf76ed
JJ
4115 x = lang_hooks.decls.omp_clause_default_ctor
4116 (c, unshare_expr (ivar), x);
acf0174b 4117 if (nx && x)
74bf76ed
JJ
4118 gimplify_and_add (x, &llist[0]);
4119 if (y)
4120 {
4121 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4122 if (y)
4123 {
4124 gimple_seq tseq = NULL;
4125
4126 dtor = y;
4127 gimplify_stmt (&dtor, &tseq);
4128 gimple_seq_add_seq (&llist[1], tseq);
4129 }
4130 }
4131 break;
4132 }
4133 }
acf0174b
JJ
4134 if (nx)
4135 gimplify_and_add (nx, ilist);
953ff289
DN
4136 /* FALLTHRU */
4137
4138 do_dtor:
4139 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4140 if (x)
4141 {
726a989a
RB
4142 gimple_seq tseq = NULL;
4143
953ff289 4144 dtor = x;
726a989a 4145 gimplify_stmt (&dtor, &tseq);
355a7673 4146 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
4147 }
4148 break;
4149
74bf76ed
JJ
4150 case OMP_CLAUSE_LINEAR:
4151 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4152 goto do_firstprivate;
4153 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4154 x = NULL;
4155 else
4156 x = build_outer_var_ref (var, ctx);
4157 goto do_private;
4158
953ff289 4159 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
4160 if (is_task_ctx (ctx))
4161 {
629b3d75 4162 if (omp_is_reference (var) || is_variable_sized (var))
a68ab351
JJ
4163 goto do_dtor;
4164 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4165 ctx))
4166 || use_pointer_for_field (var, NULL))
4167 {
4168 x = build_receiver_ref (var, false, ctx);
4169 SET_DECL_VALUE_EXPR (new_var, x);
4170 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4171 goto do_dtor;
4172 }
4173 }
74bf76ed 4174 do_firstprivate:
953ff289 4175 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
4176 if (is_simd)
4177 {
acf0174b
JJ
4178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4179 && gimple_omp_for_combined_into_p (ctx->stmt))
4180 {
da6f124d
JJ
4181 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4182 tree stept = TREE_TYPE (t);
629b3d75 4183 tree ct = omp_find_clause (clauses,
da6f124d
JJ
4184 OMP_CLAUSE__LOOPTEMP_);
4185 gcc_assert (ct);
4186 tree l = OMP_CLAUSE_DECL (ct);
56ad0e38
JJ
4187 tree n1 = fd->loop.n1;
4188 tree step = fd->loop.step;
4189 tree itype = TREE_TYPE (l);
4190 if (POINTER_TYPE_P (itype))
4191 itype = signed_type_for (itype);
4192 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4193 if (TYPE_UNSIGNED (itype)
4194 && fd->loop.cond_code == GT_EXPR)
4195 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4196 fold_build1 (NEGATE_EXPR, itype, l),
4197 fold_build1 (NEGATE_EXPR,
4198 itype, step));
4199 else
4200 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
acf0174b
JJ
4201 t = fold_build2 (MULT_EXPR, stept,
4202 fold_convert (stept, l), t);
da6f124d
JJ
4203
4204 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4205 {
4206 x = lang_hooks.decls.omp_clause_linear_ctor
4207 (c, new_var, x, t);
4208 gimplify_and_add (x, ilist);
4209 goto do_dtor;
4210 }
4211
acf0174b
JJ
4212 if (POINTER_TYPE_P (TREE_TYPE (x)))
4213 x = fold_build2 (POINTER_PLUS_EXPR,
4214 TREE_TYPE (x), x, t);
4215 else
4216 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4217 }
4218
74bf76ed
JJ
4219 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4220 || TREE_ADDRESSABLE (new_var))
6943af07
AM
4221 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4222 ivar, lvar))
74bf76ed
JJ
4223 {
4224 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4225 {
b731b390 4226 tree iv = create_tmp_var (TREE_TYPE (new_var));
74bf76ed
JJ
4227 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4228 gimplify_and_add (x, ilist);
4229 gimple_stmt_iterator gsi
4230 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
538dd0b7 4231 gassign *g
74bf76ed
JJ
4232 = gimple_build_assign (unshare_expr (lvar), iv);
4233 gsi_insert_before_without_update (&gsi, g,
4234 GSI_SAME_STMT);
da6f124d 4235 tree t = OMP_CLAUSE_LINEAR_STEP (c);
74bf76ed
JJ
4236 enum tree_code code = PLUS_EXPR;
4237 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4238 code = POINTER_PLUS_EXPR;
0d0e4a03 4239 g = gimple_build_assign (iv, code, iv, t);
74bf76ed
JJ
4240 gsi_insert_before_without_update (&gsi, g,
4241 GSI_SAME_STMT);
4242 break;
4243 }
4244 x = lang_hooks.decls.omp_clause_copy_ctor
4245 (c, unshare_expr (ivar), x);
4246 gimplify_and_add (x, &llist[0]);
4247 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4248 if (x)
4249 {
4250 gimple_seq tseq = NULL;
4251
4252 dtor = x;
4253 gimplify_stmt (&dtor, &tseq);
4254 gimple_seq_add_seq (&llist[1], tseq);
4255 }
4256 break;
4257 }
4258 }
d9a6bd32
JJ
4259 x = lang_hooks.decls.omp_clause_copy_ctor
4260 (c, unshare_expr (new_var), x);
953ff289
DN
4261 gimplify_and_add (x, ilist);
4262 goto do_dtor;
953ff289 4263
acf0174b 4264 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32 4265 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
4266 x = build_outer_var_ref (var, ctx);
4267 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4268 gimplify_and_add (x, ilist);
4269 break;
4270
953ff289 4271 case OMP_CLAUSE_COPYIN:
7c8f7639 4272 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
4273 x = build_receiver_ref (var, by_ref, ctx);
4274 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4275 append_to_statement_list (x, &copyin_seq);
4276 copyin_by_ref |= by_ref;
4277 break;
4278
4279 case OMP_CLAUSE_REDUCTION:
e5014671
NS
4280 /* OpenACC reductions are initialized using the
4281 GOACC_REDUCTION internal function. */
4282 if (is_gimple_omp_oacc (ctx->stmt))
4283 break;
953ff289
DN
4284 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4285 {
a68ab351 4286 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
355fe088 4287 gimple *tseq;
a68ab351
JJ
4288 x = build_outer_var_ref (var, ctx);
4289
629b3d75 4290 if (omp_is_reference (var)
acf0174b
JJ
4291 && !useless_type_conversion_p (TREE_TYPE (placeholder),
4292 TREE_TYPE (x)))
db3927fb 4293 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4294 SET_DECL_VALUE_EXPR (placeholder, x);
4295 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b 4296 tree new_vard = new_var;
629b3d75 4297 if (omp_is_reference (var))
acf0174b
JJ
4298 {
4299 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4300 new_vard = TREE_OPERAND (new_var, 0);
4301 gcc_assert (DECL_P (new_vard));
4302 }
74bf76ed 4303 if (is_simd
6943af07
AM
4304 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4305 ivar, lvar))
74bf76ed 4306 {
acf0174b
JJ
4307 if (new_vard == new_var)
4308 {
4309 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4310 SET_DECL_VALUE_EXPR (new_var, ivar);
4311 }
4312 else
4313 {
4314 SET_DECL_VALUE_EXPR (new_vard,
4315 build_fold_addr_expr (ivar));
4316 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4317 }
4318 x = lang_hooks.decls.omp_clause_default_ctor
4319 (c, unshare_expr (ivar),
4320 build_outer_var_ref (var, ctx));
4321 if (x)
4322 gimplify_and_add (x, &llist[0]);
4323 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4324 {
4325 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4326 lower_omp (&tseq, ctx);
4327 gimple_seq_add_seq (&llist[0], tseq);
4328 }
4329 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4330 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4331 lower_omp (&tseq, ctx);
4332 gimple_seq_add_seq (&llist[1], tseq);
4333 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4334 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4335 if (new_vard == new_var)
4336 SET_DECL_VALUE_EXPR (new_var, lvar);
4337 else
4338 SET_DECL_VALUE_EXPR (new_vard,
4339 build_fold_addr_expr (lvar));
4340 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4341 if (x)
4342 {
4343 tseq = NULL;
4344 dtor = x;
4345 gimplify_stmt (&dtor, &tseq);
4346 gimple_seq_add_seq (&llist[1], tseq);
4347 }
4348 break;
4349 }
4ceffa27
JJ
4350 /* If this is a reference to constant size reduction var
4351 with placeholder, we haven't emitted the initializer
4352 for it because it is undesirable if SIMD arrays are used.
4353 But if they aren't used, we need to emit the deferred
4354 initialization now. */
629b3d75 4355 else if (omp_is_reference (var) && is_simd)
decaaec8 4356 handle_simd_reference (clause_loc, new_vard, ilist);
acf0174b 4357 x = lang_hooks.decls.omp_clause_default_ctor
92d28cbb
JJ
4358 (c, unshare_expr (new_var),
4359 build_outer_var_ref (var, ctx));
acf0174b
JJ
4360 if (x)
4361 gimplify_and_add (x, ilist);
4362 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4363 {
4364 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4365 lower_omp (&tseq, ctx);
4366 gimple_seq_add_seq (ilist, tseq);
4367 }
4368 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4369 if (is_simd)
4370 {
4371 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4372 lower_omp (&tseq, ctx);
4373 gimple_seq_add_seq (dlist, tseq);
4374 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4375 }
4376 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4377 goto do_dtor;
4378 }
4379 else
4380 {
4381 x = omp_reduction_init (c, TREE_TYPE (new_var));
4382 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
e9792e1d
JJ
4383 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4384
4385 /* reduction(-:var) sums up the partial results, so it
4386 acts identically to reduction(+:var). */
4387 if (code == MINUS_EXPR)
4388 code = PLUS_EXPR;
4389
decaaec8 4390 tree new_vard = new_var;
629b3d75 4391 if (is_simd && omp_is_reference (var))
decaaec8
JJ
4392 {
4393 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4394 new_vard = TREE_OPERAND (new_var, 0);
4395 gcc_assert (DECL_P (new_vard));
4396 }
acf0174b 4397 if (is_simd
6943af07
AM
4398 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4399 ivar, lvar))
acf0174b 4400 {
acf0174b
JJ
4401 tree ref = build_outer_var_ref (var, ctx);
4402
4403 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4404
6943af07 4405 if (sctx.is_simt)
9669b00b
AM
4406 {
4407 if (!simt_lane)
4408 simt_lane = create_tmp_var (unsigned_type_node);
4409 x = build_call_expr_internal_loc
4410 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4411 TREE_TYPE (ivar), 2, ivar, simt_lane);
4412 x = build2 (code, TREE_TYPE (ivar), ivar, x);
4413 gimplify_assign (ivar, x, &llist[2]);
4414 }
acf0174b 4415 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
4416 ref = build_outer_var_ref (var, ctx);
4417 gimplify_assign (ref, x, &llist[1]);
decaaec8
JJ
4418
4419 if (new_vard != new_var)
4420 {
4421 SET_DECL_VALUE_EXPR (new_vard,
4422 build_fold_addr_expr (lvar));
4423 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4424 }
74bf76ed
JJ
4425 }
4426 else
4427 {
629b3d75 4428 if (omp_is_reference (var) && is_simd)
decaaec8 4429 handle_simd_reference (clause_loc, new_vard, ilist);
74bf76ed
JJ
4430 gimplify_assign (new_var, x, ilist);
4431 if (is_simd)
e9792e1d
JJ
4432 {
4433 tree ref = build_outer_var_ref (var, ctx);
4434
4435 x = build2 (code, TREE_TYPE (ref), ref, new_var);
4436 ref = build_outer_var_ref (var, ctx);
4437 gimplify_assign (ref, x, dlist);
4438 }
74bf76ed 4439 }
953ff289
DN
4440 }
4441 break;
4442
4443 default:
4444 gcc_unreachable ();
4445 }
4446 }
4447 }
4448
9d2f08ab 4449 if (known_eq (sctx.max_vf, 1U))
0c6b03b5
AM
4450 sctx.is_simt = false;
4451
4452 if (sctx.lane || sctx.is_simt)
74bf76ed 4453 {
0c6b03b5 4454 uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
4455 /* Don't want uninit warnings on simduid, it is always uninitialized,
4456 but we use it not for the value, but for the DECL_UID only. */
4457 TREE_NO_WARNING (uid) = 1;
0c6b03b5
AM
4458 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4459 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4460 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4461 gimple_omp_for_set_clauses (ctx->stmt, c);
4462 }
4463 /* Emit calls denoting privatized variables and initializing a pointer to
4464 structure that holds private variables as fields after ompdevlow pass. */
4465 if (sctx.is_simt)
4466 {
4467 sctx.simt_eargs[0] = uid;
4468 gimple *g
4469 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4470 gimple_call_set_lhs (g, uid);
4471 gimple_seq_add_stmt (ilist, g);
4472 sctx.simt_eargs.release ();
4473
4474 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4475 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4476 gimple_call_set_lhs (g, simtrec);
4477 gimple_seq_add_stmt (ilist, g);
4478 }
4479 if (sctx.lane)
4480 {
355fe088 4481 gimple *g
74bf76ed 4482 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
6943af07 4483 gimple_call_set_lhs (g, sctx.lane);
74bf76ed
JJ
4484 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4485 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6943af07 4486 g = gimple_build_assign (sctx.lane, INTEGER_CST,
0d0e4a03 4487 build_int_cst (unsigned_type_node, 0));
74bf76ed 4488 gimple_seq_add_stmt (ilist, g);
9669b00b
AM
4489 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
4490 if (llist[2])
4491 {
4492 tree simt_vf = create_tmp_var (unsigned_type_node);
4493 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4494 gimple_call_set_lhs (g, simt_vf);
4495 gimple_seq_add_stmt (dlist, g);
4496
4497 tree t = build_int_cst (unsigned_type_node, 1);
4498 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4499 gimple_seq_add_stmt (dlist, g);
4500
4501 t = build_int_cst (unsigned_type_node, 0);
6943af07 4502 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
9669b00b
AM
4503 gimple_seq_add_stmt (dlist, g);
4504
4505 tree body = create_artificial_label (UNKNOWN_LOCATION);
4506 tree header = create_artificial_label (UNKNOWN_LOCATION);
4507 tree end = create_artificial_label (UNKNOWN_LOCATION);
4508 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4509 gimple_seq_add_stmt (dlist, gimple_build_label (body));
4510
4511 gimple_seq_add_seq (dlist, llist[2]);
4512
4513 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4514 gimple_seq_add_stmt (dlist, g);
4515
4516 gimple_seq_add_stmt (dlist, gimple_build_label (header));
4517 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4518 gimple_seq_add_stmt (dlist, g);
4519
4520 gimple_seq_add_stmt (dlist, gimple_build_label (end));
4521 }
74bf76ed
JJ
4522 for (int i = 0; i < 2; i++)
4523 if (llist[i])
4524 {
b731b390 4525 tree vf = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
4526 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4527 gimple_call_set_lhs (g, vf);
4528 gimple_seq *seq = i == 0 ? ilist : dlist;
4529 gimple_seq_add_stmt (seq, g);
4530 tree t = build_int_cst (unsigned_type_node, 0);
6943af07 4531 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
74bf76ed
JJ
4532 gimple_seq_add_stmt (seq, g);
4533 tree body = create_artificial_label (UNKNOWN_LOCATION);
4534 tree header = create_artificial_label (UNKNOWN_LOCATION);
4535 tree end = create_artificial_label (UNKNOWN_LOCATION);
4536 gimple_seq_add_stmt (seq, gimple_build_goto (header));
4537 gimple_seq_add_stmt (seq, gimple_build_label (body));
4538 gimple_seq_add_seq (seq, llist[i]);
4539 t = build_int_cst (unsigned_type_node, 1);
6943af07 4540 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
74bf76ed
JJ
4541 gimple_seq_add_stmt (seq, g);
4542 gimple_seq_add_stmt (seq, gimple_build_label (header));
6943af07 4543 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
74bf76ed
JJ
4544 gimple_seq_add_stmt (seq, g);
4545 gimple_seq_add_stmt (seq, gimple_build_label (end));
4546 }
4547 }
0c6b03b5
AM
4548 if (sctx.is_simt)
4549 {
4550 gimple_seq_add_seq (dlist, sctx.simt_dlist);
4551 gimple *g
4552 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4553 gimple_seq_add_stmt (dlist, g);
4554 }
74bf76ed 4555
953ff289
DN
4556 /* The copyin sequence is not to be executed by the main thread, since
4557 that would result in self-copies. Perhaps not visible to scalars,
4558 but it certainly is to C++ operator=. */
4559 if (copyin_seq)
4560 {
e79983f4
MM
4561 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4562 0);
953ff289
DN
4563 x = build2 (NE_EXPR, boolean_type_node, x,
4564 build_int_cst (TREE_TYPE (x), 0));
4565 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4566 gimplify_and_add (x, ilist);
4567 }
4568
4569 /* If any copyin variable is passed by reference, we must ensure the
4570 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
4571 threads. Similarly for variables in both firstprivate and
4572 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
4573 happens after firstprivate copying in all threads. And similarly
4574 for UDRs if initializer expression refers to omp_orig. */
4575 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
4576 {
4577 /* Don't add any barrier for #pragma omp simd or
4578 #pragma omp distribute. */
4579 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
e2110f8f 4580 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
629b3d75 4581 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
74bf76ed
JJ
4582 }
4583
4584 /* If max_vf is non-zero, then we can use only a vectorization factor
4585 up to the max_vf we chose. So stick it into the safelen clause. */
9d2f08ab 4586 if (maybe_ne (sctx.max_vf, 0U))
74bf76ed 4587 {
629b3d75 4588 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 4589 OMP_CLAUSE_SAFELEN);
9d2f08ab 4590 poly_uint64 safe_len;
74bf76ed 4591 if (c == NULL_TREE
9d2f08ab
RS
4592 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4593 && maybe_gt (safe_len, sctx.max_vf)))
74bf76ed
JJ
4594 {
4595 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4596 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943af07 4597 sctx.max_vf);
74bf76ed
JJ
4598 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4599 gimple_omp_for_set_clauses (ctx->stmt, c);
4600 }
4601 }
953ff289
DN
4602}
4603
50674e96 4604
953ff289
DN
4605/* Generate code to implement the LASTPRIVATE clauses. This is used for
4606 both parallel and workshare constructs. PREDICATE may be NULL if it's
4607 always true. */
4608
4609static void
726a989a 4610lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 4611 omp_context *ctx)
953ff289 4612{
74bf76ed 4613 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 4614 bool par_clauses = false;
9669b00b 4615 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
953ff289 4616
74bf76ed
JJ
4617 /* Early exit if there are no lastprivate or linear clauses. */
4618 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4619 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4620 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4621 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4622 break;
953ff289
DN
4623 if (clauses == NULL)
4624 {
4625 /* If this was a workshare clause, see if it had been combined
4626 with its parallel. In that case, look for the clauses on the
4627 parallel statement itself. */
4628 if (is_parallel_ctx (ctx))
4629 return;
4630
4631 ctx = ctx->outer;
4632 if (ctx == NULL || !is_parallel_ctx (ctx))
4633 return;
4634
629b3d75 4635 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
4636 OMP_CLAUSE_LASTPRIVATE);
4637 if (clauses == NULL)
4638 return;
a68ab351 4639 par_clauses = true;
953ff289
DN
4640 }
4641
9669b00b
AM
4642 bool maybe_simt = false;
4643 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4644 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4645 {
629b3d75
MJ
4646 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4647 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
9669b00b
AM
4648 if (simduid)
4649 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4650 }
4651
726a989a
RB
4652 if (predicate)
4653 {
538dd0b7 4654 gcond *stmt;
726a989a 4655 tree label_true, arm1, arm2;
56b1c60e 4656 enum tree_code pred_code = TREE_CODE (predicate);
726a989a 4657
c2255bc4
AH
4658 label = create_artificial_label (UNKNOWN_LOCATION);
4659 label_true = create_artificial_label (UNKNOWN_LOCATION);
56b1c60e
MJ
4660 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4661 {
4662 arm1 = TREE_OPERAND (predicate, 0);
4663 arm2 = TREE_OPERAND (predicate, 1);
4664 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4665 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4666 }
4667 else
4668 {
4669 arm1 = predicate;
4670 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4671 arm2 = boolean_false_node;
4672 pred_code = NE_EXPR;
4673 }
9669b00b
AM
4674 if (maybe_simt)
4675 {
56b1c60e 4676 c = build2 (pred_code, boolean_type_node, arm1, arm2);
9669b00b
AM
4677 c = fold_convert (integer_type_node, c);
4678 simtcond = create_tmp_var (integer_type_node);
4679 gimplify_assign (simtcond, c, stmt_list);
4680 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4681 1, simtcond);
4682 c = create_tmp_var (integer_type_node);
4683 gimple_call_set_lhs (g, c);
4684 gimple_seq_add_stmt (stmt_list, g);
4685 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4686 label_true, label);
4687 }
4688 else
56b1c60e 4689 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
726a989a
RB
4690 gimple_seq_add_stmt (stmt_list, stmt);
4691 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4692 }
953ff289 4693
a68ab351 4694 for (c = clauses; c ;)
953ff289
DN
4695 {
4696 tree var, new_var;
db3927fb 4697 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 4698
74bf76ed
JJ
4699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4700 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4701 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
4702 {
4703 var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
4704 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4705 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4706 && is_taskloop_ctx (ctx))
4707 {
4708 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4709 new_var = lookup_decl (var, ctx->outer);
4710 }
4711 else
2187f2a2
JJ
4712 {
4713 new_var = lookup_decl (var, ctx);
4714 /* Avoid uninitialized warnings for lastprivate and
4715 for linear iterators. */
4716 if (predicate
4717 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4718 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4719 TREE_NO_WARNING (new_var) = 1;
4720 }
953ff289 4721
2260d19d 4722 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
74bf76ed
JJ
4723 {
4724 tree val = DECL_VALUE_EXPR (new_var);
2260d19d 4725 if (TREE_CODE (val) == ARRAY_REF
74bf76ed
JJ
4726 && VAR_P (TREE_OPERAND (val, 0))
4727 && lookup_attribute ("omp simd array",
4728 DECL_ATTRIBUTES (TREE_OPERAND (val,
4729 0))))
4730 {
4731 if (lastlane == NULL)
4732 {
b731b390 4733 lastlane = create_tmp_var (unsigned_type_node);
538dd0b7 4734 gcall *g
74bf76ed
JJ
4735 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4736 2, simduid,
4737 TREE_OPERAND (val, 1));
4738 gimple_call_set_lhs (g, lastlane);
4739 gimple_seq_add_stmt (stmt_list, g);
4740 }
4741 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4742 TREE_OPERAND (val, 0), lastlane,
4743 NULL_TREE, NULL_TREE);
0c6b03b5 4744 }
2260d19d
AM
4745 }
4746 else if (maybe_simt)
4747 {
4748 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4749 ? DECL_VALUE_EXPR (new_var)
4750 : new_var);
4751 if (simtlast == NULL)
0c6b03b5 4752 {
2260d19d
AM
4753 simtlast = create_tmp_var (unsigned_type_node);
4754 gcall *g = gimple_build_call_internal
4755 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4756 gimple_call_set_lhs (g, simtlast);
4757 gimple_seq_add_stmt (stmt_list, g);
74bf76ed 4758 }
2260d19d
AM
4759 x = build_call_expr_internal_loc
4760 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4761 TREE_TYPE (val), 2, val, simtlast);
4762 new_var = unshare_expr (new_var);
4763 gimplify_assign (new_var, x, stmt_list);
4764 new_var = unshare_expr (new_var);
74bf76ed
JJ
4765 }
4766
4767 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4768 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 4769 {
355a7673 4770 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
4771 gimple_seq_add_seq (stmt_list,
4772 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 4773 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 4774 }
f7468577
JJ
4775 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4776 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4777 {
4778 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4779 gimple_seq_add_seq (stmt_list,
4780 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4781 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4782 }
953ff289 4783
d9a6bd32
JJ
4784 x = NULL_TREE;
4785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4786 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4787 {
4788 gcc_checking_assert (is_taskloop_ctx (ctx));
4789 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4790 ctx->outer->outer);
4791 if (is_global_var (ovar))
4792 x = ovar;
4793 }
4794 if (!x)
c39dad64 4795 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
629b3d75 4796 if (omp_is_reference (var))
70f34814 4797 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 4798 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 4799 gimplify_and_add (x, stmt_list);
a68ab351
JJ
4800 }
4801 c = OMP_CLAUSE_CHAIN (c);
4802 if (c == NULL && !par_clauses)
4803 {
4804 /* If this was a workshare clause, see if it had been combined
4805 with its parallel. In that case, continue looking for the
4806 clauses also on the parallel statement itself. */
4807 if (is_parallel_ctx (ctx))
4808 break;
4809
4810 ctx = ctx->outer;
4811 if (ctx == NULL || !is_parallel_ctx (ctx))
4812 break;
4813
629b3d75 4814 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
4815 OMP_CLAUSE_LASTPRIVATE);
4816 par_clauses = true;
4817 }
953ff289
DN
4818 }
4819
726a989a
RB
4820 if (label)
4821 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
4822}
4823
e5014671
NS
4824/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4825 (which might be a placeholder). INNER is true if this is an inner
4826 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
4827 join markers. Generate the before-loop forking sequence in
4828 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
4829 general form of these sequences is
4830
4831 GOACC_REDUCTION_SETUP
4832 GOACC_FORK
4833 GOACC_REDUCTION_INIT
4834 ...
4835 GOACC_REDUCTION_FINI
4836 GOACC_JOIN
4837 GOACC_REDUCTION_TEARDOWN. */
4838
41dbbb37 4839static void
e5014671
NS
4840lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4841 gcall *fork, gcall *join, gimple_seq *fork_seq,
4842 gimple_seq *join_seq, omp_context *ctx)
41dbbb37 4843{
e5014671
NS
4844 gimple_seq before_fork = NULL;
4845 gimple_seq after_fork = NULL;
4846 gimple_seq before_join = NULL;
4847 gimple_seq after_join = NULL;
4848 tree init_code = NULL_TREE, fini_code = NULL_TREE,
4849 setup_code = NULL_TREE, teardown_code = NULL_TREE;
4850 unsigned offset = 0;
4851
4852 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4853 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4854 {
4855 tree orig = OMP_CLAUSE_DECL (c);
4856 tree var = maybe_lookup_decl (orig, ctx);
4857 tree ref_to_res = NULL_TREE;
c42cfb5c
CP
4858 tree incoming, outgoing, v1, v2, v3;
4859 bool is_private = false;
e5014671
NS
4860
4861 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4862 if (rcode == MINUS_EXPR)
4863 rcode = PLUS_EXPR;
4864 else if (rcode == TRUTH_ANDIF_EXPR)
4865 rcode = BIT_AND_EXPR;
4866 else if (rcode == TRUTH_ORIF_EXPR)
4867 rcode = BIT_IOR_EXPR;
4868 tree op = build_int_cst (unsigned_type_node, rcode);
4869
4870 if (!var)
4871 var = orig;
e5014671
NS
4872
4873 incoming = outgoing = var;
01914336 4874
e5014671
NS
4875 if (!inner)
4876 {
4877 /* See if an outer construct also reduces this variable. */
4878 omp_context *outer = ctx;
41dbbb37 4879
e5014671
NS
4880 while (omp_context *probe = outer->outer)
4881 {
4882 enum gimple_code type = gimple_code (probe->stmt);
4883 tree cls;
41dbbb37 4884
e5014671
NS
4885 switch (type)
4886 {
4887 case GIMPLE_OMP_FOR:
4888 cls = gimple_omp_for_clauses (probe->stmt);
4889 break;
41dbbb37 4890
e5014671
NS
4891 case GIMPLE_OMP_TARGET:
4892 if (gimple_omp_target_kind (probe->stmt)
4893 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
4894 goto do_lookup;
41dbbb37 4895
e5014671
NS
4896 cls = gimple_omp_target_clauses (probe->stmt);
4897 break;
41dbbb37 4898
e5014671
NS
4899 default:
4900 goto do_lookup;
4901 }
01914336 4902
e5014671
NS
4903 outer = probe;
4904 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
4905 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4906 && orig == OMP_CLAUSE_DECL (cls))
c42cfb5c
CP
4907 {
4908 incoming = outgoing = lookup_decl (orig, probe);
4909 goto has_outer_reduction;
4910 }
4911 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4912 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4913 && orig == OMP_CLAUSE_DECL (cls))
4914 {
4915 is_private = true;
4916 goto do_lookup;
4917 }
e5014671 4918 }
41dbbb37 4919
e5014671
NS
4920 do_lookup:
4921 /* This is the outermost construct with this reduction,
4922 see if there's a mapping for it. */
4923 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
c42cfb5c 4924 && maybe_lookup_field (orig, outer) && !is_private)
e5014671
NS
4925 {
4926 ref_to_res = build_receiver_ref (orig, false, outer);
629b3d75 4927 if (omp_is_reference (orig))
e5014671 4928 ref_to_res = build_simple_mem_ref (ref_to_res);
41dbbb37 4929
c42cfb5c
CP
4930 tree type = TREE_TYPE (var);
4931 if (POINTER_TYPE_P (type))
4932 type = TREE_TYPE (type);
4933
e5014671 4934 outgoing = var;
c42cfb5c 4935 incoming = omp_reduction_init_op (loc, rcode, type);
e5014671
NS
4936 }
4937 else
11c4c4ba
CLT
4938 {
4939 /* Try to look at enclosing contexts for reduction var,
4940 use original if no mapping found. */
4941 tree t = NULL_TREE;
4942 omp_context *c = ctx->outer;
4943 while (c && !t)
4944 {
4945 t = maybe_lookup_decl (orig, c);
4946 c = c->outer;
4947 }
4948 incoming = outgoing = (t ? t : orig);
4949 }
01914336 4950
e5014671
NS
4951 has_outer_reduction:;
4952 }
41dbbb37 4953
e5014671
NS
4954 if (!ref_to_res)
4955 ref_to_res = integer_zero_node;
41dbbb37 4956
01914336 4957 if (omp_is_reference (orig))
c42cfb5c
CP
4958 {
4959 tree type = TREE_TYPE (var);
4960 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
4961
4962 if (!inner)
4963 {
4964 tree x = create_tmp_var (TREE_TYPE (type), id);
4965 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
4966 }
4967
4968 v1 = create_tmp_var (type, id);
4969 v2 = create_tmp_var (type, id);
4970 v3 = create_tmp_var (type, id);
4971
4972 gimplify_assign (v1, var, fork_seq);
4973 gimplify_assign (v2, var, fork_seq);
4974 gimplify_assign (v3, var, fork_seq);
4975
4976 var = build_simple_mem_ref (var);
4977 v1 = build_simple_mem_ref (v1);
4978 v2 = build_simple_mem_ref (v2);
4979 v3 = build_simple_mem_ref (v3);
4980 outgoing = build_simple_mem_ref (outgoing);
4981
e387fc64 4982 if (!TREE_CONSTANT (incoming))
c42cfb5c
CP
4983 incoming = build_simple_mem_ref (incoming);
4984 }
4985 else
4986 v1 = v2 = v3 = var;
4987
e5014671 4988 /* Determine position in reduction buffer, which may be used
ef1d3b57
RS
4989 by target. The parser has ensured that this is not a
4990 variable-sized type. */
4991 fixed_size_mode mode
4992 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
e5014671
NS
4993 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
4994 offset = (offset + align - 1) & ~(align - 1);
4995 tree off = build_int_cst (sizetype, offset);
4996 offset += GET_MODE_SIZE (mode);
41dbbb37 4997
e5014671
NS
4998 if (!init_code)
4999 {
5000 init_code = build_int_cst (integer_type_node,
5001 IFN_GOACC_REDUCTION_INIT);
5002 fini_code = build_int_cst (integer_type_node,
5003 IFN_GOACC_REDUCTION_FINI);
5004 setup_code = build_int_cst (integer_type_node,
5005 IFN_GOACC_REDUCTION_SETUP);
5006 teardown_code = build_int_cst (integer_type_node,
5007 IFN_GOACC_REDUCTION_TEARDOWN);
5008 }
5009
5010 tree setup_call
5011 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5012 TREE_TYPE (var), 6, setup_code,
5013 unshare_expr (ref_to_res),
5014 incoming, level, op, off);
5015 tree init_call
5016 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5017 TREE_TYPE (var), 6, init_code,
5018 unshare_expr (ref_to_res),
c42cfb5c 5019 v1, level, op, off);
e5014671
NS
5020 tree fini_call
5021 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5022 TREE_TYPE (var), 6, fini_code,
5023 unshare_expr (ref_to_res),
c42cfb5c 5024 v2, level, op, off);
e5014671
NS
5025 tree teardown_call
5026 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5027 TREE_TYPE (var), 6, teardown_code,
c42cfb5c 5028 ref_to_res, v3, level, op, off);
e5014671 5029
c42cfb5c
CP
5030 gimplify_assign (v1, setup_call, &before_fork);
5031 gimplify_assign (v2, init_call, &after_fork);
5032 gimplify_assign (v3, fini_call, &before_join);
e5014671
NS
5033 gimplify_assign (outgoing, teardown_call, &after_join);
5034 }
5035
5036 /* Now stitch things together. */
5037 gimple_seq_add_seq (fork_seq, before_fork);
5038 if (fork)
5039 gimple_seq_add_stmt (fork_seq, fork);
5040 gimple_seq_add_seq (fork_seq, after_fork);
5041
5042 gimple_seq_add_seq (join_seq, before_join);
5043 if (join)
5044 gimple_seq_add_stmt (join_seq, join);
5045 gimple_seq_add_seq (join_seq, after_join);
41dbbb37 5046}
50674e96 5047
953ff289
DN
5048/* Generate code to implement the REDUCTION clauses. */
5049
5050static void
726a989a 5051lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 5052{
726a989a 5053 gimple_seq sub_seq = NULL;
355fe088 5054 gimple *stmt;
374d0225 5055 tree x, c;
953ff289
DN
5056 int count = 0;
5057
e5014671
NS
5058 /* OpenACC loop reductions are handled elsewhere. */
5059 if (is_gimple_omp_oacc (ctx->stmt))
5060 return;
5061
74bf76ed
JJ
5062 /* SIMD reductions are handled in lower_rec_input_clauses. */
5063 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 5064 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed
JJ
5065 return;
5066
953ff289
DN
5067 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5068 update in that case, otherwise use a lock. */
5069 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 5070 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289 5071 {
d9a6bd32
JJ
5072 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5073 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
953ff289 5074 {
acf0174b 5075 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
5076 count = -1;
5077 break;
5078 }
5079 count++;
5080 }
5081
5082 if (count == 0)
5083 return;
5084
5085 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5086 {
d9a6bd32 5087 tree var, ref, new_var, orig_var;
953ff289 5088 enum tree_code code;
db3927fb 5089 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5090
aaf46ef9 5091 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
5092 continue;
5093
c24783c4 5094 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
d9a6bd32
JJ
5095 orig_var = var = OMP_CLAUSE_DECL (c);
5096 if (TREE_CODE (var) == MEM_REF)
5097 {
5098 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
5099 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5100 var = TREE_OPERAND (var, 0);
c24783c4 5101 if (TREE_CODE (var) == ADDR_EXPR)
d9a6bd32 5102 var = TREE_OPERAND (var, 0);
c24783c4
JJ
5103 else
5104 {
5105 /* If this is a pointer or referenced based array
5106 section, the var could be private in the outer
5107 context e.g. on orphaned loop construct. Pretend this
5108 is private variable's outer reference. */
5109 ccode = OMP_CLAUSE_PRIVATE;
5110 if (TREE_CODE (var) == INDIRECT_REF)
5111 var = TREE_OPERAND (var, 0);
5112 }
d9a6bd32
JJ
5113 orig_var = var;
5114 if (is_variable_sized (var))
5115 {
5116 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5117 var = DECL_VALUE_EXPR (var);
5118 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5119 var = TREE_OPERAND (var, 0);
5120 gcc_assert (DECL_P (var));
5121 }
5122 }
953ff289 5123 new_var = lookup_decl (var, ctx);
629b3d75 5124 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
70f34814 5125 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
c24783c4 5126 ref = build_outer_var_ref (var, ctx, ccode);
953ff289 5127 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
5128
5129 /* reduction(-:var) sums up the partial results, so it acts
5130 identically to reduction(+:var). */
953ff289
DN
5131 if (code == MINUS_EXPR)
5132 code = PLUS_EXPR;
5133
e5014671 5134 if (count == 1)
953ff289 5135 {
db3927fb 5136 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5137
5138 addr = save_expr (addr);
5139 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 5140 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 5141 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 5142 gimplify_and_add (x, stmt_seqp);
953ff289
DN
5143 return;
5144 }
d9a6bd32
JJ
5145 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5146 {
5147 tree d = OMP_CLAUSE_DECL (c);
5148 tree type = TREE_TYPE (d);
5149 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5150 tree i = create_tmp_var (TREE_TYPE (v), NULL);
5151 tree ptype = build_pointer_type (TREE_TYPE (type));
e01d41e5
JJ
5152 tree bias = TREE_OPERAND (d, 1);
5153 d = TREE_OPERAND (d, 0);
5154 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5155 {
5156 tree b = TREE_OPERAND (d, 1);
5157 b = maybe_lookup_decl (b, ctx);
5158 if (b == NULL)
5159 {
5160 b = TREE_OPERAND (d, 1);
5161 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5162 }
5163 if (integer_zerop (bias))
5164 bias = b;
5165 else
5166 {
5167 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5168 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5169 TREE_TYPE (b), b, bias);
5170 }
5171 d = TREE_OPERAND (d, 0);
5172 }
d9a6bd32
JJ
5173 /* For ref build_outer_var_ref already performs this, so
5174 only new_var needs a dereference. */
e01d41e5 5175 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
5176 {
5177 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
629b3d75 5178 gcc_assert (omp_is_reference (var) && var == orig_var);
d9a6bd32 5179 }
e01d41e5 5180 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
5181 {
5182 if (orig_var == var)
5183 {
5184 new_var = build_fold_addr_expr (new_var);
5185 ref = build_fold_addr_expr (ref);
5186 }
5187 }
5188 else
5189 {
5190 gcc_assert (orig_var == var);
629b3d75 5191 if (omp_is_reference (var))
d9a6bd32
JJ
5192 ref = build_fold_addr_expr (ref);
5193 }
5194 if (DECL_P (v))
5195 {
5196 tree t = maybe_lookup_decl (v, ctx);
5197 if (t)
5198 v = t;
5199 else
5200 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5201 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5202 }
e01d41e5
JJ
5203 if (!integer_zerop (bias))
5204 {
5205 bias = fold_convert_loc (clause_loc, sizetype, bias);
5206 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5207 TREE_TYPE (new_var), new_var,
5208 unshare_expr (bias));
5209 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5210 TREE_TYPE (ref), ref, bias);
5211 }
d9a6bd32
JJ
5212 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5213 ref = fold_convert_loc (clause_loc, ptype, ref);
5214 tree m = create_tmp_var (ptype, NULL);
5215 gimplify_assign (m, new_var, stmt_seqp);
5216 new_var = m;
5217 m = create_tmp_var (ptype, NULL);
5218 gimplify_assign (m, ref, stmt_seqp);
5219 ref = m;
5220 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5221 tree body = create_artificial_label (UNKNOWN_LOCATION);
5222 tree end = create_artificial_label (UNKNOWN_LOCATION);
5223 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5224 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5225 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5226 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5227 {
5228 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5229 tree decl_placeholder
5230 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5231 SET_DECL_VALUE_EXPR (placeholder, out);
5232 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5233 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5234 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5235 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5236 gimple_seq_add_seq (&sub_seq,
5237 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5238 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5239 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5240 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5241 }
5242 else
5243 {
5244 x = build2 (code, TREE_TYPE (out), out, priv);
5245 out = unshare_expr (out);
5246 gimplify_assign (out, x, &sub_seq);
5247 }
5248 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5249 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5250 gimple_seq_add_stmt (&sub_seq, g);
5251 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5252 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5253 gimple_seq_add_stmt (&sub_seq, g);
5254 g = gimple_build_assign (i, PLUS_EXPR, i,
5255 build_int_cst (TREE_TYPE (i), 1));
5256 gimple_seq_add_stmt (&sub_seq, g);
5257 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5258 gimple_seq_add_stmt (&sub_seq, g);
5259 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5260 }
41dbbb37 5261 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
953ff289
DN
5262 {
5263 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5264
629b3d75 5265 if (omp_is_reference (var)
acf0174b
JJ
5266 && !useless_type_conversion_p (TREE_TYPE (placeholder),
5267 TREE_TYPE (ref)))
db3927fb 5268 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5269 SET_DECL_VALUE_EXPR (placeholder, ref);
5270 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 5271 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
5272 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5273 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
5274 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5275 }
5276 else
5277 {
5278 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5279 ref = build_outer_var_ref (var, ctx);
726a989a 5280 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
5281 }
5282 }
5283
e79983f4
MM
5284 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5285 0);
726a989a 5286 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 5287
726a989a 5288 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 5289
e79983f4
MM
5290 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5291 0);
726a989a 5292 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
5293}
5294
50674e96 5295
953ff289
DN
5296/* Generate code to implement the COPYPRIVATE clauses. */
5297
5298static void
726a989a 5299lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
5300 omp_context *ctx)
5301{
5302 tree c;
5303
5304 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5305 {
78db7d92 5306 tree var, new_var, ref, x;
953ff289 5307 bool by_ref;
db3927fb 5308 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5309
aaf46ef9 5310 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
5311 continue;
5312
5313 var = OMP_CLAUSE_DECL (c);
7c8f7639 5314 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
5315
5316 ref = build_sender_ref (var, ctx);
78db7d92
JJ
5317 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5318 if (by_ref)
5319 {
5320 x = build_fold_addr_expr_loc (clause_loc, new_var);
5321 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5322 }
726a989a 5323 gimplify_assign (ref, x, slist);
953ff289 5324
78db7d92
JJ
5325 ref = build_receiver_ref (var, false, ctx);
5326 if (by_ref)
5327 {
5328 ref = fold_convert_loc (clause_loc,
5329 build_pointer_type (TREE_TYPE (new_var)),
5330 ref);
5331 ref = build_fold_indirect_ref_loc (clause_loc, ref);
5332 }
629b3d75 5333 if (omp_is_reference (var))
953ff289 5334 {
78db7d92 5335 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
5336 ref = build_simple_mem_ref_loc (clause_loc, ref);
5337 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 5338 }
78db7d92 5339 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
5340 gimplify_and_add (x, rlist);
5341 }
5342}
5343
50674e96 5344
953ff289
DN
5345/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5346 and REDUCTION from the sender (aka parent) side. */
5347
5348static void
726a989a
RB
5349lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5350 omp_context *ctx)
953ff289 5351{
d9a6bd32
JJ
5352 tree c, t;
5353 int ignored_looptemp = 0;
5354 bool is_taskloop = false;
5355
5356 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5357 by GOMP_taskloop. */
5358 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5359 {
5360 ignored_looptemp = 2;
5361 is_taskloop = true;
5362 }
953ff289
DN
5363
5364 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5365 {
50674e96 5366 tree val, ref, x, var;
953ff289 5367 bool by_ref, do_in = false, do_out = false;
db3927fb 5368 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5369
aaf46ef9 5370 switch (OMP_CLAUSE_CODE (c))
953ff289 5371 {
a68ab351
JJ
5372 case OMP_CLAUSE_PRIVATE:
5373 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5374 break;
5375 continue;
953ff289
DN
5376 case OMP_CLAUSE_FIRSTPRIVATE:
5377 case OMP_CLAUSE_COPYIN:
5378 case OMP_CLAUSE_LASTPRIVATE:
5379 case OMP_CLAUSE_REDUCTION:
d9a6bd32
JJ
5380 break;
5381 case OMP_CLAUSE_SHARED:
5382 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5383 break;
5384 continue;
acf0174b 5385 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32
JJ
5386 if (ignored_looptemp)
5387 {
5388 ignored_looptemp--;
5389 continue;
5390 }
953ff289
DN
5391 break;
5392 default:
5393 continue;
5394 }
5395
d2dda7fe 5396 val = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
5397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5398 && TREE_CODE (val) == MEM_REF)
5399 {
5400 val = TREE_OPERAND (val, 0);
e01d41e5
JJ
5401 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5402 val = TREE_OPERAND (val, 0);
d9a6bd32
JJ
5403 if (TREE_CODE (val) == INDIRECT_REF
5404 || TREE_CODE (val) == ADDR_EXPR)
5405 val = TREE_OPERAND (val, 0);
5406 if (is_variable_sized (val))
5407 continue;
5408 }
5409
5410 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5411 outer taskloop region. */
5412 omp_context *ctx_for_o = ctx;
5413 if (is_taskloop
5414 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5415 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5416 ctx_for_o = ctx->outer;
5417
5418 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
50674e96 5419
8ca5b2a2
JJ
5420 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5421 && is_global_var (var))
5422 continue;
d9a6bd32
JJ
5423
5424 t = omp_member_access_dummy_var (var);
5425 if (t)
5426 {
5427 var = DECL_VALUE_EXPR (var);
5428 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5429 if (o != t)
5430 var = unshare_and_remap (var, t, o);
5431 else
5432 var = unshare_expr (var);
5433 }
5434
5435 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5436 {
5437 /* Handle taskloop firstprivate/lastprivate, where the
5438 lastprivate on GIMPLE_OMP_TASK is represented as
5439 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
5440 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5441 x = omp_build_component_ref (ctx->sender_decl, f);
5442 if (use_pointer_for_field (val, ctx))
5443 var = build_fold_addr_expr (var);
5444 gimplify_assign (x, var, ilist);
5445 DECL_ABSTRACT_ORIGIN (f) = NULL;
5446 continue;
5447 }
5448
5449 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5450 || val == OMP_CLAUSE_DECL (c))
5451 && is_variable_sized (val))
953ff289 5452 continue;
7c8f7639 5453 by_ref = use_pointer_for_field (val, NULL);
953ff289 5454
aaf46ef9 5455 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5456 {
5457 case OMP_CLAUSE_FIRSTPRIVATE:
ec35ea45
JJ
5458 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5459 && !by_ref
5460 && is_task_ctx (ctx))
5461 TREE_NO_WARNING (var) = 1;
5462 do_in = true;
5463 break;
5464
5465 case OMP_CLAUSE_PRIVATE:
953ff289 5466 case OMP_CLAUSE_COPYIN:
acf0174b 5467 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
5468 do_in = true;
5469 break;
5470
5471 case OMP_CLAUSE_LASTPRIVATE:
629b3d75 5472 if (by_ref || omp_is_reference (val))
953ff289
DN
5473 {
5474 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5475 continue;
5476 do_in = true;
5477 }
5478 else
a68ab351
JJ
5479 {
5480 do_out = true;
5481 if (lang_hooks.decls.omp_private_outer_ref (val))
5482 do_in = true;
5483 }
953ff289
DN
5484 break;
5485
5486 case OMP_CLAUSE_REDUCTION:
5487 do_in = true;
d9a6bd32 5488 if (val == OMP_CLAUSE_DECL (c))
629b3d75 5489 do_out = !(by_ref || omp_is_reference (val));
d9a6bd32
JJ
5490 else
5491 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
953ff289
DN
5492 break;
5493
5494 default:
5495 gcc_unreachable ();
5496 }
5497
5498 if (do_in)
5499 {
5500 ref = build_sender_ref (val, ctx);
db3927fb 5501 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 5502 gimplify_assign (ref, x, ilist);
a68ab351
JJ
5503 if (is_task_ctx (ctx))
5504 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 5505 }
50674e96 5506
953ff289
DN
5507 if (do_out)
5508 {
5509 ref = build_sender_ref (val, ctx);
726a989a 5510 gimplify_assign (var, ref, olist);
953ff289
DN
5511 }
5512 }
5513}
5514
726a989a
RB
5515/* Generate code to implement SHARED from the sender (aka parent)
5516 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5517 list things that got automatically shared. */
953ff289
DN
5518
5519static void
726a989a 5520lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 5521{
d9a6bd32 5522 tree var, ovar, nvar, t, f, x, record_type;
953ff289
DN
5523
5524 if (ctx->record_type == NULL)
5525 return;
50674e96 5526
a68ab351 5527 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 5528 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
5529 {
5530 ovar = DECL_ABSTRACT_ORIGIN (f);
d9a6bd32
JJ
5531 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5532 continue;
5533
953ff289
DN
5534 nvar = maybe_lookup_decl (ovar, ctx);
5535 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5536 continue;
5537
50674e96
DN
5538 /* If CTX is a nested parallel directive. Find the immediately
5539 enclosing parallel or workshare construct that contains a
5540 mapping for OVAR. */
d2dda7fe 5541 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 5542
d9a6bd32
JJ
5543 t = omp_member_access_dummy_var (var);
5544 if (t)
5545 {
5546 var = DECL_VALUE_EXPR (var);
5547 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5548 if (o != t)
5549 var = unshare_and_remap (var, t, o);
5550 else
5551 var = unshare_expr (var);
5552 }
5553
7c8f7639 5554 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
5555 {
5556 x = build_sender_ref (ovar, ctx);
50674e96 5557 var = build_fold_addr_expr (var);
726a989a 5558 gimplify_assign (x, var, ilist);
953ff289
DN
5559 }
5560 else
5561 {
5562 x = build_sender_ref (ovar, ctx);
726a989a 5563 gimplify_assign (x, var, ilist);
953ff289 5564
14e5b285
RG
5565 if (!TREE_READONLY (var)
5566 /* We don't need to receive a new reference to a result
5567 or parm decl. In fact we may not store to it as we will
5568 invalidate any pending RSO and generate wrong gimple
5569 during inlining. */
5570 && !((TREE_CODE (var) == RESULT_DECL
5571 || TREE_CODE (var) == PARM_DECL)
5572 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
5573 {
5574 x = build_sender_ref (ovar, ctx);
726a989a 5575 gimplify_assign (var, x, olist);
a68ab351 5576 }
953ff289
DN
5577 }
5578 }
5579}
5580
e4834818
NS
5581/* Emit an OpenACC head marker call, encapulating the partitioning and
5582 other information that must be processed by the target compiler.
5583 Return the maximum number of dimensions the associated loop might
5584 be partitioned over. */
5585
5586static unsigned
5587lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5588 gimple_seq *seq, omp_context *ctx)
5589{
5590 unsigned levels = 0;
5591 unsigned tag = 0;
5592 tree gang_static = NULL_TREE;
5593 auto_vec<tree, 5> args;
5594
5595 args.quick_push (build_int_cst
5596 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5597 args.quick_push (ddvar);
5598 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5599 {
5600 switch (OMP_CLAUSE_CODE (c))
5601 {
5602 case OMP_CLAUSE_GANG:
5603 tag |= OLF_DIM_GANG;
5604 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5605 /* static:* is represented by -1, and we can ignore it, as
5606 scheduling is always static. */
5607 if (gang_static && integer_minus_onep (gang_static))
5608 gang_static = NULL_TREE;
5609 levels++;
5610 break;
5611
5612 case OMP_CLAUSE_WORKER:
5613 tag |= OLF_DIM_WORKER;
5614 levels++;
5615 break;
5616
5617 case OMP_CLAUSE_VECTOR:
5618 tag |= OLF_DIM_VECTOR;
5619 levels++;
5620 break;
5621
5622 case OMP_CLAUSE_SEQ:
5623 tag |= OLF_SEQ;
5624 break;
5625
5626 case OMP_CLAUSE_AUTO:
5627 tag |= OLF_AUTO;
5628 break;
5629
5630 case OMP_CLAUSE_INDEPENDENT:
5631 tag |= OLF_INDEPENDENT;
5632 break;
5633
02889d23
CLT
5634 case OMP_CLAUSE_TILE:
5635 tag |= OLF_TILE;
5636 break;
5637
e4834818
NS
5638 default:
5639 continue;
5640 }
5641 }
5642
5643 if (gang_static)
5644 {
5645 if (DECL_P (gang_static))
5646 gang_static = build_outer_var_ref (gang_static, ctx);
5647 tag |= OLF_GANG_STATIC;
5648 }
5649
5650 /* In a parallel region, loops are implicitly INDEPENDENT. */
5651 omp_context *tgt = enclosing_target_ctx (ctx);
5652 if (!tgt || is_oacc_parallel (tgt))
5653 tag |= OLF_INDEPENDENT;
5654
02889d23
CLT
5655 if (tag & OLF_TILE)
5656 /* Tiling could use all 3 levels. */
5657 levels = 3;
5658 else
5659 {
5660 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5661 Ensure at least one level, or 2 for possible auto
5662 partitioning */
5663 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5664 << OLF_DIM_BASE) | OLF_SEQ));
5665
5666 if (levels < 1u + maybe_auto)
5667 levels = 1u + maybe_auto;
5668 }
e4834818
NS
5669
5670 args.quick_push (build_int_cst (integer_type_node, levels));
5671 args.quick_push (build_int_cst (integer_type_node, tag));
5672 if (gang_static)
5673 args.quick_push (gang_static);
5674
5675 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5676 gimple_set_location (call, loc);
5677 gimple_set_lhs (call, ddvar);
5678 gimple_seq_add_stmt (seq, call);
5679
5680 return levels;
5681}
5682
5683/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
5684 partitioning level of the enclosed region. */
5685
5686static void
5687lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5688 tree tofollow, gimple_seq *seq)
5689{
5690 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5691 : IFN_UNIQUE_OACC_TAIL_MARK);
5692 tree marker = build_int_cst (integer_type_node, marker_kind);
5693 int nargs = 2 + (tofollow != NULL_TREE);
5694 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5695 marker, ddvar, tofollow);
5696 gimple_set_location (call, loc);
5697 gimple_set_lhs (call, ddvar);
5698 gimple_seq_add_stmt (seq, call);
5699}
5700
5701/* Generate the before and after OpenACC loop sequences. CLAUSES are
5702 the loop clauses, from which we extract reductions. Initialize
5703 HEAD and TAIL. */
5704
5705static void
5706lower_oacc_head_tail (location_t loc, tree clauses,
5707 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5708{
5709 bool inner = false;
5710 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5711 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5712
5713 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
e4834818
NS
5714 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5715 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5716
4877b5a4 5717 gcc_assert (count);
e4834818
NS
5718 for (unsigned done = 1; count; count--, done++)
5719 {
5720 gimple_seq fork_seq = NULL;
5721 gimple_seq join_seq = NULL;
5722
5723 tree place = build_int_cst (integer_type_node, -1);
5724 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5725 fork_kind, ddvar, place);
5726 gimple_set_location (fork, loc);
5727 gimple_set_lhs (fork, ddvar);
5728
5729 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5730 join_kind, ddvar, place);
5731 gimple_set_location (join, loc);
5732 gimple_set_lhs (join, ddvar);
5733
5734 /* Mark the beginning of this level sequence. */
5735 if (inner)
5736 lower_oacc_loop_marker (loc, ddvar, true,
5737 build_int_cst (integer_type_node, count),
5738 &fork_seq);
5739 lower_oacc_loop_marker (loc, ddvar, false,
5740 build_int_cst (integer_type_node, done),
5741 &join_seq);
5742
e5014671
NS
5743 lower_oacc_reductions (loc, clauses, place, inner,
5744 fork, join, &fork_seq, &join_seq, ctx);
e4834818
NS
5745
5746 /* Append this level to head. */
5747 gimple_seq_add_seq (head, fork_seq);
5748 /* Prepend it to tail. */
5749 gimple_seq_add_seq (&join_seq, *tail);
5750 *tail = join_seq;
5751
5752 inner = true;
5753 }
5754
5755 /* Mark the end of the sequence. */
5756 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5757 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5758}
726a989a 5759
629b3d75
MJ
5760/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5761 catch handler and return it. This prevents programs from violating the
5762 structured block semantics with throws. */
726a989a 5763
629b3d75
MJ
5764static gimple_seq
5765maybe_catch_exception (gimple_seq body)
726a989a 5766{
629b3d75
MJ
5767 gimple *g;
5768 tree decl;
b2b40051 5769
629b3d75
MJ
5770 if (!flag_exceptions)
5771 return body;
b2b40051 5772
629b3d75
MJ
5773 if (lang_hooks.eh_protect_cleanup_actions != NULL)
5774 decl = lang_hooks.eh_protect_cleanup_actions ();
5775 else
5776 decl = builtin_decl_explicit (BUILT_IN_TRAP);
b2b40051 5777
629b3d75
MJ
5778 g = gimple_build_eh_must_not_throw (decl);
5779 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5780 GIMPLE_TRY_CATCH);
b2b40051 5781
629b3d75 5782 return gimple_seq_alloc_with_stmt (g);
b2b40051
MJ
5783}
5784
629b3d75
MJ
5785\f
5786/* Routines to lower OMP directives into OMP-GIMPLE. */
726a989a 5787
629b3d75
MJ
5788/* If ctx is a worksharing context inside of a cancellable parallel
5789 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5790 and conditional branch to parallel's cancel_label to handle
5791 cancellation in the implicit barrier. */
953ff289
DN
5792
5793static void
629b3d75 5794maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
953ff289 5795{
629b3d75
MJ
5796 gimple *omp_return = gimple_seq_last_stmt (*body);
5797 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5798 if (gimple_omp_return_nowait_p (omp_return))
5799 return;
5800 if (ctx->outer
5801 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5802 && ctx->outer->cancellable)
50674e96 5803 {
629b3d75
MJ
5804 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5805 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5806 tree lhs = create_tmp_var (c_bool_type);
5807 gimple_omp_return_set_lhs (omp_return, lhs);
5808 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5809 gimple *g = gimple_build_cond (NE_EXPR, lhs,
5810 fold_convert (c_bool_type,
5811 boolean_false_node),
5812 ctx->outer->cancel_label, fallthru_label);
5813 gimple_seq_add_stmt (body, g);
5814 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
50674e96 5815 }
629b3d75 5816}
953ff289 5817
629b3d75
MJ
5818/* Lower the OpenMP sections directive in the current statement in GSI_P.
5819 CTX is the enclosing OMP context for the current statement. */
953ff289 5820
629b3d75
MJ
5821static void
5822lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5823{
5824 tree block, control;
5825 gimple_stmt_iterator tgsi;
5826 gomp_sections *stmt;
5827 gimple *t;
5828 gbind *new_stmt, *bind;
5829 gimple_seq ilist, dlist, olist, new_body;
953ff289 5830
629b3d75 5831 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
953ff289 5832
629b3d75 5833 push_gimplify_context ();
acf0174b 5834
629b3d75
MJ
5835 dlist = NULL;
5836 ilist = NULL;
5837 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5838 &ilist, &dlist, ctx, NULL);
953ff289 5839
629b3d75
MJ
5840 new_body = gimple_omp_body (stmt);
5841 gimple_omp_set_body (stmt, NULL);
5842 tgsi = gsi_start (new_body);
5843 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
953ff289 5844 {
629b3d75
MJ
5845 omp_context *sctx;
5846 gimple *sec_start;
50674e96 5847
629b3d75
MJ
5848 sec_start = gsi_stmt (tgsi);
5849 sctx = maybe_lookup_ctx (sec_start);
5850 gcc_assert (sctx);
5851
5852 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5853 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5854 GSI_CONTINUE_LINKING);
5855 gimple_omp_set_body (sec_start, NULL);
5856
5857 if (gsi_one_before_end_p (tgsi))
50674e96 5858 {
629b3d75
MJ
5859 gimple_seq l = NULL;
5860 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5861 &l, ctx);
5862 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5863 gimple_omp_section_set_last (sec_start);
5864 }
917948d3 5865
629b3d75
MJ
5866 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5867 GSI_CONTINUE_LINKING);
5868 }
50674e96 5869
629b3d75
MJ
5870 block = make_node (BLOCK);
5871 bind = gimple_build_bind (NULL, new_body, block);
50674e96 5872
629b3d75
MJ
5873 olist = NULL;
5874 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5875
629b3d75
MJ
5876 block = make_node (BLOCK);
5877 new_stmt = gimple_build_bind (NULL, NULL, block);
5878 gsi_replace (gsi_p, new_stmt, true);
50674e96 5879
629b3d75
MJ
5880 pop_gimplify_context (new_stmt);
5881 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5882 BLOCK_VARS (block) = gimple_bind_vars (bind);
5883 if (BLOCK_VARS (block))
5884 TREE_USED (block) = 1;
50674e96 5885
629b3d75
MJ
5886 new_body = NULL;
5887 gimple_seq_add_seq (&new_body, ilist);
5888 gimple_seq_add_stmt (&new_body, stmt);
5889 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5890 gimple_seq_add_stmt (&new_body, bind);
50674e96 5891
629b3d75
MJ
5892 control = create_tmp_var (unsigned_type_node, ".section");
5893 t = gimple_build_omp_continue (control, control);
5894 gimple_omp_sections_set_control (stmt, control);
5895 gimple_seq_add_stmt (&new_body, t);
50674e96 5896
629b3d75
MJ
5897 gimple_seq_add_seq (&new_body, olist);
5898 if (ctx->cancellable)
5899 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5900 gimple_seq_add_seq (&new_body, dlist);
917948d3 5901
629b3d75 5902 new_body = maybe_catch_exception (new_body);
50674e96 5903
01914336
MJ
5904 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5905 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5906 t = gimple_build_omp_return (nowait);
629b3d75
MJ
5907 gimple_seq_add_stmt (&new_body, t);
5908 maybe_add_implicit_barrier_cancel (ctx, &new_body);
953ff289 5909
629b3d75 5910 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
5911}
5912
9a771876 5913
629b3d75
MJ
5914/* A subroutine of lower_omp_single. Expand the simple form of
5915 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
9a771876 5916
629b3d75
MJ
5917 if (GOMP_single_start ())
5918 BODY;
5919 [ GOMP_barrier (); ] -> unless 'nowait' is present.
9a771876 5920
629b3d75
MJ
5921 FIXME. It may be better to delay expanding the logic of this until
5922 pass_expand_omp. The expanded logic may make the job more difficult
5923 to a synchronization analysis pass. */
a68ab351
JJ
5924
5925static void
629b3d75 5926lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
a68ab351 5927{
629b3d75
MJ
5928 location_t loc = gimple_location (single_stmt);
5929 tree tlabel = create_artificial_label (loc);
5930 tree flabel = create_artificial_label (loc);
5931 gimple *call, *cond;
5932 tree lhs, decl;
20906c66 5933
629b3d75
MJ
5934 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
5935 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
5936 call = gimple_build_call (decl, 0);
5937 gimple_call_set_lhs (call, lhs);
5938 gimple_seq_add_stmt (pre_p, call);
a68ab351 5939
629b3d75
MJ
5940 cond = gimple_build_cond (EQ_EXPR, lhs,
5941 fold_convert_loc (loc, TREE_TYPE (lhs),
5942 boolean_true_node),
5943 tlabel, flabel);
5944 gimple_seq_add_stmt (pre_p, cond);
5945 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5946 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5947 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
a68ab351
JJ
5948}
5949
5950
629b3d75
MJ
5951/* A subroutine of lower_omp_single. Expand the simple form of
5952 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289 5953
629b3d75 5954 #pragma omp single copyprivate (a, b, c)
953ff289 5955
629b3d75 5956 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
953ff289 5957
629b3d75
MJ
5958 {
5959 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5960 {
5961 BODY;
5962 copyout.a = a;
5963 copyout.b = b;
5964 copyout.c = c;
5965 GOMP_single_copy_end (&copyout);
5966 }
5967 else
5968 {
5969 a = copyout_p->a;
5970 b = copyout_p->b;
5971 c = copyout_p->c;
5972 }
5973 GOMP_barrier ();
5974 }
726a989a 5975
629b3d75
MJ
5976 FIXME. It may be better to delay expanding the logic of this until
5977 pass_expand_omp. The expanded logic may make the job more difficult
5978 to a synchronization analysis pass. */
953ff289 5979
629b3d75
MJ
5980static void
5981lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
5982 omp_context *ctx)
5983{
5984 tree ptr_type, t, l0, l1, l2, bfn_decl;
5985 gimple_seq copyin_seq;
5986 location_t loc = gimple_location (single_stmt);
953ff289 5987
629b3d75 5988 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
953ff289 5989
629b3d75
MJ
5990 ptr_type = build_pointer_type (ctx->record_type);
5991 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
953ff289 5992
629b3d75
MJ
5993 l0 = create_artificial_label (loc);
5994 l1 = create_artificial_label (loc);
5995 l2 = create_artificial_label (loc);
953ff289 5996
629b3d75
MJ
5997 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
5998 t = build_call_expr_loc (loc, bfn_decl, 0);
5999 t = fold_convert_loc (loc, ptr_type, t);
6000 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289 6001
629b3d75
MJ
6002 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6003 build_int_cst (ptr_type, 0));
6004 t = build3 (COND_EXPR, void_type_node, t,
6005 build_and_jump (&l0), build_and_jump (&l1));
6006 gimplify_and_add (t, pre_p);
953ff289 6007
629b3d75 6008 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 6009
629b3d75 6010 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289 6011
629b3d75
MJ
6012 copyin_seq = NULL;
6013 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6014 &copyin_seq, ctx);
953ff289 6015
629b3d75
MJ
6016 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6017 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6018 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6019 gimplify_and_add (t, pre_p);
2aee3e57 6020
629b3d75
MJ
6021 t = build_and_jump (&l2);
6022 gimplify_and_add (t, pre_p);
953ff289 6023
629b3d75 6024 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 6025
629b3d75 6026 gimple_seq_add_seq (pre_p, copyin_seq);
777f7f9a 6027
629b3d75 6028 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
777f7f9a 6029}
50674e96 6030
629b3d75
MJ
6031
6032/* Expand code for an OpenMP single directive. */
2b4cf991
JJ
6033
6034static void
629b3d75 6035lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
2b4cf991 6036{
629b3d75 6037 tree block;
629b3d75
MJ
6038 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6039 gbind *bind;
6040 gimple_seq bind_body, bind_body_tail = NULL, dlist;
2b4cf991 6041
629b3d75 6042 push_gimplify_context ();
2b4cf991 6043
629b3d75
MJ
6044 block = make_node (BLOCK);
6045 bind = gimple_build_bind (NULL, NULL, block);
6046 gsi_replace (gsi_p, bind, true);
6047 bind_body = NULL;
6048 dlist = NULL;
6049 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6050 &bind_body, &dlist, ctx, NULL);
6051 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
2b4cf991 6052
629b3d75 6053 gimple_seq_add_stmt (&bind_body, single_stmt);
2b4cf991 6054
629b3d75
MJ
6055 if (ctx->record_type)
6056 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6057 else
6058 lower_omp_single_simple (single_stmt, &bind_body);
2b4cf991 6059
629b3d75 6060 gimple_omp_set_body (single_stmt, NULL);
2b4cf991 6061
629b3d75 6062 gimple_seq_add_seq (&bind_body, dlist);
5a0f4dd3 6063
629b3d75 6064 bind_body = maybe_catch_exception (bind_body);
5a0f4dd3 6065
01914336
MJ
6066 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6067 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6068 gimple *g = gimple_build_omp_return (nowait);
6069 gimple_seq_add_stmt (&bind_body_tail, g);
629b3d75
MJ
6070 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6071 if (ctx->record_type)
6072 {
6073 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6074 tree clobber = build_constructor (ctx->record_type, NULL);
6075 TREE_THIS_VOLATILE (clobber) = 1;
6076 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6077 clobber), GSI_SAME_STMT);
6078 }
6079 gimple_seq_add_seq (&bind_body, bind_body_tail);
6080 gimple_bind_set_body (bind, bind_body);
5a0f4dd3 6081
629b3d75 6082 pop_gimplify_context (bind);
5a0f4dd3 6083
629b3d75
MJ
6084 gimple_bind_append_vars (bind, ctx->block_vars);
6085 BLOCK_VARS (block) = ctx->block_vars;
6086 if (BLOCK_VARS (block))
6087 TREE_USED (block) = 1;
5a0f4dd3
JJ
6088}
6089
74bf76ed 6090
629b3d75 6091/* Expand code for an OpenMP master directive. */
953ff289
DN
6092
6093static void
629b3d75 6094lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6095{
629b3d75
MJ
6096 tree block, lab = NULL, x, bfn_decl;
6097 gimple *stmt = gsi_stmt (*gsi_p);
6098 gbind *bind;
6099 location_t loc = gimple_location (stmt);
6100 gimple_seq tseq;
50674e96 6101
629b3d75 6102 push_gimplify_context ();
50674e96 6103
629b3d75
MJ
6104 block = make_node (BLOCK);
6105 bind = gimple_build_bind (NULL, NULL, block);
6106 gsi_replace (gsi_p, bind, true);
6107 gimple_bind_add_stmt (bind, stmt);
50674e96 6108
629b3d75
MJ
6109 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6110 x = build_call_expr_loc (loc, bfn_decl, 0);
6111 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6112 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6113 tseq = NULL;
6114 gimplify_and_add (x, &tseq);
6115 gimple_bind_add_seq (bind, tseq);
9a771876 6116
629b3d75
MJ
6117 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6118 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6119 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6120 gimple_omp_set_body (stmt, NULL);
b357f682 6121
629b3d75 6122 gimple_bind_add_stmt (bind, gimple_build_label (lab));
99819c63 6123
629b3d75 6124 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e01d41e5 6125
629b3d75 6126 pop_gimplify_context (bind);
b8698a0f 6127
629b3d75
MJ
6128 gimple_bind_append_vars (bind, ctx->block_vars);
6129 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
6130}
6131
e4834818 6132
629b3d75 6133/* Expand code for an OpenMP taskgroup directive. */
e4834818 6134
629b3d75
MJ
6135static void
6136lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
e4834818 6137{
629b3d75
MJ
6138 gimple *stmt = gsi_stmt (*gsi_p);
6139 gcall *x;
6140 gbind *bind;
6141 tree block = make_node (BLOCK);
e4834818 6142
629b3d75
MJ
6143 bind = gimple_build_bind (NULL, NULL, block);
6144 gsi_replace (gsi_p, bind, true);
6145 gimple_bind_add_stmt (bind, stmt);
e4834818 6146
629b3d75
MJ
6147 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6148 0);
6149 gimple_bind_add_stmt (bind, x);
e4834818 6150
629b3d75
MJ
6151 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6152 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6153 gimple_omp_set_body (stmt, NULL);
e4834818 6154
629b3d75 6155 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e4834818 6156
629b3d75
MJ
6157 gimple_bind_append_vars (bind, ctx->block_vars);
6158 BLOCK_VARS (block) = ctx->block_vars;
e4834818
NS
6159}
6160
50674e96 6161
629b3d75 6162/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
74bf76ed
JJ
6163
6164static void
629b3d75
MJ
6165lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6166 omp_context *ctx)
74bf76ed 6167{
629b3d75
MJ
6168 struct omp_for_data fd;
6169 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6170 return;
74bf76ed 6171
629b3d75
MJ
6172 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6173 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6174 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6175 if (!fd.ordered)
6176 return;
acf0174b 6177
629b3d75
MJ
6178 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6179 tree c = gimple_omp_ordered_clauses (ord_stmt);
6180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6181 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
74bf76ed 6182 {
629b3d75
MJ
6183 /* Merge depend clauses from multiple adjacent
6184 #pragma omp ordered depend(sink:...) constructs
6185 into one #pragma omp ordered depend(sink:...), so that
6186 we can optimize them together. */
6187 gimple_stmt_iterator gsi = *gsi_p;
6188 gsi_next (&gsi);
6189 while (!gsi_end_p (gsi))
74bf76ed 6190 {
629b3d75
MJ
6191 gimple *stmt = gsi_stmt (gsi);
6192 if (is_gimple_debug (stmt)
6193 || gimple_code (stmt) == GIMPLE_NOP)
74bf76ed 6194 {
629b3d75
MJ
6195 gsi_next (&gsi);
6196 continue;
74bf76ed 6197 }
629b3d75
MJ
6198 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6199 break;
6200 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6201 c = gimple_omp_ordered_clauses (ord_stmt2);
6202 if (c == NULL_TREE
6203 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6204 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6205 break;
6206 while (*list_p)
6207 list_p = &OMP_CLAUSE_CHAIN (*list_p);
6208 *list_p = c;
6209 gsi_remove (&gsi, true);
74bf76ed
JJ
6210 }
6211 }
74bf76ed 6212
629b3d75
MJ
6213 /* Canonicalize sink dependence clauses into one folded clause if
6214 possible.
74bf76ed 6215
629b3d75
MJ
6216 The basic algorithm is to create a sink vector whose first
6217 element is the GCD of all the first elements, and whose remaining
6218 elements are the minimum of the subsequent columns.
74bf76ed 6219
629b3d75
MJ
6220 We ignore dependence vectors whose first element is zero because
6221 such dependencies are known to be executed by the same thread.
acf0174b 6222
629b3d75
MJ
6223 We take into account the direction of the loop, so a minimum
6224 becomes a maximum if the loop is iterating forwards. We also
6225 ignore sink clauses where the loop direction is unknown, or where
6226 the offsets are clearly invalid because they are not a multiple
6227 of the loop increment.
6228
6229 For example:
6230
6231 #pragma omp for ordered(2)
6232 for (i=0; i < N; ++i)
6233 for (j=0; j < M; ++j)
acf0174b 6234 {
629b3d75
MJ
6235 #pragma omp ordered \
6236 depend(sink:i-8,j-2) \
6237 depend(sink:i,j-1) \ // Completely ignored because i+0.
6238 depend(sink:i-4,j-3) \
6239 depend(sink:i-6,j-4)
6240 #pragma omp ordered depend(source)
acf0174b 6241 }
acf0174b 6242
629b3d75 6243 Folded clause is:
74bf76ed 6244
629b3d75
MJ
6245 depend(sink:-gcd(8,4,6),-min(2,3,4))
6246 -or-
6247 depend(sink:-2,-2)
6248 */
74bf76ed 6249
629b3d75
MJ
6250 /* FIXME: Computing GCD's where the first element is zero is
6251 non-trivial in the presence of collapsed loops. Do this later. */
6252 if (fd.collapse > 1)
6253 return;
74bf76ed 6254
629b3d75 6255 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
c3684b7b
MS
6256
6257 /* wide_int is not a POD so it must be default-constructed. */
6258 for (unsigned i = 0; i != 2 * len - 1; ++i)
6259 new (static_cast<void*>(folded_deps + i)) wide_int ();
6260
629b3d75
MJ
6261 tree folded_dep = NULL_TREE;
6262 /* TRUE if the first dimension's offset is negative. */
6263 bool neg_offset_p = false;
74bf76ed 6264
629b3d75
MJ
6265 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6266 unsigned int i;
6267 while ((c = *list_p) != NULL)
74bf76ed 6268 {
629b3d75 6269 bool remove = false;
74bf76ed 6270
629b3d75
MJ
6271 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6272 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6273 goto next_ordered_clause;
74bf76ed 6274
629b3d75
MJ
6275 tree vec;
6276 for (vec = OMP_CLAUSE_DECL (c), i = 0;
6277 vec && TREE_CODE (vec) == TREE_LIST;
6278 vec = TREE_CHAIN (vec), ++i)
74bf76ed 6279 {
629b3d75 6280 gcc_assert (i < len);
74bf76ed 6281
629b3d75
MJ
6282 /* omp_extract_for_data has canonicalized the condition. */
6283 gcc_assert (fd.loops[i].cond_code == LT_EXPR
6284 || fd.loops[i].cond_code == GT_EXPR);
6285 bool forward = fd.loops[i].cond_code == LT_EXPR;
6286 bool maybe_lexically_later = true;
953ff289 6287
629b3d75
MJ
6288 /* While the committee makes up its mind, bail if we have any
6289 non-constant steps. */
6290 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6291 goto lower_omp_ordered_ret;
953ff289 6292
629b3d75
MJ
6293 tree itype = TREE_TYPE (TREE_VALUE (vec));
6294 if (POINTER_TYPE_P (itype))
6295 itype = sizetype;
8e6cdc90 6296 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
629b3d75
MJ
6297 TYPE_PRECISION (itype),
6298 TYPE_SIGN (itype));
a68ab351 6299
629b3d75 6300 /* Ignore invalid offsets that are not multiples of the step. */
8e6cdc90
RS
6301 if (!wi::multiple_of_p (wi::abs (offset),
6302 wi::abs (wi::to_wide (fd.loops[i].step)),
6303 UNSIGNED))
b4c3a85b 6304 {
629b3d75
MJ
6305 warning_at (OMP_CLAUSE_LOCATION (c), 0,
6306 "ignoring sink clause with offset that is not "
6307 "a multiple of the loop step");
6308 remove = true;
6309 goto next_ordered_clause;
b4c3a85b 6310 }
d9a6bd32 6311
629b3d75
MJ
6312 /* Calculate the first dimension. The first dimension of
6313 the folded dependency vector is the GCD of the first
6314 elements, while ignoring any first elements whose offset
6315 is 0. */
6316 if (i == 0)
b4c3a85b 6317 {
629b3d75
MJ
6318 /* Ignore dependence vectors whose first dimension is 0. */
6319 if (offset == 0)
b4c3a85b 6320 {
629b3d75
MJ
6321 remove = true;
6322 goto next_ordered_clause;
b4c3a85b 6323 }
d9a6bd32 6324 else
629b3d75
MJ
6325 {
6326 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6327 {
6328 error_at (OMP_CLAUSE_LOCATION (c),
6329 "first offset must be in opposite direction "
6330 "of loop iterations");
6331 goto lower_omp_ordered_ret;
6332 }
6333 if (forward)
6334 offset = -offset;
6335 neg_offset_p = forward;
6336 /* Initialize the first time around. */
6337 if (folded_dep == NULL_TREE)
6338 {
6339 folded_dep = c;
6340 folded_deps[0] = offset;
6341 }
6342 else
6343 folded_deps[0] = wi::gcd (folded_deps[0],
6344 offset, UNSIGNED);
6345 }
d9a6bd32 6346 }
629b3d75 6347 /* Calculate minimum for the remaining dimensions. */
d9a6bd32 6348 else
d9a6bd32 6349 {
629b3d75
MJ
6350 folded_deps[len + i - 1] = offset;
6351 if (folded_dep == c)
6352 folded_deps[i] = offset;
6353 else if (maybe_lexically_later
6354 && !wi::eq_p (folded_deps[i], offset))
6355 {
6356 if (forward ^ wi::gts_p (folded_deps[i], offset))
6357 {
6358 unsigned int j;
6359 folded_dep = c;
6360 for (j = 1; j <= i; j++)
6361 folded_deps[j] = folded_deps[len + j - 1];
6362 }
6363 else
6364 maybe_lexically_later = false;
6365 }
d9a6bd32 6366 }
d9a6bd32 6367 }
629b3d75 6368 gcc_assert (i == len);
d9a6bd32 6369
629b3d75
MJ
6370 remove = true;
6371
6372 next_ordered_clause:
6373 if (remove)
6374 *list_p = OMP_CLAUSE_CHAIN (c);
d9a6bd32 6375 else
629b3d75 6376 list_p = &OMP_CLAUSE_CHAIN (c);
d9a6bd32 6377 }
d9a6bd32 6378
629b3d75 6379 if (folded_dep)
d9a6bd32 6380 {
629b3d75
MJ
6381 if (neg_offset_p)
6382 folded_deps[0] = -folded_deps[0];
d9a6bd32 6383
629b3d75
MJ
6384 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6385 if (POINTER_TYPE_P (itype))
6386 itype = sizetype;
6387
6388 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6389 = wide_int_to_tree (itype, folded_deps[0]);
6390 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6391 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
d9a6bd32
JJ
6392 }
6393
629b3d75 6394 lower_omp_ordered_ret:
d9a6bd32 6395
629b3d75
MJ
6396 /* Ordered without clauses is #pragma omp threads, while we want
6397 a nop instead if we remove all clauses. */
6398 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6399 gsi_replace (gsi_p, gimple_build_nop (), true);
d9a6bd32
JJ
6400}
6401
6402
629b3d75 6403/* Expand code for an OpenMP ordered directive. */
953ff289 6404
777f7f9a 6405static void
629b3d75 6406lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6407{
629b3d75
MJ
6408 tree block;
6409 gimple *stmt = gsi_stmt (*gsi_p), *g;
6410 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6411 gcall *x;
6412 gbind *bind;
6413 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6414 OMP_CLAUSE_SIMD);
6415 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6416 loop. */
6417 bool maybe_simt
6418 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6419 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6420 OMP_CLAUSE_THREADS);
d9a6bd32 6421
629b3d75
MJ
6422 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6423 OMP_CLAUSE_DEPEND))
d9a6bd32 6424 {
629b3d75
MJ
6425 /* FIXME: This is needs to be moved to the expansion to verify various
6426 conditions only testable on cfg with dominators computed, and also
6427 all the depend clauses to be merged still might need to be available
6428 for the runtime checks. */
6429 if (0)
6430 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6431 return;
a68ab351 6432 }
d9a6bd32 6433
629b3d75
MJ
6434 push_gimplify_context ();
6435
6436 block = make_node (BLOCK);
6437 bind = gimple_build_bind (NULL, NULL, block);
6438 gsi_replace (gsi_p, bind, true);
6439 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 6440
629b3d75 6441 if (simd)
917948d3 6442 {
629b3d75
MJ
6443 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6444 build_int_cst (NULL_TREE, threads));
6445 cfun->has_simduid_loops = true;
917948d3
ZD
6446 }
6447 else
629b3d75
MJ
6448 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6449 0);
6450 gimple_bind_add_stmt (bind, x);
6451
6452 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6453 if (maybe_simt)
953ff289 6454 {
629b3d75
MJ
6455 counter = create_tmp_var (integer_type_node);
6456 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6457 gimple_call_set_lhs (g, counter);
6458 gimple_bind_add_stmt (bind, g);
d9a6bd32 6459
629b3d75
MJ
6460 body = create_artificial_label (UNKNOWN_LOCATION);
6461 test = create_artificial_label (UNKNOWN_LOCATION);
6462 gimple_bind_add_stmt (bind, gimple_build_label (body));
953ff289 6463
629b3d75
MJ
6464 tree simt_pred = create_tmp_var (integer_type_node);
6465 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6466 gimple_call_set_lhs (g, simt_pred);
6467 gimple_bind_add_stmt (bind, g);
d9a6bd32 6468
629b3d75
MJ
6469 tree t = create_artificial_label (UNKNOWN_LOCATION);
6470 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6471 gimple_bind_add_stmt (bind, g);
74bf76ed 6472
629b3d75 6473 gimple_bind_add_stmt (bind, gimple_build_label (t));
acf0174b 6474 }
629b3d75
MJ
6475 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6476 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6477 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6478 gimple_omp_set_body (stmt, NULL);
acf0174b 6479
629b3d75 6480 if (maybe_simt)
d9a6bd32 6481 {
629b3d75
MJ
6482 gimple_bind_add_stmt (bind, gimple_build_label (test));
6483 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6484 gimple_bind_add_stmt (bind, g);
50674e96 6485
629b3d75
MJ
6486 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6487 tree nonneg = create_tmp_var (integer_type_node);
6488 gimple_seq tseq = NULL;
6489 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6490 gimple_bind_add_seq (bind, tseq);
d9a6bd32 6491
629b3d75
MJ
6492 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6493 gimple_call_set_lhs (g, nonneg);
6494 gimple_bind_add_stmt (bind, g);
d9a6bd32 6495
629b3d75
MJ
6496 tree end = create_artificial_label (UNKNOWN_LOCATION);
6497 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6498 gimple_bind_add_stmt (bind, g);
50674e96 6499
629b3d75 6500 gimple_bind_add_stmt (bind, gimple_build_label (end));
e5c95afe 6501 }
629b3d75
MJ
6502 if (simd)
6503 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6504 build_int_cst (NULL_TREE, threads));
777f7f9a 6505 else
629b3d75
MJ
6506 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6507 0);
6508 gimple_bind_add_stmt (bind, x);
917948d3 6509
629b3d75 6510 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6511
629b3d75 6512 pop_gimplify_context (bind);
917948d3 6513
629b3d75
MJ
6514 gimple_bind_append_vars (bind, ctx->block_vars);
6515 BLOCK_VARS (block) = gimple_bind_vars (bind);
6516}
56102c7f 6517
56102c7f 6518
629b3d75
MJ
6519/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
6520 substitution of a couple of function calls. But in the NAMED case,
6521 requires that languages coordinate a symbol name. It is therefore
6522 best put here in common code. */
56102c7f 6523
629b3d75 6524static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
56102c7f 6525
629b3d75
MJ
6526static void
6527lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6528{
6529 tree block;
6530 tree name, lock, unlock;
6531 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6532 gbind *bind;
6533 location_t loc = gimple_location (stmt);
6534 gimple_seq tbody;
56102c7f 6535
629b3d75
MJ
6536 name = gimple_omp_critical_name (stmt);
6537 if (name)
6538 {
6539 tree decl;
56102c7f 6540
629b3d75
MJ
6541 if (!critical_name_mutexes)
6542 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
56102c7f 6543
629b3d75
MJ
6544 tree *n = critical_name_mutexes->get (name);
6545 if (n == NULL)
74bf76ed 6546 {
629b3d75 6547 char *new_str;
953ff289 6548
629b3d75 6549 decl = create_tmp_var_raw (ptr_type_node);
953ff289 6550
629b3d75
MJ
6551 new_str = ACONCAT ((".gomp_critical_user_",
6552 IDENTIFIER_POINTER (name), NULL));
6553 DECL_NAME (decl) = get_identifier (new_str);
6554 TREE_PUBLIC (decl) = 1;
6555 TREE_STATIC (decl) = 1;
6556 DECL_COMMON (decl) = 1;
6557 DECL_ARTIFICIAL (decl) = 1;
6558 DECL_IGNORED_P (decl) = 1;
953ff289 6559
629b3d75 6560 varpool_node::finalize_decl (decl);
953ff289 6561
629b3d75
MJ
6562 critical_name_mutexes->put (name, decl);
6563 }
6564 else
6565 decl = *n;
953ff289 6566
629b3d75
MJ
6567 /* If '#pragma omp critical' is inside offloaded region or
6568 inside function marked as offloadable, the symbol must be
6569 marked as offloadable too. */
6570 omp_context *octx;
6571 if (cgraph_node::get (current_function_decl)->offloadable)
6572 varpool_node::get_create (decl)->offloadable = 1;
6573 else
6574 for (octx = ctx->outer; octx; octx = octx->outer)
6575 if (is_gimple_omp_offloaded (octx->stmt))
6576 {
6577 varpool_node::get_create (decl)->offloadable = 1;
6578 break;
6579 }
777f7f9a 6580
629b3d75 6581 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
01914336
MJ
6582 lock = build_call_expr_loc (loc, lock, 1,
6583 build_fold_addr_expr_loc (loc, decl));
777f7f9a 6584
629b3d75
MJ
6585 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6586 unlock = build_call_expr_loc (loc, unlock, 1,
6587 build_fold_addr_expr_loc (loc, decl));
acf0174b 6588 }
acf0174b 6589 else
5a0f4dd3 6590 {
629b3d75
MJ
6591 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6592 lock = build_call_expr_loc (loc, lock, 0);
5a0f4dd3 6593
629b3d75
MJ
6594 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6595 unlock = build_call_expr_loc (loc, unlock, 0);
acf0174b 6596 }
953ff289 6597
629b3d75 6598 push_gimplify_context ();
fb79f500 6599
629b3d75
MJ
6600 block = make_node (BLOCK);
6601 bind = gimple_build_bind (NULL, NULL, block);
6602 gsi_replace (gsi_p, bind, true);
6603 gimple_bind_add_stmt (bind, stmt);
fb79f500 6604
629b3d75
MJ
6605 tbody = gimple_bind_body (bind);
6606 gimplify_and_add (lock, &tbody);
6607 gimple_bind_set_body (bind, tbody);
fb79f500 6608
629b3d75
MJ
6609 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6610 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6611 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6612 gimple_omp_set_body (stmt, NULL);
953ff289 6613
629b3d75
MJ
6614 tbody = gimple_bind_body (bind);
6615 gimplify_and_add (unlock, &tbody);
6616 gimple_bind_set_body (bind, tbody);
953ff289 6617
629b3d75 6618 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 6619
629b3d75
MJ
6620 pop_gimplify_context (bind);
6621 gimple_bind_append_vars (bind, ctx->block_vars);
6622 BLOCK_VARS (block) = gimple_bind_vars (bind);
6623}
50674e96 6624
629b3d75
MJ
6625/* A subroutine of lower_omp_for. Generate code to emit the predicate
6626 for a lastprivate clause. Given a loop control predicate of (V
6627 cond N2), we gate the clause on (!(V cond N2)). The lowered form
6628 is appended to *DLIST, iterator initialization is appended to
6629 *BODY_P. */
50674e96 6630
629b3d75
MJ
6631static void
6632lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6633 gimple_seq *dlist, struct omp_context *ctx)
6634{
6635 tree clauses, cond, vinit;
6636 enum tree_code cond_code;
6637 gimple_seq stmts;
953ff289 6638
629b3d75
MJ
6639 cond_code = fd->loop.cond_code;
6640 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
acf0174b 6641
629b3d75
MJ
6642 /* When possible, use a strict equality expression. This can let VRP
6643 type optimizations deduce the value and remove a copy. */
6644 if (tree_fits_shwi_p (fd->loop.step))
acf0174b 6645 {
629b3d75
MJ
6646 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6647 if (step == 1 || step == -1)
6648 cond_code = EQ_EXPR;
acf0174b 6649 }
629b3d75
MJ
6650
6651 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6652 || gimple_omp_for_grid_phony (fd->for_stmt))
6653 cond = omp_grid_lastprivate_predicate (fd);
a68ab351 6654 else
acf0174b 6655 {
629b3d75
MJ
6656 tree n2 = fd->loop.n2;
6657 if (fd->collapse > 1
6658 && TREE_CODE (n2) != INTEGER_CST
6659 && gimple_omp_for_combined_into_p (fd->for_stmt))
d9a6bd32 6660 {
629b3d75
MJ
6661 struct omp_context *taskreg_ctx = NULL;
6662 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
d9a6bd32 6663 {
629b3d75
MJ
6664 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6665 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6666 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
d9a6bd32 6667 {
629b3d75
MJ
6668 if (gimple_omp_for_combined_into_p (gfor))
6669 {
6670 gcc_assert (ctx->outer->outer
6671 && is_parallel_ctx (ctx->outer->outer));
6672 taskreg_ctx = ctx->outer->outer;
6673 }
6674 else
6675 {
6676 struct omp_for_data outer_fd;
6677 omp_extract_for_data (gfor, &outer_fd, NULL);
6678 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6679 }
d9a6bd32 6680 }
629b3d75
MJ
6681 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6682 taskreg_ctx = ctx->outer->outer;
6683 }
6684 else if (is_taskreg_ctx (ctx->outer))
6685 taskreg_ctx = ctx->outer;
6686 if (taskreg_ctx)
6687 {
6688 int i;
6689 tree taskreg_clauses
6690 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6691 tree innerc = omp_find_clause (taskreg_clauses,
6692 OMP_CLAUSE__LOOPTEMP_);
6693 gcc_assert (innerc);
6694 for (i = 0; i < fd->collapse; i++)
6695 {
6696 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6697 OMP_CLAUSE__LOOPTEMP_);
6698 gcc_assert (innerc);
6699 }
6700 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6701 OMP_CLAUSE__LOOPTEMP_);
6702 if (innerc)
6703 n2 = fold_convert (TREE_TYPE (n2),
6704 lookup_decl (OMP_CLAUSE_DECL (innerc),
6705 taskreg_ctx));
d9a6bd32 6706 }
acf0174b 6707 }
629b3d75 6708 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
acf0174b 6709 }
50674e96 6710
629b3d75
MJ
6711 clauses = gimple_omp_for_clauses (fd->for_stmt);
6712 stmts = NULL;
6713 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6714 if (!gimple_seq_empty_p (stmts))
acf0174b 6715 {
629b3d75
MJ
6716 gimple_seq_add_seq (&stmts, *dlist);
6717 *dlist = stmts;
6093bc06 6718
629b3d75
MJ
6719 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
6720 vinit = fd->loop.n1;
6721 if (cond_code == EQ_EXPR
6722 && tree_fits_shwi_p (fd->loop.n2)
6723 && ! integer_zerop (fd->loop.n2))
6724 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6725 else
6726 vinit = unshare_expr (vinit);
e67d7a1e 6727
629b3d75
MJ
6728 /* Initialize the iterator variable, so that threads that don't execute
6729 any iterations don't execute the lastprivate clauses by accident. */
6730 gimplify_assign (fd->loop.v, vinit, body_p);
acf0174b 6731 }
953ff289
DN
6732}
6733
1b96e9a4 6734
629b3d75 6735/* Lower code for an OMP loop directive. */
50674e96 6736
629b3d75
MJ
6737static void
6738lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6739{
6740 tree *rhs_p, block;
6741 struct omp_for_data fd, *fdp = NULL;
6742 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6743 gbind *new_stmt;
6744 gimple_seq omp_for_body, body, dlist;
6745 gimple_seq oacc_head = NULL, oacc_tail = NULL;
6746 size_t i;
953ff289 6747
629b3d75 6748 push_gimplify_context ();
953ff289 6749
629b3d75 6750 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
953ff289 6751
629b3d75
MJ
6752 block = make_node (BLOCK);
6753 new_stmt = gimple_build_bind (NULL, NULL, block);
6754 /* Replace at gsi right away, so that 'stmt' is no member
6755 of a sequence anymore as we're going to add to a different
6756 one below. */
6757 gsi_replace (gsi_p, new_stmt, true);
953ff289 6758
629b3d75
MJ
6759 /* Move declaration of temporaries in the loop body before we make
6760 it go away. */
6761 omp_for_body = gimple_omp_body (stmt);
6762 if (!gimple_seq_empty_p (omp_for_body)
6763 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
acf0174b 6764 {
629b3d75
MJ
6765 gbind *inner_bind
6766 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6767 tree vars = gimple_bind_vars (inner_bind);
6768 gimple_bind_append_vars (new_stmt, vars);
6769 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6770 keep them on the inner_bind and it's block. */
6771 gimple_bind_set_vars (inner_bind, NULL_TREE);
6772 if (gimple_bind_block (inner_bind))
6773 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
acf0174b 6774 }
50674e96 6775
629b3d75 6776 if (gimple_omp_for_combined_into_p (stmt))
5a0f4dd3 6777 {
629b3d75
MJ
6778 omp_extract_for_data (stmt, &fd, NULL);
6779 fdp = &fd;
6780
6781 /* We need two temporaries with fd.loop.v type (istart/iend)
6782 and then (fd.collapse - 1) temporaries with the same
6783 type for count2 ... countN-1 vars if not constant. */
6784 size_t count = 2;
6785 tree type = fd.iter_type;
6786 if (fd.collapse > 1
6787 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6788 count += fd.collapse - 1;
6789 bool taskreg_for
6790 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6791 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6792 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6e6cf7b0 6793 tree simtc = NULL;
629b3d75
MJ
6794 tree clauses = *pc;
6795 if (taskreg_for)
6796 outerc
6797 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6798 OMP_CLAUSE__LOOPTEMP_);
6e6cf7b0
JJ
6799 if (ctx->simt_stmt)
6800 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6801 OMP_CLAUSE__LOOPTEMP_);
629b3d75 6802 for (i = 0; i < count; i++)
5a0f4dd3 6803 {
629b3d75
MJ
6804 tree temp;
6805 if (taskreg_for)
6806 {
6807 gcc_assert (outerc);
6808 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6809 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6810 OMP_CLAUSE__LOOPTEMP_);
6811 }
6812 else
5a0f4dd3 6813 {
6e6cf7b0
JJ
6814 /* If there are 2 adjacent SIMD stmts, one with _simt_
6815 clause, another without, make sure they have the same
6816 decls in _looptemp_ clauses, because the outer stmt
6817 they are combined into will look up just one inner_stmt. */
6818 if (ctx->simt_stmt)
6819 temp = OMP_CLAUSE_DECL (simtc);
6820 else
6821 temp = create_tmp_var (type);
629b3d75 6822 insert_decl_map (&ctx->outer->cb, temp, temp);
5a0f4dd3 6823 }
629b3d75
MJ
6824 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6825 OMP_CLAUSE_DECL (*pc) = temp;
6826 pc = &OMP_CLAUSE_CHAIN (*pc);
6e6cf7b0
JJ
6827 if (ctx->simt_stmt)
6828 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6829 OMP_CLAUSE__LOOPTEMP_);
5a0f4dd3 6830 }
629b3d75 6831 *pc = clauses;
5a0f4dd3
JJ
6832 }
6833
629b3d75
MJ
6834 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
6835 dlist = NULL;
6836 body = NULL;
6837 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6838 fdp);
6839 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
917948d3 6840
629b3d75 6841 lower_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289 6842
629b3d75
MJ
6843 /* Lower the header expressions. At this point, we can assume that
6844 the header is of the form:
50674e96 6845
629b3d75 6846 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
917948d3 6847
629b3d75
MJ
6848 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6849 using the .omp_data_s mapping, if needed. */
6850 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6851 {
6852 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6853 if (!is_gimple_min_invariant (*rhs_p))
6854 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6855 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6856 recompute_tree_invariant_for_addr_expr (*rhs_p);
50674e96 6857
629b3d75
MJ
6858 rhs_p = gimple_omp_for_final_ptr (stmt, i);
6859 if (!is_gimple_min_invariant (*rhs_p))
6860 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
0fe4bc78
JJ
6861 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6862 recompute_tree_invariant_for_addr_expr (*rhs_p);
d9a6bd32 6863
629b3d75
MJ
6864 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6865 if (!is_gimple_min_invariant (*rhs_p))
6866 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
6867 }
953ff289 6868
629b3d75
MJ
6869 /* Once lowered, extract the bounds and clauses. */
6870 omp_extract_for_data (stmt, &fd, NULL);
953ff289 6871
629b3d75
MJ
6872 if (is_gimple_omp_oacc (ctx->stmt)
6873 && !ctx_in_oacc_kernels_region (ctx))
6874 lower_oacc_head_tail (gimple_location (stmt),
6875 gimple_omp_for_clauses (stmt),
6876 &oacc_head, &oacc_tail, ctx);
953ff289 6877
01914336 6878 /* Add OpenACC partitioning and reduction markers just before the loop. */
629b3d75
MJ
6879 if (oacc_head)
6880 gimple_seq_add_seq (&body, oacc_head);
01914336 6881
629b3d75 6882 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
acf0174b 6883
629b3d75
MJ
6884 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6885 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
6886 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6887 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6888 {
629b3d75
MJ
6889 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6890 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6891 OMP_CLAUSE_LINEAR_STEP (c)
6892 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6893 ctx);
d9a6bd32 6894 }
acf0174b 6895
629b3d75
MJ
6896 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6897 && gimple_omp_for_grid_phony (stmt));
6898 if (!phony_loop)
6899 gimple_seq_add_stmt (&body, stmt);
6900 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6901
6902 if (!phony_loop)
6903 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6904 fd.loop.v));
917948d3 6905
629b3d75
MJ
6906 /* After the loop, add exit clauses. */
6907 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
b8698a0f 6908
629b3d75
MJ
6909 if (ctx->cancellable)
6910 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
50674e96 6911
629b3d75 6912 gimple_seq_add_seq (&body, dlist);
953ff289 6913
629b3d75 6914 body = maybe_catch_exception (body);
953ff289 6915
629b3d75 6916 if (!phony_loop)
acf0174b 6917 {
629b3d75
MJ
6918 /* Region exit marker goes at the end of the loop body. */
6919 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6920 maybe_add_implicit_barrier_cancel (ctx, &body);
acf0174b 6921 }
953ff289 6922
629b3d75
MJ
6923 /* Add OpenACC joining and reduction markers just after the loop. */
6924 if (oacc_tail)
6925 gimple_seq_add_seq (&body, oacc_tail);
917948d3 6926
629b3d75 6927 pop_gimplify_context (new_stmt);
917948d3 6928
629b3d75 6929 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6724f8a6 6930 maybe_remove_omp_member_access_dummy_vars (new_stmt);
629b3d75
MJ
6931 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
6932 if (BLOCK_VARS (block))
6933 TREE_USED (block) = 1;
917948d3 6934
629b3d75
MJ
6935 gimple_bind_set_body (new_stmt, body);
6936 gimple_omp_set_body (stmt, NULL);
6937 gimple_omp_for_set_pre_body (stmt, NULL);
6938}
17720e84 6939
629b3d75
MJ
6940/* Callback for walk_stmts. Check if the current statement only contains
6941 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
917948d3 6942
629b3d75
MJ
6943static tree
6944check_combined_parallel (gimple_stmt_iterator *gsi_p,
6945 bool *handled_ops_p,
6946 struct walk_stmt_info *wi)
6947{
6948 int *info = (int *) wi->info;
6949 gimple *stmt = gsi_stmt (*gsi_p);
917948d3 6950
629b3d75
MJ
6951 *handled_ops_p = true;
6952 switch (gimple_code (stmt))
acf0174b 6953 {
629b3d75 6954 WALK_SUBSTMTS;
8cba6b95 6955
65f4b875
AO
6956 case GIMPLE_DEBUG:
6957 break;
629b3d75
MJ
6958 case GIMPLE_OMP_FOR:
6959 case GIMPLE_OMP_SECTIONS:
6960 *info = *info == 0 ? 1 : -1;
6961 break;
6962 default:
6963 *info = -1;
6964 break;
acf0174b 6965 }
629b3d75 6966 return NULL;
953ff289
DN
6967}
6968
629b3d75
MJ
6969struct omp_taskcopy_context
6970{
6971 /* This field must be at the beginning, as we do "inheritance": Some
6972 callback functions for tree-inline.c (e.g., omp_copy_decl)
6973 receive a copy_body_data pointer that is up-casted to an
6974 omp_context pointer. */
6975 copy_body_data cb;
6976 omp_context *ctx;
6977};
9a771876 6978
629b3d75
MJ
6979static tree
6980task_copyfn_copy_decl (tree var, copy_body_data *cb)
6981{
6982 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9a771876 6983
629b3d75
MJ
6984 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6985 return create_tmp_var (TREE_TYPE (var));
9a771876 6986
629b3d75
MJ
6987 return var;
6988}
9a771876 6989
629b3d75
MJ
6990static tree
6991task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9a771876 6992{
629b3d75 6993 tree name, new_fields = NULL, type, f;
9a771876 6994
629b3d75
MJ
6995 type = lang_hooks.types.make_type (RECORD_TYPE);
6996 name = DECL_NAME (TYPE_NAME (orig_type));
6997 name = build_decl (gimple_location (tcctx->ctx->stmt),
6998 TYPE_DECL, name, type);
6999 TYPE_NAME (type) = name;
9a771876 7000
629b3d75 7001 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9a771876 7002 {
629b3d75
MJ
7003 tree new_f = copy_node (f);
7004 DECL_CONTEXT (new_f) = type;
7005 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7006 TREE_CHAIN (new_f) = new_fields;
7007 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7008 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7009 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7010 &tcctx->cb, NULL);
7011 new_fields = new_f;
7012 tcctx->cb.decl_map->put (f, new_f);
9a771876 7013 }
629b3d75
MJ
7014 TYPE_FIELDS (type) = nreverse (new_fields);
7015 layout_type (type);
7016 return type;
7017}
9a771876 7018
629b3d75 7019/* Create task copyfn. */
9a771876 7020
629b3d75
MJ
7021static void
7022create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7023{
7024 struct function *child_cfun;
7025 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7026 tree record_type, srecord_type, bind, list;
7027 bool record_needs_remap = false, srecord_needs_remap = false;
7028 splay_tree_node n;
7029 struct omp_taskcopy_context tcctx;
7030 location_t loc = gimple_location (task_stmt);
a3bccfa1 7031 size_t looptempno = 0;
9a771876 7032
629b3d75
MJ
7033 child_fn = gimple_omp_task_copy_fn (task_stmt);
7034 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7035 gcc_assert (child_cfun->cfg == NULL);
7036 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9a771876 7037
629b3d75
MJ
7038 /* Reset DECL_CONTEXT on function arguments. */
7039 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7040 DECL_CONTEXT (t) = child_fn;
9a771876 7041
629b3d75
MJ
7042 /* Populate the function. */
7043 push_gimplify_context ();
7044 push_cfun (child_cfun);
9a771876 7045
629b3d75
MJ
7046 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7047 TREE_SIDE_EFFECTS (bind) = 1;
7048 list = NULL;
7049 DECL_SAVED_TREE (child_fn) = bind;
7050 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9a771876 7051
629b3d75
MJ
7052 /* Remap src and dst argument types if needed. */
7053 record_type = ctx->record_type;
7054 srecord_type = ctx->srecord_type;
7055 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7056 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7057 {
7058 record_needs_remap = true;
7059 break;
7060 }
7061 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7062 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7063 {
7064 srecord_needs_remap = true;
7065 break;
7066 }
9a771876 7067
629b3d75 7068 if (record_needs_remap || srecord_needs_remap)
9a771876 7069 {
629b3d75
MJ
7070 memset (&tcctx, '\0', sizeof (tcctx));
7071 tcctx.cb.src_fn = ctx->cb.src_fn;
7072 tcctx.cb.dst_fn = child_fn;
7073 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7074 gcc_checking_assert (tcctx.cb.src_node);
7075 tcctx.cb.dst_node = tcctx.cb.src_node;
7076 tcctx.cb.src_cfun = ctx->cb.src_cfun;
7077 tcctx.cb.copy_decl = task_copyfn_copy_decl;
7078 tcctx.cb.eh_lp_nr = 0;
7079 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7080 tcctx.cb.decl_map = new hash_map<tree, tree>;
7081 tcctx.ctx = ctx;
9a771876 7082
629b3d75
MJ
7083 if (record_needs_remap)
7084 record_type = task_copyfn_remap_type (&tcctx, record_type);
7085 if (srecord_needs_remap)
7086 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9a771876
JJ
7087 }
7088 else
629b3d75 7089 tcctx.cb.decl_map = NULL;
9a771876 7090
629b3d75
MJ
7091 arg = DECL_ARGUMENTS (child_fn);
7092 TREE_TYPE (arg) = build_pointer_type (record_type);
7093 sarg = DECL_CHAIN (arg);
7094 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9a771876 7095
629b3d75
MJ
7096 /* First pass: initialize temporaries used in record_type and srecord_type
7097 sizes and field offsets. */
7098 if (tcctx.cb.decl_map)
7099 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7101 {
7102 tree *p;
9a771876 7103
629b3d75
MJ
7104 decl = OMP_CLAUSE_DECL (c);
7105 p = tcctx.cb.decl_map->get (decl);
7106 if (p == NULL)
7107 continue;
7108 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7109 sf = (tree) n->value;
7110 sf = *tcctx.cb.decl_map->get (sf);
7111 src = build_simple_mem_ref_loc (loc, sarg);
7112 src = omp_build_component_ref (src, sf);
7113 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7114 append_to_statement_list (t, &list);
7115 }
9a771876 7116
629b3d75
MJ
7117 /* Second pass: copy shared var pointers and copy construct non-VLA
7118 firstprivate vars. */
7119 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7120 switch (OMP_CLAUSE_CODE (c))
7121 {
7122 splay_tree_key key;
7123 case OMP_CLAUSE_SHARED:
7124 decl = OMP_CLAUSE_DECL (c);
7125 key = (splay_tree_key) decl;
7126 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7127 key = (splay_tree_key) &DECL_UID (decl);
7128 n = splay_tree_lookup (ctx->field_map, key);
7129 if (n == NULL)
7130 break;
7131 f = (tree) n->value;
7132 if (tcctx.cb.decl_map)
7133 f = *tcctx.cb.decl_map->get (f);
7134 n = splay_tree_lookup (ctx->sfield_map, key);
7135 sf = (tree) n->value;
7136 if (tcctx.cb.decl_map)
7137 sf = *tcctx.cb.decl_map->get (sf);
7138 src = build_simple_mem_ref_loc (loc, sarg);
7139 src = omp_build_component_ref (src, sf);
7140 dst = build_simple_mem_ref_loc (loc, arg);
7141 dst = omp_build_component_ref (dst, f);
7142 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7143 append_to_statement_list (t, &list);
7144 break;
a3bccfa1
JJ
7145 case OMP_CLAUSE__LOOPTEMP_:
7146 /* Fields for first two _looptemp_ clauses are initialized by
7147 GOMP_taskloop*, the rest are handled like firstprivate. */
7148 if (looptempno < 2)
7149 {
7150 looptempno++;
7151 break;
7152 }
7153 /* FALLTHRU */
629b3d75
MJ
7154 case OMP_CLAUSE_FIRSTPRIVATE:
7155 decl = OMP_CLAUSE_DECL (c);
7156 if (is_variable_sized (decl))
7157 break;
7158 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7159 if (n == NULL)
7160 break;
7161 f = (tree) n->value;
7162 if (tcctx.cb.decl_map)
7163 f = *tcctx.cb.decl_map->get (f);
7164 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7165 if (n != NULL)
7166 {
7167 sf = (tree) n->value;
7168 if (tcctx.cb.decl_map)
7169 sf = *tcctx.cb.decl_map->get (sf);
7170 src = build_simple_mem_ref_loc (loc, sarg);
7171 src = omp_build_component_ref (src, sf);
7172 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7173 src = build_simple_mem_ref_loc (loc, src);
7174 }
7175 else
7176 src = decl;
7177 dst = build_simple_mem_ref_loc (loc, arg);
7178 dst = omp_build_component_ref (dst, f);
a3bccfa1
JJ
7179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7180 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7181 else
7182 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
629b3d75
MJ
7183 append_to_statement_list (t, &list);
7184 break;
7185 case OMP_CLAUSE_PRIVATE:
7186 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7187 break;
7188 decl = OMP_CLAUSE_DECL (c);
7189 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7190 f = (tree) n->value;
7191 if (tcctx.cb.decl_map)
7192 f = *tcctx.cb.decl_map->get (f);
7193 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7194 if (n != NULL)
7195 {
7196 sf = (tree) n->value;
7197 if (tcctx.cb.decl_map)
7198 sf = *tcctx.cb.decl_map->get (sf);
7199 src = build_simple_mem_ref_loc (loc, sarg);
7200 src = omp_build_component_ref (src, sf);
7201 if (use_pointer_for_field (decl, NULL))
7202 src = build_simple_mem_ref_loc (loc, src);
7203 }
7204 else
7205 src = decl;
7206 dst = build_simple_mem_ref_loc (loc, arg);
7207 dst = omp_build_component_ref (dst, f);
7208 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7209 append_to_statement_list (t, &list);
7210 break;
7211 default:
7212 break;
7213 }
74bf76ed 7214
629b3d75
MJ
7215 /* Last pass: handle VLA firstprivates. */
7216 if (tcctx.cb.decl_map)
7217 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7218 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7219 {
7220 tree ind, ptr, df;
74bf76ed 7221
629b3d75
MJ
7222 decl = OMP_CLAUSE_DECL (c);
7223 if (!is_variable_sized (decl))
7224 continue;
7225 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7226 if (n == NULL)
7227 continue;
7228 f = (tree) n->value;
7229 f = *tcctx.cb.decl_map->get (f);
7230 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7231 ind = DECL_VALUE_EXPR (decl);
7232 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7233 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7234 n = splay_tree_lookup (ctx->sfield_map,
7235 (splay_tree_key) TREE_OPERAND (ind, 0));
7236 sf = (tree) n->value;
7237 sf = *tcctx.cb.decl_map->get (sf);
7238 src = build_simple_mem_ref_loc (loc, sarg);
7239 src = omp_build_component_ref (src, sf);
7240 src = build_simple_mem_ref_loc (loc, src);
7241 dst = build_simple_mem_ref_loc (loc, arg);
7242 dst = omp_build_component_ref (dst, f);
7243 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7244 append_to_statement_list (t, &list);
7245 n = splay_tree_lookup (ctx->field_map,
7246 (splay_tree_key) TREE_OPERAND (ind, 0));
7247 df = (tree) n->value;
7248 df = *tcctx.cb.decl_map->get (df);
7249 ptr = build_simple_mem_ref_loc (loc, arg);
7250 ptr = omp_build_component_ref (ptr, df);
7251 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7252 build_fold_addr_expr_loc (loc, dst));
7253 append_to_statement_list (t, &list);
7254 }
74bf76ed 7255
629b3d75
MJ
7256 t = build1 (RETURN_EXPR, void_type_node, NULL);
7257 append_to_statement_list (t, &list);
74bf76ed 7258
629b3d75
MJ
7259 if (tcctx.cb.decl_map)
7260 delete tcctx.cb.decl_map;
7261 pop_gimplify_context (NULL);
7262 BIND_EXPR_BODY (bind) = list;
7263 pop_cfun ();
7264}
74bf76ed
JJ
7265
7266static void
629b3d75 7267lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
74bf76ed 7268{
629b3d75
MJ
7269 tree c, clauses;
7270 gimple *g;
7271 size_t n_in = 0, n_out = 0, idx = 2, i;
7272
7273 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7274 gcc_assert (clauses);
7275 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7277 switch (OMP_CLAUSE_DEPEND_KIND (c))
7278 {
7279 case OMP_CLAUSE_DEPEND_IN:
7280 n_in++;
7281 break;
7282 case OMP_CLAUSE_DEPEND_OUT:
7283 case OMP_CLAUSE_DEPEND_INOUT:
7284 n_out++;
7285 break;
7286 case OMP_CLAUSE_DEPEND_SOURCE:
7287 case OMP_CLAUSE_DEPEND_SINK:
7288 /* FALLTHRU */
7289 default:
7290 gcc_unreachable ();
7291 }
7292 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7293 tree array = create_tmp_var (type);
7294 TREE_ADDRESSABLE (array) = 1;
7295 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7296 NULL_TREE);
7297 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7298 gimple_seq_add_stmt (iseq, g);
7299 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7300 NULL_TREE);
7301 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7302 gimple_seq_add_stmt (iseq, g);
7303 for (i = 0; i < 2; i++)
74bf76ed 7304 {
629b3d75
MJ
7305 if ((i ? n_in : n_out) == 0)
7306 continue;
7307 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7308 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7309 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7310 {
7311 tree t = OMP_CLAUSE_DECL (c);
7312 t = fold_convert (ptr_type_node, t);
7313 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7314 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7315 NULL_TREE, NULL_TREE);
7316 g = gimple_build_assign (r, t);
7317 gimple_seq_add_stmt (iseq, g);
7318 }
74bf76ed 7319 }
629b3d75
MJ
7320 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7321 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7322 OMP_CLAUSE_CHAIN (c) = *pclauses;
7323 *pclauses = c;
7324 tree clobber = build_constructor (type, NULL);
7325 TREE_THIS_VOLATILE (clobber) = 1;
7326 g = gimple_build_assign (array, clobber);
7327 gimple_seq_add_stmt (oseq, g);
7328}
7329
7330/* Lower the OpenMP parallel or task directive in the current statement
7331 in GSI_P. CTX holds context information for the directive. */
74bf76ed 7332
629b3d75
MJ
7333static void
7334lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7335{
7336 tree clauses;
7337 tree child_fn, t;
7338 gimple *stmt = gsi_stmt (*gsi_p);
7339 gbind *par_bind, *bind, *dep_bind = NULL;
7340 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7341 location_t loc = gimple_location (stmt);
74bf76ed 7342
629b3d75
MJ
7343 clauses = gimple_omp_taskreg_clauses (stmt);
7344 par_bind
7345 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7346 par_body = gimple_bind_body (par_bind);
7347 child_fn = ctx->cb.dst_fn;
7348 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7349 && !gimple_omp_parallel_combined_p (stmt))
74bf76ed 7350 {
629b3d75
MJ
7351 struct walk_stmt_info wi;
7352 int ws_num = 0;
74bf76ed 7353
629b3d75
MJ
7354 memset (&wi, 0, sizeof (wi));
7355 wi.info = &ws_num;
7356 wi.val_only = true;
7357 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7358 if (ws_num == 1)
7359 gimple_omp_parallel_set_combined_p (stmt, true);
74bf76ed 7360 }
629b3d75
MJ
7361 gimple_seq dep_ilist = NULL;
7362 gimple_seq dep_olist = NULL;
7363 if (gimple_code (stmt) == GIMPLE_OMP_TASK
7364 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
acf0174b 7365 {
629b3d75
MJ
7366 push_gimplify_context ();
7367 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7368 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7369 &dep_ilist, &dep_olist);
9669b00b 7370 }
9669b00b 7371
629b3d75
MJ
7372 if (ctx->srecord_type)
7373 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9669b00b 7374
629b3d75 7375 push_gimplify_context ();
74bf76ed 7376
629b3d75
MJ
7377 par_olist = NULL;
7378 par_ilist = NULL;
7379 par_rlist = NULL;
7380 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7381 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7382 if (phony_construct && ctx->record_type)
9669b00b 7383 {
629b3d75
MJ
7384 gcc_checking_assert (!ctx->receiver_decl);
7385 ctx->receiver_decl = create_tmp_var
7386 (build_reference_type (ctx->record_type), ".omp_rec");
9669b00b 7387 }
629b3d75
MJ
7388 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7389 lower_omp (&par_body, ctx);
7390 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7391 lower_reduction_clauses (clauses, &par_rlist, ctx);
9669b00b 7392
629b3d75
MJ
7393 /* Declare all the variables created by mapping and the variables
7394 declared in the scope of the parallel body. */
7395 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7396 maybe_remove_omp_member_access_dummy_vars (par_bind);
629b3d75 7397 record_vars_into (gimple_bind_vars (par_bind), child_fn);
74bf76ed 7398
629b3d75 7399 if (ctx->record_type)
74bf76ed 7400 {
629b3d75
MJ
7401 ctx->sender_decl
7402 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7403 : ctx->record_type, ".omp_data_o");
7404 DECL_NAMELESS (ctx->sender_decl) = 1;
7405 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7406 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
74bf76ed 7407 }
74bf76ed 7408
629b3d75
MJ
7409 olist = NULL;
7410 ilist = NULL;
7411 lower_send_clauses (clauses, &ilist, &olist, ctx);
7412 lower_send_shared_vars (&ilist, &olist, ctx);
9669b00b 7413
629b3d75 7414 if (ctx->record_type)
74bf76ed 7415 {
629b3d75
MJ
7416 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7417 TREE_THIS_VOLATILE (clobber) = 1;
7418 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7419 clobber));
d9a6bd32 7420 }
d9a6bd32 7421
629b3d75
MJ
7422 /* Once all the expansions are done, sequence all the different
7423 fragments inside gimple_omp_body. */
d9a6bd32 7424
629b3d75 7425 new_body = NULL;
d9a6bd32 7426
629b3d75 7427 if (ctx->record_type)
d9a6bd32 7428 {
629b3d75
MJ
7429 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7430 /* fixup_child_record_type might have changed receiver_decl's type. */
7431 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7432 gimple_seq_add_stmt (&new_body,
7433 gimple_build_assign (ctx->receiver_decl, t));
d9a6bd32
JJ
7434 }
7435
629b3d75
MJ
7436 gimple_seq_add_seq (&new_body, par_ilist);
7437 gimple_seq_add_seq (&new_body, par_body);
7438 gimple_seq_add_seq (&new_body, par_rlist);
7439 if (ctx->cancellable)
7440 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7441 gimple_seq_add_seq (&new_body, par_olist);
7442 new_body = maybe_catch_exception (new_body);
7443 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7444 gimple_seq_add_stmt (&new_body,
7445 gimple_build_omp_continue (integer_zero_node,
7446 integer_zero_node));
7447 if (!phony_construct)
d9a6bd32 7448 {
629b3d75
MJ
7449 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7450 gimple_omp_set_body (stmt, new_body);
d9a6bd32
JJ
7451 }
7452
629b3d75
MJ
7453 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7454 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7455 gimple_bind_add_seq (bind, ilist);
7456 if (!phony_construct)
7457 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 7458 else
629b3d75
MJ
7459 gimple_bind_add_seq (bind, new_body);
7460 gimple_bind_add_seq (bind, olist);
d9a6bd32 7461
629b3d75
MJ
7462 pop_gimplify_context (NULL);
7463
7464 if (dep_bind)
d9a6bd32 7465 {
629b3d75
MJ
7466 gimple_bind_add_seq (dep_bind, dep_ilist);
7467 gimple_bind_add_stmt (dep_bind, bind);
7468 gimple_bind_add_seq (dep_bind, dep_olist);
7469 pop_gimplify_context (dep_bind);
d9a6bd32 7470 }
d9a6bd32
JJ
7471}
7472
629b3d75
MJ
7473/* Lower the GIMPLE_OMP_TARGET in the current statement
7474 in GSI_P. CTX holds context information for the directive. */
d9a6bd32
JJ
7475
7476static void
629b3d75 7477lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
d9a6bd32 7478{
629b3d75
MJ
7479 tree clauses;
7480 tree child_fn, t, c;
7481 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7482 gbind *tgt_bind, *bind, *dep_bind = NULL;
7483 gimple_seq tgt_body, olist, ilist, fplist, new_body;
7484 location_t loc = gimple_location (stmt);
7485 bool offloaded, data_region;
7486 unsigned int map_cnt = 0;
d9a6bd32 7487
629b3d75
MJ
7488 offloaded = is_gimple_omp_offloaded (stmt);
7489 switch (gimple_omp_target_kind (stmt))
d9a6bd32 7490 {
629b3d75
MJ
7491 case GF_OMP_TARGET_KIND_REGION:
7492 case GF_OMP_TARGET_KIND_UPDATE:
7493 case GF_OMP_TARGET_KIND_ENTER_DATA:
7494 case GF_OMP_TARGET_KIND_EXIT_DATA:
7495 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7496 case GF_OMP_TARGET_KIND_OACC_KERNELS:
7497 case GF_OMP_TARGET_KIND_OACC_UPDATE:
7498 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7499 case GF_OMP_TARGET_KIND_OACC_DECLARE:
7500 data_region = false;
7501 break;
7502 case GF_OMP_TARGET_KIND_DATA:
7503 case GF_OMP_TARGET_KIND_OACC_DATA:
7504 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7505 data_region = true;
7506 break;
7507 default:
7508 gcc_unreachable ();
74bf76ed 7509 }
74bf76ed 7510
629b3d75 7511 clauses = gimple_omp_target_clauses (stmt);
d9a6bd32 7512
629b3d75
MJ
7513 gimple_seq dep_ilist = NULL;
7514 gimple_seq dep_olist = NULL;
7515 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
d9a6bd32 7516 {
629b3d75
MJ
7517 push_gimplify_context ();
7518 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7519 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7520 &dep_ilist, &dep_olist);
d9a6bd32 7521 }
953ff289 7522
629b3d75
MJ
7523 tgt_bind = NULL;
7524 tgt_body = NULL;
7525 if (offloaded)
e4834818 7526 {
629b3d75
MJ
7527 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7528 tgt_body = gimple_bind_body (tgt_bind);
e4834818 7529 }
629b3d75
MJ
7530 else if (data_region)
7531 tgt_body = gimple_omp_body (stmt);
7532 child_fn = ctx->cb.dst_fn;
e4834818 7533
629b3d75
MJ
7534 push_gimplify_context ();
7535 fplist = NULL;
e4834818 7536
629b3d75
MJ
7537 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7538 switch (OMP_CLAUSE_CODE (c))
7539 {
7540 tree var, x;
e4834818 7541
629b3d75
MJ
7542 default:
7543 break;
7544 case OMP_CLAUSE_MAP:
7545#if CHECKING_P
7546 /* First check what we're prepared to handle in the following. */
7547 switch (OMP_CLAUSE_MAP_KIND (c))
7548 {
7549 case GOMP_MAP_ALLOC:
7550 case GOMP_MAP_TO:
7551 case GOMP_MAP_FROM:
7552 case GOMP_MAP_TOFROM:
7553 case GOMP_MAP_POINTER:
7554 case GOMP_MAP_TO_PSET:
7555 case GOMP_MAP_DELETE:
7556 case GOMP_MAP_RELEASE:
7557 case GOMP_MAP_ALWAYS_TO:
7558 case GOMP_MAP_ALWAYS_FROM:
7559 case GOMP_MAP_ALWAYS_TOFROM:
7560 case GOMP_MAP_FIRSTPRIVATE_POINTER:
7561 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7562 case GOMP_MAP_STRUCT:
7563 case GOMP_MAP_ALWAYS_POINTER:
7564 break;
7565 case GOMP_MAP_FORCE_ALLOC:
7566 case GOMP_MAP_FORCE_TO:
7567 case GOMP_MAP_FORCE_FROM:
7568 case GOMP_MAP_FORCE_TOFROM:
7569 case GOMP_MAP_FORCE_PRESENT:
7570 case GOMP_MAP_FORCE_DEVICEPTR:
7571 case GOMP_MAP_DEVICE_RESIDENT:
7572 case GOMP_MAP_LINK:
7573 gcc_assert (is_gimple_omp_oacc (stmt));
7574 break;
7575 default:
7576 gcc_unreachable ();
7577 }
7578#endif
7579 /* FALLTHRU */
7580 case OMP_CLAUSE_TO:
7581 case OMP_CLAUSE_FROM:
7582 oacc_firstprivate:
7583 var = OMP_CLAUSE_DECL (c);
7584 if (!DECL_P (var))
7585 {
7586 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7587 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7588 && (OMP_CLAUSE_MAP_KIND (c)
7589 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
7590 map_cnt++;
7591 continue;
7592 }
e4834818 7593
629b3d75
MJ
7594 if (DECL_SIZE (var)
7595 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7596 {
7597 tree var2 = DECL_VALUE_EXPR (var);
7598 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7599 var2 = TREE_OPERAND (var2, 0);
7600 gcc_assert (DECL_P (var2));
7601 var = var2;
7602 }
e4834818 7603
629b3d75
MJ
7604 if (offloaded
7605 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7606 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7607 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7608 {
7609 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7610 {
7611 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7612 && varpool_node::get_create (var)->offloadable)
7613 continue;
e4834818 7614
629b3d75
MJ
7615 tree type = build_pointer_type (TREE_TYPE (var));
7616 tree new_var = lookup_decl (var, ctx);
7617 x = create_tmp_var_raw (type, get_name (new_var));
7618 gimple_add_tmp_var (x);
7619 x = build_simple_mem_ref (x);
7620 SET_DECL_VALUE_EXPR (new_var, x);
7621 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7622 }
7623 continue;
7624 }
e4834818 7625
629b3d75
MJ
7626 if (!maybe_lookup_field (var, ctx))
7627 continue;
e4834818 7628
629b3d75
MJ
7629 /* Don't remap oacc parallel reduction variables, because the
7630 intermediate result must be local to each gang. */
7631 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7632 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7633 {
7634 x = build_receiver_ref (var, true, ctx);
7635 tree new_var = lookup_decl (var, ctx);
e4834818 7636
629b3d75
MJ
7637 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7638 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7639 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7640 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7641 x = build_simple_mem_ref (x);
7642 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7643 {
7644 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
bd1cab35
CLT
7645 if (omp_is_reference (new_var)
7646 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
629b3d75
MJ
7647 {
7648 /* Create a local object to hold the instance
7649 value. */
7650 tree type = TREE_TYPE (TREE_TYPE (new_var));
7651 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7652 tree inst = create_tmp_var (type, id);
7653 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7654 x = build_fold_addr_expr (inst);
7655 }
7656 gimplify_assign (new_var, x, &fplist);
7657 }
7658 else if (DECL_P (new_var))
7659 {
7660 SET_DECL_VALUE_EXPR (new_var, x);
7661 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7662 }
7663 else
7664 gcc_unreachable ();
7665 }
7666 map_cnt++;
7667 break;
e4834818 7668
629b3d75
MJ
7669 case OMP_CLAUSE_FIRSTPRIVATE:
7670 if (is_oacc_parallel (ctx))
7671 goto oacc_firstprivate;
7672 map_cnt++;
7673 var = OMP_CLAUSE_DECL (c);
7674 if (!omp_is_reference (var)
7675 && !is_gimple_reg_type (TREE_TYPE (var)))
7676 {
7677 tree new_var = lookup_decl (var, ctx);
7678 if (is_variable_sized (var))
7679 {
7680 tree pvar = DECL_VALUE_EXPR (var);
7681 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7682 pvar = TREE_OPERAND (pvar, 0);
7683 gcc_assert (DECL_P (pvar));
7684 tree new_pvar = lookup_decl (pvar, ctx);
7685 x = build_fold_indirect_ref (new_pvar);
7686 TREE_THIS_NOTRAP (x) = 1;
7687 }
7688 else
7689 x = build_receiver_ref (var, true, ctx);
7690 SET_DECL_VALUE_EXPR (new_var, x);
7691 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7692 }
7693 break;
e4834818 7694
629b3d75
MJ
7695 case OMP_CLAUSE_PRIVATE:
7696 if (is_gimple_omp_oacc (ctx->stmt))
7697 break;
7698 var = OMP_CLAUSE_DECL (c);
7699 if (is_variable_sized (var))
7700 {
7701 tree new_var = lookup_decl (var, ctx);
7702 tree pvar = DECL_VALUE_EXPR (var);
7703 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7704 pvar = TREE_OPERAND (pvar, 0);
7705 gcc_assert (DECL_P (pvar));
7706 tree new_pvar = lookup_decl (pvar, ctx);
7707 x = build_fold_indirect_ref (new_pvar);
7708 TREE_THIS_NOTRAP (x) = 1;
7709 SET_DECL_VALUE_EXPR (new_var, x);
7710 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7711 }
7712 break;
e4834818 7713
629b3d75
MJ
7714 case OMP_CLAUSE_USE_DEVICE_PTR:
7715 case OMP_CLAUSE_IS_DEVICE_PTR:
7716 var = OMP_CLAUSE_DECL (c);
7717 map_cnt++;
7718 if (is_variable_sized (var))
7719 {
7720 tree new_var = lookup_decl (var, ctx);
7721 tree pvar = DECL_VALUE_EXPR (var);
7722 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7723 pvar = TREE_OPERAND (pvar, 0);
7724 gcc_assert (DECL_P (pvar));
7725 tree new_pvar = lookup_decl (pvar, ctx);
7726 x = build_fold_indirect_ref (new_pvar);
7727 TREE_THIS_NOTRAP (x) = 1;
7728 SET_DECL_VALUE_EXPR (new_var, x);
7729 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7730 }
7731 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7732 {
7733 tree new_var = lookup_decl (var, ctx);
7734 tree type = build_pointer_type (TREE_TYPE (var));
7735 x = create_tmp_var_raw (type, get_name (new_var));
7736 gimple_add_tmp_var (x);
7737 x = build_simple_mem_ref (x);
7738 SET_DECL_VALUE_EXPR (new_var, x);
7739 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7740 }
7741 else
7742 {
7743 tree new_var = lookup_decl (var, ctx);
7744 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7745 gimple_add_tmp_var (x);
7746 SET_DECL_VALUE_EXPR (new_var, x);
7747 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7748 }
7749 break;
7750 }
e4834818 7751
629b3d75 7752 if (offloaded)
e4834818 7753 {
629b3d75
MJ
7754 target_nesting_level++;
7755 lower_omp (&tgt_body, ctx);
7756 target_nesting_level--;
e4834818 7757 }
629b3d75
MJ
7758 else if (data_region)
7759 lower_omp (&tgt_body, ctx);
e4834818 7760
629b3d75 7761 if (offloaded)
e4834818 7762 {
629b3d75
MJ
7763 /* Declare all the variables created by mapping and the variables
7764 declared in the scope of the target body. */
7765 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 7766 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
629b3d75 7767 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
e4834818
NS
7768 }
7769
629b3d75
MJ
7770 olist = NULL;
7771 ilist = NULL;
7772 if (ctx->record_type)
e4834818 7773 {
629b3d75
MJ
7774 ctx->sender_decl
7775 = create_tmp_var (ctx->record_type, ".omp_data_arr");
7776 DECL_NAMELESS (ctx->sender_decl) = 1;
7777 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7778 t = make_tree_vec (3);
7779 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7780 TREE_VEC_ELT (t, 1)
7781 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7782 ".omp_data_sizes");
7783 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7784 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7785 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7786 tree tkind_type = short_unsigned_type_node;
7787 int talign_shift = 8;
7788 TREE_VEC_ELT (t, 2)
7789 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7790 ".omp_data_kinds");
7791 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7792 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7793 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7794 gimple_omp_target_set_data_arg (stmt, t);
953ff289 7795
629b3d75
MJ
7796 vec<constructor_elt, va_gc> *vsize;
7797 vec<constructor_elt, va_gc> *vkind;
7798 vec_alloc (vsize, map_cnt);
7799 vec_alloc (vkind, map_cnt);
7800 unsigned int map_idx = 0;
953ff289 7801
629b3d75
MJ
7802 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7803 switch (OMP_CLAUSE_CODE (c))
953ff289 7804 {
629b3d75
MJ
7805 tree ovar, nc, s, purpose, var, x, type;
7806 unsigned int talign;
953ff289 7807
629b3d75
MJ
7808 default:
7809 break;
953ff289 7810
629b3d75
MJ
7811 case OMP_CLAUSE_MAP:
7812 case OMP_CLAUSE_TO:
7813 case OMP_CLAUSE_FROM:
7814 oacc_firstprivate_map:
7815 nc = c;
7816 ovar = OMP_CLAUSE_DECL (c);
7817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7818 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7819 || (OMP_CLAUSE_MAP_KIND (c)
7820 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7821 break;
7822 if (!DECL_P (ovar))
c34938a8 7823 {
629b3d75
MJ
7824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7825 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7826 {
7827 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7828 == get_base_address (ovar));
7829 nc = OMP_CLAUSE_CHAIN (c);
7830 ovar = OMP_CLAUSE_DECL (nc);
7831 }
7832 else
7833 {
7834 tree x = build_sender_ref (ovar, ctx);
7835 tree v
7836 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7837 gimplify_assign (x, v, &ilist);
7838 nc = NULL_TREE;
7839 }
7840 }
7841 else
7842 {
7843 if (DECL_SIZE (ovar)
7844 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7845 {
7846 tree ovar2 = DECL_VALUE_EXPR (ovar);
7847 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7848 ovar2 = TREE_OPERAND (ovar2, 0);
7849 gcc_assert (DECL_P (ovar2));
7850 ovar = ovar2;
7851 }
7852 if (!maybe_lookup_field (ovar, ctx))
7853 continue;
c34938a8 7854 }
777f7f9a 7855
629b3d75
MJ
7856 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7857 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7858 talign = DECL_ALIGN_UNIT (ovar);
7859 if (nc)
7860 {
7861 var = lookup_decl_in_outer_ctx (ovar, ctx);
7862 x = build_sender_ref (ovar, ctx);
777f7f9a 7863
629b3d75
MJ
7864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7865 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7866 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7867 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7868 {
7869 gcc_assert (offloaded);
7870 tree avar
7871 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7872 mark_addressable (avar);
7873 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7874 talign = DECL_ALIGN_UNIT (avar);
7875 avar = build_fold_addr_expr (avar);
7876 gimplify_assign (x, avar, &ilist);
7877 }
7878 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7879 {
7880 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7881 if (!omp_is_reference (var))
7882 {
7883 if (is_gimple_reg (var)
7884 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7885 TREE_NO_WARNING (var) = 1;
7886 var = build_fold_addr_expr (var);
7887 }
7888 else
7889 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7890 gimplify_assign (x, var, &ilist);
7891 }
7892 else if (is_gimple_reg (var))
7893 {
7894 gcc_assert (offloaded);
7895 tree avar = create_tmp_var (TREE_TYPE (var));
7896 mark_addressable (avar);
7897 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7898 if (GOMP_MAP_COPY_TO_P (map_kind)
7899 || map_kind == GOMP_MAP_POINTER
7900 || map_kind == GOMP_MAP_TO_PSET
7901 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7902 {
7903 /* If we need to initialize a temporary
7904 with VAR because it is not addressable, and
7905 the variable hasn't been initialized yet, then
7906 we'll get a warning for the store to avar.
7907 Don't warn in that case, the mapping might
7908 be implicit. */
7909 TREE_NO_WARNING (var) = 1;
7910 gimplify_assign (avar, var, &ilist);
7911 }
7912 avar = build_fold_addr_expr (avar);
7913 gimplify_assign (x, avar, &ilist);
7914 if ((GOMP_MAP_COPY_FROM_P (map_kind)
7915 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7916 && !TYPE_READONLY (TREE_TYPE (var)))
7917 {
7918 x = unshare_expr (x);
7919 x = build_simple_mem_ref (x);
7920 gimplify_assign (var, x, &olist);
7921 }
7922 }
7923 else
7924 {
7925 var = build_fold_addr_expr (var);
7926 gimplify_assign (x, var, &ilist);
7927 }
7928 }
7929 s = NULL_TREE;
7930 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7931 {
7932 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7933 s = TREE_TYPE (ovar);
7934 if (TREE_CODE (s) == REFERENCE_TYPE)
7935 s = TREE_TYPE (s);
7936 s = TYPE_SIZE_UNIT (s);
7937 }
7938 else
7939 s = OMP_CLAUSE_SIZE (c);
7940 if (s == NULL_TREE)
7941 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7942 s = fold_convert (size_type_node, s);
7943 purpose = size_int (map_idx++);
7944 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
7945 if (TREE_CODE (s) != INTEGER_CST)
7946 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
777f7f9a 7947
629b3d75
MJ
7948 unsigned HOST_WIDE_INT tkind, tkind_zero;
7949 switch (OMP_CLAUSE_CODE (c))
7950 {
7951 case OMP_CLAUSE_MAP:
7952 tkind = OMP_CLAUSE_MAP_KIND (c);
7953 tkind_zero = tkind;
7954 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
7955 switch (tkind)
7956 {
7957 case GOMP_MAP_ALLOC:
7958 case GOMP_MAP_TO:
7959 case GOMP_MAP_FROM:
7960 case GOMP_MAP_TOFROM:
7961 case GOMP_MAP_ALWAYS_TO:
7962 case GOMP_MAP_ALWAYS_FROM:
7963 case GOMP_MAP_ALWAYS_TOFROM:
7964 case GOMP_MAP_RELEASE:
7965 case GOMP_MAP_FORCE_TO:
7966 case GOMP_MAP_FORCE_FROM:
7967 case GOMP_MAP_FORCE_TOFROM:
7968 case GOMP_MAP_FORCE_PRESENT:
7969 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
7970 break;
7971 case GOMP_MAP_DELETE:
7972 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
7973 default:
7974 break;
7975 }
7976 if (tkind_zero != tkind)
7977 {
7978 if (integer_zerop (s))
7979 tkind = tkind_zero;
7980 else if (integer_nonzerop (s))
7981 tkind_zero = tkind;
7982 }
7983 break;
7984 case OMP_CLAUSE_FIRSTPRIVATE:
7985 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7986 tkind = GOMP_MAP_TO;
7987 tkind_zero = tkind;
7988 break;
7989 case OMP_CLAUSE_TO:
7990 tkind = GOMP_MAP_TO;
7991 tkind_zero = tkind;
7992 break;
7993 case OMP_CLAUSE_FROM:
7994 tkind = GOMP_MAP_FROM;
7995 tkind_zero = tkind;
7996 break;
7997 default:
7998 gcc_unreachable ();
7999 }
8000 gcc_checking_assert (tkind
8001 < (HOST_WIDE_INT_C (1U) << talign_shift));
8002 gcc_checking_assert (tkind_zero
8003 < (HOST_WIDE_INT_C (1U) << talign_shift));
8004 talign = ceil_log2 (talign);
8005 tkind |= talign << talign_shift;
8006 tkind_zero |= talign << talign_shift;
8007 gcc_checking_assert (tkind
8008 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8009 gcc_checking_assert (tkind_zero
8010 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8011 if (tkind == tkind_zero)
8012 x = build_int_cstu (tkind_type, tkind);
8013 else
8014 {
8015 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8016 x = build3 (COND_EXPR, tkind_type,
8017 fold_build2 (EQ_EXPR, boolean_type_node,
8018 unshare_expr (s), size_zero_node),
8019 build_int_cstu (tkind_type, tkind_zero),
8020 build_int_cstu (tkind_type, tkind));
8021 }
8022 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8023 if (nc && nc != c)
8024 c = nc;
8025 break;
05409788 8026
629b3d75
MJ
8027 case OMP_CLAUSE_FIRSTPRIVATE:
8028 if (is_oacc_parallel (ctx))
8029 goto oacc_firstprivate_map;
8030 ovar = OMP_CLAUSE_DECL (c);
8031 if (omp_is_reference (ovar))
8032 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8033 else
8034 talign = DECL_ALIGN_UNIT (ovar);
8035 var = lookup_decl_in_outer_ctx (ovar, ctx);
8036 x = build_sender_ref (ovar, ctx);
8037 tkind = GOMP_MAP_FIRSTPRIVATE;
8038 type = TREE_TYPE (ovar);
8039 if (omp_is_reference (ovar))
8040 type = TREE_TYPE (type);
8041 if ((INTEGRAL_TYPE_P (type)
8042 && TYPE_PRECISION (type) <= POINTER_SIZE)
8043 || TREE_CODE (type) == POINTER_TYPE)
8044 {
8045 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8046 tree t = var;
8047 if (omp_is_reference (var))
8048 t = build_simple_mem_ref (var);
8049 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8050 TREE_NO_WARNING (var) = 1;
8051 if (TREE_CODE (type) != POINTER_TYPE)
8052 t = fold_convert (pointer_sized_int_node, t);
8053 t = fold_convert (TREE_TYPE (x), t);
8054 gimplify_assign (x, t, &ilist);
8055 }
8056 else if (omp_is_reference (var))
8057 gimplify_assign (x, var, &ilist);
8058 else if (is_gimple_reg (var))
8059 {
8060 tree avar = create_tmp_var (TREE_TYPE (var));
8061 mark_addressable (avar);
8062 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8063 TREE_NO_WARNING (var) = 1;
8064 gimplify_assign (avar, var, &ilist);
8065 avar = build_fold_addr_expr (avar);
8066 gimplify_assign (x, avar, &ilist);
8067 }
8068 else
8069 {
8070 var = build_fold_addr_expr (var);
8071 gimplify_assign (x, var, &ilist);
8072 }
8073 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8074 s = size_int (0);
8075 else if (omp_is_reference (ovar))
8076 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8077 else
8078 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8079 s = fold_convert (size_type_node, s);
8080 purpose = size_int (map_idx++);
8081 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8082 if (TREE_CODE (s) != INTEGER_CST)
8083 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
05409788 8084
629b3d75
MJ
8085 gcc_checking_assert (tkind
8086 < (HOST_WIDE_INT_C (1U) << talign_shift));
8087 talign = ceil_log2 (talign);
8088 tkind |= talign << talign_shift;
8089 gcc_checking_assert (tkind
8090 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8091 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8092 build_int_cstu (tkind_type, tkind));
8093 break;
05409788 8094
629b3d75
MJ
8095 case OMP_CLAUSE_USE_DEVICE_PTR:
8096 case OMP_CLAUSE_IS_DEVICE_PTR:
8097 ovar = OMP_CLAUSE_DECL (c);
8098 var = lookup_decl_in_outer_ctx (ovar, ctx);
8099 x = build_sender_ref (ovar, ctx);
8100 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8101 tkind = GOMP_MAP_USE_DEVICE_PTR;
8102 else
8103 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8104 type = TREE_TYPE (ovar);
8105 if (TREE_CODE (type) == ARRAY_TYPE)
8106 var = build_fold_addr_expr (var);
8107 else
8108 {
8109 if (omp_is_reference (ovar))
8110 {
8111 type = TREE_TYPE (type);
8112 if (TREE_CODE (type) != ARRAY_TYPE)
8113 var = build_simple_mem_ref (var);
8114 var = fold_convert (TREE_TYPE (x), var);
8115 }
8116 }
8117 gimplify_assign (x, var, &ilist);
8118 s = size_int (0);
8119 purpose = size_int (map_idx++);
8120 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8121 gcc_checking_assert (tkind
8122 < (HOST_WIDE_INT_C (1U) << talign_shift));
8123 gcc_checking_assert (tkind
8124 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8125 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8126 build_int_cstu (tkind_type, tkind));
8127 break;
8128 }
05409788 8129
629b3d75 8130 gcc_assert (map_idx == map_cnt);
20906c66 8131
629b3d75
MJ
8132 DECL_INITIAL (TREE_VEC_ELT (t, 1))
8133 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8134 DECL_INITIAL (TREE_VEC_ELT (t, 2))
8135 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8136 for (int i = 1; i <= 2; i++)
8137 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8138 {
8139 gimple_seq initlist = NULL;
8140 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8141 TREE_VEC_ELT (t, i)),
8142 &initlist, true, NULL_TREE);
8143 gimple_seq_add_seq (&ilist, initlist);
20906c66 8144
629b3d75
MJ
8145 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8146 NULL);
8147 TREE_THIS_VOLATILE (clobber) = 1;
8148 gimple_seq_add_stmt (&olist,
8149 gimple_build_assign (TREE_VEC_ELT (t, i),
8150 clobber));
8151 }
05409788 8152
629b3d75
MJ
8153 tree clobber = build_constructor (ctx->record_type, NULL);
8154 TREE_THIS_VOLATILE (clobber) = 1;
8155 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8156 clobber));
8157 }
05409788 8158
629b3d75
MJ
8159 /* Once all the expansions are done, sequence all the different
8160 fragments inside gimple_omp_body. */
05409788 8161
629b3d75 8162 new_body = NULL;
05409788 8163
629b3d75
MJ
8164 if (offloaded
8165 && ctx->record_type)
05409788 8166 {
629b3d75
MJ
8167 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8168 /* fixup_child_record_type might have changed receiver_decl's type. */
8169 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8170 gimple_seq_add_stmt (&new_body,
8171 gimple_build_assign (ctx->receiver_decl, t));
05409788 8172 }
629b3d75 8173 gimple_seq_add_seq (&new_body, fplist);
05409788 8174
629b3d75 8175 if (offloaded || data_region)
0645c1a2 8176 {
629b3d75
MJ
8177 tree prev = NULL_TREE;
8178 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8179 switch (OMP_CLAUSE_CODE (c))
0645c1a2 8180 {
629b3d75
MJ
8181 tree var, x;
8182 default:
8183 break;
8184 case OMP_CLAUSE_FIRSTPRIVATE:
8185 if (is_gimple_omp_oacc (ctx->stmt))
8186 break;
8187 var = OMP_CLAUSE_DECL (c);
8188 if (omp_is_reference (var)
8189 || is_gimple_reg_type (TREE_TYPE (var)))
0645c1a2 8190 {
629b3d75
MJ
8191 tree new_var = lookup_decl (var, ctx);
8192 tree type;
8193 type = TREE_TYPE (var);
8194 if (omp_is_reference (var))
8195 type = TREE_TYPE (type);
8196 if ((INTEGRAL_TYPE_P (type)
8197 && TYPE_PRECISION (type) <= POINTER_SIZE)
8198 || TREE_CODE (type) == POINTER_TYPE)
8199 {
8200 x = build_receiver_ref (var, false, ctx);
8201 if (TREE_CODE (type) != POINTER_TYPE)
8202 x = fold_convert (pointer_sized_int_node, x);
8203 x = fold_convert (type, x);
8204 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8205 fb_rvalue);
8206 if (omp_is_reference (var))
8207 {
8208 tree v = create_tmp_var_raw (type, get_name (var));
8209 gimple_add_tmp_var (v);
8210 TREE_ADDRESSABLE (v) = 1;
8211 gimple_seq_add_stmt (&new_body,
8212 gimple_build_assign (v, x));
8213 x = build_fold_addr_expr (v);
8214 }
8215 gimple_seq_add_stmt (&new_body,
8216 gimple_build_assign (new_var, x));
8217 }
8218 else
8219 {
8220 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8221 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8222 fb_rvalue);
8223 gimple_seq_add_stmt (&new_body,
8224 gimple_build_assign (new_var, x));
8225 }
8226 }
8227 else if (is_variable_sized (var))
8228 {
8229 tree pvar = DECL_VALUE_EXPR (var);
8230 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8231 pvar = TREE_OPERAND (pvar, 0);
8232 gcc_assert (DECL_P (pvar));
8233 tree new_var = lookup_decl (pvar, ctx);
8234 x = build_receiver_ref (var, false, ctx);
8235 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8236 gimple_seq_add_stmt (&new_body,
8237 gimple_build_assign (new_var, x));
8238 }
8239 break;
8240 case OMP_CLAUSE_PRIVATE:
8241 if (is_gimple_omp_oacc (ctx->stmt))
8242 break;
8243 var = OMP_CLAUSE_DECL (c);
8244 if (omp_is_reference (var))
8245 {
8246 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8247 tree new_var = lookup_decl (var, ctx);
8248 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8249 if (TREE_CONSTANT (x))
8250 {
8251 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8252 get_name (var));
8253 gimple_add_tmp_var (x);
8254 TREE_ADDRESSABLE (x) = 1;
8255 x = build_fold_addr_expr_loc (clause_loc, x);
8256 }
8257 else
8258 break;
9bd46bc9 8259
629b3d75
MJ
8260 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8261 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8262 gimple_seq_add_stmt (&new_body,
8263 gimple_build_assign (new_var, x));
8264 }
8265 break;
8266 case OMP_CLAUSE_USE_DEVICE_PTR:
8267 case OMP_CLAUSE_IS_DEVICE_PTR:
8268 var = OMP_CLAUSE_DECL (c);
8269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8270 x = build_sender_ref (var, ctx);
8271 else
8272 x = build_receiver_ref (var, false, ctx);
8273 if (is_variable_sized (var))
8274 {
8275 tree pvar = DECL_VALUE_EXPR (var);
8276 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8277 pvar = TREE_OPERAND (pvar, 0);
8278 gcc_assert (DECL_P (pvar));
8279 tree new_var = lookup_decl (pvar, ctx);
8280 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8281 gimple_seq_add_stmt (&new_body,
8282 gimple_build_assign (new_var, x));
8283 }
8284 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8285 {
8286 tree new_var = lookup_decl (var, ctx);
8287 new_var = DECL_VALUE_EXPR (new_var);
8288 gcc_assert (TREE_CODE (new_var) == MEM_REF);
8289 new_var = TREE_OPERAND (new_var, 0);
8290 gcc_assert (DECL_P (new_var));
8291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8292 gimple_seq_add_stmt (&new_body,
8293 gimple_build_assign (new_var, x));
8294 }
9bd46bc9 8295 else
629b3d75
MJ
8296 {
8297 tree type = TREE_TYPE (var);
8298 tree new_var = lookup_decl (var, ctx);
8299 if (omp_is_reference (var))
8300 {
8301 type = TREE_TYPE (type);
8302 if (TREE_CODE (type) != ARRAY_TYPE)
8303 {
8304 tree v = create_tmp_var_raw (type, get_name (var));
8305 gimple_add_tmp_var (v);
8306 TREE_ADDRESSABLE (v) = 1;
8307 x = fold_convert (type, x);
8308 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8309 fb_rvalue);
8310 gimple_seq_add_stmt (&new_body,
8311 gimple_build_assign (v, x));
8312 x = build_fold_addr_expr (v);
8313 }
8314 }
8315 new_var = DECL_VALUE_EXPR (new_var);
8316 x = fold_convert (TREE_TYPE (new_var), x);
8317 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8318 gimple_seq_add_stmt (&new_body,
8319 gimple_build_assign (new_var, x));
8320 }
8321 break;
9bd46bc9 8322 }
629b3d75
MJ
8323 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8324 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8325 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
8326 or references to VLAs. */
8327 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8328 switch (OMP_CLAUSE_CODE (c))
8329 {
8330 tree var;
8331 default:
8332 break;
8333 case OMP_CLAUSE_MAP:
8334 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8335 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8336 {
8337 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
a90c8804 8338 poly_int64 offset = 0;
629b3d75
MJ
8339 gcc_assert (prev);
8340 var = OMP_CLAUSE_DECL (c);
8341 if (DECL_P (var)
8342 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8343 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8344 ctx))
8345 && varpool_node::get_create (var)->offloadable)
8346 break;
8347 if (TREE_CODE (var) == INDIRECT_REF
8348 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8349 var = TREE_OPERAND (var, 0);
8350 if (TREE_CODE (var) == COMPONENT_REF)
8351 {
8352 var = get_addr_base_and_unit_offset (var, &offset);
8353 gcc_assert (var != NULL_TREE && DECL_P (var));
8354 }
8355 else if (DECL_SIZE (var)
8356 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8357 {
8358 tree var2 = DECL_VALUE_EXPR (var);
8359 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8360 var2 = TREE_OPERAND (var2, 0);
8361 gcc_assert (DECL_P (var2));
8362 var = var2;
8363 }
8364 tree new_var = lookup_decl (var, ctx), x;
8365 tree type = TREE_TYPE (new_var);
8366 bool is_ref;
8367 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8368 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8369 == COMPONENT_REF))
8370 {
8371 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8372 is_ref = true;
8373 new_var = build2 (MEM_REF, type,
8374 build_fold_addr_expr (new_var),
8375 build_int_cst (build_pointer_type (type),
8376 offset));
8377 }
8378 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8379 {
8380 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8381 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8382 new_var = build2 (MEM_REF, type,
8383 build_fold_addr_expr (new_var),
8384 build_int_cst (build_pointer_type (type),
8385 offset));
8386 }
8387 else
8388 is_ref = omp_is_reference (var);
8389 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8390 is_ref = false;
8391 bool ref_to_array = false;
8392 if (is_ref)
8393 {
8394 type = TREE_TYPE (type);
8395 if (TREE_CODE (type) == ARRAY_TYPE)
8396 {
8397 type = build_pointer_type (type);
8398 ref_to_array = true;
8399 }
8400 }
8401 else if (TREE_CODE (type) == ARRAY_TYPE)
8402 {
8403 tree decl2 = DECL_VALUE_EXPR (new_var);
8404 gcc_assert (TREE_CODE (decl2) == MEM_REF);
8405 decl2 = TREE_OPERAND (decl2, 0);
8406 gcc_assert (DECL_P (decl2));
8407 new_var = decl2;
8408 type = TREE_TYPE (new_var);
8409 }
8410 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8411 x = fold_convert_loc (clause_loc, type, x);
8412 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8413 {
8414 tree bias = OMP_CLAUSE_SIZE (c);
8415 if (DECL_P (bias))
8416 bias = lookup_decl (bias, ctx);
8417 bias = fold_convert_loc (clause_loc, sizetype, bias);
8418 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8419 bias);
8420 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8421 TREE_TYPE (x), x, bias);
8422 }
8423 if (ref_to_array)
8424 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8425 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8426 if (is_ref && !ref_to_array)
8427 {
8428 tree t = create_tmp_var_raw (type, get_name (var));
8429 gimple_add_tmp_var (t);
8430 TREE_ADDRESSABLE (t) = 1;
8431 gimple_seq_add_stmt (&new_body,
8432 gimple_build_assign (t, x));
8433 x = build_fold_addr_expr_loc (clause_loc, t);
8434 }
8435 gimple_seq_add_stmt (&new_body,
8436 gimple_build_assign (new_var, x));
8437 prev = NULL_TREE;
8438 }
8439 else if (OMP_CLAUSE_CHAIN (c)
8440 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8441 == OMP_CLAUSE_MAP
8442 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8443 == GOMP_MAP_FIRSTPRIVATE_POINTER
8444 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8445 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8446 prev = c;
8447 break;
8448 case OMP_CLAUSE_PRIVATE:
8449 var = OMP_CLAUSE_DECL (c);
8450 if (is_variable_sized (var))
8451 {
8452 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8453 tree new_var = lookup_decl (var, ctx);
8454 tree pvar = DECL_VALUE_EXPR (var);
8455 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8456 pvar = TREE_OPERAND (pvar, 0);
8457 gcc_assert (DECL_P (pvar));
8458 tree new_pvar = lookup_decl (pvar, ctx);
8459 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8460 tree al = size_int (DECL_ALIGN (var));
8461 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8462 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8463 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8464 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8465 gimple_seq_add_stmt (&new_body,
8466 gimple_build_assign (new_pvar, x));
8467 }
8468 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8469 {
8470 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8471 tree new_var = lookup_decl (var, ctx);
8472 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8473 if (TREE_CONSTANT (x))
8474 break;
8475 else
8476 {
8477 tree atmp
8478 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8479 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8480 tree al = size_int (TYPE_ALIGN (rtype));
8481 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8482 }
9bd46bc9 8483
629b3d75
MJ
8484 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8485 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8486 gimple_seq_add_stmt (&new_body,
8487 gimple_build_assign (new_var, x));
8488 }
8489 break;
8490 }
9bd46bc9 8491
629b3d75
MJ
8492 gimple_seq fork_seq = NULL;
8493 gimple_seq join_seq = NULL;
9bd46bc9 8494
629b3d75 8495 if (is_oacc_parallel (ctx))
9bd46bc9 8496 {
629b3d75
MJ
8497 /* If there are reductions on the offloaded region itself, treat
8498 them as a dummy GANG loop. */
8499 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
9bd46bc9 8500
629b3d75
MJ
8501 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8502 false, NULL, NULL, &fork_seq, &join_seq, ctx);
9bd46bc9 8503 }
9bd46bc9 8504
629b3d75
MJ
8505 gimple_seq_add_seq (&new_body, fork_seq);
8506 gimple_seq_add_seq (&new_body, tgt_body);
8507 gimple_seq_add_seq (&new_body, join_seq);
9bd46bc9 8508
629b3d75
MJ
8509 if (offloaded)
8510 new_body = maybe_catch_exception (new_body);
9bd46bc9 8511
629b3d75
MJ
8512 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8513 gimple_omp_set_body (stmt, new_body);
9bd46bc9
NS
8514 }
8515
629b3d75
MJ
8516 bind = gimple_build_bind (NULL, NULL,
8517 tgt_bind ? gimple_bind_block (tgt_bind)
8518 : NULL_TREE);
8519 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8520 gimple_bind_add_seq (bind, ilist);
8521 gimple_bind_add_stmt (bind, stmt);
8522 gimple_bind_add_seq (bind, olist);
9bd46bc9
NS
8523
8524 pop_gimplify_context (NULL);
8525
629b3d75 8526 if (dep_bind)
b6adbb9f 8527 {
629b3d75
MJ
8528 gimple_bind_add_seq (dep_bind, dep_ilist);
8529 gimple_bind_add_stmt (dep_bind, bind);
8530 gimple_bind_add_seq (dep_bind, dep_olist);
8531 pop_gimplify_context (dep_bind);
b6adbb9f 8532 }
b6adbb9f
NS
8533}
8534
629b3d75 8535/* Expand code for an OpenMP teams directive. */
94829f87 8536
f8393eb0 8537static void
629b3d75 8538lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
94829f87 8539{
629b3d75
MJ
8540 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8541 push_gimplify_context ();
94829f87 8542
629b3d75
MJ
8543 tree block = make_node (BLOCK);
8544 gbind *bind = gimple_build_bind (NULL, NULL, block);
8545 gsi_replace (gsi_p, bind, true);
8546 gimple_seq bind_body = NULL;
8547 gimple_seq dlist = NULL;
8548 gimple_seq olist = NULL;
94829f87 8549
629b3d75
MJ
8550 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8551 OMP_CLAUSE_NUM_TEAMS);
8552 if (num_teams == NULL_TREE)
8553 num_teams = build_int_cst (unsigned_type_node, 0);
8554 else
94829f87 8555 {
629b3d75
MJ
8556 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8557 num_teams = fold_convert (unsigned_type_node, num_teams);
8558 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
94829f87 8559 }
629b3d75
MJ
8560 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8561 OMP_CLAUSE_THREAD_LIMIT);
8562 if (thread_limit == NULL_TREE)
8563 thread_limit = build_int_cst (unsigned_type_node, 0);
8564 else
94829f87 8565 {
629b3d75
MJ
8566 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8567 thread_limit = fold_convert (unsigned_type_node, thread_limit);
8568 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8569 fb_rvalue);
94829f87 8570 }
9bd46bc9 8571
629b3d75
MJ
8572 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8573 &bind_body, &dlist, ctx, NULL);
8574 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8575 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8576 if (!gimple_omp_teams_grid_phony (teams_stmt))
9bd46bc9 8577 {
629b3d75
MJ
8578 gimple_seq_add_stmt (&bind_body, teams_stmt);
8579 location_t loc = gimple_location (teams_stmt);
8580 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8581 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8582 gimple_set_location (call, loc);
8583 gimple_seq_add_stmt (&bind_body, call);
9bd46bc9
NS
8584 }
8585
629b3d75
MJ
8586 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8587 gimple_omp_set_body (teams_stmt, NULL);
8588 gimple_seq_add_seq (&bind_body, olist);
8589 gimple_seq_add_seq (&bind_body, dlist);
8590 if (!gimple_omp_teams_grid_phony (teams_stmt))
8591 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8592 gimple_bind_set_body (bind, bind_body);
9bd46bc9 8593
629b3d75 8594 pop_gimplify_context (bind);
9bd46bc9 8595
629b3d75
MJ
8596 gimple_bind_append_vars (bind, ctx->block_vars);
8597 BLOCK_VARS (block) = ctx->block_vars;
8598 if (BLOCK_VARS (block))
8599 TREE_USED (block) = 1;
9bd46bc9
NS
8600}
8601
629b3d75 8602/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
9bd46bc9 8603
629b3d75
MJ
8604static void
8605lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8606{
629b3d75
MJ
8607 gimple *stmt = gsi_stmt (*gsi_p);
8608 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8609 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8610 gimple_build_omp_return (false));
9bd46bc9
NS
8611}
8612
9bd46bc9 8613
629b3d75
MJ
8614/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
8615 regimplified. If DATA is non-NULL, lower_omp_1 is outside
8616 of OMP context, but with task_shared_vars set. */
9bd46bc9 8617
629b3d75
MJ
8618static tree
8619lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8620 void *data)
9bd46bc9 8621{
629b3d75 8622 tree t = *tp;
9bd46bc9 8623
629b3d75
MJ
8624 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
8625 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8626 return t;
9bd46bc9 8627
629b3d75
MJ
8628 if (task_shared_vars
8629 && DECL_P (t)
8630 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8631 return t;
9bd46bc9 8632
629b3d75
MJ
8633 /* If a global variable has been privatized, TREE_CONSTANT on
8634 ADDR_EXPR might be wrong. */
8635 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8636 recompute_tree_invariant_for_addr_expr (t);
9bd46bc9 8637
629b3d75
MJ
8638 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8639 return NULL_TREE;
9bd46bc9
NS
8640}
8641
629b3d75
MJ
8642/* Data to be communicated between lower_omp_regimplify_operands and
8643 lower_omp_regimplify_operands_p. */
9bd46bc9 8644
629b3d75 8645struct lower_omp_regimplify_operands_data
9bd46bc9 8646{
629b3d75
MJ
8647 omp_context *ctx;
8648 vec<tree> *decls;
8649};
9bd46bc9 8650
629b3d75
MJ
8651/* Helper function for lower_omp_regimplify_operands. Find
8652 omp_member_access_dummy_var vars and adjust temporarily their
8653 DECL_VALUE_EXPRs if needed. */
9bd46bc9 8654
629b3d75
MJ
8655static tree
8656lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8657 void *data)
9bd46bc9 8658{
629b3d75
MJ
8659 tree t = omp_member_access_dummy_var (*tp);
8660 if (t)
9bd46bc9 8661 {
629b3d75
MJ
8662 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8663 lower_omp_regimplify_operands_data *ldata
8664 = (lower_omp_regimplify_operands_data *) wi->info;
8665 tree o = maybe_lookup_decl (t, ldata->ctx);
8666 if (o != t)
9bd46bc9 8667 {
629b3d75
MJ
8668 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8669 ldata->decls->safe_push (*tp);
8670 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8671 SET_DECL_VALUE_EXPR (*tp, v);
9bd46bc9 8672 }
9bd46bc9 8673 }
629b3d75
MJ
8674 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8675 return NULL_TREE;
9bd46bc9
NS
8676}
8677
629b3d75
MJ
8678/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8679 of omp_member_access_dummy_var vars during regimplification. */
9bd46bc9
NS
8680
8681static void
629b3d75
MJ
8682lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8683 gimple_stmt_iterator *gsi_p)
9bd46bc9 8684{
629b3d75
MJ
8685 auto_vec<tree, 10> decls;
8686 if (ctx)
8687 {
8688 struct walk_stmt_info wi;
8689 memset (&wi, '\0', sizeof (wi));
8690 struct lower_omp_regimplify_operands_data data;
8691 data.ctx = ctx;
8692 data.decls = &decls;
8693 wi.info = &data;
8694 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8695 }
8696 gimple_regimplify_operands (stmt, gsi_p);
8697 while (!decls.is_empty ())
8698 {
8699 tree t = decls.pop ();
8700 tree v = decls.pop ();
8701 SET_DECL_VALUE_EXPR (t, v);
8702 }
9bd46bc9
NS
8703}
8704
9bd46bc9 8705static void
629b3d75 8706lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 8707{
629b3d75
MJ
8708 gimple *stmt = gsi_stmt (*gsi_p);
8709 struct walk_stmt_info wi;
8710 gcall *call_stmt;
9bd46bc9 8711
629b3d75
MJ
8712 if (gimple_has_location (stmt))
8713 input_location = gimple_location (stmt);
9bd46bc9 8714
629b3d75
MJ
8715 if (task_shared_vars)
8716 memset (&wi, '\0', sizeof (wi));
9bd46bc9 8717
629b3d75
MJ
8718 /* If we have issued syntax errors, avoid doing any heavy lifting.
8719 Just replace the OMP directives with a NOP to avoid
8720 confusing RTL expansion. */
8721 if (seen_error () && is_gimple_omp (stmt))
9bd46bc9 8722 {
629b3d75
MJ
8723 gsi_replace (gsi_p, gimple_build_nop (), true);
8724 return;
8725 }
9bd46bc9 8726
629b3d75
MJ
8727 switch (gimple_code (stmt))
8728 {
8729 case GIMPLE_COND:
8730 {
8731 gcond *cond_stmt = as_a <gcond *> (stmt);
8732 if ((ctx || task_shared_vars)
8733 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8734 lower_omp_regimplify_p,
8735 ctx ? NULL : &wi, NULL)
8736 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8737 lower_omp_regimplify_p,
8738 ctx ? NULL : &wi, NULL)))
8739 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8740 }
8741 break;
8742 case GIMPLE_CATCH:
8743 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8744 break;
8745 case GIMPLE_EH_FILTER:
8746 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8747 break;
8748 case GIMPLE_TRY:
8749 lower_omp (gimple_try_eval_ptr (stmt), ctx);
8750 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8751 break;
8752 case GIMPLE_TRANSACTION:
01914336 8753 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
629b3d75
MJ
8754 ctx);
8755 break;
8756 case GIMPLE_BIND:
8757 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
6724f8a6 8758 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
629b3d75
MJ
8759 break;
8760 case GIMPLE_OMP_PARALLEL:
8761 case GIMPLE_OMP_TASK:
8762 ctx = maybe_lookup_ctx (stmt);
8763 gcc_assert (ctx);
8764 if (ctx->cancellable)
8765 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8766 lower_omp_taskreg (gsi_p, ctx);
8767 break;
8768 case GIMPLE_OMP_FOR:
8769 ctx = maybe_lookup_ctx (stmt);
8770 gcc_assert (ctx);
8771 if (ctx->cancellable)
8772 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8773 lower_omp_for (gsi_p, ctx);
8774 break;
8775 case GIMPLE_OMP_SECTIONS:
8776 ctx = maybe_lookup_ctx (stmt);
8777 gcc_assert (ctx);
8778 if (ctx->cancellable)
8779 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8780 lower_omp_sections (gsi_p, ctx);
8781 break;
8782 case GIMPLE_OMP_SINGLE:
8783 ctx = maybe_lookup_ctx (stmt);
8784 gcc_assert (ctx);
8785 lower_omp_single (gsi_p, ctx);
8786 break;
8787 case GIMPLE_OMP_MASTER:
8788 ctx = maybe_lookup_ctx (stmt);
8789 gcc_assert (ctx);
8790 lower_omp_master (gsi_p, ctx);
8791 break;
8792 case GIMPLE_OMP_TASKGROUP:
8793 ctx = maybe_lookup_ctx (stmt);
8794 gcc_assert (ctx);
8795 lower_omp_taskgroup (gsi_p, ctx);
8796 break;
8797 case GIMPLE_OMP_ORDERED:
8798 ctx = maybe_lookup_ctx (stmt);
8799 gcc_assert (ctx);
8800 lower_omp_ordered (gsi_p, ctx);
8801 break;
8802 case GIMPLE_OMP_CRITICAL:
8803 ctx = maybe_lookup_ctx (stmt);
8804 gcc_assert (ctx);
8805 lower_omp_critical (gsi_p, ctx);
8806 break;
8807 case GIMPLE_OMP_ATOMIC_LOAD:
8808 if ((ctx || task_shared_vars)
8809 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8810 as_a <gomp_atomic_load *> (stmt)),
8811 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8812 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8813 break;
8814 case GIMPLE_OMP_TARGET:
8815 ctx = maybe_lookup_ctx (stmt);
8816 gcc_assert (ctx);
8817 lower_omp_target (gsi_p, ctx);
8818 break;
8819 case GIMPLE_OMP_TEAMS:
8820 ctx = maybe_lookup_ctx (stmt);
8821 gcc_assert (ctx);
8822 lower_omp_teams (gsi_p, ctx);
8823 break;
8824 case GIMPLE_OMP_GRID_BODY:
8825 ctx = maybe_lookup_ctx (stmt);
8826 gcc_assert (ctx);
8827 lower_omp_grid_body (gsi_p, ctx);
8828 break;
8829 case GIMPLE_CALL:
8830 tree fndecl;
8831 call_stmt = as_a <gcall *> (stmt);
8832 fndecl = gimple_call_fndecl (call_stmt);
8833 if (fndecl
3d78e008 8834 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
629b3d75 8835 switch (DECL_FUNCTION_CODE (fndecl))
9bd46bc9 8836 {
629b3d75
MJ
8837 case BUILT_IN_GOMP_BARRIER:
8838 if (ctx == NULL)
8839 break;
8840 /* FALLTHRU */
8841 case BUILT_IN_GOMP_CANCEL:
8842 case BUILT_IN_GOMP_CANCELLATION_POINT:
8843 omp_context *cctx;
8844 cctx = ctx;
8845 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8846 cctx = cctx->outer;
8847 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8848 if (!cctx->cancellable)
8849 {
8850 if (DECL_FUNCTION_CODE (fndecl)
8851 == BUILT_IN_GOMP_CANCELLATION_POINT)
8852 {
8853 stmt = gimple_build_nop ();
8854 gsi_replace (gsi_p, stmt, false);
8855 }
8856 break;
8857 }
8858 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8859 {
8860 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8861 gimple_call_set_fndecl (call_stmt, fndecl);
8862 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8863 }
8864 tree lhs;
8865 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8866 gimple_call_set_lhs (call_stmt, lhs);
8867 tree fallthru_label;
8868 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8869 gimple *g;
8870 g = gimple_build_label (fallthru_label);
8871 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8872 g = gimple_build_cond (NE_EXPR, lhs,
8873 fold_convert (TREE_TYPE (lhs),
8874 boolean_false_node),
8875 cctx->cancel_label, fallthru_label);
8876 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8877 break;
8878 default:
8879 break;
9bd46bc9 8880 }
629b3d75
MJ
8881 /* FALLTHRU */
8882 default:
8883 if ((ctx || task_shared_vars)
8884 && walk_gimple_op (stmt, lower_omp_regimplify_p,
8885 ctx ? NULL : &wi))
9bd46bc9 8886 {
629b3d75
MJ
8887 /* Just remove clobbers, this should happen only if we have
8888 "privatized" local addressable variables in SIMD regions,
8889 the clobber isn't needed in that case and gimplifying address
8890 of the ARRAY_REF into a pointer and creating MEM_REF based
8891 clobber would create worse code than we get with the clobber
8892 dropped. */
8893 if (gimple_clobber_p (stmt))
4ae13300 8894 {
629b3d75
MJ
8895 gsi_replace (gsi_p, gimple_build_nop (), true);
8896 break;
9bd46bc9 8897 }
629b3d75 8898 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9bd46bc9 8899 }
629b3d75 8900 break;
9bd46bc9 8901 }
9bd46bc9
NS
8902}
8903
9bd46bc9 8904static void
629b3d75 8905lower_omp (gimple_seq *body, omp_context *ctx)
9bd46bc9 8906{
629b3d75
MJ
8907 location_t saved_location = input_location;
8908 gimple_stmt_iterator gsi;
8909 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8910 lower_omp_1 (&gsi, ctx);
8911 /* During gimplification, we haven't folded statments inside offloading
8912 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
8913 if (target_nesting_level || taskreg_nesting_level)
8914 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8915 fold_stmt (&gsi);
8916 input_location = saved_location;
9bd46bc9
NS
8917}
8918
629b3d75 8919/* Main entry point. */
9bd46bc9 8920
629b3d75
MJ
8921static unsigned int
8922execute_lower_omp (void)
9bd46bc9 8923{
629b3d75
MJ
8924 gimple_seq body;
8925 int i;
8926 omp_context *ctx;
9bd46bc9 8927
629b3d75
MJ
8928 /* This pass always runs, to provide PROP_gimple_lomp.
8929 But often, there is nothing to do. */
5e9d6aa4 8930 if (flag_openacc == 0 && flag_openmp == 0
629b3d75
MJ
8931 && flag_openmp_simd == 0)
8932 return 0;
9bd46bc9 8933
629b3d75
MJ
8934 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8935 delete_omp_context);
9bd46bc9 8936
629b3d75 8937 body = gimple_body (current_function_decl);
9bd46bc9 8938
629b3d75
MJ
8939 if (hsa_gen_requested_p ())
8940 omp_grid_gridify_all_targets (&body);
8941
8942 scan_omp (&body, NULL);
8943 gcc_assert (taskreg_nesting_level == 0);
8944 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
8945 finish_taskreg_scan (ctx);
8946 taskreg_contexts.release ();
9bd46bc9 8947
629b3d75
MJ
8948 if (all_contexts->root)
8949 {
8950 if (task_shared_vars)
8951 push_gimplify_context ();
8952 lower_omp (&body, NULL);
8953 if (task_shared_vars)
8954 pop_gimplify_context (NULL);
8955 }
8956
8957 if (all_contexts)
8958 {
8959 splay_tree_delete (all_contexts);
8960 all_contexts = NULL;
9bd46bc9 8961 }
629b3d75 8962 BITMAP_FREE (task_shared_vars);
6724f8a6
JJ
8963
8964 /* If current function is a method, remove artificial dummy VAR_DECL created
8965 for non-static data member privatization, they aren't needed for
8966 debuginfo nor anything else, have been already replaced everywhere in the
8967 IL and cause problems with LTO. */
8968 if (DECL_ARGUMENTS (current_function_decl)
8969 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
8970 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
8971 == POINTER_TYPE))
8972 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
629b3d75 8973 return 0;
9bd46bc9
NS
8974}
8975
629b3d75 8976namespace {
9bd46bc9 8977
629b3d75 8978const pass_data pass_data_lower_omp =
9bd46bc9 8979{
629b3d75
MJ
8980 GIMPLE_PASS, /* type */
8981 "omplower", /* name */
fd2b8c8b 8982 OPTGROUP_OMP, /* optinfo_flags */
629b3d75
MJ
8983 TV_NONE, /* tv_id */
8984 PROP_gimple_any, /* properties_required */
8985 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
8986 0, /* properties_destroyed */
8987 0, /* todo_flags_start */
8988 0, /* todo_flags_finish */
8989};
9bd46bc9 8990
629b3d75
MJ
8991class pass_lower_omp : public gimple_opt_pass
8992{
8993public:
8994 pass_lower_omp (gcc::context *ctxt)
8995 : gimple_opt_pass (pass_data_lower_omp, ctxt)
8996 {}
9bd46bc9 8997
629b3d75
MJ
8998 /* opt_pass methods: */
8999 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9bd46bc9 9000
629b3d75 9001}; // class pass_lower_omp
9bd46bc9 9002
629b3d75 9003} // anon namespace
9bd46bc9 9004
629b3d75
MJ
9005gimple_opt_pass *
9006make_pass_lower_omp (gcc::context *ctxt)
9007{
9008 return new pass_lower_omp (ctxt);
9bd46bc9 9009}
629b3d75
MJ
9010\f
9011/* The following is a utility to diagnose structured block violations.
9012 It is not part of the "omplower" pass, as that's invoked too late. It
9013 should be invoked by the respective front ends after gimplification. */
9bd46bc9 9014
629b3d75 9015static splay_tree all_labels;
9bd46bc9 9016
629b3d75
MJ
9017/* Check for mismatched contexts and generate an error if needed. Return
9018 true if an error is detected. */
9bd46bc9 9019
629b3d75
MJ
9020static bool
9021diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9022 gimple *branch_ctx, gimple *label_ctx)
9023{
9024 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9025 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9bd46bc9 9026
629b3d75
MJ
9027 if (label_ctx == branch_ctx)
9028 return false;
9bd46bc9 9029
629b3d75 9030 const char* kind = NULL;
9bd46bc9 9031
629b3d75 9032 if (flag_openacc)
9bd46bc9 9033 {
629b3d75
MJ
9034 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9035 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9bd46bc9 9036 {
629b3d75
MJ
9037 gcc_checking_assert (kind == NULL);
9038 kind = "OpenACC";
9bd46bc9
NS
9039 }
9040 }
629b3d75 9041 if (kind == NULL)
5b37e866 9042 {
0a734553 9043 gcc_checking_assert (flag_openmp || flag_openmp_simd);
629b3d75 9044 kind = "OpenMP";
5b37e866 9045 }
9bd46bc9 9046
01914336 9047 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
629b3d75
MJ
9048 so we could traverse it and issue a correct "exit" or "enter" error
9049 message upon a structured block violation.
c5a64cfe 9050
629b3d75
MJ
9051 We built the context by building a list with tree_cons'ing, but there is
9052 no easy counterpart in gimple tuples. It seems like far too much work
9053 for issuing exit/enter error messages. If someone really misses the
01914336 9054 distinct error message... patches welcome. */
c5a64cfe 9055
629b3d75
MJ
9056#if 0
9057 /* Try to avoid confusing the user by producing and error message
9058 with correct "exit" or "enter" verbiage. We prefer "exit"
9059 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
9060 if (branch_ctx == NULL)
9061 exit_p = false;
9062 else
5b37e866 9063 {
629b3d75
MJ
9064 while (label_ctx)
9065 {
9066 if (TREE_VALUE (label_ctx) == branch_ctx)
9067 {
9068 exit_p = false;
9069 break;
9070 }
9071 label_ctx = TREE_CHAIN (label_ctx);
9072 }
5b37e866
NS
9073 }
9074
629b3d75
MJ
9075 if (exit_p)
9076 error ("invalid exit from %s structured block", kind);
9077 else
9078 error ("invalid entry to %s structured block", kind);
9079#endif
5b37e866 9080
629b3d75
MJ
9081 /* If it's obvious we have an invalid entry, be specific about the error. */
9082 if (branch_ctx == NULL)
9083 error ("invalid entry to %s structured block", kind);
9084 else
c5a64cfe 9085 {
629b3d75
MJ
9086 /* Otherwise, be vague and lazy, but efficient. */
9087 error ("invalid branch to/from %s structured block", kind);
c5a64cfe 9088 }
5b37e866 9089
629b3d75
MJ
9090 gsi_replace (gsi_p, gimple_build_nop (), false);
9091 return true;
c5a64cfe
NS
9092}
9093
629b3d75
MJ
9094/* Pass 1: Create a minimal tree of structured blocks, and record
9095 where each label is found. */
9bd46bc9 9096
629b3d75
MJ
9097static tree
9098diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9099 struct walk_stmt_info *wi)
9bd46bc9 9100{
629b3d75
MJ
9101 gimple *context = (gimple *) wi->info;
9102 gimple *inner_context;
9103 gimple *stmt = gsi_stmt (*gsi_p);
9bd46bc9 9104
629b3d75 9105 *handled_ops_p = true;
6e91acf8 9106
629b3d75
MJ
9107 switch (gimple_code (stmt))
9108 {
9109 WALK_SUBSTMTS;
6e91acf8 9110
629b3d75
MJ
9111 case GIMPLE_OMP_PARALLEL:
9112 case GIMPLE_OMP_TASK:
9113 case GIMPLE_OMP_SECTIONS:
9114 case GIMPLE_OMP_SINGLE:
9115 case GIMPLE_OMP_SECTION:
9116 case GIMPLE_OMP_MASTER:
9117 case GIMPLE_OMP_ORDERED:
9118 case GIMPLE_OMP_CRITICAL:
9119 case GIMPLE_OMP_TARGET:
9120 case GIMPLE_OMP_TEAMS:
9121 case GIMPLE_OMP_TASKGROUP:
9122 /* The minimal context here is just the current OMP construct. */
9123 inner_context = stmt;
9124 wi->info = inner_context;
9125 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9126 wi->info = context;
9127 break;
e5014671 9128
629b3d75
MJ
9129 case GIMPLE_OMP_FOR:
9130 inner_context = stmt;
9131 wi->info = inner_context;
9132 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9133 walk them. */
9134 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9135 diagnose_sb_1, NULL, wi);
9136 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9137 wi->info = context;
9138 break;
e5014671 9139
629b3d75
MJ
9140 case GIMPLE_LABEL:
9141 splay_tree_insert (all_labels,
9142 (splay_tree_key) gimple_label_label (
9143 as_a <glabel *> (stmt)),
9144 (splay_tree_value) context);
9145 break;
e5014671 9146
629b3d75
MJ
9147 default:
9148 break;
e5014671
NS
9149 }
9150
629b3d75 9151 return NULL_TREE;
e5014671
NS
9152}
9153
629b3d75
MJ
9154/* Pass 2: Check each branch and see if its context differs from that of
9155 the destination label's context. */
94829f87 9156
629b3d75
MJ
9157static tree
9158diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9159 struct walk_stmt_info *wi)
94829f87 9160{
629b3d75
MJ
9161 gimple *context = (gimple *) wi->info;
9162 splay_tree_node n;
9163 gimple *stmt = gsi_stmt (*gsi_p);
f8393eb0 9164
629b3d75 9165 *handled_ops_p = true;
f8393eb0 9166
629b3d75 9167 switch (gimple_code (stmt))
9bd46bc9 9168 {
629b3d75 9169 WALK_SUBSTMTS;
9bd46bc9 9170
629b3d75
MJ
9171 case GIMPLE_OMP_PARALLEL:
9172 case GIMPLE_OMP_TASK:
9173 case GIMPLE_OMP_SECTIONS:
9174 case GIMPLE_OMP_SINGLE:
9175 case GIMPLE_OMP_SECTION:
9176 case GIMPLE_OMP_MASTER:
9177 case GIMPLE_OMP_ORDERED:
9178 case GIMPLE_OMP_CRITICAL:
9179 case GIMPLE_OMP_TARGET:
9180 case GIMPLE_OMP_TEAMS:
9181 case GIMPLE_OMP_TASKGROUP:
9182 wi->info = stmt;
9183 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9184 wi->info = context;
9185 break;
e5014671 9186
629b3d75
MJ
9187 case GIMPLE_OMP_FOR:
9188 wi->info = stmt;
9189 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9190 walk them. */
9191 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9192 diagnose_sb_2, NULL, wi);
9193 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9194 wi->info = context;
9195 break;
e5014671 9196
629b3d75
MJ
9197 case GIMPLE_COND:
9198 {
9199 gcond *cond_stmt = as_a <gcond *> (stmt);
9200 tree lab = gimple_cond_true_label (cond_stmt);
9201 if (lab)
9bd46bc9 9202 {
629b3d75
MJ
9203 n = splay_tree_lookup (all_labels,
9204 (splay_tree_key) lab);
9205 diagnose_sb_0 (gsi_p, context,
9206 n ? (gimple *) n->value : NULL);
9bd46bc9 9207 }
629b3d75
MJ
9208 lab = gimple_cond_false_label (cond_stmt);
9209 if (lab)
9210 {
9211 n = splay_tree_lookup (all_labels,
9212 (splay_tree_key) lab);
9213 diagnose_sb_0 (gsi_p, context,
9214 n ? (gimple *) n->value : NULL);
9215 }
9216 }
9217 break;
9bd46bc9 9218
629b3d75
MJ
9219 case GIMPLE_GOTO:
9220 {
9221 tree lab = gimple_goto_dest (stmt);
9222 if (TREE_CODE (lab) != LABEL_DECL)
9223 break;
9224
9225 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9226 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9227 }
9228 break;
9bd46bc9 9229
629b3d75
MJ
9230 case GIMPLE_SWITCH:
9231 {
9232 gswitch *switch_stmt = as_a <gswitch *> (stmt);
9233 unsigned int i;
9234 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9bd46bc9 9235 {
629b3d75
MJ
9236 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9237 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9238 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9239 break;
9bd46bc9 9240 }
9bd46bc9 9241 }
629b3d75 9242 break;
9bd46bc9 9243
629b3d75
MJ
9244 case GIMPLE_RETURN:
9245 diagnose_sb_0 (gsi_p, context, NULL);
9246 break;
94829f87 9247
629b3d75
MJ
9248 default:
9249 break;
94829f87
NS
9250 }
9251
629b3d75 9252 return NULL_TREE;
bd751975
NS
9253}
9254
629b3d75
MJ
9255static unsigned int
9256diagnose_omp_structured_block_errors (void)
94829f87 9257{
629b3d75
MJ
9258 struct walk_stmt_info wi;
9259 gimple_seq body = gimple_body (current_function_decl);
346a966e 9260
629b3d75 9261 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
94829f87 9262
629b3d75
MJ
9263 memset (&wi, 0, sizeof (wi));
9264 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
94829f87 9265
629b3d75
MJ
9266 memset (&wi, 0, sizeof (wi));
9267 wi.want_locations = true;
9268 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
94829f87 9269
629b3d75 9270 gimple_set_body (current_function_decl, body);
9669b00b 9271
629b3d75
MJ
9272 splay_tree_delete (all_labels);
9273 all_labels = NULL;
9669b00b 9274
9669b00b
AM
9275 return 0;
9276}
9277
9278namespace {
9279
629b3d75 9280const pass_data pass_data_diagnose_omp_blocks =
9669b00b
AM
9281{
9282 GIMPLE_PASS, /* type */
629b3d75 9283 "*diagnose_omp_blocks", /* name */
fd2b8c8b 9284 OPTGROUP_OMP, /* optinfo_flags */
9669b00b 9285 TV_NONE, /* tv_id */
629b3d75
MJ
9286 PROP_gimple_any, /* properties_required */
9287 0, /* properties_provided */
9669b00b
AM
9288 0, /* properties_destroyed */
9289 0, /* todo_flags_start */
629b3d75 9290 0, /* todo_flags_finish */
9669b00b
AM
9291};
9292
629b3d75 9293class pass_diagnose_omp_blocks : public gimple_opt_pass
9669b00b
AM
9294{
9295public:
629b3d75
MJ
9296 pass_diagnose_omp_blocks (gcc::context *ctxt)
9297 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9669b00b
AM
9298 {}
9299
9300 /* opt_pass methods: */
629b3d75
MJ
9301 virtual bool gate (function *)
9302 {
5e9d6aa4 9303 return flag_openacc || flag_openmp || flag_openmp_simd;
629b3d75 9304 }
9669b00b
AM
9305 virtual unsigned int execute (function *)
9306 {
629b3d75 9307 return diagnose_omp_structured_block_errors ();
4a38b02b
IV
9308 }
9309
629b3d75 9310}; // class pass_diagnose_omp_blocks
4a38b02b
IV
9311
9312} // anon namespace
9313
9314gimple_opt_pass *
629b3d75 9315make_pass_diagnose_omp_blocks (gcc::context *ctxt)
4a38b02b 9316{
629b3d75 9317 return new pass_diagnose_omp_blocks (ctxt);
4a38b02b 9318}
629b3d75 9319\f
4a38b02b 9320
953ff289 9321#include "gt-omp-low.h"