]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
re PR debug/90197 (Cannot step through simple loop at -O -g)
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
41dbbb37
TS
1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
953ff289
DN
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
a5544970 7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
957060b5 29#include "target.h"
953ff289 30#include "tree.h"
c7131fb2 31#include "gimple.h"
957060b5 32#include "tree-pass.h"
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
2fb9a547
AM
39#include "internal-fn.h"
40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
18f429e2 43#include "gimplify-me.h"
5be5c238 44#include "gimple-walk.h"
726a989a 45#include "tree-iterator.h"
953ff289
DN
46#include "tree-inline.h"
47#include "langhooks.h"
442b4905 48#include "tree-dfa.h"
7a300452 49#include "tree-ssa.h"
6be42dd4 50#include "splay-tree.h"
629b3d75 51#include "omp-general.h"
0645c1a2 52#include "omp-low.h"
629b3d75 53#include "omp-grid.h"
4484a35a 54#include "gimple-low.h"
dd912cb8 55#include "symbol-summary.h"
1fe37220 56#include "tree-nested.h"
1f6be682 57#include "context.h"
41dbbb37 58#include "gomp-constants.h"
9bd46bc9 59#include "gimple-pretty-print.h"
13293add 60#include "hsa-common.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
953ff289 63
41dbbb37 64/* Lowering of OMP parallel and workshare constructs proceeds in two
953ff289
DN
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
c0220ea4 68 re-gimplifying things when variables have been replaced with complex
953ff289
DN
69 expressions.
70
7ebaeab5 71 Final code generation is done by pass_expand_omp. The flowgraph is
41dbbb37
TS
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
953ff289
DN
74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
a79683d5 78struct omp_context
953ff289
DN
79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
355fe088 88 gimple *stmt;
953ff289 89
b8698a0f 90 /* Map variables to fields in a structure that allows communication
953ff289
DN
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
a68ab351
JJ
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
953ff289
DN
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
acf0174b
JJ
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
6e6cf7b0
JJ
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
28567c40
JJ
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* And a hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
953ff289
DN
126 /* Nesting depth of this context. Used to beautify error messages re
127 invalid gotos. The outermost ctx is depth 1, with depth 0 being
128 reserved for the main body of the function. */
129 int depth;
130
953ff289
DN
131 /* True if this parallel directive is nested within another. */
132 bool is_nested;
acf0174b
JJ
133
134 /* True if this construct can be cancelled. */
135 bool cancellable;
a79683d5 136};
953ff289 137
953ff289 138static splay_tree all_contexts;
a68ab351 139static int taskreg_nesting_level;
acf0174b 140static int target_nesting_level;
a68ab351 141static bitmap task_shared_vars;
5771c391 142static vec<omp_context *> taskreg_contexts;
953ff289 143
26127932 144static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
145static tree scan_omp_1_op (tree *, int *, void *);
146
147#define WALK_SUBSTMTS \
148 case GIMPLE_BIND: \
149 case GIMPLE_TRY: \
150 case GIMPLE_CATCH: \
151 case GIMPLE_EH_FILTER: \
0a35513e 152 case GIMPLE_TRANSACTION: \
726a989a
RB
153 /* The sub-statements for these should be walked. */ \
154 *handled_ops_p = false; \
155 break;
156
e4834818
NS
157/* Return true if CTX corresponds to an oacc parallel region. */
158
159static bool
160is_oacc_parallel (omp_context *ctx)
161{
162 enum gimple_code outer_type = gimple_code (ctx->stmt);
163 return ((outer_type == GIMPLE_OMP_TARGET)
164 && (gimple_omp_target_kind (ctx->stmt)
165 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
166}
167
168/* Return true if CTX corresponds to an oacc kernels region. */
169
170static bool
171is_oacc_kernels (omp_context *ctx)
172{
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_KERNELS));
177}
178
d9a6bd32
JJ
179/* If DECL is the artificial dummy VAR_DECL created for non-static
180 data member privatization, return the underlying "this" parameter,
181 otherwise return NULL. */
182
183tree
184omp_member_access_dummy_var (tree decl)
185{
186 if (!VAR_P (decl)
187 || !DECL_ARTIFICIAL (decl)
188 || !DECL_IGNORED_P (decl)
189 || !DECL_HAS_VALUE_EXPR_P (decl)
190 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
191 return NULL_TREE;
192
193 tree v = DECL_VALUE_EXPR (decl);
194 if (TREE_CODE (v) != COMPONENT_REF)
195 return NULL_TREE;
196
197 while (1)
198 switch (TREE_CODE (v))
199 {
200 case COMPONENT_REF:
201 case MEM_REF:
202 case INDIRECT_REF:
203 CASE_CONVERT:
204 case POINTER_PLUS_EXPR:
205 v = TREE_OPERAND (v, 0);
206 continue;
207 case PARM_DECL:
208 if (DECL_CONTEXT (v) == current_function_decl
209 && DECL_ARTIFICIAL (v)
210 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
211 return v;
212 return NULL_TREE;
213 default:
214 return NULL_TREE;
215 }
216}
217
218/* Helper for unshare_and_remap, called through walk_tree. */
219
220static tree
221unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
222{
223 tree *pair = (tree *) data;
224 if (*tp == pair[0])
225 {
226 *tp = unshare_expr (pair[1]);
227 *walk_subtrees = 0;
228 }
229 else if (IS_TYPE_OR_DECL_P (*tp))
230 *walk_subtrees = 0;
231 return NULL_TREE;
232}
233
234/* Return unshare_expr (X) with all occurrences of FROM
235 replaced with TO. */
236
237static tree
238unshare_and_remap (tree x, tree from, tree to)
239{
240 tree pair[2] = { from, to };
241 x = unshare_expr (x);
242 walk_tree (&x, unshare_and_remap_1, pair, NULL);
243 return x;
244}
245
726a989a
RB
246/* Convenience function for calling scan_omp_1_op on tree operands. */
247
248static inline tree
249scan_omp_op (tree *tp, omp_context *ctx)
250{
251 struct walk_stmt_info wi;
252
253 memset (&wi, 0, sizeof (wi));
254 wi.info = ctx;
255 wi.want_locations = true;
256
257 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
258}
259
355a7673 260static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
261static tree lookup_decl_in_outer_ctx (tree, omp_context *);
262static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289 263
953ff289
DN
264/* Return true if CTX is for an omp parallel. */
265
266static inline bool
267is_parallel_ctx (omp_context *ctx)
268{
726a989a 269 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
270}
271
50674e96 272
a68ab351
JJ
273/* Return true if CTX is for an omp task. */
274
275static inline bool
276is_task_ctx (omp_context *ctx)
277{
726a989a 278 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
279}
280
281
d9a6bd32
JJ
282/* Return true if CTX is for an omp taskloop. */
283
284static inline bool
285is_taskloop_ctx (omp_context *ctx)
286{
287 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
288 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
289}
290
291
28567c40
JJ
292/* Return true if CTX is for a host omp teams. */
293
294static inline bool
295is_host_teams_ctx (omp_context *ctx)
296{
297 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
298 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
299}
300
301/* Return true if CTX is for an omp parallel or omp task or host omp teams
302 (the last one is strictly not a task region in OpenMP speak, but we
303 need to treat it similarly). */
a68ab351
JJ
304
305static inline bool
306is_taskreg_ctx (omp_context *ctx)
307{
28567c40 308 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
a68ab351
JJ
309}
310
953ff289
DN
311/* Return true if EXPR is variable sized. */
312
313static inline bool
22ea9ec0 314is_variable_sized (const_tree expr)
953ff289
DN
315{
316 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
317}
318
41dbbb37 319/* Lookup variables. The "maybe" form
953ff289
DN
320 allows for the variable form to not have been entered, otherwise we
321 assert that the variable must have been entered. */
322
323static inline tree
324lookup_decl (tree var, omp_context *ctx)
325{
b787e7a2 326 tree *n = ctx->cb.decl_map->get (var);
6be42dd4 327 return *n;
953ff289
DN
328}
329
330static inline tree
7c8f7639 331maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 332{
b787e7a2 333 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
6be42dd4 334 return n ? *n : NULL_TREE;
953ff289
DN
335}
336
337static inline tree
338lookup_field (tree var, omp_context *ctx)
339{
340 splay_tree_node n;
341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
342 return (tree) n->value;
343}
344
a68ab351 345static inline tree
d9a6bd32 346lookup_sfield (splay_tree_key key, omp_context *ctx)
a68ab351
JJ
347{
348 splay_tree_node n;
349 n = splay_tree_lookup (ctx->sfield_map
d9a6bd32 350 ? ctx->sfield_map : ctx->field_map, key);
a68ab351
JJ
351 return (tree) n->value;
352}
353
953ff289 354static inline tree
d9a6bd32
JJ
355lookup_sfield (tree var, omp_context *ctx)
356{
357 return lookup_sfield ((splay_tree_key) var, ctx);
358}
359
360static inline tree
361maybe_lookup_field (splay_tree_key key, omp_context *ctx)
953ff289
DN
362{
363 splay_tree_node n;
d9a6bd32 364 n = splay_tree_lookup (ctx->field_map, key);
953ff289
DN
365 return n ? (tree) n->value : NULL_TREE;
366}
367
d9a6bd32
JJ
368static inline tree
369maybe_lookup_field (tree var, omp_context *ctx)
370{
371 return maybe_lookup_field ((splay_tree_key) var, ctx);
372}
373
7c8f7639
JJ
374/* Return true if DECL should be copied by pointer. SHARED_CTX is
375 the parallel context if DECL is to be shared. */
953ff289
DN
376
377static bool
a68ab351 378use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289 379{
9dc5773f
JJ
380 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
381 || TYPE_ATOMIC (TREE_TYPE (decl)))
953ff289
DN
382 return true;
383
6fc0bb99 384 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 385 when we know the value is not accessible from an outer scope. */
7c8f7639 386 if (shared_ctx)
953ff289 387 {
41dbbb37
TS
388 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
389
953ff289
DN
390 /* ??? Trivially accessible from anywhere. But why would we even
391 be passing an address in this case? Should we simply assert
392 this to be false, or should we have a cleanup pass that removes
393 these from the list of mappings? */
28567c40 394 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
953ff289
DN
395 return true;
396
397 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
398 without analyzing the expression whether or not its location
399 is accessible to anyone else. In the case of nested parallel
400 regions it certainly may be. */
077b0dfb 401 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
402 return true;
403
404 /* Do not use copy-in/copy-out for variables that have their
405 address taken. */
406 if (TREE_ADDRESSABLE (decl))
407 return true;
7c8f7639 408
6d840d99
JJ
409 /* lower_send_shared_vars only uses copy-in, but not copy-out
410 for these. */
411 if (TREE_READONLY (decl)
412 || ((TREE_CODE (decl) == RESULT_DECL
413 || TREE_CODE (decl) == PARM_DECL)
414 && DECL_BY_REFERENCE (decl)))
415 return false;
416
7c8f7639
JJ
417 /* Disallow copy-in/out in nested parallel if
418 decl is shared in outer parallel, otherwise
419 each thread could store the shared variable
420 in its own copy-in location, making the
421 variable no longer really shared. */
6d840d99 422 if (shared_ctx->is_nested)
7c8f7639
JJ
423 {
424 omp_context *up;
425
426 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 427 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
428 break;
429
d9c194cb 430 if (up)
7c8f7639
JJ
431 {
432 tree c;
433
726a989a 434 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
435 c; c = OMP_CLAUSE_CHAIN (c))
436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
437 && OMP_CLAUSE_DECL (c) == decl)
438 break;
439
440 if (c)
25142650 441 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
442 }
443 }
a68ab351 444
6d840d99 445 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
446 deferred or executed in different thread, when GOMP_task
447 returns, the task hasn't necessarily terminated. */
6d840d99 448 if (is_task_ctx (shared_ctx))
a68ab351 449 {
25142650
JJ
450 tree outer;
451 maybe_mark_addressable_and_ret:
452 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
d9a6bd32 453 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
a68ab351
JJ
454 {
455 /* Taking address of OUTER in lower_send_shared_vars
456 might need regimplification of everything that uses the
457 variable. */
458 if (!task_shared_vars)
459 task_shared_vars = BITMAP_ALLOC (NULL);
460 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
461 TREE_ADDRESSABLE (outer) = 1;
462 }
463 return true;
464 }
953ff289
DN
465 }
466
467 return false;
468}
469
917948d3
ZD
470/* Construct a new automatic decl similar to VAR. */
471
472static tree
473omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
474{
475 tree copy = copy_var_decl (var, name, type);
476
477 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 478 DECL_CHAIN (copy) = ctx->block_vars;
d9a6bd32
JJ
479 /* If VAR is listed in task_shared_vars, it means it wasn't
480 originally addressable and is just because task needs to take
481 it's address. But we don't need to take address of privatizations
482 from that var. */
483 if (TREE_ADDRESSABLE (var)
484 && task_shared_vars
485 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
486 TREE_ADDRESSABLE (copy) = 0;
953ff289
DN
487 ctx->block_vars = copy;
488
489 return copy;
490}
491
492static tree
493omp_copy_decl_1 (tree var, omp_context *ctx)
494{
495 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
496}
497
a9a58711
JJ
498/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
499 as appropriate. */
500static tree
501omp_build_component_ref (tree obj, tree field)
502{
503 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
504 if (TREE_THIS_VOLATILE (field))
505 TREE_THIS_VOLATILE (ret) |= 1;
506 if (TREE_READONLY (field))
507 TREE_READONLY (ret) |= 1;
508 return ret;
509}
510
953ff289
DN
511/* Build tree nodes to access the field for VAR on the receiver side. */
512
513static tree
514build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
515{
516 tree x, field = lookup_field (var, ctx);
517
518 /* If the receiver record type was remapped in the child function,
519 remap the field into the new record type. */
520 x = maybe_lookup_field (field, ctx);
521 if (x != NULL)
522 field = x;
523
70f34814 524 x = build_simple_mem_ref (ctx->receiver_decl);
f1b9b669 525 TREE_THIS_NOTRAP (x) = 1;
a9a58711 526 x = omp_build_component_ref (x, field);
953ff289 527 if (by_ref)
096b85f4
TV
528 {
529 x = build_simple_mem_ref (x);
530 TREE_THIS_NOTRAP (x) = 1;
531 }
953ff289
DN
532
533 return x;
534}
535
536/* Build tree nodes to access VAR in the scope outer to CTX. In the case
537 of a parallel, this is a component reference; for workshare constructs
538 this is some variable. */
539
540static tree
c39dad64
JJ
541build_outer_var_ref (tree var, omp_context *ctx,
542 enum omp_clause_code code = OMP_CLAUSE_ERROR)
953ff289
DN
543{
544 tree x;
28567c40
JJ
545 omp_context *outer = ctx->outer;
546 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
547 outer = outer->outer;
953ff289 548
8ca5b2a2 549 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
550 x = var;
551 else if (is_variable_sized (var))
552 {
553 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
c39dad64 554 x = build_outer_var_ref (x, ctx, code);
70f34814 555 x = build_simple_mem_ref (x);
953ff289 556 }
a68ab351 557 else if (is_taskreg_ctx (ctx))
953ff289 558 {
7c8f7639 559 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
560 x = build_receiver_ref (var, by_ref, ctx);
561 }
c39dad64
JJ
562 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
563 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
564 || (code == OMP_CLAUSE_PRIVATE
565 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
566 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
567 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
74bf76ed 568 {
c39dad64
JJ
569 /* #pragma omp simd isn't a worksharing construct, and can reference
570 even private vars in its linear etc. clauses.
571 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
572 to private vars in all worksharing constructs. */
74bf76ed 573 x = NULL_TREE;
28567c40
JJ
574 if (outer && is_taskreg_ctx (outer))
575 x = lookup_decl (var, outer);
576 else if (outer)
f3b331d1 577 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
578 if (x == NULL_TREE)
579 x = var;
580 }
c39dad64 581 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
d9a6bd32 582 {
28567c40 583 gcc_assert (outer);
d9a6bd32 584 splay_tree_node n
28567c40 585 = splay_tree_lookup (outer->field_map,
d9a6bd32
JJ
586 (splay_tree_key) &DECL_UID (var));
587 if (n == NULL)
588 {
28567c40 589 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
d9a6bd32
JJ
590 x = var;
591 else
28567c40 592 x = lookup_decl (var, outer);
d9a6bd32
JJ
593 }
594 else
595 {
596 tree field = (tree) n->value;
597 /* If the receiver record type was remapped in the child function,
598 remap the field into the new record type. */
28567c40 599 x = maybe_lookup_field (field, outer);
d9a6bd32
JJ
600 if (x != NULL)
601 field = x;
602
28567c40 603 x = build_simple_mem_ref (outer->receiver_decl);
d9a6bd32 604 x = omp_build_component_ref (x, field);
28567c40 605 if (use_pointer_for_field (var, outer))
d9a6bd32
JJ
606 x = build_simple_mem_ref (x);
607 }
608 }
28567c40 609 else if (outer)
b2b40051 610 {
b2b40051
MJ
611 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
612 {
613 outer = outer->outer;
614 gcc_assert (outer
615 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
616 }
c39dad64 617 x = lookup_decl (var, outer);
b2b40051 618 }
629b3d75 619 else if (omp_is_reference (var))
eeb1d9e0
JJ
620 /* This can happen with orphaned constructs. If var is reference, it is
621 possible it is shared and as such valid. */
622 x = var;
d9a6bd32
JJ
623 else if (omp_member_access_dummy_var (var))
624 x = var;
953ff289
DN
625 else
626 gcc_unreachable ();
627
d9a6bd32
JJ
628 if (x == var)
629 {
630 tree t = omp_member_access_dummy_var (var);
631 if (t)
632 {
633 x = DECL_VALUE_EXPR (var);
634 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
635 if (o != t)
636 x = unshare_and_remap (x, t, o);
637 else
638 x = unshare_expr (x);
639 }
640 }
641
629b3d75 642 if (omp_is_reference (var))
70f34814 643 x = build_simple_mem_ref (x);
953ff289
DN
644
645 return x;
646}
647
648/* Build tree nodes to access the field for VAR on the sender side. */
649
650static tree
d9a6bd32 651build_sender_ref (splay_tree_key key, omp_context *ctx)
953ff289 652{
d9a6bd32 653 tree field = lookup_sfield (key, ctx);
a9a58711 654 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
655}
656
d9a6bd32
JJ
657static tree
658build_sender_ref (tree var, omp_context *ctx)
659{
660 return build_sender_ref ((splay_tree_key) var, ctx);
661}
662
86938de6
TV
663/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
664 BASE_POINTERS_RESTRICT, declare the field with restrict. */
953ff289
DN
665
666static void
829c6349 667install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 668{
a68ab351 669 tree field, type, sfield = NULL_TREE;
d9a6bd32 670 splay_tree_key key = (splay_tree_key) var;
953ff289 671
d9a6bd32
JJ
672 if ((mask & 8) != 0)
673 {
674 key = (splay_tree_key) &DECL_UID (var);
675 gcc_checking_assert (key != (splay_tree_key) var);
676 }
a68ab351 677 gcc_assert ((mask & 1) == 0
d9a6bd32 678 || !splay_tree_lookup (ctx->field_map, key));
a68ab351 679 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
d9a6bd32 680 || !splay_tree_lookup (ctx->sfield_map, key));
41dbbb37
TS
681 gcc_assert ((mask & 3) == 3
682 || !is_gimple_omp_oacc (ctx->stmt));
953ff289
DN
683
684 type = TREE_TYPE (var);
8498c16b
TV
685 /* Prevent redeclaring the var in the split-off function with a restrict
686 pointer type. Note that we only clear type itself, restrict qualifiers in
687 the pointed-to type will be ignored by points-to analysis. */
688 if (POINTER_TYPE_P (type)
689 && TYPE_RESTRICT (type))
690 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
691
acf0174b
JJ
692 if (mask & 4)
693 {
694 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
695 type = build_pointer_type (build_pointer_type (type));
696 }
697 else if (by_ref)
829c6349 698 type = build_pointer_type (type);
629b3d75 699 else if ((mask & 3) == 1 && omp_is_reference (var))
a68ab351 700 type = TREE_TYPE (type);
953ff289 701
c2255bc4
AH
702 field = build_decl (DECL_SOURCE_LOCATION (var),
703 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
704
705 /* Remember what variable this field was created for. This does have a
706 side effect of making dwarf2out ignore this member, so for helpful
707 debugging we clear it later in delete_omp_context. */
708 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
709 if (type == TREE_TYPE (var))
710 {
fe37c7af 711 SET_DECL_ALIGN (field, DECL_ALIGN (var));
a68ab351
JJ
712 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
713 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
714 }
715 else
fe37c7af 716 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
953ff289 717
a68ab351
JJ
718 if ((mask & 3) == 3)
719 {
720 insert_field_into_struct (ctx->record_type, field);
721 if (ctx->srecord_type)
722 {
c2255bc4
AH
723 sfield = build_decl (DECL_SOURCE_LOCATION (var),
724 FIELD_DECL, DECL_NAME (var), type);
a68ab351 725 DECL_ABSTRACT_ORIGIN (sfield) = var;
fe37c7af 726 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
a68ab351
JJ
727 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
728 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
729 insert_field_into_struct (ctx->srecord_type, sfield);
730 }
731 }
732 else
733 {
734 if (ctx->srecord_type == NULL_TREE)
735 {
736 tree t;
737
738 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
739 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
740 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
741 {
d9a6bd32 742 sfield = build_decl (DECL_SOURCE_LOCATION (t),
c2255bc4 743 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
744 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
745 insert_field_into_struct (ctx->srecord_type, sfield);
746 splay_tree_insert (ctx->sfield_map,
747 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
748 (splay_tree_value) sfield);
749 }
750 }
751 sfield = field;
752 insert_field_into_struct ((mask & 1) ? ctx->record_type
753 : ctx->srecord_type, field);
754 }
953ff289 755
a68ab351 756 if (mask & 1)
d9a6bd32 757 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
a68ab351 758 if ((mask & 2) && ctx->sfield_map)
d9a6bd32 759 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
953ff289
DN
760}
761
762static tree
763install_var_local (tree var, omp_context *ctx)
764{
765 tree new_var = omp_copy_decl_1 (var, ctx);
766 insert_decl_map (&ctx->cb, var, new_var);
767 return new_var;
768}
769
770/* Adjust the replacement for DECL in CTX for the new context. This means
771 copying the DECL_VALUE_EXPR, and fixing up the type. */
772
773static void
774fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
775{
776 tree new_decl, size;
777
778 new_decl = lookup_decl (decl, ctx);
779
780 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
781
782 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
783 && DECL_HAS_VALUE_EXPR_P (decl))
784 {
785 tree ve = DECL_VALUE_EXPR (decl);
726a989a 786 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
787 SET_DECL_VALUE_EXPR (new_decl, ve);
788 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
789 }
790
791 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
792 {
793 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
794 if (size == error_mark_node)
795 size = TYPE_SIZE (TREE_TYPE (new_decl));
796 DECL_SIZE (new_decl) = size;
797
798 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
799 if (size == error_mark_node)
800 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
801 DECL_SIZE_UNIT (new_decl) = size;
802 }
803}
804
805/* The callback for remap_decl. Search all containing contexts for a
806 mapping of the variable; this avoids having to duplicate the splay
807 tree ahead of time. We know a mapping doesn't already exist in the
808 given context. Create new mappings to implement default semantics. */
809
810static tree
811omp_copy_decl (tree var, copy_body_data *cb)
812{
813 omp_context *ctx = (omp_context *) cb;
814 tree new_var;
815
953ff289
DN
816 if (TREE_CODE (var) == LABEL_DECL)
817 {
50aa16c3
JJ
818 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
819 return var;
c2255bc4 820 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 821 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
822 insert_decl_map (&ctx->cb, var, new_var);
823 return new_var;
824 }
825
a68ab351 826 while (!is_taskreg_ctx (ctx))
953ff289
DN
827 {
828 ctx = ctx->outer;
829 if (ctx == NULL)
830 return var;
831 new_var = maybe_lookup_decl (var, ctx);
832 if (new_var)
833 return new_var;
834 }
835
8ca5b2a2
JJ
836 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
837 return var;
838
953ff289
DN
839 return error_mark_node;
840}
841
629b3d75 842/* Create a new context, with OUTER_CTX being the surrounding context. */
50674e96 843
629b3d75
MJ
844static omp_context *
845new_omp_context (gimple *stmt, omp_context *outer_ctx)
50674e96 846{
629b3d75 847 omp_context *ctx = XCNEW (omp_context);
50674e96 848
629b3d75
MJ
849 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
850 (splay_tree_value) ctx);
851 ctx->stmt = stmt;
50674e96 852
629b3d75 853 if (outer_ctx)
777f7f9a 854 {
629b3d75
MJ
855 ctx->outer = outer_ctx;
856 ctx->cb = outer_ctx->cb;
857 ctx->cb.block = NULL;
858 ctx->depth = outer_ctx->depth + 1;
953ff289
DN
859 }
860 else
861 {
862 ctx->cb.src_fn = current_function_decl;
863 ctx->cb.dst_fn = current_function_decl;
d52f5295 864 ctx->cb.src_node = cgraph_node::get (current_function_decl);
fe660d7b 865 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
866 ctx->cb.dst_node = ctx->cb.src_node;
867 ctx->cb.src_cfun = cfun;
868 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 869 ctx->cb.eh_lp_nr = 0;
953ff289 870 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
79d64ee8
JJ
871 ctx->cb.adjust_array_error_bounds = true;
872 ctx->cb.dont_remap_vla_if_no_change = true;
953ff289
DN
873 ctx->depth = 1;
874 }
875
b787e7a2 876 ctx->cb.decl_map = new hash_map<tree, tree>;
953ff289
DN
877
878 return ctx;
879}
880
726a989a 881static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
882
883/* Finalize task copyfn. */
884
885static void
538dd0b7 886finalize_task_copyfn (gomp_task *task_stmt)
2368a460
JJ
887{
888 struct function *child_cfun;
af16bc76 889 tree child_fn;
355a7673 890 gimple_seq seq = NULL, new_seq;
538dd0b7 891 gbind *bind;
2368a460 892
726a989a 893 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
894 if (child_fn == NULL_TREE)
895 return;
896
897 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 898 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 899
2368a460 900 push_cfun (child_cfun);
3ad065ef 901 bind = gimplify_body (child_fn, false);
726a989a
RB
902 gimple_seq_add_stmt (&seq, bind);
903 new_seq = maybe_catch_exception (seq);
904 if (new_seq != seq)
905 {
906 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 907 seq = NULL;
726a989a
RB
908 gimple_seq_add_stmt (&seq, bind);
909 }
910 gimple_set_body (child_fn, seq);
2368a460 911 pop_cfun ();
2368a460 912
d7ed20db 913 /* Inform the callgraph about the new function. */
edafad14
TV
914 cgraph_node *node = cgraph_node::get_create (child_fn);
915 node->parallelized_function = 1;
d52f5295 916 cgraph_node::add_new_function (child_fn, false);
2368a460
JJ
917}
918
953ff289
DN
919/* Destroy a omp_context data structures. Called through the splay tree
920 value delete callback. */
921
922static void
923delete_omp_context (splay_tree_value value)
924{
925 omp_context *ctx = (omp_context *) value;
926
b787e7a2 927 delete ctx->cb.decl_map;
953ff289
DN
928
929 if (ctx->field_map)
930 splay_tree_delete (ctx->field_map);
a68ab351
JJ
931 if (ctx->sfield_map)
932 splay_tree_delete (ctx->sfield_map);
953ff289
DN
933
934 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
935 it produces corrupt debug information. */
936 if (ctx->record_type)
937 {
938 tree t;
910ad8de 939 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
940 DECL_ABSTRACT_ORIGIN (t) = NULL;
941 }
a68ab351
JJ
942 if (ctx->srecord_type)
943 {
944 tree t;
910ad8de 945 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
946 DECL_ABSTRACT_ORIGIN (t) = NULL;
947 }
953ff289 948
2368a460 949 if (is_task_ctx (ctx))
538dd0b7 950 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
2368a460 951
28567c40
JJ
952 if (ctx->task_reduction_map)
953 {
954 ctx->task_reductions.release ();
955 delete ctx->task_reduction_map;
956 }
957
953ff289
DN
958 XDELETE (ctx);
959}
960
961/* Fix up RECEIVER_DECL with a type that has been remapped to the child
962 context. */
963
964static void
965fixup_child_record_type (omp_context *ctx)
966{
967 tree f, type = ctx->record_type;
968
b2b40051
MJ
969 if (!ctx->receiver_decl)
970 return;
953ff289
DN
971 /* ??? It isn't sufficient to just call remap_type here, because
972 variably_modified_type_p doesn't work the way we expect for
973 record types. Testing each field for whether it needs remapping
974 and creating a new record by hand works, however. */
910ad8de 975 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
976 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
977 break;
978 if (f)
979 {
980 tree name, new_fields = NULL;
981
982 type = lang_hooks.types.make_type (RECORD_TYPE);
983 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
984 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
985 TYPE_DECL, name, type);
953ff289
DN
986 TYPE_NAME (type) = name;
987
910ad8de 988 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
989 {
990 tree new_f = copy_node (f);
991 DECL_CONTEXT (new_f) = type;
992 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 993 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
994 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
995 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
996 &ctx->cb, NULL);
997 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
998 &ctx->cb, NULL);
953ff289
DN
999 new_fields = new_f;
1000
1001 /* Arrange to be able to look up the receiver field
1002 given the sender field. */
1003 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1004 (splay_tree_value) new_f);
1005 }
1006 TYPE_FIELDS (type) = nreverse (new_fields);
1007 layout_type (type);
1008 }
1009
d9a6bd32
JJ
1010 /* In a target region we never modify any of the pointers in *.omp_data_i,
1011 so attempt to help the optimizers. */
1012 if (is_gimple_omp_offloaded (ctx->stmt))
1013 type = build_qualified_type (type, TYPE_QUAL_CONST);
1014
a2a2fe4b
RB
1015 TREE_TYPE (ctx->receiver_decl)
1016 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
953ff289
DN
1017}
1018
1019/* Instantiate decls as necessary in CTX to satisfy the data sharing
829c6349 1020 specified by CLAUSES. */
953ff289
DN
1021
1022static void
829c6349 1023scan_sharing_clauses (tree clauses, omp_context *ctx)
953ff289
DN
1024{
1025 tree c, decl;
1026 bool scan_array_reductions = false;
1027
1028 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1029 {
1030 bool by_ref;
1031
aaf46ef9 1032 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1033 {
1034 case OMP_CLAUSE_PRIVATE:
1035 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1036 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1037 goto do_private;
1038 else if (!is_variable_sized (decl))
953ff289
DN
1039 install_var_local (decl, ctx);
1040 break;
1041
1042 case OMP_CLAUSE_SHARED:
9cf32741 1043 decl = OMP_CLAUSE_DECL (c);
28567c40
JJ
1044 /* Ignore shared directives in teams construct inside of
1045 target construct. */
1046 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1047 && !is_host_teams_ctx (ctx))
9cf32741
JJ
1048 {
1049 /* Global variables don't need to be copied,
1050 the receiver side will use them directly. */
1051 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1052 if (is_global_var (odecl))
1053 break;
1054 insert_decl_map (&ctx->cb, decl, odecl);
1055 break;
1056 }
a68ab351 1057 gcc_assert (is_taskreg_ctx (ctx));
5da250fc
JJ
1058 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1059 || !is_variable_sized (decl));
8ca5b2a2
JJ
1060 /* Global variables don't need to be copied,
1061 the receiver side will use them directly. */
1062 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1063 break;
d9a6bd32 1064 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1a80d6b8
JJ
1065 {
1066 use_pointer_for_field (decl, ctx);
1067 break;
1068 }
1069 by_ref = use_pointer_for_field (decl, NULL);
1070 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
953ff289
DN
1071 || TREE_ADDRESSABLE (decl)
1072 || by_ref
629b3d75 1073 || omp_is_reference (decl))
953ff289 1074 {
1a80d6b8 1075 by_ref = use_pointer_for_field (decl, ctx);
a68ab351 1076 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1077 install_var_local (decl, ctx);
1078 break;
1079 }
1080 /* We don't need to copy const scalar vars back. */
aaf46ef9 1081 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1082 goto do_private;
1083
d9a6bd32 1084 case OMP_CLAUSE_REDUCTION:
28567c40 1085 case OMP_CLAUSE_IN_REDUCTION:
d9a6bd32 1086 decl = OMP_CLAUSE_DECL (c);
28567c40 1087 if (TREE_CODE (decl) == MEM_REF)
d9a6bd32
JJ
1088 {
1089 tree t = TREE_OPERAND (decl, 0);
e01d41e5
JJ
1090 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1091 t = TREE_OPERAND (t, 0);
d9a6bd32
JJ
1092 if (TREE_CODE (t) == INDIRECT_REF
1093 || TREE_CODE (t) == ADDR_EXPR)
1094 t = TREE_OPERAND (t, 0);
1095 install_var_local (t, ctx);
1096 if (is_taskreg_ctx (ctx)
28567c40
JJ
1097 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1098 || (is_task_ctx (ctx)
1099 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1100 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1101 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1102 == POINTER_TYPE)))))
1103 && !is_variable_sized (t)
1104 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1105 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1106 && !is_task_ctx (ctx))))
d9a6bd32 1107 {
28567c40
JJ
1108 by_ref = use_pointer_for_field (t, NULL);
1109 if (is_task_ctx (ctx)
1110 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1111 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1112 {
1113 install_var_field (t, false, 1, ctx);
1114 install_var_field (t, by_ref, 2, ctx);
1115 }
1116 else
1117 install_var_field (t, by_ref, 3, ctx);
d9a6bd32
JJ
1118 }
1119 break;
1120 }
28567c40
JJ
1121 if (is_task_ctx (ctx)
1122 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1123 && OMP_CLAUSE_REDUCTION_TASK (c)
1124 && is_parallel_ctx (ctx)))
1125 {
1126 /* Global variables don't need to be copied,
1127 the receiver side will use them directly. */
1128 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1129 {
1130 by_ref = use_pointer_for_field (decl, ctx);
1131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1132 install_var_field (decl, by_ref, 3, ctx);
1133 }
1134 install_var_local (decl, ctx);
1135 break;
1136 }
1137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1138 && OMP_CLAUSE_REDUCTION_TASK (c))
1139 {
1140 install_var_local (decl, ctx);
1141 break;
1142 }
d9a6bd32
JJ
1143 goto do_private;
1144
953ff289
DN
1145 case OMP_CLAUSE_LASTPRIVATE:
1146 /* Let the corresponding firstprivate clause create
1147 the variable. */
1148 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1149 break;
1150 /* FALLTHRU */
1151
1152 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 1153 case OMP_CLAUSE_LINEAR:
953ff289
DN
1154 decl = OMP_CLAUSE_DECL (c);
1155 do_private:
d9a6bd32
JJ
1156 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1157 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1158 && is_gimple_omp_offloaded (ctx->stmt))
1159 {
1160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
629b3d75 1161 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
d9a6bd32
JJ
1162 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1163 install_var_field (decl, true, 3, ctx);
1164 else
1165 install_var_field (decl, false, 3, ctx);
1166 }
953ff289 1167 if (is_variable_sized (decl))
953ff289 1168 {
a68ab351
JJ
1169 if (is_task_ctx (ctx))
1170 install_var_field (decl, false, 1, ctx);
1171 break;
1172 }
1173 else if (is_taskreg_ctx (ctx))
1174 {
1175 bool global
1176 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1177 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1178
1179 if (is_task_ctx (ctx)
629b3d75 1180 && (global || by_ref || omp_is_reference (decl)))
a68ab351
JJ
1181 {
1182 install_var_field (decl, false, 1, ctx);
1183 if (!global)
1184 install_var_field (decl, by_ref, 2, ctx);
1185 }
1186 else if (!global)
1187 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1188 }
1189 install_var_local (decl, ctx);
1190 break;
1191
d9a6bd32
JJ
1192 case OMP_CLAUSE_USE_DEVICE_PTR:
1193 decl = OMP_CLAUSE_DECL (c);
1194 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1195 install_var_field (decl, true, 3, ctx);
1196 else
1197 install_var_field (decl, false, 3, ctx);
1198 if (DECL_SIZE (decl)
1199 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1200 {
1201 tree decl2 = DECL_VALUE_EXPR (decl);
1202 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1203 decl2 = TREE_OPERAND (decl2, 0);
1204 gcc_assert (DECL_P (decl2));
1205 install_var_local (decl2, ctx);
1206 }
1207 install_var_local (decl, ctx);
1208 break;
1209
1210 case OMP_CLAUSE_IS_DEVICE_PTR:
1211 decl = OMP_CLAUSE_DECL (c);
1212 goto do_private;
1213
acf0174b 1214 case OMP_CLAUSE__LOOPTEMP_:
28567c40 1215 case OMP_CLAUSE__REDUCTEMP_:
d9a6bd32 1216 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
1217 decl = OMP_CLAUSE_DECL (c);
1218 install_var_field (decl, false, 3, ctx);
1219 install_var_local (decl, ctx);
1220 break;
1221
953ff289 1222 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1223 case OMP_CLAUSE_COPYIN:
1224 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1225 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1226 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1227 break;
1228
20906c66 1229 case OMP_CLAUSE_FINAL:
953ff289
DN
1230 case OMP_CLAUSE_IF:
1231 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1232 case OMP_CLAUSE_NUM_TEAMS:
1233 case OMP_CLAUSE_THREAD_LIMIT:
1234 case OMP_CLAUSE_DEVICE:
953ff289 1235 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1236 case OMP_CLAUSE_DIST_SCHEDULE:
1237 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
1238 case OMP_CLAUSE_PRIORITY:
1239 case OMP_CLAUSE_GRAINSIZE:
1240 case OMP_CLAUSE_NUM_TASKS:
41dbbb37
TS
1241 case OMP_CLAUSE_NUM_GANGS:
1242 case OMP_CLAUSE_NUM_WORKERS:
1243 case OMP_CLAUSE_VECTOR_LENGTH:
953ff289 1244 if (ctx->outer)
726a989a 1245 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1246 break;
1247
acf0174b
JJ
1248 case OMP_CLAUSE_TO:
1249 case OMP_CLAUSE_FROM:
1250 case OMP_CLAUSE_MAP:
1251 if (ctx->outer)
1252 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1253 decl = OMP_CLAUSE_DECL (c);
1254 /* Global variables with "omp declare target" attribute
1255 don't need to be copied, the receiver side will use them
4a38b02b 1256 directly. However, global variables with "omp declare target link"
5883c5cc 1257 attribute need to be copied. Or when ALWAYS modifier is used. */
acf0174b
JJ
1258 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1259 && DECL_P (decl)
e01d41e5
JJ
1260 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1261 && (OMP_CLAUSE_MAP_KIND (c)
1262 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1263 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
5883c5cc
JJ
1264 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1265 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1266 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
acf0174b 1267 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
4a38b02b
IV
1268 && varpool_node::get_create (decl)->offloadable
1269 && !lookup_attribute ("omp declare target link",
1270 DECL_ATTRIBUTES (decl)))
acf0174b
JJ
1271 break;
1272 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1273 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
acf0174b 1274 {
41dbbb37
TS
1275 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1276 not offloaded; there is nothing to map for those. */
1277 if (!is_gimple_omp_offloaded (ctx->stmt)
b8910447
JJ
1278 && !POINTER_TYPE_P (TREE_TYPE (decl))
1279 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
acf0174b
JJ
1280 break;
1281 }
d9a6bd32 1282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
e01d41e5
JJ
1283 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1284 || (OMP_CLAUSE_MAP_KIND (c)
1285 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
d9a6bd32
JJ
1286 {
1287 if (TREE_CODE (decl) == COMPONENT_REF
1288 || (TREE_CODE (decl) == INDIRECT_REF
1289 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1290 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1291 == REFERENCE_TYPE)))
1292 break;
1293 if (DECL_SIZE (decl)
1294 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1295 {
1296 tree decl2 = DECL_VALUE_EXPR (decl);
1297 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1298 decl2 = TREE_OPERAND (decl2, 0);
1299 gcc_assert (DECL_P (decl2));
1300 install_var_local (decl2, ctx);
1301 }
1302 install_var_local (decl, ctx);
1303 break;
1304 }
acf0174b
JJ
1305 if (DECL_P (decl))
1306 {
1307 if (DECL_SIZE (decl)
1308 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1309 {
1310 tree decl2 = DECL_VALUE_EXPR (decl);
1311 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1312 decl2 = TREE_OPERAND (decl2, 0);
1313 gcc_assert (DECL_P (decl2));
e01d41e5 1314 install_var_field (decl2, true, 3, ctx);
acf0174b
JJ
1315 install_var_local (decl2, ctx);
1316 install_var_local (decl, ctx);
1317 }
1318 else
1319 {
1320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
41dbbb37 1321 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
acf0174b
JJ
1322 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1323 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1324 install_var_field (decl, true, 7, ctx);
1325 else
829c6349 1326 install_var_field (decl, true, 3, ctx);
c42cfb5c
CP
1327 if (is_gimple_omp_offloaded (ctx->stmt)
1328 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
acf0174b
JJ
1329 install_var_local (decl, ctx);
1330 }
1331 }
1332 else
1333 {
1334 tree base = get_base_address (decl);
1335 tree nc = OMP_CLAUSE_CHAIN (c);
1336 if (DECL_P (base)
1337 && nc != NULL_TREE
1338 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1339 && OMP_CLAUSE_DECL (nc) == base
41dbbb37 1340 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
acf0174b
JJ
1341 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1342 {
1343 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1344 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1345 }
1346 else
1347 {
f014c653
JJ
1348 if (ctx->outer)
1349 {
1350 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1351 decl = OMP_CLAUSE_DECL (c);
1352 }
acf0174b
JJ
1353 gcc_assert (!splay_tree_lookup (ctx->field_map,
1354 (splay_tree_key) decl));
1355 tree field
1356 = build_decl (OMP_CLAUSE_LOCATION (c),
1357 FIELD_DECL, NULL_TREE, ptr_type_node);
fe37c7af 1358 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
acf0174b
JJ
1359 insert_field_into_struct (ctx->record_type, field);
1360 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1361 (splay_tree_value) field);
1362 }
1363 }
1364 break;
1365
b2b40051
MJ
1366 case OMP_CLAUSE__GRIDDIM_:
1367 if (ctx->outer)
1368 {
1369 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1370 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1371 }
1372 break;
1373
953ff289
DN
1374 case OMP_CLAUSE_NOWAIT:
1375 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1376 case OMP_CLAUSE_COLLAPSE:
1377 case OMP_CLAUSE_UNTIED:
20906c66 1378 case OMP_CLAUSE_MERGEABLE:
acf0174b 1379 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1380 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
1381 case OMP_CLAUSE_SIMDLEN:
1382 case OMP_CLAUSE_THREADS:
1383 case OMP_CLAUSE_SIMD:
1384 case OMP_CLAUSE_NOGROUP:
1385 case OMP_CLAUSE_DEFAULTMAP:
41dbbb37
TS
1386 case OMP_CLAUSE_ASYNC:
1387 case OMP_CLAUSE_WAIT:
1388 case OMP_CLAUSE_GANG:
1389 case OMP_CLAUSE_WORKER:
1390 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1391 case OMP_CLAUSE_INDEPENDENT:
1392 case OMP_CLAUSE_AUTO:
1393 case OMP_CLAUSE_SEQ:
02889d23 1394 case OMP_CLAUSE_TILE:
6c7509bc 1395 case OMP_CLAUSE__SIMT_:
8a4674bb 1396 case OMP_CLAUSE_DEFAULT:
28567c40 1397 case OMP_CLAUSE_NONTEMPORAL:
829c6349
CLT
1398 case OMP_CLAUSE_IF_PRESENT:
1399 case OMP_CLAUSE_FINALIZE:
28567c40 1400 case OMP_CLAUSE_TASK_REDUCTION:
953ff289
DN
1401 break;
1402
acf0174b
JJ
1403 case OMP_CLAUSE_ALIGNED:
1404 decl = OMP_CLAUSE_DECL (c);
1405 if (is_global_var (decl)
1406 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1407 install_var_local (decl, ctx);
1408 break;
1409
41dbbb37 1410 case OMP_CLAUSE__CACHE_:
953ff289
DN
1411 default:
1412 gcc_unreachable ();
1413 }
1414 }
1415
1416 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1417 {
aaf46ef9 1418 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1419 {
1420 case OMP_CLAUSE_LASTPRIVATE:
1421 /* Let the corresponding firstprivate clause create
1422 the variable. */
726a989a 1423 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1424 scan_array_reductions = true;
953ff289
DN
1425 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1426 break;
1427 /* FALLTHRU */
1428
953ff289 1429 case OMP_CLAUSE_FIRSTPRIVATE:
41dbbb37 1430 case OMP_CLAUSE_PRIVATE:
74bf76ed 1431 case OMP_CLAUSE_LINEAR:
d9a6bd32 1432 case OMP_CLAUSE_IS_DEVICE_PTR:
953ff289
DN
1433 decl = OMP_CLAUSE_DECL (c);
1434 if (is_variable_sized (decl))
d9a6bd32
JJ
1435 {
1436 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1437 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1438 && is_gimple_omp_offloaded (ctx->stmt))
1439 {
1440 tree decl2 = DECL_VALUE_EXPR (decl);
1441 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1442 decl2 = TREE_OPERAND (decl2, 0);
1443 gcc_assert (DECL_P (decl2));
1444 install_var_local (decl2, ctx);
1445 fixup_remapped_decl (decl2, ctx, false);
1446 }
1447 install_var_local (decl, ctx);
1448 }
953ff289 1449 fixup_remapped_decl (decl, ctx,
aaf46ef9 1450 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1451 && OMP_CLAUSE_PRIVATE_DEBUG (c));
d9a6bd32
JJ
1452 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1453 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
953ff289 1454 scan_array_reductions = true;
d9a6bd32
JJ
1455 break;
1456
1457 case OMP_CLAUSE_REDUCTION:
28567c40 1458 case OMP_CLAUSE_IN_REDUCTION:
d9a6bd32
JJ
1459 decl = OMP_CLAUSE_DECL (c);
1460 if (TREE_CODE (decl) != MEM_REF)
1461 {
1462 if (is_variable_sized (decl))
1463 install_var_local (decl, ctx);
1464 fixup_remapped_decl (decl, ctx, false);
1465 }
1466 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
f7468577 1467 scan_array_reductions = true;
953ff289
DN
1468 break;
1469
28567c40
JJ
1470 case OMP_CLAUSE_TASK_REDUCTION:
1471 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1472 scan_array_reductions = true;
1473 break;
1474
953ff289 1475 case OMP_CLAUSE_SHARED:
28567c40
JJ
1476 /* Ignore shared directives in teams construct inside of
1477 target construct. */
1478 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1479 && !is_host_teams_ctx (ctx))
acf0174b 1480 break;
953ff289 1481 decl = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
1482 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1483 break;
1484 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1485 {
1486 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1487 ctx->outer)))
1488 break;
1489 bool by_ref = use_pointer_for_field (decl, ctx);
1490 install_var_field (decl, by_ref, 11, ctx);
1491 break;
1492 }
1493 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1494 break;
1495
acf0174b 1496 case OMP_CLAUSE_MAP:
41dbbb37 1497 if (!is_gimple_omp_offloaded (ctx->stmt))
acf0174b
JJ
1498 break;
1499 decl = OMP_CLAUSE_DECL (c);
1500 if (DECL_P (decl)
e01d41e5
JJ
1501 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1502 && (OMP_CLAUSE_MAP_KIND (c)
1503 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32 1504 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
acf0174b 1505 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1f6be682 1506 && varpool_node::get_create (decl)->offloadable)
acf0174b
JJ
1507 break;
1508 if (DECL_P (decl))
1509 {
d9a6bd32
JJ
1510 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1511 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
acf0174b
JJ
1512 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1513 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1514 {
1515 tree new_decl = lookup_decl (decl, ctx);
1516 TREE_TYPE (new_decl)
1517 = remap_type (TREE_TYPE (decl), &ctx->cb);
1518 }
1519 else if (DECL_SIZE (decl)
1520 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1521 {
1522 tree decl2 = DECL_VALUE_EXPR (decl);
1523 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1524 decl2 = TREE_OPERAND (decl2, 0);
1525 gcc_assert (DECL_P (decl2));
1526 fixup_remapped_decl (decl2, ctx, false);
1527 fixup_remapped_decl (decl, ctx, true);
1528 }
1529 else
1530 fixup_remapped_decl (decl, ctx, false);
1531 }
1532 break;
1533
953ff289
DN
1534 case OMP_CLAUSE_COPYPRIVATE:
1535 case OMP_CLAUSE_COPYIN:
1536 case OMP_CLAUSE_DEFAULT:
1537 case OMP_CLAUSE_IF:
1538 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1539 case OMP_CLAUSE_NUM_TEAMS:
1540 case OMP_CLAUSE_THREAD_LIMIT:
1541 case OMP_CLAUSE_DEVICE:
953ff289 1542 case OMP_CLAUSE_SCHEDULE:
acf0174b 1543 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1544 case OMP_CLAUSE_NOWAIT:
1545 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1546 case OMP_CLAUSE_COLLAPSE:
1547 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1548 case OMP_CLAUSE_FINAL:
1549 case OMP_CLAUSE_MERGEABLE:
acf0174b 1550 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1551 case OMP_CLAUSE_SAFELEN:
d9a6bd32 1552 case OMP_CLAUSE_SIMDLEN:
acf0174b
JJ
1553 case OMP_CLAUSE_ALIGNED:
1554 case OMP_CLAUSE_DEPEND:
1555 case OMP_CLAUSE__LOOPTEMP_:
28567c40 1556 case OMP_CLAUSE__REDUCTEMP_:
acf0174b
JJ
1557 case OMP_CLAUSE_TO:
1558 case OMP_CLAUSE_FROM:
d9a6bd32
JJ
1559 case OMP_CLAUSE_PRIORITY:
1560 case OMP_CLAUSE_GRAINSIZE:
1561 case OMP_CLAUSE_NUM_TASKS:
1562 case OMP_CLAUSE_THREADS:
1563 case OMP_CLAUSE_SIMD:
1564 case OMP_CLAUSE_NOGROUP:
1565 case OMP_CLAUSE_DEFAULTMAP:
1566 case OMP_CLAUSE_USE_DEVICE_PTR:
28567c40 1567 case OMP_CLAUSE_NONTEMPORAL:
41dbbb37
TS
1568 case OMP_CLAUSE_ASYNC:
1569 case OMP_CLAUSE_WAIT:
1570 case OMP_CLAUSE_NUM_GANGS:
1571 case OMP_CLAUSE_NUM_WORKERS:
1572 case OMP_CLAUSE_VECTOR_LENGTH:
1573 case OMP_CLAUSE_GANG:
1574 case OMP_CLAUSE_WORKER:
1575 case OMP_CLAUSE_VECTOR:
c5a64cfe
NS
1576 case OMP_CLAUSE_INDEPENDENT:
1577 case OMP_CLAUSE_AUTO:
1578 case OMP_CLAUSE_SEQ:
02889d23 1579 case OMP_CLAUSE_TILE:
b2b40051 1580 case OMP_CLAUSE__GRIDDIM_:
6c7509bc 1581 case OMP_CLAUSE__SIMT_:
829c6349
CLT
1582 case OMP_CLAUSE_IF_PRESENT:
1583 case OMP_CLAUSE_FINALIZE:
41dbbb37
TS
1584 break;
1585
41dbbb37 1586 case OMP_CLAUSE__CACHE_:
953ff289
DN
1587 default:
1588 gcc_unreachable ();
1589 }
1590 }
1591
41dbbb37
TS
1592 gcc_checking_assert (!scan_array_reductions
1593 || !is_gimple_omp_oacc (ctx->stmt));
953ff289 1594 if (scan_array_reductions)
6b37bdaf
PP
1595 {
1596 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
28567c40
JJ
1597 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1598 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1599 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
6b37bdaf
PP
1600 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1601 {
1602 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1603 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1604 }
1605 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1606 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1607 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1608 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1609 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1610 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1611 }
953ff289
DN
1612}
1613
5e9d6aa4 1614/* Create a new name for omp child function. Returns an identifier. */
953ff289 1615
953ff289 1616static tree
5e9d6aa4 1617create_omp_child_function_name (bool task_copy)
953ff289 1618{
7958186b
MP
1619 return clone_function_name_numbered (current_function_decl,
1620 task_copy ? "_omp_cpyfn" : "_omp_fn");
9a771876
JJ
1621}
1622
9669b00b
AM
1623/* Return true if CTX may belong to offloaded code: either if current function
1624 is offloaded, or any enclosing context corresponds to a target region. */
1625
1626static bool
1627omp_maybe_offloaded_ctx (omp_context *ctx)
1628{
1629 if (cgraph_node::get (current_function_decl)->offloadable)
1630 return true;
1631 for (; ctx; ctx = ctx->outer)
1632 if (is_gimple_omp_offloaded (ctx->stmt))
1633 return true;
1634 return false;
1635}
1636
953ff289
DN
1637/* Build a decl for the omp child function. It'll not contain a body
1638 yet, just the bare decl. */
1639
1640static void
a68ab351 1641create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1642{
1643 tree decl, type, name, t;
1644
5e9d6aa4 1645 name = create_omp_child_function_name (task_copy);
a68ab351
JJ
1646 if (task_copy)
1647 type = build_function_type_list (void_type_node, ptr_type_node,
1648 ptr_type_node, NULL_TREE);
1649 else
1650 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1651
9a771876 1652 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
953ff289 1653
41dbbb37
TS
1654 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1655 || !task_copy);
a68ab351
JJ
1656 if (!task_copy)
1657 ctx->cb.dst_fn = decl;
1658 else
726a989a 1659 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1660
1661 TREE_STATIC (decl) = 1;
1662 TREE_USED (decl) = 1;
1663 DECL_ARTIFICIAL (decl) = 1;
1664 DECL_IGNORED_P (decl) = 0;
1665 TREE_PUBLIC (decl) = 0;
1666 DECL_UNINLINABLE (decl) = 1;
1667 DECL_EXTERNAL (decl) = 0;
1668 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1669 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 1670 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
5c38262d 1671 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
f1542d9a
JJ
1672 /* Remove omp declare simd attribute from the new attributes. */
1673 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1674 {
1675 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1676 a = a2;
1677 a = TREE_CHAIN (a);
1678 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1679 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1680 *p = TREE_CHAIN (*p);
1681 else
1682 {
1683 tree chain = TREE_CHAIN (*p);
1684 *p = copy_node (*p);
1685 p = &TREE_CHAIN (*p);
1686 *p = chain;
1687 }
1688 }
5c38262d
JJ
1689 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1690 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1691 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1692 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1693 DECL_FUNCTION_VERSIONED (decl)
1694 = DECL_FUNCTION_VERSIONED (current_function_decl);
1695
9669b00b 1696 if (omp_maybe_offloaded_ctx (ctx))
acf0174b 1697 {
9669b00b
AM
1698 cgraph_node::get_create (decl)->offloadable = 1;
1699 if (ENABLE_OFFLOADING)
1700 g->have_offload = true;
acf0174b 1701 }
953ff289 1702
d7823208
BS
1703 if (cgraph_node::get_create (decl)->offloadable
1704 && !lookup_attribute ("omp declare target",
1705 DECL_ATTRIBUTES (current_function_decl)))
9669b00b
AM
1706 {
1707 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1708 ? "omp target entrypoint"
1709 : "omp declare target");
1710 DECL_ATTRIBUTES (decl)
1711 = tree_cons (get_identifier (target_attr),
1712 NULL_TREE, DECL_ATTRIBUTES (decl));
1713 }
d7823208 1714
c2255bc4
AH
1715 t = build_decl (DECL_SOURCE_LOCATION (decl),
1716 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1717 DECL_ARTIFICIAL (t) = 1;
1718 DECL_IGNORED_P (t) = 1;
07485407 1719 DECL_CONTEXT (t) = decl;
953ff289
DN
1720 DECL_RESULT (decl) = t;
1721
9a771876
JJ
1722 tree data_name = get_identifier (".omp_data_i");
1723 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1724 ptr_type_node);
953ff289 1725 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1726 DECL_NAMELESS (t) = 1;
953ff289 1727 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1728 DECL_CONTEXT (t) = current_function_decl;
953ff289 1729 TREE_USED (t) = 1;
d9a6bd32 1730 TREE_READONLY (t) = 1;
953ff289 1731 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1732 if (!task_copy)
1733 ctx->receiver_decl = t;
1734 else
1735 {
c2255bc4
AH
1736 t = build_decl (DECL_SOURCE_LOCATION (decl),
1737 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1738 ptr_type_node);
1739 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1740 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1741 DECL_ARG_TYPE (t) = ptr_type_node;
1742 DECL_CONTEXT (t) = current_function_decl;
1743 TREE_USED (t) = 1;
628c189e 1744 TREE_ADDRESSABLE (t) = 1;
910ad8de 1745 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1746 DECL_ARGUMENTS (decl) = t;
1747 }
953ff289 1748
b8698a0f 1749 /* Allocate memory for the function structure. The call to
50674e96 1750 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1751 it afterward. */
db2960f4 1752 push_struct_function (decl);
726a989a 1753 cfun->function_end_locus = gimple_location (ctx->stmt);
381cdae4 1754 init_tree_ssa (cfun);
db2960f4 1755 pop_cfun ();
953ff289
DN
1756}
1757
acf0174b
JJ
1758/* Callback for walk_gimple_seq. Check if combined parallel
1759 contains gimple_omp_for_combined_into_p OMP_FOR. */
1760
629b3d75
MJ
1761tree
1762omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1763 bool *handled_ops_p,
1764 struct walk_stmt_info *wi)
acf0174b 1765{
355fe088 1766 gimple *stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
1767
1768 *handled_ops_p = true;
1769 switch (gimple_code (stmt))
1770 {
1771 WALK_SUBSTMTS;
1772
1773 case GIMPLE_OMP_FOR:
1774 if (gimple_omp_for_combined_into_p (stmt)
d9a6bd32
JJ
1775 && gimple_omp_for_kind (stmt)
1776 == *(const enum gf_mask *) (wi->info))
acf0174b
JJ
1777 {
1778 wi->info = stmt;
1779 return integer_zero_node;
1780 }
1781 break;
1782 default:
1783 break;
1784 }
1785 return NULL;
1786}
1787
28567c40 1788/* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
d9a6bd32
JJ
1789
1790static void
1791add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1792 omp_context *outer_ctx)
1793{
1794 struct walk_stmt_info wi;
1795
1796 memset (&wi, 0, sizeof (wi));
1797 wi.val_only = true;
1798 wi.info = (void *) &msk;
629b3d75 1799 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
d9a6bd32
JJ
1800 if (wi.info != (void *) &msk)
1801 {
1802 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1803 struct omp_for_data fd;
629b3d75 1804 omp_extract_for_data (for_stmt, &fd, NULL);
d9a6bd32
JJ
1805 /* We need two temporaries with fd.loop.v type (istart/iend)
1806 and then (fd.collapse - 1) temporaries with the same
1807 type for count2 ... countN-1 vars if not constant. */
1808 size_t count = 2, i;
1809 tree type = fd.iter_type;
1810 if (fd.collapse > 1
1811 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1812 {
1813 count += fd.collapse - 1;
e01d41e5 1814 /* If there are lastprivate clauses on the inner
d9a6bd32
JJ
1815 GIMPLE_OMP_FOR, add one more temporaries for the total number
1816 of iterations (product of count1 ... countN-1). */
629b3d75 1817 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
e01d41e5
JJ
1818 OMP_CLAUSE_LASTPRIVATE))
1819 count++;
1820 else if (msk == GF_OMP_FOR_KIND_FOR
629b3d75 1821 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
e01d41e5 1822 OMP_CLAUSE_LASTPRIVATE))
d9a6bd32
JJ
1823 count++;
1824 }
1825 for (i = 0; i < count; i++)
1826 {
1827 tree temp = create_tmp_var (type);
1828 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1829 insert_decl_map (&outer_ctx->cb, temp, temp);
1830 OMP_CLAUSE_DECL (c) = temp;
1831 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1832 gimple_omp_taskreg_set_clauses (stmt, c);
1833 }
1834 }
28567c40
JJ
1835 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1836 && omp_find_clause (gimple_omp_task_clauses (stmt),
1837 OMP_CLAUSE_REDUCTION))
1838 {
1839 tree type = build_pointer_type (pointer_sized_int_node);
1840 tree temp = create_tmp_var (type);
1841 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1842 insert_decl_map (&outer_ctx->cb, temp, temp);
1843 OMP_CLAUSE_DECL (c) = temp;
1844 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1845 gimple_omp_task_set_clauses (stmt, c);
1846 }
d9a6bd32
JJ
1847}
1848
953ff289
DN
1849/* Scan an OpenMP parallel directive. */
1850
1851static void
726a989a 1852scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1853{
1854 omp_context *ctx;
1855 tree name;
538dd0b7 1856 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
953ff289
DN
1857
1858 /* Ignore parallel directives with empty bodies, unless there
1859 are copyin clauses. */
1860 if (optimize > 0
726a989a 1861 && empty_body_p (gimple_omp_body (stmt))
629b3d75 1862 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
726a989a 1863 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1864 {
726a989a 1865 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1866 return;
1867 }
1868
acf0174b 1869 if (gimple_omp_parallel_combined_p (stmt))
d9a6bd32 1870 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
28567c40
JJ
1871 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1872 OMP_CLAUSE_REDUCTION);
1873 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1874 if (OMP_CLAUSE_REDUCTION_TASK (c))
1875 {
1876 tree type = build_pointer_type (pointer_sized_int_node);
1877 tree temp = create_tmp_var (type);
1878 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1879 if (outer_ctx)
1880 insert_decl_map (&outer_ctx->cb, temp, temp);
1881 OMP_CLAUSE_DECL (c) = temp;
1882 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1883 gimple_omp_parallel_set_clauses (stmt, c);
1884 break;
1885 }
1886 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1887 break;
acf0174b 1888
726a989a 1889 ctx = new_omp_context (stmt, outer_ctx);
5771c391 1890 taskreg_contexts.safe_push (ctx);
a68ab351 1891 if (taskreg_nesting_level > 1)
50674e96 1892 ctx->is_nested = true;
953ff289 1893 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289 1894 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1895 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1896 name = build_decl (gimple_location (stmt),
1897 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1898 DECL_ARTIFICIAL (name) = 1;
1899 DECL_NAMELESS (name) = 1;
953ff289 1900 TYPE_NAME (ctx->record_type) = name;
f7484978 1901 TYPE_ARTIFICIAL (ctx->record_type) = 1;
b2b40051
MJ
1902 if (!gimple_omp_parallel_grid_phony (stmt))
1903 {
1904 create_omp_child_function (ctx, false);
1905 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1906 }
953ff289 1907
726a989a 1908 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 1909 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
1910
1911 if (TYPE_FIELDS (ctx->record_type) == NULL)
1912 ctx->record_type = ctx->receiver_decl = NULL;
953ff289
DN
1913}
1914
a68ab351
JJ
1915/* Scan an OpenMP task directive. */
1916
1917static void
726a989a 1918scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1919{
1920 omp_context *ctx;
726a989a 1921 tree name, t;
538dd0b7 1922 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
a68ab351 1923
fbc698e0
JJ
1924 /* Ignore task directives with empty bodies, unless they have depend
1925 clause. */
a68ab351 1926 if (optimize > 0
28567c40 1927 && gimple_omp_body (stmt)
fbc698e0
JJ
1928 && empty_body_p (gimple_omp_body (stmt))
1929 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
a68ab351 1930 {
726a989a 1931 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1932 return;
1933 }
1934
d9a6bd32
JJ
1935 if (gimple_omp_task_taskloop_p (stmt))
1936 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1937
726a989a 1938 ctx = new_omp_context (stmt, outer_ctx);
28567c40
JJ
1939
1940 if (gimple_omp_task_taskwait_p (stmt))
1941 {
1942 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1943 return;
1944 }
1945
5771c391 1946 taskreg_contexts.safe_push (ctx);
a68ab351
JJ
1947 if (taskreg_nesting_level > 1)
1948 ctx->is_nested = true;
1949 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
a68ab351
JJ
1950 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1951 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1952 name = build_decl (gimple_location (stmt),
1953 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1954 DECL_ARTIFICIAL (name) = 1;
1955 DECL_NAMELESS (name) = 1;
a68ab351 1956 TYPE_NAME (ctx->record_type) = name;
f7484978 1957 TYPE_ARTIFICIAL (ctx->record_type) = 1;
a68ab351 1958 create_omp_child_function (ctx, false);
726a989a 1959 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1960
726a989a 1961 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1962
1963 if (ctx->srecord_type)
1964 {
1965 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1966 name = build_decl (gimple_location (stmt),
1967 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1968 DECL_ARTIFICIAL (name) = 1;
1969 DECL_NAMELESS (name) = 1;
a68ab351 1970 TYPE_NAME (ctx->srecord_type) = name;
f7484978 1971 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
a68ab351
JJ
1972 create_omp_child_function (ctx, true);
1973 }
1974
26127932 1975 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
1976
1977 if (TYPE_FIELDS (ctx->record_type) == NULL)
1978 {
1979 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1980 t = build_int_cst (long_integer_type_node, 0);
1981 gimple_omp_task_set_arg_size (stmt, t);
1982 t = build_int_cst (long_integer_type_node, 1);
1983 gimple_omp_task_set_arg_align (stmt, t);
a68ab351 1984 }
5771c391
JJ
1985}
1986
655e5265
JJ
1987/* Helper function for finish_taskreg_scan, called through walk_tree.
1988 If maybe_lookup_decl_in_outer_context returns non-NULL for some
1989 tree, replace it in the expression. */
1990
1991static tree
1992finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1993{
1994 if (VAR_P (*tp))
1995 {
1996 omp_context *ctx = (omp_context *) data;
1997 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1998 if (t != *tp)
1999 {
2000 if (DECL_HAS_VALUE_EXPR_P (t))
2001 t = unshare_expr (DECL_VALUE_EXPR (t));
2002 *tp = t;
2003 }
2004 *walk_subtrees = 0;
2005 }
2006 else if (IS_TYPE_OR_DECL_P (*tp))
2007 *walk_subtrees = 0;
2008 return NULL_TREE;
2009}
5771c391
JJ
2010
2011/* If any decls have been made addressable during scan_omp,
2012 adjust their fields if needed, and layout record types
2013 of parallel/task constructs. */
2014
2015static void
2016finish_taskreg_scan (omp_context *ctx)
2017{
2018 if (ctx->record_type == NULL_TREE)
2019 return;
2020
2021 /* If any task_shared_vars were needed, verify all
28567c40 2022 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
5771c391
JJ
2023 statements if use_pointer_for_field hasn't changed
2024 because of that. If it did, update field types now. */
2025 if (task_shared_vars)
2026 {
2027 tree c;
2028
2029 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2030 c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
2031 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2032 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5771c391
JJ
2033 {
2034 tree decl = OMP_CLAUSE_DECL (c);
2035
2036 /* Global variables don't need to be copied,
2037 the receiver side will use them directly. */
2038 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2039 continue;
2040 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2041 || !use_pointer_for_field (decl, ctx))
2042 continue;
2043 tree field = lookup_field (decl, ctx);
2044 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2045 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2046 continue;
2047 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2048 TREE_THIS_VOLATILE (field) = 0;
2049 DECL_USER_ALIGN (field) = 0;
fe37c7af 2050 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
5771c391 2051 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
fe37c7af 2052 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
5771c391
JJ
2053 if (ctx->srecord_type)
2054 {
2055 tree sfield = lookup_sfield (decl, ctx);
2056 TREE_TYPE (sfield) = TREE_TYPE (field);
2057 TREE_THIS_VOLATILE (sfield) = 0;
2058 DECL_USER_ALIGN (sfield) = 0;
fe37c7af 2059 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
5771c391 2060 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
fe37c7af 2061 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
5771c391
JJ
2062 }
2063 }
2064 }
2065
2066 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
28567c40
JJ
2067 {
2068 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2069 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2070 if (c)
2071 {
2072 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2073 expects to find it at the start of data. */
2074 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2075 tree *p = &TYPE_FIELDS (ctx->record_type);
2076 while (*p)
2077 if (*p == f)
2078 {
2079 *p = DECL_CHAIN (*p);
2080 break;
2081 }
2082 else
2083 p = &DECL_CHAIN (*p);
2084 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2085 TYPE_FIELDS (ctx->record_type) = f;
2086 }
2087 layout_type (ctx->record_type);
2088 fixup_child_record_type (ctx);
2089 }
2090 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
5771c391
JJ
2091 {
2092 layout_type (ctx->record_type);
2093 fixup_child_record_type (ctx);
2094 }
a68ab351
JJ
2095 else
2096 {
5771c391 2097 location_t loc = gimple_location (ctx->stmt);
a68ab351
JJ
2098 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2099 /* Move VLA fields to the end. */
2100 p = &TYPE_FIELDS (ctx->record_type);
2101 while (*p)
2102 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2103 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2104 {
2105 *q = *p;
2106 *p = TREE_CHAIN (*p);
2107 TREE_CHAIN (*q) = NULL_TREE;
2108 q = &TREE_CHAIN (*q);
2109 }
2110 else
910ad8de 2111 p = &DECL_CHAIN (*p);
a68ab351 2112 *p = vla_fields;
d9a6bd32
JJ
2113 if (gimple_omp_task_taskloop_p (ctx->stmt))
2114 {
2115 /* Move fields corresponding to first and second _looptemp_
2116 clause first. There are filled by GOMP_taskloop
2117 and thus need to be in specific positions. */
28567c40
JJ
2118 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2119 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
629b3d75 2120 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
d9a6bd32 2121 OMP_CLAUSE__LOOPTEMP_);
28567c40 2122 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
d9a6bd32
JJ
2123 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2124 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
28567c40 2125 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
d9a6bd32
JJ
2126 p = &TYPE_FIELDS (ctx->record_type);
2127 while (*p)
28567c40 2128 if (*p == f1 || *p == f2 || *p == f3)
d9a6bd32
JJ
2129 *p = DECL_CHAIN (*p);
2130 else
2131 p = &DECL_CHAIN (*p);
2132 DECL_CHAIN (f1) = f2;
28567c40
JJ
2133 if (c3)
2134 {
2135 DECL_CHAIN (f2) = f3;
2136 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2137 }
2138 else
2139 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
d9a6bd32
JJ
2140 TYPE_FIELDS (ctx->record_type) = f1;
2141 if (ctx->srecord_type)
2142 {
2143 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2144 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
28567c40
JJ
2145 if (c3)
2146 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
d9a6bd32
JJ
2147 p = &TYPE_FIELDS (ctx->srecord_type);
2148 while (*p)
28567c40 2149 if (*p == f1 || *p == f2 || *p == f3)
d9a6bd32
JJ
2150 *p = DECL_CHAIN (*p);
2151 else
2152 p = &DECL_CHAIN (*p);
2153 DECL_CHAIN (f1) = f2;
2154 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
28567c40
JJ
2155 if (c3)
2156 {
2157 DECL_CHAIN (f2) = f3;
2158 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2159 }
2160 else
2161 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
d9a6bd32
JJ
2162 TYPE_FIELDS (ctx->srecord_type) = f1;
2163 }
2164 }
a68ab351
JJ
2165 layout_type (ctx->record_type);
2166 fixup_child_record_type (ctx);
2167 if (ctx->srecord_type)
2168 layout_type (ctx->srecord_type);
5771c391
JJ
2169 tree t = fold_convert_loc (loc, long_integer_type_node,
2170 TYPE_SIZE_UNIT (ctx->record_type));
655e5265
JJ
2171 if (TREE_CODE (t) != INTEGER_CST)
2172 {
2173 t = unshare_expr (t);
2174 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2175 }
5771c391 2176 gimple_omp_task_set_arg_size (ctx->stmt, t);
726a989a 2177 t = build_int_cst (long_integer_type_node,
a68ab351 2178 TYPE_ALIGN_UNIT (ctx->record_type));
5771c391 2179 gimple_omp_task_set_arg_align (ctx->stmt, t);
a68ab351
JJ
2180 }
2181}
2182
e4834818 2183/* Find the enclosing offload context. */
953ff289 2184
41dbbb37
TS
2185static omp_context *
2186enclosing_target_ctx (omp_context *ctx)
2187{
e4834818
NS
2188 for (; ctx; ctx = ctx->outer)
2189 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2190 break;
2191
41dbbb37
TS
2192 return ctx;
2193}
2194
e4834818
NS
2195/* Return true if ctx is part of an oacc kernels region. */
2196
41dbbb37 2197static bool
e4834818 2198ctx_in_oacc_kernels_region (omp_context *ctx)
41dbbb37 2199{
e4834818
NS
2200 for (;ctx != NULL; ctx = ctx->outer)
2201 {
2202 gimple *stmt = ctx->stmt;
2203 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2204 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2205 return true;
2206 }
2207
2208 return false;
2209}
2210
2211/* Check the parallelism clauses inside a kernels regions.
2212 Until kernels handling moves to use the same loop indirection
2213 scheme as parallel, we need to do this checking early. */
2214
2215static unsigned
2216check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2217{
2218 bool checking = true;
2219 unsigned outer_mask = 0;
2220 unsigned this_mask = 0;
2221 bool has_seq = false, has_auto = false;
2222
2223 if (ctx->outer)
2224 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2225 if (!stmt)
2226 {
2227 checking = false;
2228 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2229 return outer_mask;
2230 stmt = as_a <gomp_for *> (ctx->stmt);
2231 }
2232
2233 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2234 {
2235 switch (OMP_CLAUSE_CODE (c))
2236 {
2237 case OMP_CLAUSE_GANG:
2238 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2239 break;
2240 case OMP_CLAUSE_WORKER:
2241 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2242 break;
2243 case OMP_CLAUSE_VECTOR:
2244 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2245 break;
2246 case OMP_CLAUSE_SEQ:
2247 has_seq = true;
2248 break;
2249 case OMP_CLAUSE_AUTO:
2250 has_auto = true;
2251 break;
2252 default:
2253 break;
2254 }
2255 }
2256
2257 if (checking)
2258 {
2259 if (has_seq && (this_mask || has_auto))
2260 error_at (gimple_location (stmt), "%<seq%> overrides other"
2261 " OpenACC loop specifiers");
2262 else if (has_auto && this_mask)
2263 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2264 " OpenACC loop specifiers");
2265
2266 if (this_mask & outer_mask)
2267 error_at (gimple_location (stmt), "inner loop uses same"
2268 " OpenACC parallelism as containing loop");
2269 }
2270
2271 return outer_mask | this_mask;
41dbbb37
TS
2272}
2273
2274/* Scan a GIMPLE_OMP_FOR. */
953ff289 2275
6e6cf7b0 2276static omp_context *
538dd0b7 2277scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
953ff289 2278{
50674e96 2279 omp_context *ctx;
726a989a 2280 size_t i;
41dbbb37
TS
2281 tree clauses = gimple_omp_for_clauses (stmt);
2282
50674e96 2283 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2284
41dbbb37
TS
2285 if (is_gimple_omp_oacc (stmt))
2286 {
e4834818
NS
2287 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2288
2289 if (!tgt || is_oacc_parallel (tgt))
2290 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2291 {
2292 char const *check = NULL;
2293
2294 switch (OMP_CLAUSE_CODE (c))
2295 {
2296 case OMP_CLAUSE_GANG:
2297 check = "gang";
2298 break;
2299
2300 case OMP_CLAUSE_WORKER:
2301 check = "worker";
2302 break;
2303
2304 case OMP_CLAUSE_VECTOR:
2305 check = "vector";
2306 break;
2307
2308 default:
2309 break;
2310 }
2311
2312 if (check && OMP_CLAUSE_OPERAND (c, 0))
2313 error_at (gimple_location (stmt),
2314 "argument not permitted on %qs clause in"
2315 " OpenACC %<parallel%>", check);
2316 }
2317
2318 if (tgt && is_oacc_kernels (tgt))
2319 {
28567c40 2320 /* Strip out reductions, as they are not handled yet. */
e4834818
NS
2321 tree *prev_ptr = &clauses;
2322
2323 while (tree probe = *prev_ptr)
41dbbb37 2324 {
e4834818
NS
2325 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2326
2327 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2328 *prev_ptr = *next_ptr;
2329 else
2330 prev_ptr = next_ptr;
41dbbb37 2331 }
e4834818
NS
2332
2333 gimple_omp_for_set_clauses (stmt, clauses);
2334 check_oacc_kernel_gwv (stmt, ctx);
41dbbb37
TS
2335 }
2336 }
2337
2338 scan_sharing_clauses (clauses, ctx);
953ff289 2339
26127932 2340 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2341 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2342 {
726a989a
RB
2343 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2344 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2345 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2346 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2347 }
26127932 2348 scan_omp (gimple_omp_body_ptr (stmt), ctx);
6e6cf7b0 2349 return ctx;
953ff289
DN
2350}
2351
6c7509bc
JJ
2352/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2353
2354static void
2355scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2356 omp_context *outer_ctx)
2357{
2358 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2359 gsi_replace (gsi, bind, false);
2360 gimple_seq seq = NULL;
2361 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2362 tree cond = create_tmp_var_raw (integer_type_node);
2363 DECL_CONTEXT (cond) = current_function_decl;
2364 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2365 gimple_bind_set_vars (bind, cond);
2366 gimple_call_set_lhs (g, cond);
2367 gimple_seq_add_stmt (&seq, g);
2368 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2369 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2370 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2371 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2372 gimple_seq_add_stmt (&seq, g);
2373 g = gimple_build_label (lab1);
2374 gimple_seq_add_stmt (&seq, g);
2375 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2376 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2377 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2378 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2379 gimple_omp_for_set_clauses (new_stmt, clause);
2380 gimple_seq_add_stmt (&seq, new_stmt);
2381 g = gimple_build_goto (lab3);
2382 gimple_seq_add_stmt (&seq, g);
2383 g = gimple_build_label (lab2);
2384 gimple_seq_add_stmt (&seq, g);
2385 gimple_seq_add_stmt (&seq, stmt);
2386 g = gimple_build_label (lab3);
2387 gimple_seq_add_stmt (&seq, g);
2388 gimple_bind_set_body (bind, seq);
2389 update_stmt (bind);
2390 scan_omp_for (new_stmt, outer_ctx);
6e6cf7b0 2391 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
6c7509bc
JJ
2392}
2393
953ff289
DN
2394/* Scan an OpenMP sections directive. */
2395
2396static void
538dd0b7 2397scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
953ff289 2398{
953ff289
DN
2399 omp_context *ctx;
2400
2401 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2402 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2403 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2404}
2405
2406/* Scan an OpenMP single directive. */
2407
2408static void
538dd0b7 2409scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
953ff289 2410{
953ff289
DN
2411 omp_context *ctx;
2412 tree name;
2413
2414 ctx = new_omp_context (stmt, outer_ctx);
2415 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2416 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2417 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2418 name = build_decl (gimple_location (stmt),
2419 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2420 TYPE_NAME (ctx->record_type) = name;
2421
726a989a 2422 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2423 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2424
2425 if (TYPE_FIELDS (ctx->record_type) == NULL)
2426 ctx->record_type = NULL;
2427 else
2428 layout_type (ctx->record_type);
2429}
2430
41dbbb37 2431/* Scan a GIMPLE_OMP_TARGET. */
acf0174b
JJ
2432
2433static void
538dd0b7 2434scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
acf0174b
JJ
2435{
2436 omp_context *ctx;
2437 tree name;
41dbbb37
TS
2438 bool offloaded = is_gimple_omp_offloaded (stmt);
2439 tree clauses = gimple_omp_target_clauses (stmt);
acf0174b
JJ
2440
2441 ctx = new_omp_context (stmt, outer_ctx);
2442 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
acf0174b
JJ
2443 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2444 name = create_tmp_var_name (".omp_data_t");
2445 name = build_decl (gimple_location (stmt),
2446 TYPE_DECL, name, ctx->record_type);
2447 DECL_ARTIFICIAL (name) = 1;
2448 DECL_NAMELESS (name) = 1;
2449 TYPE_NAME (ctx->record_type) = name;
f7484978 2450 TYPE_ARTIFICIAL (ctx->record_type) = 1;
86938de6 2451
41dbbb37 2452 if (offloaded)
acf0174b
JJ
2453 {
2454 create_omp_child_function (ctx, false);
2455 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2456 }
2457
829c6349 2458 scan_sharing_clauses (clauses, ctx);
acf0174b
JJ
2459 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2460
2461 if (TYPE_FIELDS (ctx->record_type) == NULL)
2462 ctx->record_type = ctx->receiver_decl = NULL;
2463 else
2464 {
2465 TYPE_FIELDS (ctx->record_type)
2466 = nreverse (TYPE_FIELDS (ctx->record_type));
b2b29377
MM
2467 if (flag_checking)
2468 {
2469 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2470 for (tree field = TYPE_FIELDS (ctx->record_type);
2471 field;
2472 field = DECL_CHAIN (field))
2473 gcc_assert (DECL_ALIGN (field) == align);
2474 }
acf0174b 2475 layout_type (ctx->record_type);
41dbbb37 2476 if (offloaded)
acf0174b
JJ
2477 fixup_child_record_type (ctx);
2478 }
2479}
2480
2481/* Scan an OpenMP teams directive. */
2482
2483static void
538dd0b7 2484scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
acf0174b
JJ
2485{
2486 omp_context *ctx = new_omp_context (stmt, outer_ctx);
28567c40
JJ
2487
2488 if (!gimple_omp_teams_host (stmt))
2489 {
2490 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2491 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2492 return;
2493 }
2494 taskreg_contexts.safe_push (ctx);
2495 gcc_assert (taskreg_nesting_level == 1);
2496 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2497 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2498 tree name = create_tmp_var_name (".omp_data_s");
2499 name = build_decl (gimple_location (stmt),
2500 TYPE_DECL, name, ctx->record_type);
2501 DECL_ARTIFICIAL (name) = 1;
2502 DECL_NAMELESS (name) = 1;
2503 TYPE_NAME (ctx->record_type) = name;
2504 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2505 create_omp_child_function (ctx, false);
2506 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2507
acf0174b
JJ
2508 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2509 scan_omp (gimple_omp_body_ptr (stmt), ctx);
28567c40
JJ
2510
2511 if (TYPE_FIELDS (ctx->record_type) == NULL)
2512 ctx->record_type = ctx->receiver_decl = NULL;
acf0174b 2513}
953ff289 2514
41dbbb37 2515/* Check nesting restrictions. */
26127932 2516static bool
355fe088 2517check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
a6fc8e21 2518{
d9a6bd32
JJ
2519 tree c;
2520
b2b40051
MJ
2521 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2522 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2523 the original copy of its contents. */
2524 return true;
2525
41dbbb37
TS
2526 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2527 inside an OpenACC CTX. */
2528 if (!(is_gimple_omp (stmt)
640b7e74
TV
2529 && is_gimple_omp_oacc (stmt))
2530 /* Except for atomic codes that we share with OpenMP. */
2531 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2532 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2533 {
629b3d75 2534 if (oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
2535 {
2536 error_at (gimple_location (stmt),
2537 "non-OpenACC construct inside of OpenACC routine");
2538 return false;
2539 }
2540 else
2541 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2542 if (is_gimple_omp (octx->stmt)
2543 && is_gimple_omp_oacc (octx->stmt))
2544 {
2545 error_at (gimple_location (stmt),
2546 "non-OpenACC construct inside of OpenACC region");
2547 return false;
2548 }
41dbbb37
TS
2549 }
2550
74bf76ed
JJ
2551 if (ctx != NULL)
2552 {
2553 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 2554 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2555 {
d9a6bd32
JJ
2556 c = NULL_TREE;
2557 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2558 {
2559 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2560 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18 2561 {
629b3d75 2562 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
d9f4ea18
JJ
2563 && (ctx->outer == NULL
2564 || !gimple_omp_for_combined_into_p (ctx->stmt)
2565 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2566 || (gimple_omp_for_kind (ctx->outer->stmt)
2567 != GF_OMP_FOR_KIND_FOR)
2568 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2569 {
2570 error_at (gimple_location (stmt),
2571 "%<ordered simd threads%> must be closely "
2572 "nested inside of %<for simd%> region");
2573 return false;
2574 }
2575 return true;
2576 }
d9a6bd32 2577 }
28567c40
JJ
2578 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2579 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2580 return true;
74bf76ed 2581 error_at (gimple_location (stmt),
d9a6bd32 2582 "OpenMP constructs other than %<#pragma omp ordered simd%>"
28567c40
JJ
2583 " or %<#pragma omp atomic%> may not be nested inside"
2584 " %<simd%> region");
74bf76ed
JJ
2585 return false;
2586 }
acf0174b
JJ
2587 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2588 {
2589 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
56b1c60e
MJ
2590 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2591 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
acf0174b
JJ
2592 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2593 {
2594 error_at (gimple_location (stmt),
d9f4ea18
JJ
2595 "only %<distribute%> or %<parallel%> regions are "
2596 "allowed to be strictly nested inside %<teams%> "
2597 "region");
acf0174b
JJ
2598 return false;
2599 }
2600 }
74bf76ed 2601 }
726a989a 2602 switch (gimple_code (stmt))
a6fc8e21 2603 {
726a989a 2604 case GIMPLE_OMP_FOR:
0aadce73 2605 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
74bf76ed 2606 return true;
acf0174b
JJ
2607 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2608 {
2609 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2610 {
2611 error_at (gimple_location (stmt),
d9f4ea18
JJ
2612 "%<distribute%> region must be strictly nested "
2613 "inside %<teams%> construct");
acf0174b
JJ
2614 return false;
2615 }
2616 return true;
2617 }
d9a6bd32
JJ
2618 /* We split taskloop into task and nested taskloop in it. */
2619 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2620 return true;
68d58afb
NS
2621 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2622 {
2623 bool ok = false;
01914336 2624
68d58afb
NS
2625 if (ctx)
2626 switch (gimple_code (ctx->stmt))
2627 {
2628 case GIMPLE_OMP_FOR:
2629 ok = (gimple_omp_for_kind (ctx->stmt)
2630 == GF_OMP_FOR_KIND_OACC_LOOP);
2631 break;
2632
2633 case GIMPLE_OMP_TARGET:
2634 switch (gimple_omp_target_kind (ctx->stmt))
2635 {
2636 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2637 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2638 ok = true;
2639 break;
2640
2641 default:
2642 break;
2643 }
2644
2645 default:
2646 break;
2647 }
629b3d75 2648 else if (oacc_get_fn_attrib (current_function_decl))
68d58afb
NS
2649 ok = true;
2650 if (!ok)
2651 {
2652 error_at (gimple_location (stmt),
2653 "OpenACC loop directive must be associated with"
2654 " an OpenACC compute region");
2655 return false;
2656 }
2657 }
acf0174b
JJ
2658 /* FALLTHRU */
2659 case GIMPLE_CALL:
2660 if (is_gimple_call (stmt)
2661 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2662 == BUILT_IN_GOMP_CANCEL
2663 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2664 == BUILT_IN_GOMP_CANCELLATION_POINT))
2665 {
2666 const char *bad = NULL;
2667 const char *kind = NULL;
d9f4ea18
JJ
2668 const char *construct
2669 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2670 == BUILT_IN_GOMP_CANCEL)
2671 ? "#pragma omp cancel"
2672 : "#pragma omp cancellation point";
acf0174b
JJ
2673 if (ctx == NULL)
2674 {
2675 error_at (gimple_location (stmt), "orphaned %qs construct",
d9f4ea18 2676 construct);
acf0174b
JJ
2677 return false;
2678 }
9541ffee 2679 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2680 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2681 : 0)
2682 {
2683 case 1:
2684 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2685 bad = "#pragma omp parallel";
2686 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2687 == BUILT_IN_GOMP_CANCEL
2688 && !integer_zerop (gimple_call_arg (stmt, 1)))
2689 ctx->cancellable = true;
2690 kind = "parallel";
2691 break;
2692 case 2:
2693 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2694 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2695 bad = "#pragma omp for";
2696 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2697 == BUILT_IN_GOMP_CANCEL
2698 && !integer_zerop (gimple_call_arg (stmt, 1)))
2699 {
2700 ctx->cancellable = true;
629b3d75 2701 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2702 OMP_CLAUSE_NOWAIT))
2703 warning_at (gimple_location (stmt), 0,
2704 "%<#pragma omp cancel for%> inside "
2705 "%<nowait%> for construct");
629b3d75 2706 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
acf0174b
JJ
2707 OMP_CLAUSE_ORDERED))
2708 warning_at (gimple_location (stmt), 0,
2709 "%<#pragma omp cancel for%> inside "
2710 "%<ordered%> for construct");
2711 }
2712 kind = "for";
2713 break;
2714 case 4:
2715 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2716 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2717 bad = "#pragma omp sections";
2718 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2719 == BUILT_IN_GOMP_CANCEL
2720 && !integer_zerop (gimple_call_arg (stmt, 1)))
2721 {
2722 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2723 {
2724 ctx->cancellable = true;
629b3d75 2725 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2726 (ctx->stmt),
2727 OMP_CLAUSE_NOWAIT))
2728 warning_at (gimple_location (stmt), 0,
2729 "%<#pragma omp cancel sections%> inside "
2730 "%<nowait%> sections construct");
2731 }
2732 else
2733 {
2734 gcc_assert (ctx->outer
2735 && gimple_code (ctx->outer->stmt)
2736 == GIMPLE_OMP_SECTIONS);
2737 ctx->outer->cancellable = true;
629b3d75 2738 if (omp_find_clause (gimple_omp_sections_clauses
acf0174b
JJ
2739 (ctx->outer->stmt),
2740 OMP_CLAUSE_NOWAIT))
2741 warning_at (gimple_location (stmt), 0,
2742 "%<#pragma omp cancel sections%> inside "
2743 "%<nowait%> sections construct");
2744 }
2745 }
2746 kind = "sections";
2747 break;
2748 case 8:
daa8c1d7
JJ
2749 if (!is_task_ctx (ctx)
2750 && (!is_taskloop_ctx (ctx)
2751 || ctx->outer == NULL
2752 || !is_task_ctx (ctx->outer)))
acf0174b
JJ
2753 bad = "#pragma omp task";
2754 else
d9f4ea18
JJ
2755 {
2756 for (omp_context *octx = ctx->outer;
2757 octx; octx = octx->outer)
2758 {
2759 switch (gimple_code (octx->stmt))
2760 {
2761 case GIMPLE_OMP_TASKGROUP:
2762 break;
2763 case GIMPLE_OMP_TARGET:
2764 if (gimple_omp_target_kind (octx->stmt)
2765 != GF_OMP_TARGET_KIND_REGION)
2766 continue;
2767 /* FALLTHRU */
2768 case GIMPLE_OMP_PARALLEL:
2769 case GIMPLE_OMP_TEAMS:
2770 error_at (gimple_location (stmt),
2771 "%<%s taskgroup%> construct not closely "
2772 "nested inside of %<taskgroup%> region",
2773 construct);
2774 return false;
daa8c1d7
JJ
2775 case GIMPLE_OMP_TASK:
2776 if (gimple_omp_task_taskloop_p (octx->stmt)
2777 && octx->outer
2778 && is_taskloop_ctx (octx->outer))
2779 {
2780 tree clauses
2781 = gimple_omp_for_clauses (octx->outer->stmt);
2782 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2783 break;
2784 }
2785 continue;
d9f4ea18
JJ
2786 default:
2787 continue;
2788 }
2789 break;
2790 }
2791 ctx->cancellable = true;
2792 }
acf0174b
JJ
2793 kind = "taskgroup";
2794 break;
2795 default:
2796 error_at (gimple_location (stmt), "invalid arguments");
2797 return false;
2798 }
2799 if (bad)
2800 {
2801 error_at (gimple_location (stmt),
2802 "%<%s %s%> construct not closely nested inside of %qs",
d9f4ea18 2803 construct, kind, bad);
acf0174b
JJ
2804 return false;
2805 }
2806 }
74bf76ed 2807 /* FALLTHRU */
726a989a
RB
2808 case GIMPLE_OMP_SECTIONS:
2809 case GIMPLE_OMP_SINGLE:
a6fc8e21 2810 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2811 switch (gimple_code (ctx->stmt))
a6fc8e21 2812 {
726a989a 2813 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2814 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2815 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2816 break;
2817 /* FALLTHRU */
726a989a
RB
2818 case GIMPLE_OMP_SECTIONS:
2819 case GIMPLE_OMP_SINGLE:
2820 case GIMPLE_OMP_ORDERED:
2821 case GIMPLE_OMP_MASTER:
2822 case GIMPLE_OMP_TASK:
acf0174b 2823 case GIMPLE_OMP_CRITICAL:
726a989a 2824 if (is_gimple_call (stmt))
a68ab351 2825 {
acf0174b
JJ
2826 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2827 != BUILT_IN_GOMP_BARRIER)
2828 return true;
26127932
JJ
2829 error_at (gimple_location (stmt),
2830 "barrier region may not be closely nested inside "
d9f4ea18
JJ
2831 "of work-sharing, %<critical%>, %<ordered%>, "
2832 "%<master%>, explicit %<task%> or %<taskloop%> "
2833 "region");
26127932 2834 return false;
a68ab351 2835 }
26127932
JJ
2836 error_at (gimple_location (stmt),
2837 "work-sharing region may not be closely nested inside "
d9f4ea18
JJ
2838 "of work-sharing, %<critical%>, %<ordered%>, "
2839 "%<master%>, explicit %<task%> or %<taskloop%> region");
26127932 2840 return false;
726a989a 2841 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2842 case GIMPLE_OMP_TEAMS:
26127932 2843 return true;
d9f4ea18
JJ
2844 case GIMPLE_OMP_TARGET:
2845 if (gimple_omp_target_kind (ctx->stmt)
2846 == GF_OMP_TARGET_KIND_REGION)
2847 return true;
2848 break;
a6fc8e21
JJ
2849 default:
2850 break;
2851 }
2852 break;
726a989a 2853 case GIMPLE_OMP_MASTER:
a6fc8e21 2854 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2855 switch (gimple_code (ctx->stmt))
a6fc8e21 2856 {
726a989a 2857 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2858 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2859 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2860 break;
2861 /* FALLTHRU */
726a989a
RB
2862 case GIMPLE_OMP_SECTIONS:
2863 case GIMPLE_OMP_SINGLE:
2864 case GIMPLE_OMP_TASK:
26127932 2865 error_at (gimple_location (stmt),
d9f4ea18
JJ
2866 "%<master%> region may not be closely nested inside "
2867 "of work-sharing, explicit %<task%> or %<taskloop%> "
2868 "region");
26127932 2869 return false;
726a989a 2870 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2871 case GIMPLE_OMP_TEAMS:
26127932 2872 return true;
d9f4ea18
JJ
2873 case GIMPLE_OMP_TARGET:
2874 if (gimple_omp_target_kind (ctx->stmt)
2875 == GF_OMP_TARGET_KIND_REGION)
2876 return true;
2877 break;
a6fc8e21
JJ
2878 default:
2879 break;
2880 }
2881 break;
d9a6bd32
JJ
2882 case GIMPLE_OMP_TASK:
2883 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2885 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2886 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2887 {
2888 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2889 error_at (OMP_CLAUSE_LOCATION (c),
2890 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2891 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2892 return false;
2893 }
2894 break;
726a989a 2895 case GIMPLE_OMP_ORDERED:
d9a6bd32
JJ
2896 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2897 c; c = OMP_CLAUSE_CHAIN (c))
2898 {
2899 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2900 {
2901 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
d9f4ea18 2902 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
d9a6bd32
JJ
2903 continue;
2904 }
2905 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2906 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2907 || kind == OMP_CLAUSE_DEPEND_SINK)
2908 {
2909 tree oclause;
2910 /* Look for containing ordered(N) loop. */
2911 if (ctx == NULL
2912 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2913 || (oclause
629b3d75 2914 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
d9a6bd32
JJ
2915 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2916 {
2917 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2918 "%<ordered%> construct with %<depend%> clause "
2919 "must be closely nested inside an %<ordered%> "
2920 "loop");
d9a6bd32
JJ
2921 return false;
2922 }
2923 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2924 {
2925 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2926 "%<ordered%> construct with %<depend%> clause "
2927 "must be closely nested inside a loop with "
2928 "%<ordered%> clause with a parameter");
d9a6bd32
JJ
2929 return false;
2930 }
2931 }
2932 else
2933 {
2934 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
2935 "invalid depend kind in omp %<ordered%> %<depend%>");
2936 return false;
2937 }
2938 }
2939 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
629b3d75 2940 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
d9f4ea18
JJ
2941 {
2942 /* ordered simd must be closely nested inside of simd region,
2943 and simd region must not encounter constructs other than
2944 ordered simd, therefore ordered simd may be either orphaned,
2945 or ctx->stmt must be simd. The latter case is handled already
2946 earlier. */
2947 if (ctx != NULL)
2948 {
2949 error_at (gimple_location (stmt),
2950 "%<ordered%> %<simd%> must be closely nested inside "
2951 "%<simd%> region");
d9a6bd32
JJ
2952 return false;
2953 }
2954 }
a6fc8e21 2955 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2956 switch (gimple_code (ctx->stmt))
a6fc8e21 2957 {
726a989a
RB
2958 case GIMPLE_OMP_CRITICAL:
2959 case GIMPLE_OMP_TASK:
d9f4ea18
JJ
2960 case GIMPLE_OMP_ORDERED:
2961 ordered_in_taskloop:
26127932 2962 error_at (gimple_location (stmt),
d9f4ea18
JJ
2963 "%<ordered%> region may not be closely nested inside "
2964 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2965 "%<taskloop%> region");
26127932 2966 return false;
726a989a 2967 case GIMPLE_OMP_FOR:
d9f4ea18
JJ
2968 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2969 goto ordered_in_taskloop;
173670e2
JJ
2970 tree o;
2971 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2972 OMP_CLAUSE_ORDERED);
2973 if (o == NULL)
26127932
JJ
2974 {
2975 error_at (gimple_location (stmt),
d9f4ea18
JJ
2976 "%<ordered%> region must be closely nested inside "
2977 "a loop region with an %<ordered%> clause");
26127932
JJ
2978 return false;
2979 }
173670e2
JJ
2980 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2981 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2982 {
2983 error_at (gimple_location (stmt),
2984 "%<ordered%> region without %<depend%> clause may "
2985 "not be closely nested inside a loop region with "
2986 "an %<ordered%> clause with a parameter");
2987 return false;
2988 }
26127932 2989 return true;
d9f4ea18
JJ
2990 case GIMPLE_OMP_TARGET:
2991 if (gimple_omp_target_kind (ctx->stmt)
2992 != GF_OMP_TARGET_KIND_REGION)
2993 break;
2994 /* FALLTHRU */
726a989a 2995 case GIMPLE_OMP_PARALLEL:
d9f4ea18 2996 case GIMPLE_OMP_TEAMS:
acf0174b 2997 error_at (gimple_location (stmt),
d9f4ea18
JJ
2998 "%<ordered%> region must be closely nested inside "
2999 "a loop region with an %<ordered%> clause");
acf0174b 3000 return false;
a6fc8e21
JJ
3001 default:
3002 break;
3003 }
3004 break;
726a989a 3005 case GIMPLE_OMP_CRITICAL:
538dd0b7
DM
3006 {
3007 tree this_stmt_name
3008 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3009 for (; ctx != NULL; ctx = ctx->outer)
3010 if (gomp_critical *other_crit
3011 = dyn_cast <gomp_critical *> (ctx->stmt))
3012 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3013 {
3014 error_at (gimple_location (stmt),
d9f4ea18
JJ
3015 "%<critical%> region may not be nested inside "
3016 "a %<critical%> region with the same name");
538dd0b7
DM
3017 return false;
3018 }
3019 }
a6fc8e21 3020 break;
acf0174b 3021 case GIMPLE_OMP_TEAMS:
28567c40
JJ
3022 if (ctx == NULL)
3023 break;
3024 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3025 || (gimple_omp_target_kind (ctx->stmt)
3026 != GF_OMP_TARGET_KIND_REGION))
acf0174b 3027 {
28567c40
JJ
3028 /* Teams construct can appear either strictly nested inside of
3029 target construct with no intervening stmts, or can be encountered
3030 only by initial task (so must not appear inside any OpenMP
3031 construct. */
acf0174b 3032 error_at (gimple_location (stmt),
28567c40
JJ
3033 "%<teams%> construct must be closely nested inside of "
3034 "%<target%> construct or not nested in any OpenMP "
3035 "construct");
acf0174b
JJ
3036 return false;
3037 }
3038 break;
f014c653 3039 case GIMPLE_OMP_TARGET:
d9a6bd32
JJ
3040 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3041 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3042 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3043 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3044 {
3045 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3046 error_at (OMP_CLAUSE_LOCATION (c),
3047 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3048 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3049 return false;
3050 }
640b7e74 3051 if (is_gimple_omp_offloaded (stmt)
629b3d75 3052 && oacc_get_fn_attrib (cfun->decl) != NULL)
640b7e74
TV
3053 {
3054 error_at (gimple_location (stmt),
3055 "OpenACC region inside of OpenACC routine, nested "
3056 "parallelism not supported yet");
3057 return false;
3058 }
f014c653 3059 for (; ctx != NULL; ctx = ctx->outer)
41dbbb37
TS
3060 {
3061 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3062 {
3063 if (is_gimple_omp (stmt)
3064 && is_gimple_omp_oacc (stmt)
3065 && is_gimple_omp (ctx->stmt))
3066 {
3067 error_at (gimple_location (stmt),
3068 "OpenACC construct inside of non-OpenACC region");
3069 return false;
3070 }
3071 continue;
3072 }
3073
3074 const char *stmt_name, *ctx_stmt_name;
3075 switch (gimple_omp_target_kind (stmt))
3076 {
3077 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3078 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3079 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
d9a6bd32
JJ
3080 case GF_OMP_TARGET_KIND_ENTER_DATA:
3081 stmt_name = "target enter data"; break;
3082 case GF_OMP_TARGET_KIND_EXIT_DATA:
3083 stmt_name = "target exit data"; break;
41dbbb37
TS
3084 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3085 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3086 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3087 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
d9a6bd32
JJ
3088 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3089 stmt_name = "enter/exit data"; break;
37d5ad46
JB
3090 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3091 break;
41dbbb37
TS
3092 default: gcc_unreachable ();
3093 }
3094 switch (gimple_omp_target_kind (ctx->stmt))
3095 {
3096 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3097 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
d9a6bd32
JJ
3098 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3099 ctx_stmt_name = "parallel"; break;
3100 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3101 ctx_stmt_name = "kernels"; break;
41dbbb37 3102 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
37d5ad46
JB
3103 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3104 ctx_stmt_name = "host_data"; break;
41dbbb37
TS
3105 default: gcc_unreachable ();
3106 }
3107
3108 /* OpenACC/OpenMP mismatch? */
3109 if (is_gimple_omp_oacc (stmt)
3110 != is_gimple_omp_oacc (ctx->stmt))
3111 {
3112 error_at (gimple_location (stmt),
d9f4ea18 3113 "%s %qs construct inside of %s %qs region",
41dbbb37
TS
3114 (is_gimple_omp_oacc (stmt)
3115 ? "OpenACC" : "OpenMP"), stmt_name,
3116 (is_gimple_omp_oacc (ctx->stmt)
3117 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3118 return false;
3119 }
3120 if (is_gimple_omp_offloaded (ctx->stmt))
3121 {
3122 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3123 if (is_gimple_omp_oacc (ctx->stmt))
3124 {
3125 error_at (gimple_location (stmt),
d9f4ea18 3126 "%qs construct inside of %qs region",
41dbbb37
TS
3127 stmt_name, ctx_stmt_name);
3128 return false;
3129 }
3130 else
3131 {
41dbbb37 3132 warning_at (gimple_location (stmt), 0,
d9f4ea18 3133 "%qs construct inside of %qs region",
41dbbb37
TS
3134 stmt_name, ctx_stmt_name);
3135 }
3136 }
3137 }
f014c653 3138 break;
a6fc8e21
JJ
3139 default:
3140 break;
3141 }
26127932 3142 return true;
a6fc8e21
JJ
3143}
3144
3145
726a989a
RB
3146/* Helper function scan_omp.
3147
3148 Callback for walk_tree or operators in walk_gimple_stmt used to
41dbbb37 3149 scan for OMP directives in TP. */
953ff289
DN
3150
3151static tree
726a989a 3152scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 3153{
d3bfe4de
KG
3154 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3155 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
3156 tree t = *tp;
3157
726a989a
RB
3158 switch (TREE_CODE (t))
3159 {
3160 case VAR_DECL:
3161 case PARM_DECL:
3162 case LABEL_DECL:
3163 case RESULT_DECL:
3164 if (ctx)
b2b40051
MJ
3165 {
3166 tree repl = remap_decl (t, &ctx->cb);
3167 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3168 *tp = repl;
3169 }
726a989a
RB
3170 break;
3171
3172 default:
3173 if (ctx && TYPE_P (t))
3174 *tp = remap_type (t, &ctx->cb);
3175 else if (!DECL_P (t))
a900ae6b
JJ
3176 {
3177 *walk_subtrees = 1;
3178 if (ctx)
70f34814
RG
3179 {
3180 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3181 if (tem != TREE_TYPE (t))
3182 {
3183 if (TREE_CODE (t) == INTEGER_CST)
8e6cdc90 3184 *tp = wide_int_to_tree (tem, wi::to_wide (t));
70f34814
RG
3185 else
3186 TREE_TYPE (t) = tem;
3187 }
3188 }
a900ae6b 3189 }
726a989a
RB
3190 break;
3191 }
3192
3193 return NULL_TREE;
3194}
3195
c02065fc
AH
3196/* Return true if FNDECL is a setjmp or a longjmp. */
3197
3198static bool
3199setjmp_or_longjmp_p (const_tree fndecl)
3200{
3d78e008
ML
3201 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3202 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
c02065fc
AH
3203 return true;
3204
3205 tree declname = DECL_NAME (fndecl);
3206 if (!declname)
3207 return false;
3208 const char *name = IDENTIFIER_POINTER (declname);
3209 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3210}
3211
726a989a
RB
3212
3213/* Helper function for scan_omp.
3214
41dbbb37 3215 Callback for walk_gimple_stmt used to scan for OMP directives in
726a989a
RB
3216 the current statement in GSI. */
3217
3218static tree
3219scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3220 struct walk_stmt_info *wi)
3221{
355fe088 3222 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
3223 omp_context *ctx = (omp_context *) wi->info;
3224
3225 if (gimple_has_location (stmt))
3226 input_location = gimple_location (stmt);
953ff289 3227
41dbbb37 3228 /* Check the nesting restrictions. */
acf0174b
JJ
3229 bool remove = false;
3230 if (is_gimple_omp (stmt))
3231 remove = !check_omp_nesting_restrictions (stmt, ctx);
3232 else if (is_gimple_call (stmt))
3233 {
3234 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
3235 if (fndecl)
3236 {
3237 if (setjmp_or_longjmp_p (fndecl)
3238 && ctx
3239 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3240 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
c02065fc
AH
3241 {
3242 remove = true;
3243 error_at (gimple_location (stmt),
3244 "setjmp/longjmp inside simd construct");
3245 }
3246 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3247 switch (DECL_FUNCTION_CODE (fndecl))
3248 {
3249 case BUILT_IN_GOMP_BARRIER:
3250 case BUILT_IN_GOMP_CANCEL:
3251 case BUILT_IN_GOMP_CANCELLATION_POINT:
3252 case BUILT_IN_GOMP_TASKYIELD:
3253 case BUILT_IN_GOMP_TASKWAIT:
3254 case BUILT_IN_GOMP_TASKGROUP_START:
3255 case BUILT_IN_GOMP_TASKGROUP_END:
3256 remove = !check_omp_nesting_restrictions (stmt, ctx);
3257 break;
3258 default:
3259 break;
3260 }
3261 }
acf0174b
JJ
3262 }
3263 if (remove)
3264 {
3265 stmt = gimple_build_nop ();
3266 gsi_replace (gsi, stmt, false);
a68ab351 3267 }
a6fc8e21 3268
726a989a
RB
3269 *handled_ops_p = true;
3270
3271 switch (gimple_code (stmt))
953ff289 3272 {
726a989a 3273 case GIMPLE_OMP_PARALLEL:
a68ab351 3274 taskreg_nesting_level++;
726a989a 3275 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
3276 taskreg_nesting_level--;
3277 break;
3278
726a989a 3279 case GIMPLE_OMP_TASK:
a68ab351 3280 taskreg_nesting_level++;
726a989a 3281 scan_omp_task (gsi, ctx);
a68ab351 3282 taskreg_nesting_level--;
953ff289
DN
3283 break;
3284
726a989a 3285 case GIMPLE_OMP_FOR:
6c7509bc
JJ
3286 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3287 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3288 && omp_maybe_offloaded_ctx (ctx)
3289 && omp_max_simt_vf ())
3290 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3291 else
3292 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
953ff289
DN
3293 break;
3294
726a989a 3295 case GIMPLE_OMP_SECTIONS:
538dd0b7 3296 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
953ff289
DN
3297 break;
3298
726a989a 3299 case GIMPLE_OMP_SINGLE:
538dd0b7 3300 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
953ff289
DN
3301 break;
3302
726a989a
RB
3303 case GIMPLE_OMP_SECTION:
3304 case GIMPLE_OMP_MASTER:
3305 case GIMPLE_OMP_ORDERED:
3306 case GIMPLE_OMP_CRITICAL:
b2b40051 3307 case GIMPLE_OMP_GRID_BODY:
726a989a 3308 ctx = new_omp_context (stmt, ctx);
26127932 3309 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
3310 break;
3311
28567c40
JJ
3312 case GIMPLE_OMP_TASKGROUP:
3313 ctx = new_omp_context (stmt, ctx);
3314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3316 break;
3317
acf0174b 3318 case GIMPLE_OMP_TARGET:
538dd0b7 3319 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
acf0174b
JJ
3320 break;
3321
3322 case GIMPLE_OMP_TEAMS:
28567c40
JJ
3323 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3324 {
3325 taskreg_nesting_level++;
3326 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3327 taskreg_nesting_level--;
3328 }
3329 else
3330 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
acf0174b
JJ
3331 break;
3332
726a989a 3333 case GIMPLE_BIND:
953ff289
DN
3334 {
3335 tree var;
953ff289 3336
726a989a
RB
3337 *handled_ops_p = false;
3338 if (ctx)
538dd0b7
DM
3339 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3340 var ;
3341 var = DECL_CHAIN (var))
726a989a 3342 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
3343 }
3344 break;
953ff289 3345 default:
726a989a 3346 *handled_ops_p = false;
953ff289
DN
3347 break;
3348 }
3349
3350 return NULL_TREE;
3351}
3352
3353
726a989a 3354/* Scan all the statements starting at the current statement. CTX
41dbbb37 3355 contains context information about the OMP directives and
726a989a 3356 clauses found during the scan. */
953ff289
DN
3357
3358static void
26127932 3359scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
3360{
3361 location_t saved_location;
3362 struct walk_stmt_info wi;
3363
3364 memset (&wi, 0, sizeof (wi));
953ff289 3365 wi.info = ctx;
953ff289
DN
3366 wi.want_locations = true;
3367
3368 saved_location = input_location;
26127932 3369 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
3370 input_location = saved_location;
3371}
3372\f
3373/* Re-gimplification and code generation routines. */
3374
6724f8a6
JJ
3375/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3376 of BIND if in a method. */
3377
3378static void
3379maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3380{
3381 if (DECL_ARGUMENTS (current_function_decl)
3382 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3383 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3384 == POINTER_TYPE))
3385 {
3386 tree vars = gimple_bind_vars (bind);
3387 for (tree *pvar = &vars; *pvar; )
3388 if (omp_member_access_dummy_var (*pvar))
3389 *pvar = DECL_CHAIN (*pvar);
3390 else
3391 pvar = &DECL_CHAIN (*pvar);
3392 gimple_bind_set_vars (bind, vars);
3393 }
3394}
3395
3396/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3397 block and its subblocks. */
3398
3399static void
3400remove_member_access_dummy_vars (tree block)
3401{
3402 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3403 if (omp_member_access_dummy_var (*pvar))
3404 *pvar = DECL_CHAIN (*pvar);
3405 else
3406 pvar = &DECL_CHAIN (*pvar);
3407
3408 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3409 remove_member_access_dummy_vars (block);
3410}
3411
953ff289
DN
3412/* If a context was created for STMT when it was scanned, return it. */
3413
3414static omp_context *
355fe088 3415maybe_lookup_ctx (gimple *stmt)
953ff289
DN
3416{
3417 splay_tree_node n;
3418 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3419 return n ? (omp_context *) n->value : NULL;
3420}
3421
50674e96
DN
3422
3423/* Find the mapping for DECL in CTX or the immediately enclosing
3424 context that has a mapping for DECL.
3425
3426 If CTX is a nested parallel directive, we may have to use the decl
3427 mappings created in CTX's parent context. Suppose that we have the
3428 following parallel nesting (variable UIDs showed for clarity):
3429
3430 iD.1562 = 0;
3431 #omp parallel shared(iD.1562) -> outer parallel
3432 iD.1562 = iD.1562 + 1;
3433
3434 #omp parallel shared (iD.1562) -> inner parallel
3435 iD.1562 = iD.1562 - 1;
3436
3437 Each parallel structure will create a distinct .omp_data_s structure
3438 for copying iD.1562 in/out of the directive:
3439
3440 outer parallel .omp_data_s.1.i -> iD.1562
3441 inner parallel .omp_data_s.2.i -> iD.1562
3442
3443 A shared variable mapping will produce a copy-out operation before
3444 the parallel directive and a copy-in operation after it. So, in
3445 this case we would have:
3446
3447 iD.1562 = 0;
3448 .omp_data_o.1.i = iD.1562;
3449 #omp parallel shared(iD.1562) -> outer parallel
3450 .omp_data_i.1 = &.omp_data_o.1
3451 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3452
3453 .omp_data_o.2.i = iD.1562; -> **
3454 #omp parallel shared(iD.1562) -> inner parallel
3455 .omp_data_i.2 = &.omp_data_o.2
3456 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3457
3458
3459 ** This is a problem. The symbol iD.1562 cannot be referenced
3460 inside the body of the outer parallel region. But since we are
3461 emitting this copy operation while expanding the inner parallel
3462 directive, we need to access the CTX structure of the outer
3463 parallel directive to get the correct mapping:
3464
3465 .omp_data_o.2.i = .omp_data_i.1->i
3466
3467 Since there may be other workshare or parallel directives enclosing
3468 the parallel directive, it may be necessary to walk up the context
3469 parent chain. This is not a problem in general because nested
3470 parallelism happens only rarely. */
3471
3472static tree
3473lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3474{
3475 tree t;
3476 omp_context *up;
3477
50674e96
DN
3478 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3479 t = maybe_lookup_decl (decl, up);
3480
d2dda7fe 3481 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 3482
64964499 3483 return t ? t : decl;
50674e96
DN
3484}
3485
3486
8ca5b2a2
JJ
3487/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3488 in outer contexts. */
3489
3490static tree
3491maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3492{
3493 tree t = NULL;
3494 omp_context *up;
3495
d2dda7fe
JJ
3496 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3497 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
3498
3499 return t ? t : decl;
3500}
3501
3502
f2c9f71d 3503/* Construct the initialization value for reduction operation OP. */
953ff289
DN
3504
3505tree
f2c9f71d 3506omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
953ff289 3507{
f2c9f71d 3508 switch (op)
953ff289
DN
3509 {
3510 case PLUS_EXPR:
3511 case MINUS_EXPR:
3512 case BIT_IOR_EXPR:
3513 case BIT_XOR_EXPR:
3514 case TRUTH_OR_EXPR:
3515 case TRUTH_ORIF_EXPR:
3516 case TRUTH_XOR_EXPR:
3517 case NE_EXPR:
e8160c9a 3518 return build_zero_cst (type);
953ff289
DN
3519
3520 case MULT_EXPR:
3521 case TRUTH_AND_EXPR:
3522 case TRUTH_ANDIF_EXPR:
3523 case EQ_EXPR:
db3927fb 3524 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
3525
3526 case BIT_AND_EXPR:
db3927fb 3527 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
3528
3529 case MAX_EXPR:
3530 if (SCALAR_FLOAT_TYPE_P (type))
3531 {
3532 REAL_VALUE_TYPE max, min;
3d3dbadd 3533 if (HONOR_INFINITIES (type))
953ff289
DN
3534 {
3535 real_inf (&max);
3536 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3537 }
3538 else
3539 real_maxval (&min, 1, TYPE_MODE (type));
3540 return build_real (type, min);
3541 }
3ff2d74e
TV
3542 else if (POINTER_TYPE_P (type))
3543 {
3544 wide_int min
3545 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3546 return wide_int_to_tree (type, min);
3547 }
953ff289
DN
3548 else
3549 {
3550 gcc_assert (INTEGRAL_TYPE_P (type));
3551 return TYPE_MIN_VALUE (type);
3552 }
3553
3554 case MIN_EXPR:
3555 if (SCALAR_FLOAT_TYPE_P (type))
3556 {
3557 REAL_VALUE_TYPE max;
3d3dbadd 3558 if (HONOR_INFINITIES (type))
953ff289
DN
3559 real_inf (&max);
3560 else
3561 real_maxval (&max, 0, TYPE_MODE (type));
3562 return build_real (type, max);
3563 }
3ff2d74e
TV
3564 else if (POINTER_TYPE_P (type))
3565 {
3566 wide_int max
3567 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3568 return wide_int_to_tree (type, max);
3569 }
953ff289
DN
3570 else
3571 {
3572 gcc_assert (INTEGRAL_TYPE_P (type));
3573 return TYPE_MAX_VALUE (type);
3574 }
3575
3576 default:
3577 gcc_unreachable ();
3578 }
3579}
3580
f2c9f71d
TS
3581/* Construct the initialization value for reduction CLAUSE. */
3582
3583tree
3584omp_reduction_init (tree clause, tree type)
3585{
3586 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3587 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3588}
3589
acf0174b
JJ
3590/* Return alignment to be assumed for var in CLAUSE, which should be
3591 OMP_CLAUSE_ALIGNED. */
3592
3593static tree
3594omp_clause_aligned_alignment (tree clause)
3595{
3596 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3597 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3598
3599 /* Otherwise return implementation defined alignment. */
3600 unsigned int al = 1;
16d22000 3601 opt_scalar_mode mode_iter;
86e36728
RS
3602 auto_vector_sizes sizes;
3603 targetm.vectorize.autovectorize_vector_sizes (&sizes);
3604 poly_uint64 vs = 0;
3605 for (unsigned int i = 0; i < sizes.length (); ++i)
3606 vs = ordered_max (vs, sizes[i]);
acf0174b
JJ
3607 static enum mode_class classes[]
3608 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3609 for (int i = 0; i < 4; i += 2)
16d22000
RS
3610 /* The for loop above dictates that we only walk through scalar classes. */
3611 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
acf0174b 3612 {
16d22000
RS
3613 scalar_mode mode = mode_iter.require ();
3614 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
acf0174b
JJ
3615 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3616 continue;
86e36728
RS
3617 while (maybe_ne (vs, 0U)
3618 && known_lt (GET_MODE_SIZE (vmode), vs)
490d0f6c
RS
3619 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3620 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
01914336 3621
acf0174b
JJ
3622 tree type = lang_hooks.types.type_for_mode (mode, 1);
3623 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3624 continue;
cf098191
RS
3625 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3626 GET_MODE_SIZE (mode));
86e36728 3627 type = build_vector_type (type, nelts);
acf0174b
JJ
3628 if (TYPE_MODE (type) != vmode)
3629 continue;
3630 if (TYPE_ALIGN_UNIT (type) > al)
3631 al = TYPE_ALIGN_UNIT (type);
3632 }
3633 return build_int_cst (integer_type_node, al);
3634}
3635
6943af07
AM
3636
3637/* This structure is part of the interface between lower_rec_simd_input_clauses
3638 and lower_rec_input_clauses. */
3639
3640struct omplow_simd_context {
9d2f08ab 3641 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
6943af07
AM
3642 tree idx;
3643 tree lane;
0c6b03b5
AM
3644 vec<tree, va_heap> simt_eargs;
3645 gimple_seq simt_dlist;
9d2f08ab 3646 poly_uint64_pod max_vf;
6943af07
AM
3647 bool is_simt;
3648};
3649
74bf76ed
JJ
3650/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3651 privatization. */
3652
3653static bool
6943af07
AM
3654lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3655 omplow_simd_context *sctx, tree &ivar, tree &lvar)
74bf76ed 3656{
9d2f08ab 3657 if (known_eq (sctx->max_vf, 0U))
74bf76ed 3658 {
6943af07 3659 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d2f08ab 3660 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3661 {
629b3d75 3662 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 3663 OMP_CLAUSE_SAFELEN);
9d2f08ab
RS
3664 if (c)
3665 {
3666 poly_uint64 safe_len;
3667 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3668 || maybe_lt (safe_len, 1U))
3669 sctx->max_vf = 1;
3670 else
3671 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3672 }
74bf76ed 3673 }
9d2f08ab 3674 if (maybe_gt (sctx->max_vf, 1U))
74bf76ed 3675 {
6943af07
AM
3676 sctx->idx = create_tmp_var (unsigned_type_node);
3677 sctx->lane = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
3678 }
3679 }
9d2f08ab 3680 if (known_eq (sctx->max_vf, 1U))
74bf76ed
JJ
3681 return false;
3682
0c6b03b5
AM
3683 if (sctx->is_simt)
3684 {
3685 if (is_gimple_reg (new_var))
3686 {
3687 ivar = lvar = new_var;
3688 return true;
3689 }
3690 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3691 ivar = lvar = create_tmp_var (type);
3692 TREE_ADDRESSABLE (ivar) = 1;
3693 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3694 NULL, DECL_ATTRIBUTES (ivar));
3695 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3696 tree clobber = build_constructor (type, NULL);
3697 TREE_THIS_VOLATILE (clobber) = 1;
3698 gimple *g = gimple_build_assign (ivar, clobber);
3699 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3700 }
3701 else
3702 {
3703 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3704 tree avar = create_tmp_var_raw (atype);
3705 if (TREE_ADDRESSABLE (new_var))
3706 TREE_ADDRESSABLE (avar) = 1;
3707 DECL_ATTRIBUTES (avar)
3708 = tree_cons (get_identifier ("omp simd array"), NULL,
3709 DECL_ATTRIBUTES (avar));
3710 gimple_add_tmp_var (avar);
3711 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3712 NULL_TREE, NULL_TREE);
3713 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3714 NULL_TREE, NULL_TREE);
3715 }
acf0174b
JJ
3716 if (DECL_P (new_var))
3717 {
3718 SET_DECL_VALUE_EXPR (new_var, lvar);
3719 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3720 }
74bf76ed
JJ
3721 return true;
3722}
3723
decaaec8
JJ
3724/* Helper function of lower_rec_input_clauses. For a reference
3725 in simd reduction, add an underlying variable it will reference. */
3726
3727static void
3728handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3729{
3730 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3731 if (TREE_CONSTANT (z))
3732 {
d9a6bd32
JJ
3733 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3734 get_name (new_vard));
decaaec8
JJ
3735 gimple_add_tmp_var (z);
3736 TREE_ADDRESSABLE (z) = 1;
3737 z = build_fold_addr_expr_loc (loc, z);
3738 gimplify_assign (new_vard, z, ilist);
3739 }
3740}
3741
28567c40
JJ
3742/* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3743 code to emit (type) (tskred_temp[idx]). */
3744
3745static tree
3746task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3747 unsigned idx)
3748{
3749 unsigned HOST_WIDE_INT sz
3750 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3751 tree r = build2 (MEM_REF, pointer_sized_int_node,
3752 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3753 idx * sz));
3754 tree v = create_tmp_var (pointer_sized_int_node);
3755 gimple *g = gimple_build_assign (v, r);
3756 gimple_seq_add_stmt (ilist, g);
3757 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3758 {
3759 v = create_tmp_var (type);
3760 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3761 gimple_seq_add_stmt (ilist, g);
3762 }
3763 return v;
3764}
3765
953ff289
DN
3766/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3767 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3768 private variables. Initialization statements go in ILIST, while calls
3769 to destructors go in DLIST. */
3770
3771static void
726a989a 3772lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 3773 omp_context *ctx, struct omp_for_data *fd)
953ff289 3774{
5039610b 3775 tree c, dtor, copyin_seq, x, ptr;
953ff289 3776 bool copyin_by_ref = false;
8ca5b2a2 3777 bool lastprivate_firstprivate = false;
acf0174b 3778 bool reduction_omp_orig_ref = false;
953ff289 3779 int pass;
74bf76ed 3780 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 3781 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
6943af07 3782 omplow_simd_context sctx = omplow_simd_context ();
0c6b03b5
AM
3783 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3784 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
9669b00b 3785 gimple_seq llist[3] = { };
953ff289 3786
953ff289 3787 copyin_seq = NULL;
6943af07 3788 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
953ff289 3789
74bf76ed
JJ
3790 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3791 with data sharing clauses referencing variable sized vars. That
3792 is unnecessarily hard to support and very unlikely to result in
3793 vectorized code anyway. */
3794 if (is_simd)
3795 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3796 switch (OMP_CLAUSE_CODE (c))
3797 {
da6f124d
JJ
3798 case OMP_CLAUSE_LINEAR:
3799 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6943af07 3800 sctx.max_vf = 1;
da6f124d 3801 /* FALLTHRU */
74bf76ed
JJ
3802 case OMP_CLAUSE_PRIVATE:
3803 case OMP_CLAUSE_FIRSTPRIVATE:
3804 case OMP_CLAUSE_LASTPRIVATE:
74bf76ed 3805 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3806 sctx.max_vf = 1;
74bf76ed 3807 break;
d9a6bd32 3808 case OMP_CLAUSE_REDUCTION:
28567c40 3809 case OMP_CLAUSE_IN_REDUCTION:
d9a6bd32
JJ
3810 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3811 || is_variable_sized (OMP_CLAUSE_DECL (c)))
6943af07 3812 sctx.max_vf = 1;
d9a6bd32 3813 break;
74bf76ed
JJ
3814 default:
3815 continue;
3816 }
3817
0c6b03b5 3818 /* Add a placeholder for simduid. */
9d2f08ab 3819 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
0c6b03b5
AM
3820 sctx.simt_eargs.safe_push (NULL_TREE);
3821
28567c40
JJ
3822 unsigned task_reduction_cnt = 0;
3823 unsigned task_reduction_cntorig = 0;
3824 unsigned task_reduction_cnt_full = 0;
3825 unsigned task_reduction_cntorig_full = 0;
3826 unsigned task_reduction_other_cnt = 0;
3827 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3828 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
953ff289
DN
3829 /* Do all the fixed sized types in the first pass, and the variable sized
3830 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3831 the variable sized types are processed before we use them in the
28567c40
JJ
3832 variable sized operations. For task reductions we use 4 passes, in the
3833 first two we ignore them, in the third one gather arguments for
3834 GOMP_task_reduction_remap call and in the last pass actually handle
3835 the task reductions. */
3836 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3837 ? 4 : 2); ++pass)
3838 {
3839 if (pass == 2 && task_reduction_cnt)
3840 {
3841 tskred_atype
3842 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3843 + task_reduction_cntorig);
3844 tskred_avar = create_tmp_var_raw (tskred_atype);
3845 gimple_add_tmp_var (tskred_avar);
3846 TREE_ADDRESSABLE (tskred_avar) = 1;
3847 task_reduction_cnt_full = task_reduction_cnt;
3848 task_reduction_cntorig_full = task_reduction_cntorig;
3849 }
3850 else if (pass == 3 && task_reduction_cnt)
3851 {
3852 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3853 gimple *g
3854 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3855 size_int (task_reduction_cntorig),
3856 build_fold_addr_expr (tskred_avar));
3857 gimple_seq_add_stmt (ilist, g);
3858 }
3859 if (pass == 3 && task_reduction_other_cnt)
3860 {
3861 /* For reduction clauses, build
3862 tskred_base = (void *) tskred_temp[2]
3863 + omp_get_thread_num () * tskred_temp[1]
3864 or if tskred_temp[1] is known to be constant, that constant
3865 directly. This is the start of the private reduction copy block
3866 for the current thread. */
3867 tree v = create_tmp_var (integer_type_node);
3868 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3869 gimple *g = gimple_build_call (x, 0);
3870 gimple_call_set_lhs (g, v);
3871 gimple_seq_add_stmt (ilist, g);
3872 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3873 tskred_temp = OMP_CLAUSE_DECL (c);
3874 if (is_taskreg_ctx (ctx))
3875 tskred_temp = lookup_decl (tskred_temp, ctx);
3876 tree v2 = create_tmp_var (sizetype);
3877 g = gimple_build_assign (v2, NOP_EXPR, v);
3878 gimple_seq_add_stmt (ilist, g);
3879 if (ctx->task_reductions[0])
3880 v = fold_convert (sizetype, ctx->task_reductions[0]);
3881 else
3882 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3883 tree v3 = create_tmp_var (sizetype);
3884 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3885 gimple_seq_add_stmt (ilist, g);
3886 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3887 tskred_base = create_tmp_var (ptr_type_node);
3888 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3889 gimple_seq_add_stmt (ilist, g);
3890 }
3891 task_reduction_cnt = 0;
3892 task_reduction_cntorig = 0;
3893 task_reduction_other_cnt = 0;
953ff289
DN
3894 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3895 {
aaf46ef9 3896 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3897 tree var, new_var;
3898 bool by_ref;
db3927fb 3899 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
28567c40
JJ
3900 bool task_reduction_p = false;
3901 bool task_reduction_needs_orig_p = false;
3902 tree cond = NULL_TREE;
953ff289
DN
3903
3904 switch (c_kind)
3905 {
3906 case OMP_CLAUSE_PRIVATE:
3907 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3908 continue;
3909 break;
3910 case OMP_CLAUSE_SHARED:
28567c40
JJ
3911 /* Ignore shared directives in teams construct inside
3912 of target construct. */
3913 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3914 && !is_host_teams_ctx (ctx))
acf0174b 3915 continue;
8ca5b2a2
JJ
3916 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3917 {
d9a6bd32
JJ
3918 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3919 || is_global_var (OMP_CLAUSE_DECL (c)));
8ca5b2a2
JJ
3920 continue;
3921 }
953ff289 3922 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3923 case OMP_CLAUSE_COPYIN:
d9a6bd32 3924 break;
acf0174b 3925 case OMP_CLAUSE_LINEAR:
d9a6bd32
JJ
3926 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3927 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3928 lastprivate_firstprivate = true;
acf0174b 3929 break;
953ff289 3930 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
3931 case OMP_CLAUSE_IN_REDUCTION:
3932 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3933 {
3934 task_reduction_p = true;
3935 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3936 {
3937 task_reduction_other_cnt++;
3938 if (pass == 2)
3939 continue;
3940 }
3941 else
3942 task_reduction_cnt++;
3943 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3944 {
3945 var = OMP_CLAUSE_DECL (c);
3946 /* If var is a global variable that isn't privatized
3947 in outer contexts, we don't need to look up the
3948 original address, it is always the address of the
3949 global variable itself. */
3950 if (!DECL_P (var)
3951 || omp_is_reference (var)
3952 || !is_global_var
3953 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
3954 {
3955 task_reduction_needs_orig_p = true;
3956 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
3957 task_reduction_cntorig++;
3958 }
3959 }
3960 }
3961 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
acf0174b 3962 reduction_omp_orig_ref = true;
953ff289 3963 break;
28567c40
JJ
3964 case OMP_CLAUSE__REDUCTEMP_:
3965 if (!is_taskreg_ctx (ctx))
3966 continue;
3967 /* FALLTHRU */
acf0174b 3968 case OMP_CLAUSE__LOOPTEMP_:
28567c40
JJ
3969 /* Handle _looptemp_/_reductemp_ clauses only on
3970 parallel/task. */
acf0174b
JJ
3971 if (fd)
3972 continue;
74bf76ed 3973 break;
077b0dfb 3974 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3975 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3976 {
3977 lastprivate_firstprivate = true;
d9a6bd32 3978 if (pass != 0 || is_taskloop_ctx (ctx))
8ca5b2a2
JJ
3979 continue;
3980 }
92d28cbb
JJ
3981 /* Even without corresponding firstprivate, if
3982 decl is Fortran allocatable, it needs outer var
3983 reference. */
3984 else if (pass == 0
3985 && lang_hooks.decls.omp_private_outer_ref
3986 (OMP_CLAUSE_DECL (c)))
3987 lastprivate_firstprivate = true;
077b0dfb 3988 break;
acf0174b 3989 case OMP_CLAUSE_ALIGNED:
28567c40 3990 if (pass != 1)
acf0174b
JJ
3991 continue;
3992 var = OMP_CLAUSE_DECL (c);
3993 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3994 && !is_global_var (var))
3995 {
3996 new_var = maybe_lookup_decl (var, ctx);
3997 if (new_var == NULL_TREE)
3998 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3999 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
4000 tree alarg = omp_clause_aligned_alignment (c);
4001 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4002 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
acf0174b
JJ
4003 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4004 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4005 gimplify_and_add (x, ilist);
4006 }
4007 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4008 && is_global_var (var))
4009 {
4010 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4011 new_var = lookup_decl (var, ctx);
4012 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4013 t = build_fold_addr_expr_loc (clause_loc, t);
4014 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
37e373c2
JJ
4015 tree alarg = omp_clause_aligned_alignment (c);
4016 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4017 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
acf0174b 4018 t = fold_convert_loc (clause_loc, ptype, t);
b731b390 4019 x = create_tmp_var (ptype);
acf0174b
JJ
4020 t = build2 (MODIFY_EXPR, ptype, x, t);
4021 gimplify_and_add (t, ilist);
4022 t = build_simple_mem_ref_loc (clause_loc, x);
4023 SET_DECL_VALUE_EXPR (new_var, t);
4024 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4025 }
4026 continue;
953ff289
DN
4027 default:
4028 continue;
4029 }
4030
28567c40
JJ
4031 if (task_reduction_p != (pass >= 2))
4032 continue;
4033
953ff289 4034 new_var = var = OMP_CLAUSE_DECL (c);
28567c40
JJ
4035 if ((c_kind == OMP_CLAUSE_REDUCTION
4036 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4037 && TREE_CODE (var) == MEM_REF)
d9a6bd32
JJ
4038 {
4039 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
4040 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4041 var = TREE_OPERAND (var, 0);
d9a6bd32
JJ
4042 if (TREE_CODE (var) == INDIRECT_REF
4043 || TREE_CODE (var) == ADDR_EXPR)
4044 var = TREE_OPERAND (var, 0);
4045 if (is_variable_sized (var))
4046 {
4047 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4048 var = DECL_VALUE_EXPR (var);
4049 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4050 var = TREE_OPERAND (var, 0);
4051 gcc_assert (DECL_P (var));
4052 }
4053 new_var = var;
4054 }
953ff289
DN
4055 if (c_kind != OMP_CLAUSE_COPYIN)
4056 new_var = lookup_decl (var, ctx);
4057
4058 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4059 {
4060 if (pass != 0)
4061 continue;
4062 }
d9a6bd32 4063 /* C/C++ array section reductions. */
28567c40
JJ
4064 else if ((c_kind == OMP_CLAUSE_REDUCTION
4065 || c_kind == OMP_CLAUSE_IN_REDUCTION)
d9a6bd32 4066 && var != OMP_CLAUSE_DECL (c))
953ff289
DN
4067 {
4068 if (pass == 0)
4069 continue;
4070
e01d41e5 4071 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
d9a6bd32 4072 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
28567c40 4073
e01d41e5
JJ
4074 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4075 {
4076 tree b = TREE_OPERAND (orig_var, 1);
4077 b = maybe_lookup_decl (b, ctx);
4078 if (b == NULL)
4079 {
4080 b = TREE_OPERAND (orig_var, 1);
4081 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4082 }
4083 if (integer_zerop (bias))
4084 bias = b;
4085 else
4086 {
4087 bias = fold_convert_loc (clause_loc,
4088 TREE_TYPE (b), bias);
4089 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4090 TREE_TYPE (b), b, bias);
4091 }
4092 orig_var = TREE_OPERAND (orig_var, 0);
4093 }
28567c40
JJ
4094 if (pass == 2)
4095 {
4096 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4097 if (is_global_var (out)
4098 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4099 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4100 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4101 != POINTER_TYPE)))
4102 x = var;
4103 else
4104 {
4105 bool by_ref = use_pointer_for_field (var, NULL);
4106 x = build_receiver_ref (var, by_ref, ctx);
4107 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4108 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4109 == POINTER_TYPE))
4110 x = build_fold_addr_expr (x);
4111 }
4112 if (TREE_CODE (orig_var) == INDIRECT_REF)
4113 x = build_simple_mem_ref (x);
4114 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4115 {
4116 if (var == TREE_OPERAND (orig_var, 0))
4117 x = build_fold_addr_expr (x);
4118 }
4119 bias = fold_convert (sizetype, bias);
4120 x = fold_convert (ptr_type_node, x);
4121 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4122 TREE_TYPE (x), x, bias);
4123 unsigned cnt = task_reduction_cnt - 1;
4124 if (!task_reduction_needs_orig_p)
4125 cnt += (task_reduction_cntorig_full
4126 - task_reduction_cntorig);
4127 else
4128 cnt = task_reduction_cntorig - 1;
4129 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4130 size_int (cnt), NULL_TREE, NULL_TREE);
4131 gimplify_assign (r, x, ilist);
4132 continue;
4133 }
4134
d9a6bd32
JJ
4135 if (TREE_CODE (orig_var) == INDIRECT_REF
4136 || TREE_CODE (orig_var) == ADDR_EXPR)
4137 orig_var = TREE_OPERAND (orig_var, 0);
4138 tree d = OMP_CLAUSE_DECL (c);
4139 tree type = TREE_TYPE (d);
4140 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4141 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4142 const char *name = get_name (orig_var);
28567c40
JJ
4143 if (pass == 3)
4144 {
4145 tree xv = create_tmp_var (ptr_type_node);
4146 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4147 {
4148 unsigned cnt = task_reduction_cnt - 1;
4149 if (!task_reduction_needs_orig_p)
4150 cnt += (task_reduction_cntorig_full
4151 - task_reduction_cntorig);
4152 else
4153 cnt = task_reduction_cntorig - 1;
4154 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4155 size_int (cnt), NULL_TREE, NULL_TREE);
4156
4157 gimple *g = gimple_build_assign (xv, x);
4158 gimple_seq_add_stmt (ilist, g);
4159 }
4160 else
4161 {
4162 unsigned int idx = *ctx->task_reduction_map->get (c);
4163 tree off;
4164 if (ctx->task_reductions[1 + idx])
4165 off = fold_convert (sizetype,
4166 ctx->task_reductions[1 + idx]);
4167 else
4168 off = task_reduction_read (ilist, tskred_temp, sizetype,
4169 7 + 3 * idx + 1);
4170 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4171 tskred_base, off);
4172 gimple_seq_add_stmt (ilist, g);
4173 }
4174 x = fold_convert (build_pointer_type (boolean_type_node),
4175 xv);
4176 if (TREE_CONSTANT (v))
4177 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4178 TYPE_SIZE_UNIT (type));
4179 else
4180 {
4181 tree t = maybe_lookup_decl (v, ctx);
4182 if (t)
4183 v = t;
4184 else
4185 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4186 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4187 fb_rvalue);
4188 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4189 TREE_TYPE (v), v,
4190 build_int_cst (TREE_TYPE (v), 1));
4191 t = fold_build2_loc (clause_loc, MULT_EXPR,
4192 TREE_TYPE (v), t,
4193 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4194 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4195 }
4196 cond = create_tmp_var (TREE_TYPE (x));
4197 gimplify_assign (cond, x, ilist);
4198 x = xv;
4199 }
4200 else if (TREE_CONSTANT (v))
a68ab351 4201 {
d9a6bd32
JJ
4202 x = create_tmp_var_raw (type, name);
4203 gimple_add_tmp_var (x);
4204 TREE_ADDRESSABLE (x) = 1;
4205 x = build_fold_addr_expr_loc (clause_loc, x);
4206 }
4207 else
4208 {
4209 tree atmp
4210 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4211 tree t = maybe_lookup_decl (v, ctx);
4212 if (t)
4213 v = t;
4214 else
4215 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4216 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4217 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4218 TREE_TYPE (v), v,
4219 build_int_cst (TREE_TYPE (v), 1));
4220 t = fold_build2_loc (clause_loc, MULT_EXPR,
4221 TREE_TYPE (v), t,
4222 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4223 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4224 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4225 }
4226
4227 tree ptype = build_pointer_type (TREE_TYPE (type));
4228 x = fold_convert_loc (clause_loc, ptype, x);
4229 tree y = create_tmp_var (ptype, name);
4230 gimplify_assign (y, x, ilist);
4231 x = y;
e01d41e5
JJ
4232 tree yb = y;
4233
4234 if (!integer_zerop (bias))
4235 {
48a78aee
JJ
4236 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4237 bias);
4238 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4239 x);
4240 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4241 pointer_sized_int_node, yb, bias);
4242 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
e01d41e5
JJ
4243 yb = create_tmp_var (ptype, name);
4244 gimplify_assign (yb, x, ilist);
4245 x = yb;
4246 }
4247
4248 d = TREE_OPERAND (d, 0);
4249 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4250 d = TREE_OPERAND (d, 0);
4251 if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
4252 {
4253 if (orig_var != var)
4254 {
4255 gcc_assert (is_variable_sized (orig_var));
4256 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4257 x);
4258 gimplify_assign (new_var, x, ilist);
4259 tree new_orig_var = lookup_decl (orig_var, ctx);
4260 tree t = build_fold_indirect_ref (new_var);
4261 DECL_IGNORED_P (new_var) = 0;
28567c40 4262 TREE_THIS_NOTRAP (t) = 1;
d9a6bd32
JJ
4263 SET_DECL_VALUE_EXPR (new_orig_var, t);
4264 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4265 }
4266 else
4267 {
4268 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4269 build_int_cst (ptype, 0));
4270 SET_DECL_VALUE_EXPR (new_var, x);
4271 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4272 }
4273 }
4274 else
4275 {
4276 gcc_assert (orig_var == var);
e01d41e5 4277 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
4278 {
4279 x = create_tmp_var (ptype, name);
4280 TREE_ADDRESSABLE (x) = 1;
e01d41e5 4281 gimplify_assign (x, yb, ilist);
d9a6bd32
JJ
4282 x = build_fold_addr_expr_loc (clause_loc, x);
4283 }
4284 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4285 gimplify_assign (new_var, x, ilist);
4286 }
28567c40
JJ
4287 /* GOMP_taskgroup_reduction_register memsets the whole
4288 array to zero. If the initializer is zero, we don't
4289 need to initialize it again, just mark it as ever
4290 used unconditionally, i.e. cond = true. */
4291 if (cond
4292 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4293 && initializer_zerop (omp_reduction_init (c,
4294 TREE_TYPE (type))))
4295 {
4296 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4297 boolean_true_node);
4298 gimple_seq_add_stmt (ilist, g);
4299 continue;
4300 }
4301 tree end = create_artificial_label (UNKNOWN_LOCATION);
4302 if (cond)
4303 {
4304 gimple *g;
4305 if (!is_parallel_ctx (ctx))
4306 {
4307 tree condv = create_tmp_var (boolean_type_node);
4308 g = gimple_build_assign (condv,
4309 build_simple_mem_ref (cond));
4310 gimple_seq_add_stmt (ilist, g);
4311 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4312 g = gimple_build_cond (NE_EXPR, condv,
4313 boolean_false_node, end, lab1);
4314 gimple_seq_add_stmt (ilist, g);
4315 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4316 }
4317 g = gimple_build_assign (build_simple_mem_ref (cond),
4318 boolean_true_node);
4319 gimple_seq_add_stmt (ilist, g);
4320 }
4321
4322 tree y1 = create_tmp_var (ptype);
d9a6bd32
JJ
4323 gimplify_assign (y1, y, ilist);
4324 tree i2 = NULL_TREE, y2 = NULL_TREE;
4325 tree body2 = NULL_TREE, end2 = NULL_TREE;
4326 tree y3 = NULL_TREE, y4 = NULL_TREE;
28567c40 4327 if (task_reduction_needs_orig_p)
d9a6bd32 4328 {
28567c40
JJ
4329 y3 = create_tmp_var (ptype);
4330 tree ref;
4331 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4332 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4333 size_int (task_reduction_cnt_full
4334 + task_reduction_cntorig - 1),
4335 NULL_TREE, NULL_TREE);
4336 else
d9a6bd32 4337 {
28567c40
JJ
4338 unsigned int idx = *ctx->task_reduction_map->get (c);
4339 ref = task_reduction_read (ilist, tskred_temp, ptype,
4340 7 + 3 * idx);
d9a6bd32 4341 }
28567c40
JJ
4342 gimplify_assign (y3, ref, ilist);
4343 }
4344 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4345 {
4346 if (pass != 3)
d9a6bd32 4347 {
28567c40
JJ
4348 y2 = create_tmp_var (ptype);
4349 gimplify_assign (y2, y, ilist);
4350 }
4351 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4352 {
4353 tree ref = build_outer_var_ref (var, ctx);
4354 /* For ref build_outer_var_ref already performs this. */
4355 if (TREE_CODE (d) == INDIRECT_REF)
4356 gcc_assert (omp_is_reference (var));
4357 else if (TREE_CODE (d) == ADDR_EXPR)
4358 ref = build_fold_addr_expr (ref);
4359 else if (omp_is_reference (var))
4360 ref = build_fold_addr_expr (ref);
4361 ref = fold_convert_loc (clause_loc, ptype, ref);
4362 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4363 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4364 {
4365 y3 = create_tmp_var (ptype);
4366 gimplify_assign (y3, unshare_expr (ref), ilist);
4367 }
4368 if (is_simd)
4369 {
4370 y4 = create_tmp_var (ptype);
4371 gimplify_assign (y4, ref, dlist);
4372 }
d9a6bd32
JJ
4373 }
4374 }
28567c40 4375 tree i = create_tmp_var (TREE_TYPE (v));
d9a6bd32
JJ
4376 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4377 tree body = create_artificial_label (UNKNOWN_LOCATION);
d9a6bd32
JJ
4378 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4379 if (y2)
4380 {
28567c40 4381 i2 = create_tmp_var (TREE_TYPE (v));
d9a6bd32
JJ
4382 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4383 body2 = create_artificial_label (UNKNOWN_LOCATION);
4384 end2 = create_artificial_label (UNKNOWN_LOCATION);
4385 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4386 }
4387 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4388 {
4389 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4390 tree decl_placeholder
4391 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4392 SET_DECL_VALUE_EXPR (decl_placeholder,
4393 build_simple_mem_ref (y1));
4394 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4395 SET_DECL_VALUE_EXPR (placeholder,
4396 y3 ? build_simple_mem_ref (y3)
4397 : error_mark_node);
4398 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4399 x = lang_hooks.decls.omp_clause_default_ctor
4400 (c, build_simple_mem_ref (y1),
4401 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4402 if (x)
4403 gimplify_and_add (x, ilist);
4404 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4405 {
4406 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4407 lower_omp (&tseq, ctx);
4408 gimple_seq_add_seq (ilist, tseq);
4409 }
4410 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4411 if (is_simd)
4412 {
4413 SET_DECL_VALUE_EXPR (decl_placeholder,
4414 build_simple_mem_ref (y2));
4415 SET_DECL_VALUE_EXPR (placeholder,
4416 build_simple_mem_ref (y4));
4417 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4418 lower_omp (&tseq, ctx);
4419 gimple_seq_add_seq (dlist, tseq);
4420 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4421 }
4422 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4423 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
28567c40 4424 if (y2)
d9a6bd32 4425 {
28567c40
JJ
4426 x = lang_hooks.decls.omp_clause_dtor
4427 (c, build_simple_mem_ref (y2));
4428 if (x)
4429 {
4430 gimple_seq tseq = NULL;
4431 dtor = x;
4432 gimplify_stmt (&dtor, &tseq);
4433 gimple_seq_add_seq (dlist, tseq);
4434 }
d9a6bd32
JJ
4435 }
4436 }
4437 else
4438 {
4439 x = omp_reduction_init (c, TREE_TYPE (type));
4440 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4441
4442 /* reduction(-:var) sums up the partial results, so it
4443 acts identically to reduction(+:var). */
4444 if (code == MINUS_EXPR)
4445 code = PLUS_EXPR;
4446
4447 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4448 if (is_simd)
4449 {
4450 x = build2 (code, TREE_TYPE (type),
4451 build_simple_mem_ref (y4),
4452 build_simple_mem_ref (y2));
4453 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4454 }
4455 }
4456 gimple *g
4457 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4458 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4459 gimple_seq_add_stmt (ilist, g);
4460 if (y3)
4461 {
4462 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4463 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4464 gimple_seq_add_stmt (ilist, g);
4465 }
4466 g = gimple_build_assign (i, PLUS_EXPR, i,
4467 build_int_cst (TREE_TYPE (i), 1));
4468 gimple_seq_add_stmt (ilist, g);
4469 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4470 gimple_seq_add_stmt (ilist, g);
4471 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4472 if (y2)
4473 {
4474 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4475 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4476 gimple_seq_add_stmt (dlist, g);
4477 if (y4)
4478 {
4479 g = gimple_build_assign
4480 (y4, POINTER_PLUS_EXPR, y4,
4481 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4482 gimple_seq_add_stmt (dlist, g);
4483 }
4484 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4485 build_int_cst (TREE_TYPE (i2), 1));
4486 gimple_seq_add_stmt (dlist, g);
4487 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4488 gimple_seq_add_stmt (dlist, g);
4489 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4490 }
4491 continue;
4492 }
28567c40
JJ
4493 else if (pass == 2)
4494 {
4495 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4496 x = var;
4497 else
4498 {
4499 bool by_ref = use_pointer_for_field (var, ctx);
4500 x = build_receiver_ref (var, by_ref, ctx);
4501 }
4502 if (!omp_is_reference (var))
4503 x = build_fold_addr_expr (x);
4504 x = fold_convert (ptr_type_node, x);
4505 unsigned cnt = task_reduction_cnt - 1;
4506 if (!task_reduction_needs_orig_p)
4507 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4508 else
4509 cnt = task_reduction_cntorig - 1;
4510 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4511 size_int (cnt), NULL_TREE, NULL_TREE);
4512 gimplify_assign (r, x, ilist);
4513 continue;
4514 }
4515 else if (pass == 3)
4516 {
4517 tree type = TREE_TYPE (new_var);
4518 if (!omp_is_reference (var))
4519 type = build_pointer_type (type);
4520 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4521 {
4522 unsigned cnt = task_reduction_cnt - 1;
4523 if (!task_reduction_needs_orig_p)
4524 cnt += (task_reduction_cntorig_full
4525 - task_reduction_cntorig);
4526 else
4527 cnt = task_reduction_cntorig - 1;
4528 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4529 size_int (cnt), NULL_TREE, NULL_TREE);
4530 }
4531 else
4532 {
4533 unsigned int idx = *ctx->task_reduction_map->get (c);
4534 tree off;
4535 if (ctx->task_reductions[1 + idx])
4536 off = fold_convert (sizetype,
4537 ctx->task_reductions[1 + idx]);
4538 else
4539 off = task_reduction_read (ilist, tskred_temp, sizetype,
4540 7 + 3 * idx + 1);
4541 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4542 tskred_base, off);
4543 }
4544 x = fold_convert (type, x);
4545 tree t;
4546 if (omp_is_reference (var))
4547 {
4548 gimplify_assign (new_var, x, ilist);
4549 t = new_var;
4550 new_var = build_simple_mem_ref (new_var);
4551 }
4552 else
4553 {
4554 t = create_tmp_var (type);
4555 gimplify_assign (t, x, ilist);
4556 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4557 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4558 }
4559 t = fold_convert (build_pointer_type (boolean_type_node), t);
4560 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4561 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4562 cond = create_tmp_var (TREE_TYPE (t));
4563 gimplify_assign (cond, t, ilist);
4564 }
d9a6bd32
JJ
4565 else if (is_variable_sized (var))
4566 {
4567 /* For variable sized types, we need to allocate the
4568 actual storage here. Call alloca and store the
4569 result in the pointer decl that we created elsewhere. */
4570 if (pass == 0)
4571 continue;
4572
4573 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4574 {
4575 gcall *stmt;
4576 tree tmp, atmp;
4577
4578 ptr = DECL_VALUE_EXPR (new_var);
4579 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4580 ptr = TREE_OPERAND (ptr, 0);
a68ab351
JJ
4581 gcc_assert (DECL_P (ptr));
4582 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
4583
4584 /* void *tmp = __builtin_alloca */
d9a6bd32
JJ
4585 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4586 stmt = gimple_build_call (atmp, 2, x,
4587 size_int (DECL_ALIGN (var)));
b731b390 4588 tmp = create_tmp_var_raw (ptr_type_node);
726a989a
RB
4589 gimple_add_tmp_var (tmp);
4590 gimple_call_set_lhs (stmt, tmp);
4591
4592 gimple_seq_add_stmt (ilist, stmt);
4593
db3927fb 4594 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 4595 gimplify_assign (ptr, x, ilist);
a68ab351 4596 }
953ff289 4597 }
28567c40
JJ
4598 else if (omp_is_reference (var)
4599 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4600 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
953ff289 4601 {
50674e96
DN
4602 /* For references that are being privatized for Fortran,
4603 allocate new backing storage for the new pointer
4604 variable. This allows us to avoid changing all the
4605 code that expects a pointer to something that expects
acf0174b 4606 a direct variable. */
953ff289
DN
4607 if (pass == 0)
4608 continue;
4609
4610 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
4611 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4612 {
4613 x = build_receiver_ref (var, false, ctx);
db3927fb 4614 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
4615 }
4616 else if (TREE_CONSTANT (x))
953ff289 4617 {
decaaec8
JJ
4618 /* For reduction in SIMD loop, defer adding the
4619 initialization of the reference, because if we decide
4620 to use SIMD array for it, the initilization could cause
4621 expansion ICE. */
4622 if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4ceffa27
JJ
4623 x = NULL_TREE;
4624 else
4625 {
4ceffa27 4626 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
d9a6bd32 4627 get_name (var));
4ceffa27
JJ
4628 gimple_add_tmp_var (x);
4629 TREE_ADDRESSABLE (x) = 1;
4630 x = build_fold_addr_expr_loc (clause_loc, x);
4631 }
953ff289
DN
4632 }
4633 else
4634 {
d9a6bd32
JJ
4635 tree atmp
4636 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4637 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4638 tree al = size_int (TYPE_ALIGN (rtype));
4639 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
953ff289
DN
4640 }
4641
4ceffa27
JJ
4642 if (x)
4643 {
4644 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4645 gimplify_assign (new_var, x, ilist);
4646 }
953ff289 4647
70f34814 4648 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 4649 }
28567c40
JJ
4650 else if ((c_kind == OMP_CLAUSE_REDUCTION
4651 || c_kind == OMP_CLAUSE_IN_REDUCTION)
953ff289
DN
4652 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4653 {
4654 if (pass == 0)
4655 continue;
4656 }
4657 else if (pass != 0)
4658 continue;
4659
aaf46ef9 4660 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
4661 {
4662 case OMP_CLAUSE_SHARED:
28567c40
JJ
4663 /* Ignore shared directives in teams construct inside
4664 target construct. */
4665 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4666 && !is_host_teams_ctx (ctx))
acf0174b 4667 continue;
8ca5b2a2
JJ
4668 /* Shared global vars are just accessed directly. */
4669 if (is_global_var (new_var))
4670 break;
d9a6bd32
JJ
4671 /* For taskloop firstprivate/lastprivate, represented
4672 as firstprivate and shared clause on the task, new_var
4673 is the firstprivate var. */
4674 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4675 break;
953ff289
DN
4676 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4677 needs to be delayed until after fixup_child_record_type so
4678 that we get the correct type during the dereference. */
7c8f7639 4679 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
4680 x = build_receiver_ref (var, by_ref, ctx);
4681 SET_DECL_VALUE_EXPR (new_var, x);
4682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4683
4684 /* ??? If VAR is not passed by reference, and the variable
4685 hasn't been initialized yet, then we'll get a warning for
4686 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 4687 able to notice this and not store anything at all, but
953ff289
DN
4688 we're generating code too early. Suppress the warning. */
4689 if (!by_ref)
4690 TREE_NO_WARNING (var) = 1;
4691 break;
4692
4693 case OMP_CLAUSE_LASTPRIVATE:
4694 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4695 break;
4696 /* FALLTHRU */
4697
4698 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
4699 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4700 x = build_outer_var_ref (var, ctx);
4701 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4702 {
4703 if (is_task_ctx (ctx))
4704 x = build_receiver_ref (var, false, ctx);
4705 else
c39dad64 4706 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
a68ab351
JJ
4707 }
4708 else
4709 x = NULL;
74bf76ed 4710 do_private:
acf0174b 4711 tree nx;
d9a6bd32
JJ
4712 nx = lang_hooks.decls.omp_clause_default_ctor
4713 (c, unshare_expr (new_var), x);
74bf76ed
JJ
4714 if (is_simd)
4715 {
4716 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 4717 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed 4718 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6943af07
AM
4719 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4720 ivar, lvar))
74bf76ed 4721 {
acf0174b 4722 if (nx)
74bf76ed
JJ
4723 x = lang_hooks.decls.omp_clause_default_ctor
4724 (c, unshare_expr (ivar), x);
acf0174b 4725 if (nx && x)
74bf76ed
JJ
4726 gimplify_and_add (x, &llist[0]);
4727 if (y)
4728 {
4729 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4730 if (y)
4731 {
4732 gimple_seq tseq = NULL;
4733
4734 dtor = y;
4735 gimplify_stmt (&dtor, &tseq);
4736 gimple_seq_add_seq (&llist[1], tseq);
4737 }
4738 }
4739 break;
4740 }
4741 }
acf0174b
JJ
4742 if (nx)
4743 gimplify_and_add (nx, ilist);
953ff289
DN
4744 /* FALLTHRU */
4745
4746 do_dtor:
4747 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4748 if (x)
4749 {
726a989a
RB
4750 gimple_seq tseq = NULL;
4751
953ff289 4752 dtor = x;
726a989a 4753 gimplify_stmt (&dtor, &tseq);
355a7673 4754 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
4755 }
4756 break;
4757
74bf76ed
JJ
4758 case OMP_CLAUSE_LINEAR:
4759 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4760 goto do_firstprivate;
4761 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4762 x = NULL;
4763 else
4764 x = build_outer_var_ref (var, ctx);
4765 goto do_private;
4766
953ff289 4767 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
4768 if (is_task_ctx (ctx))
4769 {
28567c40
JJ
4770 if ((omp_is_reference (var)
4771 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4772 || is_variable_sized (var))
a68ab351
JJ
4773 goto do_dtor;
4774 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4775 ctx))
4776 || use_pointer_for_field (var, NULL))
4777 {
4778 x = build_receiver_ref (var, false, ctx);
4779 SET_DECL_VALUE_EXPR (new_var, x);
4780 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4781 goto do_dtor;
4782 }
4783 }
28567c40
JJ
4784 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4785 && omp_is_reference (var))
4786 {
4787 x = build_outer_var_ref (var, ctx);
4788 gcc_assert (TREE_CODE (x) == MEM_REF
4789 && integer_zerop (TREE_OPERAND (x, 1)));
4790 x = TREE_OPERAND (x, 0);
4791 x = lang_hooks.decls.omp_clause_copy_ctor
4792 (c, unshare_expr (new_var), x);
4793 gimplify_and_add (x, ilist);
4794 goto do_dtor;
4795 }
74bf76ed 4796 do_firstprivate:
953ff289 4797 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
4798 if (is_simd)
4799 {
acf0174b
JJ
4800 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4801 && gimple_omp_for_combined_into_p (ctx->stmt))
4802 {
da6f124d
JJ
4803 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4804 tree stept = TREE_TYPE (t);
629b3d75 4805 tree ct = omp_find_clause (clauses,
da6f124d
JJ
4806 OMP_CLAUSE__LOOPTEMP_);
4807 gcc_assert (ct);
4808 tree l = OMP_CLAUSE_DECL (ct);
56ad0e38
JJ
4809 tree n1 = fd->loop.n1;
4810 tree step = fd->loop.step;
4811 tree itype = TREE_TYPE (l);
4812 if (POINTER_TYPE_P (itype))
4813 itype = signed_type_for (itype);
4814 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4815 if (TYPE_UNSIGNED (itype)
4816 && fd->loop.cond_code == GT_EXPR)
4817 l = fold_build2 (TRUNC_DIV_EXPR, itype,
4818 fold_build1 (NEGATE_EXPR, itype, l),
4819 fold_build1 (NEGATE_EXPR,
4820 itype, step));
4821 else
4822 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
acf0174b
JJ
4823 t = fold_build2 (MULT_EXPR, stept,
4824 fold_convert (stept, l), t);
da6f124d
JJ
4825
4826 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4827 {
4828 x = lang_hooks.decls.omp_clause_linear_ctor
4829 (c, new_var, x, t);
4830 gimplify_and_add (x, ilist);
4831 goto do_dtor;
4832 }
4833
acf0174b
JJ
4834 if (POINTER_TYPE_P (TREE_TYPE (x)))
4835 x = fold_build2 (POINTER_PLUS_EXPR,
4836 TREE_TYPE (x), x, t);
4837 else
4838 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4839 }
4840
74bf76ed
JJ
4841 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4842 || TREE_ADDRESSABLE (new_var))
6943af07
AM
4843 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4844 ivar, lvar))
74bf76ed
JJ
4845 {
4846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4847 {
b731b390 4848 tree iv = create_tmp_var (TREE_TYPE (new_var));
74bf76ed
JJ
4849 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4850 gimplify_and_add (x, ilist);
4851 gimple_stmt_iterator gsi
4852 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
538dd0b7 4853 gassign *g
74bf76ed
JJ
4854 = gimple_build_assign (unshare_expr (lvar), iv);
4855 gsi_insert_before_without_update (&gsi, g,
4856 GSI_SAME_STMT);
da6f124d 4857 tree t = OMP_CLAUSE_LINEAR_STEP (c);
74bf76ed
JJ
4858 enum tree_code code = PLUS_EXPR;
4859 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4860 code = POINTER_PLUS_EXPR;
0d0e4a03 4861 g = gimple_build_assign (iv, code, iv, t);
74bf76ed
JJ
4862 gsi_insert_before_without_update (&gsi, g,
4863 GSI_SAME_STMT);
4864 break;
4865 }
4866 x = lang_hooks.decls.omp_clause_copy_ctor
4867 (c, unshare_expr (ivar), x);
4868 gimplify_and_add (x, &llist[0]);
4869 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4870 if (x)
4871 {
4872 gimple_seq tseq = NULL;
4873
4874 dtor = x;
4875 gimplify_stmt (&dtor, &tseq);
4876 gimple_seq_add_seq (&llist[1], tseq);
4877 }
4878 break;
4879 }
4880 }
d9a6bd32
JJ
4881 x = lang_hooks.decls.omp_clause_copy_ctor
4882 (c, unshare_expr (new_var), x);
953ff289
DN
4883 gimplify_and_add (x, ilist);
4884 goto do_dtor;
953ff289 4885
acf0174b 4886 case OMP_CLAUSE__LOOPTEMP_:
28567c40 4887 case OMP_CLAUSE__REDUCTEMP_:
d9a6bd32 4888 gcc_assert (is_taskreg_ctx (ctx));
acf0174b
JJ
4889 x = build_outer_var_ref (var, ctx);
4890 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4891 gimplify_and_add (x, ilist);
4892 break;
4893
953ff289 4894 case OMP_CLAUSE_COPYIN:
7c8f7639 4895 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
4896 x = build_receiver_ref (var, by_ref, ctx);
4897 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4898 append_to_statement_list (x, &copyin_seq);
4899 copyin_by_ref |= by_ref;
4900 break;
4901
4902 case OMP_CLAUSE_REDUCTION:
28567c40 4903 case OMP_CLAUSE_IN_REDUCTION:
e5014671
NS
4904 /* OpenACC reductions are initialized using the
4905 GOACC_REDUCTION internal function. */
4906 if (is_gimple_omp_oacc (ctx->stmt))
4907 break;
953ff289
DN
4908 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4909 {
a68ab351 4910 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
355fe088 4911 gimple *tseq;
28567c40
JJ
4912 tree ptype = TREE_TYPE (placeholder);
4913 if (cond)
4914 {
4915 x = error_mark_node;
4916 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4917 && !task_reduction_needs_orig_p)
4918 x = var;
4919 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4920 {
4921 tree pptype = build_pointer_type (ptype);
4922 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4923 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4924 size_int (task_reduction_cnt_full
4925 + task_reduction_cntorig - 1),
4926 NULL_TREE, NULL_TREE);
4927 else
4928 {
4929 unsigned int idx
4930 = *ctx->task_reduction_map->get (c);
4931 x = task_reduction_read (ilist, tskred_temp,
4932 pptype, 7 + 3 * idx);
4933 }
4934 x = fold_convert (pptype, x);
4935 x = build_simple_mem_ref (x);
4936 }
4937 }
4938 else
4939 {
4940 x = build_outer_var_ref (var, ctx);
a68ab351 4941
28567c40
JJ
4942 if (omp_is_reference (var)
4943 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
4944 x = build_fold_addr_expr_loc (clause_loc, x);
4945 }
a68ab351
JJ
4946 SET_DECL_VALUE_EXPR (placeholder, x);
4947 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b 4948 tree new_vard = new_var;
629b3d75 4949 if (omp_is_reference (var))
acf0174b
JJ
4950 {
4951 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4952 new_vard = TREE_OPERAND (new_var, 0);
4953 gcc_assert (DECL_P (new_vard));
4954 }
74bf76ed 4955 if (is_simd
6943af07
AM
4956 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4957 ivar, lvar))
74bf76ed 4958 {
acf0174b
JJ
4959 if (new_vard == new_var)
4960 {
4961 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4962 SET_DECL_VALUE_EXPR (new_var, ivar);
4963 }
4964 else
4965 {
4966 SET_DECL_VALUE_EXPR (new_vard,
4967 build_fold_addr_expr (ivar));
4968 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4969 }
4970 x = lang_hooks.decls.omp_clause_default_ctor
4971 (c, unshare_expr (ivar),
4972 build_outer_var_ref (var, ctx));
4973 if (x)
4974 gimplify_and_add (x, &llist[0]);
4975 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4976 {
4977 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4978 lower_omp (&tseq, ctx);
4979 gimple_seq_add_seq (&llist[0], tseq);
4980 }
4981 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4982 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4983 lower_omp (&tseq, ctx);
4984 gimple_seq_add_seq (&llist[1], tseq);
4985 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4986 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4987 if (new_vard == new_var)
4988 SET_DECL_VALUE_EXPR (new_var, lvar);
4989 else
4990 SET_DECL_VALUE_EXPR (new_vard,
4991 build_fold_addr_expr (lvar));
4992 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4993 if (x)
4994 {
4995 tseq = NULL;
4996 dtor = x;
4997 gimplify_stmt (&dtor, &tseq);
4998 gimple_seq_add_seq (&llist[1], tseq);
4999 }
5000 break;
5001 }
4ceffa27
JJ
5002 /* If this is a reference to constant size reduction var
5003 with placeholder, we haven't emitted the initializer
5004 for it because it is undesirable if SIMD arrays are used.
5005 But if they aren't used, we need to emit the deferred
5006 initialization now. */
629b3d75 5007 else if (omp_is_reference (var) && is_simd)
decaaec8 5008 handle_simd_reference (clause_loc, new_vard, ilist);
28567c40
JJ
5009
5010 tree lab2 = NULL_TREE;
5011 if (cond)
5012 {
5013 gimple *g;
5014 if (!is_parallel_ctx (ctx))
5015 {
5016 tree condv = create_tmp_var (boolean_type_node);
5017 tree m = build_simple_mem_ref (cond);
5018 g = gimple_build_assign (condv, m);
5019 gimple_seq_add_stmt (ilist, g);
5020 tree lab1
5021 = create_artificial_label (UNKNOWN_LOCATION);
5022 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5023 g = gimple_build_cond (NE_EXPR, condv,
5024 boolean_false_node,
5025 lab2, lab1);
5026 gimple_seq_add_stmt (ilist, g);
5027 gimple_seq_add_stmt (ilist,
5028 gimple_build_label (lab1));
5029 }
5030 g = gimple_build_assign (build_simple_mem_ref (cond),
5031 boolean_true_node);
5032 gimple_seq_add_stmt (ilist, g);
5033 }
acf0174b 5034 x = lang_hooks.decls.omp_clause_default_ctor
92d28cbb 5035 (c, unshare_expr (new_var),
28567c40
JJ
5036 cond ? NULL_TREE
5037 : build_outer_var_ref (var, ctx));
acf0174b
JJ
5038 if (x)
5039 gimplify_and_add (x, ilist);
5040 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5041 {
5042 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5043 lower_omp (&tseq, ctx);
5044 gimple_seq_add_seq (ilist, tseq);
5045 }
5046 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5047 if (is_simd)
5048 {
5049 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5050 lower_omp (&tseq, ctx);
5051 gimple_seq_add_seq (dlist, tseq);
5052 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5053 }
5054 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
28567c40
JJ
5055 if (cond)
5056 {
5057 if (lab2)
5058 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5059 break;
5060 }
acf0174b
JJ
5061 goto do_dtor;
5062 }
5063 else
5064 {
5065 x = omp_reduction_init (c, TREE_TYPE (new_var));
5066 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
e9792e1d
JJ
5067 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5068
28567c40
JJ
5069 if (cond)
5070 {
5071 gimple *g;
5072 tree lab2 = NULL_TREE;
5073 /* GOMP_taskgroup_reduction_register memsets the whole
5074 array to zero. If the initializer is zero, we don't
5075 need to initialize it again, just mark it as ever
5076 used unconditionally, i.e. cond = true. */
5077 if (initializer_zerop (x))
5078 {
5079 g = gimple_build_assign (build_simple_mem_ref (cond),
5080 boolean_true_node);
5081 gimple_seq_add_stmt (ilist, g);
5082 break;
5083 }
5084
5085 /* Otherwise, emit
5086 if (!cond) { cond = true; new_var = x; } */
5087 if (!is_parallel_ctx (ctx))
5088 {
5089 tree condv = create_tmp_var (boolean_type_node);
5090 tree m = build_simple_mem_ref (cond);
5091 g = gimple_build_assign (condv, m);
5092 gimple_seq_add_stmt (ilist, g);
5093 tree lab1
5094 = create_artificial_label (UNKNOWN_LOCATION);
5095 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5096 g = gimple_build_cond (NE_EXPR, condv,
5097 boolean_false_node,
5098 lab2, lab1);
5099 gimple_seq_add_stmt (ilist, g);
5100 gimple_seq_add_stmt (ilist,
5101 gimple_build_label (lab1));
5102 }
5103 g = gimple_build_assign (build_simple_mem_ref (cond),
5104 boolean_true_node);
5105 gimple_seq_add_stmt (ilist, g);
5106 gimplify_assign (new_var, x, ilist);
5107 if (lab2)
5108 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5109 break;
5110 }
5111
e9792e1d
JJ
5112 /* reduction(-:var) sums up the partial results, so it
5113 acts identically to reduction(+:var). */
5114 if (code == MINUS_EXPR)
5115 code = PLUS_EXPR;
5116
decaaec8 5117 tree new_vard = new_var;
629b3d75 5118 if (is_simd && omp_is_reference (var))
decaaec8
JJ
5119 {
5120 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5121 new_vard = TREE_OPERAND (new_var, 0);
5122 gcc_assert (DECL_P (new_vard));
5123 }
acf0174b 5124 if (is_simd
6943af07
AM
5125 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5126 ivar, lvar))
acf0174b 5127 {
acf0174b
JJ
5128 tree ref = build_outer_var_ref (var, ctx);
5129
5130 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5131
6943af07 5132 if (sctx.is_simt)
9669b00b
AM
5133 {
5134 if (!simt_lane)
5135 simt_lane = create_tmp_var (unsigned_type_node);
5136 x = build_call_expr_internal_loc
5137 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5138 TREE_TYPE (ivar), 2, ivar, simt_lane);
5139 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5140 gimplify_assign (ivar, x, &llist[2]);
5141 }
acf0174b 5142 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
5143 ref = build_outer_var_ref (var, ctx);
5144 gimplify_assign (ref, x, &llist[1]);
decaaec8
JJ
5145
5146 if (new_vard != new_var)
5147 {
5148 SET_DECL_VALUE_EXPR (new_vard,
5149 build_fold_addr_expr (lvar));
5150 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5151 }
74bf76ed
JJ
5152 }
5153 else
5154 {
629b3d75 5155 if (omp_is_reference (var) && is_simd)
decaaec8 5156 handle_simd_reference (clause_loc, new_vard, ilist);
74bf76ed
JJ
5157 gimplify_assign (new_var, x, ilist);
5158 if (is_simd)
e9792e1d
JJ
5159 {
5160 tree ref = build_outer_var_ref (var, ctx);
5161
5162 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5163 ref = build_outer_var_ref (var, ctx);
5164 gimplify_assign (ref, x, dlist);
5165 }
74bf76ed 5166 }
953ff289
DN
5167 }
5168 break;
5169
5170 default:
5171 gcc_unreachable ();
5172 }
5173 }
5174 }
28567c40
JJ
5175 if (tskred_avar)
5176 {
5177 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5178 TREE_THIS_VOLATILE (clobber) = 1;
5179 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5180 }
953ff289 5181
9d2f08ab 5182 if (known_eq (sctx.max_vf, 1U))
0c6b03b5
AM
5183 sctx.is_simt = false;
5184
5185 if (sctx.lane || sctx.is_simt)
74bf76ed 5186 {
0c6b03b5 5187 uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
5188 /* Don't want uninit warnings on simduid, it is always uninitialized,
5189 but we use it not for the value, but for the DECL_UID only. */
5190 TREE_NO_WARNING (uid) = 1;
0c6b03b5
AM
5191 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5192 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5193 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5194 gimple_omp_for_set_clauses (ctx->stmt, c);
5195 }
5196 /* Emit calls denoting privatized variables and initializing a pointer to
5197 structure that holds private variables as fields after ompdevlow pass. */
5198 if (sctx.is_simt)
5199 {
5200 sctx.simt_eargs[0] = uid;
5201 gimple *g
5202 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5203 gimple_call_set_lhs (g, uid);
5204 gimple_seq_add_stmt (ilist, g);
5205 sctx.simt_eargs.release ();
5206
5207 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5208 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5209 gimple_call_set_lhs (g, simtrec);
5210 gimple_seq_add_stmt (ilist, g);
5211 }
5212 if (sctx.lane)
5213 {
355fe088 5214 gimple *g
74bf76ed 5215 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
6943af07 5216 gimple_call_set_lhs (g, sctx.lane);
74bf76ed
JJ
5217 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5218 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6943af07 5219 g = gimple_build_assign (sctx.lane, INTEGER_CST,
0d0e4a03 5220 build_int_cst (unsigned_type_node, 0));
74bf76ed 5221 gimple_seq_add_stmt (ilist, g);
9669b00b
AM
5222 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5223 if (llist[2])
5224 {
5225 tree simt_vf = create_tmp_var (unsigned_type_node);
5226 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5227 gimple_call_set_lhs (g, simt_vf);
5228 gimple_seq_add_stmt (dlist, g);
5229
5230 tree t = build_int_cst (unsigned_type_node, 1);
5231 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5232 gimple_seq_add_stmt (dlist, g);
5233
5234 t = build_int_cst (unsigned_type_node, 0);
6943af07 5235 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
9669b00b
AM
5236 gimple_seq_add_stmt (dlist, g);
5237
5238 tree body = create_artificial_label (UNKNOWN_LOCATION);
5239 tree header = create_artificial_label (UNKNOWN_LOCATION);
5240 tree end = create_artificial_label (UNKNOWN_LOCATION);
5241 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5242 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5243
5244 gimple_seq_add_seq (dlist, llist[2]);
5245
5246 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5247 gimple_seq_add_stmt (dlist, g);
5248
5249 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5250 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5251 gimple_seq_add_stmt (dlist, g);
5252
5253 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5254 }
74bf76ed
JJ
5255 for (int i = 0; i < 2; i++)
5256 if (llist[i])
5257 {
b731b390 5258 tree vf = create_tmp_var (unsigned_type_node);
74bf76ed
JJ
5259 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5260 gimple_call_set_lhs (g, vf);
5261 gimple_seq *seq = i == 0 ? ilist : dlist;
5262 gimple_seq_add_stmt (seq, g);
5263 tree t = build_int_cst (unsigned_type_node, 0);
6943af07 5264 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
74bf76ed
JJ
5265 gimple_seq_add_stmt (seq, g);
5266 tree body = create_artificial_label (UNKNOWN_LOCATION);
5267 tree header = create_artificial_label (UNKNOWN_LOCATION);
5268 tree end = create_artificial_label (UNKNOWN_LOCATION);
5269 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5270 gimple_seq_add_stmt (seq, gimple_build_label (body));
5271 gimple_seq_add_seq (seq, llist[i]);
5272 t = build_int_cst (unsigned_type_node, 1);
6943af07 5273 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
74bf76ed
JJ
5274 gimple_seq_add_stmt (seq, g);
5275 gimple_seq_add_stmt (seq, gimple_build_label (header));
6943af07 5276 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
74bf76ed
JJ
5277 gimple_seq_add_stmt (seq, g);
5278 gimple_seq_add_stmt (seq, gimple_build_label (end));
5279 }
5280 }
0c6b03b5
AM
5281 if (sctx.is_simt)
5282 {
5283 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5284 gimple *g
5285 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5286 gimple_seq_add_stmt (dlist, g);
5287 }
74bf76ed 5288
953ff289
DN
5289 /* The copyin sequence is not to be executed by the main thread, since
5290 that would result in self-copies. Perhaps not visible to scalars,
5291 but it certainly is to C++ operator=. */
5292 if (copyin_seq)
5293 {
e79983f4
MM
5294 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5295 0);
953ff289
DN
5296 x = build2 (NE_EXPR, boolean_type_node, x,
5297 build_int_cst (TREE_TYPE (x), 0));
5298 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5299 gimplify_and_add (x, ilist);
5300 }
5301
5302 /* If any copyin variable is passed by reference, we must ensure the
5303 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
5304 threads. Similarly for variables in both firstprivate and
5305 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
5306 happens after firstprivate copying in all threads. And similarly
5307 for UDRs if initializer expression refers to omp_orig. */
5308 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
5309 {
5310 /* Don't add any barrier for #pragma omp simd or
5311 #pragma omp distribute. */
28567c40
JJ
5312 if (!is_task_ctx (ctx)
5313 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5314 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
629b3d75 5315 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
74bf76ed
JJ
5316 }
5317
5318 /* If max_vf is non-zero, then we can use only a vectorization factor
5319 up to the max_vf we chose. So stick it into the safelen clause. */
9d2f08ab 5320 if (maybe_ne (sctx.max_vf, 0U))
74bf76ed 5321 {
629b3d75 5322 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
74bf76ed 5323 OMP_CLAUSE_SAFELEN);
9d2f08ab 5324 poly_uint64 safe_len;
74bf76ed 5325 if (c == NULL_TREE
9d2f08ab
RS
5326 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5327 && maybe_gt (safe_len, sctx.max_vf)))
74bf76ed
JJ
5328 {
5329 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5330 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6943af07 5331 sctx.max_vf);
74bf76ed
JJ
5332 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5333 gimple_omp_for_set_clauses (ctx->stmt, c);
5334 }
5335 }
953ff289
DN
5336}
5337
50674e96 5338
953ff289
DN
5339/* Generate code to implement the LASTPRIVATE clauses. This is used for
5340 both parallel and workshare constructs. PREDICATE may be NULL if it's
5341 always true. */
5342
5343static void
726a989a 5344lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 5345 omp_context *ctx)
953ff289 5346{
74bf76ed 5347 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 5348 bool par_clauses = false;
9669b00b 5349 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
953ff289 5350
74bf76ed
JJ
5351 /* Early exit if there are no lastprivate or linear clauses. */
5352 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5353 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5354 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5355 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5356 break;
953ff289
DN
5357 if (clauses == NULL)
5358 {
5359 /* If this was a workshare clause, see if it had been combined
5360 with its parallel. In that case, look for the clauses on the
5361 parallel statement itself. */
5362 if (is_parallel_ctx (ctx))
5363 return;
5364
5365 ctx = ctx->outer;
5366 if (ctx == NULL || !is_parallel_ctx (ctx))
5367 return;
5368
629b3d75 5369 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
5370 OMP_CLAUSE_LASTPRIVATE);
5371 if (clauses == NULL)
5372 return;
a68ab351 5373 par_clauses = true;
953ff289
DN
5374 }
5375
9669b00b
AM
5376 bool maybe_simt = false;
5377 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5378 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5379 {
629b3d75
MJ
5380 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5381 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
9669b00b
AM
5382 if (simduid)
5383 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5384 }
5385
726a989a
RB
5386 if (predicate)
5387 {
538dd0b7 5388 gcond *stmt;
726a989a 5389 tree label_true, arm1, arm2;
56b1c60e 5390 enum tree_code pred_code = TREE_CODE (predicate);
726a989a 5391
c2255bc4
AH
5392 label = create_artificial_label (UNKNOWN_LOCATION);
5393 label_true = create_artificial_label (UNKNOWN_LOCATION);
56b1c60e
MJ
5394 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5395 {
5396 arm1 = TREE_OPERAND (predicate, 0);
5397 arm2 = TREE_OPERAND (predicate, 1);
5398 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5399 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5400 }
5401 else
5402 {
5403 arm1 = predicate;
5404 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5405 arm2 = boolean_false_node;
5406 pred_code = NE_EXPR;
5407 }
9669b00b
AM
5408 if (maybe_simt)
5409 {
56b1c60e 5410 c = build2 (pred_code, boolean_type_node, arm1, arm2);
9669b00b
AM
5411 c = fold_convert (integer_type_node, c);
5412 simtcond = create_tmp_var (integer_type_node);
5413 gimplify_assign (simtcond, c, stmt_list);
5414 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5415 1, simtcond);
5416 c = create_tmp_var (integer_type_node);
5417 gimple_call_set_lhs (g, c);
5418 gimple_seq_add_stmt (stmt_list, g);
5419 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5420 label_true, label);
5421 }
5422 else
56b1c60e 5423 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
726a989a
RB
5424 gimple_seq_add_stmt (stmt_list, stmt);
5425 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5426 }
953ff289 5427
a68ab351 5428 for (c = clauses; c ;)
953ff289
DN
5429 {
5430 tree var, new_var;
db3927fb 5431 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5432
74bf76ed
JJ
5433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5434 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5435 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
5436 {
5437 var = OMP_CLAUSE_DECL (c);
d9a6bd32
JJ
5438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5439 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5440 && is_taskloop_ctx (ctx))
5441 {
5442 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5443 new_var = lookup_decl (var, ctx->outer);
5444 }
5445 else
2187f2a2
JJ
5446 {
5447 new_var = lookup_decl (var, ctx);
5448 /* Avoid uninitialized warnings for lastprivate and
5449 for linear iterators. */
5450 if (predicate
5451 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5452 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5453 TREE_NO_WARNING (new_var) = 1;
5454 }
953ff289 5455
2260d19d 5456 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
74bf76ed
JJ
5457 {
5458 tree val = DECL_VALUE_EXPR (new_var);
2260d19d 5459 if (TREE_CODE (val) == ARRAY_REF
74bf76ed
JJ
5460 && VAR_P (TREE_OPERAND (val, 0))
5461 && lookup_attribute ("omp simd array",
5462 DECL_ATTRIBUTES (TREE_OPERAND (val,
5463 0))))
5464 {
5465 if (lastlane == NULL)
5466 {
b731b390 5467 lastlane = create_tmp_var (unsigned_type_node);
538dd0b7 5468 gcall *g
74bf76ed
JJ
5469 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5470 2, simduid,
5471 TREE_OPERAND (val, 1));
5472 gimple_call_set_lhs (g, lastlane);
5473 gimple_seq_add_stmt (stmt_list, g);
5474 }
5475 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5476 TREE_OPERAND (val, 0), lastlane,
5477 NULL_TREE, NULL_TREE);
0c6b03b5 5478 }
2260d19d
AM
5479 }
5480 else if (maybe_simt)
5481 {
5482 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5483 ? DECL_VALUE_EXPR (new_var)
5484 : new_var);
5485 if (simtlast == NULL)
0c6b03b5 5486 {
2260d19d
AM
5487 simtlast = create_tmp_var (unsigned_type_node);
5488 gcall *g = gimple_build_call_internal
5489 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5490 gimple_call_set_lhs (g, simtlast);
5491 gimple_seq_add_stmt (stmt_list, g);
74bf76ed 5492 }
2260d19d
AM
5493 x = build_call_expr_internal_loc
5494 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5495 TREE_TYPE (val), 2, val, simtlast);
5496 new_var = unshare_expr (new_var);
5497 gimplify_assign (new_var, x, stmt_list);
5498 new_var = unshare_expr (new_var);
74bf76ed
JJ
5499 }
5500
5501 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5502 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 5503 {
355a7673 5504 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
5505 gimple_seq_add_seq (stmt_list,
5506 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 5507 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 5508 }
f7468577
JJ
5509 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5510 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5511 {
5512 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5513 gimple_seq_add_seq (stmt_list,
5514 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5515 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5516 }
953ff289 5517
d9a6bd32
JJ
5518 x = NULL_TREE;
5519 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5520 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5521 {
5522 gcc_checking_assert (is_taskloop_ctx (ctx));
5523 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5524 ctx->outer->outer);
5525 if (is_global_var (ovar))
5526 x = ovar;
5527 }
5528 if (!x)
c39dad64 5529 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
629b3d75 5530 if (omp_is_reference (var))
70f34814 5531 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 5532 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 5533 gimplify_and_add (x, stmt_list);
a68ab351
JJ
5534 }
5535 c = OMP_CLAUSE_CHAIN (c);
5536 if (c == NULL && !par_clauses)
5537 {
5538 /* If this was a workshare clause, see if it had been combined
5539 with its parallel. In that case, continue looking for the
5540 clauses also on the parallel statement itself. */
5541 if (is_parallel_ctx (ctx))
5542 break;
5543
5544 ctx = ctx->outer;
5545 if (ctx == NULL || !is_parallel_ctx (ctx))
5546 break;
5547
629b3d75 5548 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
5549 OMP_CLAUSE_LASTPRIVATE);
5550 par_clauses = true;
5551 }
953ff289
DN
5552 }
5553
726a989a
RB
5554 if (label)
5555 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
5556}
5557
e5014671
NS
5558/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5559 (which might be a placeholder). INNER is true if this is an inner
5560 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
5561 join markers. Generate the before-loop forking sequence in
5562 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
5563 general form of these sequences is
5564
5565 GOACC_REDUCTION_SETUP
5566 GOACC_FORK
5567 GOACC_REDUCTION_INIT
5568 ...
5569 GOACC_REDUCTION_FINI
5570 GOACC_JOIN
5571 GOACC_REDUCTION_TEARDOWN. */
5572
41dbbb37 5573static void
e5014671
NS
5574lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5575 gcall *fork, gcall *join, gimple_seq *fork_seq,
5576 gimple_seq *join_seq, omp_context *ctx)
41dbbb37 5577{
e5014671
NS
5578 gimple_seq before_fork = NULL;
5579 gimple_seq after_fork = NULL;
5580 gimple_seq before_join = NULL;
5581 gimple_seq after_join = NULL;
5582 tree init_code = NULL_TREE, fini_code = NULL_TREE,
5583 setup_code = NULL_TREE, teardown_code = NULL_TREE;
5584 unsigned offset = 0;
5585
5586 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5587 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5588 {
5589 tree orig = OMP_CLAUSE_DECL (c);
5590 tree var = maybe_lookup_decl (orig, ctx);
5591 tree ref_to_res = NULL_TREE;
c42cfb5c
CP
5592 tree incoming, outgoing, v1, v2, v3;
5593 bool is_private = false;
e5014671
NS
5594
5595 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5596 if (rcode == MINUS_EXPR)
5597 rcode = PLUS_EXPR;
5598 else if (rcode == TRUTH_ANDIF_EXPR)
5599 rcode = BIT_AND_EXPR;
5600 else if (rcode == TRUTH_ORIF_EXPR)
5601 rcode = BIT_IOR_EXPR;
5602 tree op = build_int_cst (unsigned_type_node, rcode);
5603
5604 if (!var)
5605 var = orig;
e5014671
NS
5606
5607 incoming = outgoing = var;
01914336 5608
e5014671
NS
5609 if (!inner)
5610 {
5611 /* See if an outer construct also reduces this variable. */
5612 omp_context *outer = ctx;
41dbbb37 5613
e5014671
NS
5614 while (omp_context *probe = outer->outer)
5615 {
5616 enum gimple_code type = gimple_code (probe->stmt);
5617 tree cls;
41dbbb37 5618
e5014671
NS
5619 switch (type)
5620 {
5621 case GIMPLE_OMP_FOR:
5622 cls = gimple_omp_for_clauses (probe->stmt);
5623 break;
41dbbb37 5624
e5014671
NS
5625 case GIMPLE_OMP_TARGET:
5626 if (gimple_omp_target_kind (probe->stmt)
5627 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
5628 goto do_lookup;
41dbbb37 5629
e5014671
NS
5630 cls = gimple_omp_target_clauses (probe->stmt);
5631 break;
41dbbb37 5632
e5014671
NS
5633 default:
5634 goto do_lookup;
5635 }
01914336 5636
e5014671
NS
5637 outer = probe;
5638 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
5639 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5640 && orig == OMP_CLAUSE_DECL (cls))
c42cfb5c
CP
5641 {
5642 incoming = outgoing = lookup_decl (orig, probe);
5643 goto has_outer_reduction;
5644 }
5645 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5646 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5647 && orig == OMP_CLAUSE_DECL (cls))
5648 {
5649 is_private = true;
5650 goto do_lookup;
5651 }
e5014671 5652 }
41dbbb37 5653
e5014671
NS
5654 do_lookup:
5655 /* This is the outermost construct with this reduction,
5656 see if there's a mapping for it. */
5657 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
c42cfb5c 5658 && maybe_lookup_field (orig, outer) && !is_private)
e5014671
NS
5659 {
5660 ref_to_res = build_receiver_ref (orig, false, outer);
629b3d75 5661 if (omp_is_reference (orig))
e5014671 5662 ref_to_res = build_simple_mem_ref (ref_to_res);
41dbbb37 5663
c42cfb5c
CP
5664 tree type = TREE_TYPE (var);
5665 if (POINTER_TYPE_P (type))
5666 type = TREE_TYPE (type);
5667
e5014671 5668 outgoing = var;
c42cfb5c 5669 incoming = omp_reduction_init_op (loc, rcode, type);
e5014671
NS
5670 }
5671 else
11c4c4ba
CLT
5672 {
5673 /* Try to look at enclosing contexts for reduction var,
5674 use original if no mapping found. */
5675 tree t = NULL_TREE;
5676 omp_context *c = ctx->outer;
5677 while (c && !t)
5678 {
5679 t = maybe_lookup_decl (orig, c);
5680 c = c->outer;
5681 }
5682 incoming = outgoing = (t ? t : orig);
5683 }
01914336 5684
e5014671
NS
5685 has_outer_reduction:;
5686 }
41dbbb37 5687
e5014671
NS
5688 if (!ref_to_res)
5689 ref_to_res = integer_zero_node;
41dbbb37 5690
01914336 5691 if (omp_is_reference (orig))
c42cfb5c
CP
5692 {
5693 tree type = TREE_TYPE (var);
5694 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5695
5696 if (!inner)
5697 {
5698 tree x = create_tmp_var (TREE_TYPE (type), id);
5699 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5700 }
5701
5702 v1 = create_tmp_var (type, id);
5703 v2 = create_tmp_var (type, id);
5704 v3 = create_tmp_var (type, id);
5705
5706 gimplify_assign (v1, var, fork_seq);
5707 gimplify_assign (v2, var, fork_seq);
5708 gimplify_assign (v3, var, fork_seq);
5709
5710 var = build_simple_mem_ref (var);
5711 v1 = build_simple_mem_ref (v1);
5712 v2 = build_simple_mem_ref (v2);
5713 v3 = build_simple_mem_ref (v3);
5714 outgoing = build_simple_mem_ref (outgoing);
5715
e387fc64 5716 if (!TREE_CONSTANT (incoming))
c42cfb5c
CP
5717 incoming = build_simple_mem_ref (incoming);
5718 }
5719 else
5720 v1 = v2 = v3 = var;
5721
e5014671 5722 /* Determine position in reduction buffer, which may be used
ef1d3b57
RS
5723 by target. The parser has ensured that this is not a
5724 variable-sized type. */
5725 fixed_size_mode mode
5726 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
e5014671
NS
5727 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
5728 offset = (offset + align - 1) & ~(align - 1);
5729 tree off = build_int_cst (sizetype, offset);
5730 offset += GET_MODE_SIZE (mode);
41dbbb37 5731
e5014671
NS
5732 if (!init_code)
5733 {
5734 init_code = build_int_cst (integer_type_node,
5735 IFN_GOACC_REDUCTION_INIT);
5736 fini_code = build_int_cst (integer_type_node,
5737 IFN_GOACC_REDUCTION_FINI);
5738 setup_code = build_int_cst (integer_type_node,
5739 IFN_GOACC_REDUCTION_SETUP);
5740 teardown_code = build_int_cst (integer_type_node,
5741 IFN_GOACC_REDUCTION_TEARDOWN);
5742 }
5743
5744 tree setup_call
5745 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5746 TREE_TYPE (var), 6, setup_code,
5747 unshare_expr (ref_to_res),
5748 incoming, level, op, off);
5749 tree init_call
5750 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5751 TREE_TYPE (var), 6, init_code,
5752 unshare_expr (ref_to_res),
c42cfb5c 5753 v1, level, op, off);
e5014671
NS
5754 tree fini_call
5755 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5756 TREE_TYPE (var), 6, fini_code,
5757 unshare_expr (ref_to_res),
c42cfb5c 5758 v2, level, op, off);
e5014671
NS
5759 tree teardown_call
5760 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5761 TREE_TYPE (var), 6, teardown_code,
c42cfb5c 5762 ref_to_res, v3, level, op, off);
e5014671 5763
c42cfb5c
CP
5764 gimplify_assign (v1, setup_call, &before_fork);
5765 gimplify_assign (v2, init_call, &after_fork);
5766 gimplify_assign (v3, fini_call, &before_join);
e5014671
NS
5767 gimplify_assign (outgoing, teardown_call, &after_join);
5768 }
5769
5770 /* Now stitch things together. */
5771 gimple_seq_add_seq (fork_seq, before_fork);
5772 if (fork)
5773 gimple_seq_add_stmt (fork_seq, fork);
5774 gimple_seq_add_seq (fork_seq, after_fork);
5775
5776 gimple_seq_add_seq (join_seq, before_join);
5777 if (join)
5778 gimple_seq_add_stmt (join_seq, join);
5779 gimple_seq_add_seq (join_seq, after_join);
41dbbb37 5780}
50674e96 5781
953ff289
DN
5782/* Generate code to implement the REDUCTION clauses. */
5783
5784static void
726a989a 5785lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 5786{
726a989a 5787 gimple_seq sub_seq = NULL;
355fe088 5788 gimple *stmt;
374d0225 5789 tree x, c;
953ff289
DN
5790 int count = 0;
5791
e5014671
NS
5792 /* OpenACC loop reductions are handled elsewhere. */
5793 if (is_gimple_omp_oacc (ctx->stmt))
5794 return;
5795
74bf76ed
JJ
5796 /* SIMD reductions are handled in lower_rec_input_clauses. */
5797 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0aadce73 5798 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
74bf76ed
JJ
5799 return;
5800
953ff289
DN
5801 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
5802 update in that case, otherwise use a lock. */
5803 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
28567c40
JJ
5804 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5805 && !OMP_CLAUSE_REDUCTION_TASK (c))
953ff289 5806 {
d9a6bd32
JJ
5807 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5808 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
953ff289 5809 {
acf0174b 5810 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
5811 count = -1;
5812 break;
5813 }
5814 count++;
5815 }
5816
5817 if (count == 0)
5818 return;
5819
5820 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5821 {
d9a6bd32 5822 tree var, ref, new_var, orig_var;
953ff289 5823 enum tree_code code;
db3927fb 5824 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 5825
28567c40
JJ
5826 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5827 || OMP_CLAUSE_REDUCTION_TASK (c))
953ff289
DN
5828 continue;
5829
c24783c4 5830 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
d9a6bd32
JJ
5831 orig_var = var = OMP_CLAUSE_DECL (c);
5832 if (TREE_CODE (var) == MEM_REF)
5833 {
5834 var = TREE_OPERAND (var, 0);
e01d41e5
JJ
5835 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5836 var = TREE_OPERAND (var, 0);
c24783c4 5837 if (TREE_CODE (var) == ADDR_EXPR)
d9a6bd32 5838 var = TREE_OPERAND (var, 0);
c24783c4
JJ
5839 else
5840 {
5841 /* If this is a pointer or referenced based array
5842 section, the var could be private in the outer
5843 context e.g. on orphaned loop construct. Pretend this
5844 is private variable's outer reference. */
5845 ccode = OMP_CLAUSE_PRIVATE;
5846 if (TREE_CODE (var) == INDIRECT_REF)
5847 var = TREE_OPERAND (var, 0);
5848 }
d9a6bd32
JJ
5849 orig_var = var;
5850 if (is_variable_sized (var))
5851 {
5852 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5853 var = DECL_VALUE_EXPR (var);
5854 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5855 var = TREE_OPERAND (var, 0);
5856 gcc_assert (DECL_P (var));
5857 }
5858 }
953ff289 5859 new_var = lookup_decl (var, ctx);
629b3d75 5860 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
70f34814 5861 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
c24783c4 5862 ref = build_outer_var_ref (var, ctx, ccode);
953ff289 5863 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
5864
5865 /* reduction(-:var) sums up the partial results, so it acts
5866 identically to reduction(+:var). */
953ff289
DN
5867 if (code == MINUS_EXPR)
5868 code = PLUS_EXPR;
5869
e5014671 5870 if (count == 1)
953ff289 5871 {
db3927fb 5872 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
5873
5874 addr = save_expr (addr);
5875 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 5876 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 5877 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
28567c40 5878 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
726a989a 5879 gimplify_and_add (x, stmt_seqp);
953ff289
DN
5880 return;
5881 }
d9a6bd32
JJ
5882 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5883 {
5884 tree d = OMP_CLAUSE_DECL (c);
5885 tree type = TREE_TYPE (d);
5886 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
28567c40 5887 tree i = create_tmp_var (TREE_TYPE (v));
d9a6bd32 5888 tree ptype = build_pointer_type (TREE_TYPE (type));
e01d41e5
JJ
5889 tree bias = TREE_OPERAND (d, 1);
5890 d = TREE_OPERAND (d, 0);
5891 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5892 {
5893 tree b = TREE_OPERAND (d, 1);
5894 b = maybe_lookup_decl (b, ctx);
5895 if (b == NULL)
5896 {
5897 b = TREE_OPERAND (d, 1);
5898 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5899 }
5900 if (integer_zerop (bias))
5901 bias = b;
5902 else
5903 {
5904 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5905 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5906 TREE_TYPE (b), b, bias);
5907 }
5908 d = TREE_OPERAND (d, 0);
5909 }
d9a6bd32
JJ
5910 /* For ref build_outer_var_ref already performs this, so
5911 only new_var needs a dereference. */
e01d41e5 5912 if (TREE_CODE (d) == INDIRECT_REF)
d9a6bd32
JJ
5913 {
5914 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
629b3d75 5915 gcc_assert (omp_is_reference (var) && var == orig_var);
d9a6bd32 5916 }
e01d41e5 5917 else if (TREE_CODE (d) == ADDR_EXPR)
d9a6bd32
JJ
5918 {
5919 if (orig_var == var)
5920 {
5921 new_var = build_fold_addr_expr (new_var);
5922 ref = build_fold_addr_expr (ref);
5923 }
5924 }
5925 else
5926 {
5927 gcc_assert (orig_var == var);
629b3d75 5928 if (omp_is_reference (var))
d9a6bd32
JJ
5929 ref = build_fold_addr_expr (ref);
5930 }
5931 if (DECL_P (v))
5932 {
5933 tree t = maybe_lookup_decl (v, ctx);
5934 if (t)
5935 v = t;
5936 else
5937 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5938 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5939 }
e01d41e5
JJ
5940 if (!integer_zerop (bias))
5941 {
5942 bias = fold_convert_loc (clause_loc, sizetype, bias);
5943 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5944 TREE_TYPE (new_var), new_var,
5945 unshare_expr (bias));
5946 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5947 TREE_TYPE (ref), ref, bias);
5948 }
d9a6bd32
JJ
5949 new_var = fold_convert_loc (clause_loc, ptype, new_var);
5950 ref = fold_convert_loc (clause_loc, ptype, ref);
28567c40 5951 tree m = create_tmp_var (ptype);
d9a6bd32
JJ
5952 gimplify_assign (m, new_var, stmt_seqp);
5953 new_var = m;
28567c40 5954 m = create_tmp_var (ptype);
d9a6bd32
JJ
5955 gimplify_assign (m, ref, stmt_seqp);
5956 ref = m;
5957 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5958 tree body = create_artificial_label (UNKNOWN_LOCATION);
5959 tree end = create_artificial_label (UNKNOWN_LOCATION);
5960 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5961 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5962 tree out = build_simple_mem_ref_loc (clause_loc, ref);
5963 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5964 {
5965 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5966 tree decl_placeholder
5967 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5968 SET_DECL_VALUE_EXPR (placeholder, out);
5969 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5970 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5971 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5972 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5973 gimple_seq_add_seq (&sub_seq,
5974 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5975 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5976 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5977 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5978 }
5979 else
5980 {
5981 x = build2 (code, TREE_TYPE (out), out, priv);
5982 out = unshare_expr (out);
5983 gimplify_assign (out, x, &sub_seq);
5984 }
5985 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5986 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5987 gimple_seq_add_stmt (&sub_seq, g);
5988 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5989 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5990 gimple_seq_add_stmt (&sub_seq, g);
5991 g = gimple_build_assign (i, PLUS_EXPR, i,
5992 build_int_cst (TREE_TYPE (i), 1));
5993 gimple_seq_add_stmt (&sub_seq, g);
5994 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5995 gimple_seq_add_stmt (&sub_seq, g);
5996 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5997 }
41dbbb37 5998 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
953ff289
DN
5999 {
6000 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6001
629b3d75 6002 if (omp_is_reference (var)
acf0174b
JJ
6003 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6004 TREE_TYPE (ref)))
db3927fb 6005 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
6006 SET_DECL_VALUE_EXPR (placeholder, ref);
6007 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 6008 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
6009 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6010 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
6011 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6012 }
6013 else
6014 {
6015 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6016 ref = build_outer_var_ref (var, ctx);
726a989a 6017 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
6018 }
6019 }
6020
e79983f4
MM
6021 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6022 0);
726a989a 6023 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 6024
726a989a 6025 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 6026
e79983f4
MM
6027 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6028 0);
726a989a 6029 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
6030}
6031
50674e96 6032
953ff289
DN
6033/* Generate code to implement the COPYPRIVATE clauses. */
6034
6035static void
726a989a 6036lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
6037 omp_context *ctx)
6038{
6039 tree c;
6040
6041 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6042 {
78db7d92 6043 tree var, new_var, ref, x;
953ff289 6044 bool by_ref;
db3927fb 6045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 6046
aaf46ef9 6047 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
6048 continue;
6049
6050 var = OMP_CLAUSE_DECL (c);
7c8f7639 6051 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
6052
6053 ref = build_sender_ref (var, ctx);
78db7d92
JJ
6054 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6055 if (by_ref)
6056 {
6057 x = build_fold_addr_expr_loc (clause_loc, new_var);
6058 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6059 }
726a989a 6060 gimplify_assign (ref, x, slist);
953ff289 6061
78db7d92
JJ
6062 ref = build_receiver_ref (var, false, ctx);
6063 if (by_ref)
6064 {
6065 ref = fold_convert_loc (clause_loc,
6066 build_pointer_type (TREE_TYPE (new_var)),
6067 ref);
6068 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6069 }
629b3d75 6070 if (omp_is_reference (var))
953ff289 6071 {
78db7d92 6072 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
6073 ref = build_simple_mem_ref_loc (clause_loc, ref);
6074 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 6075 }
78db7d92 6076 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
6077 gimplify_and_add (x, rlist);
6078 }
6079}
6080
50674e96 6081
953ff289
DN
6082/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6083 and REDUCTION from the sender (aka parent) side. */
6084
6085static void
726a989a
RB
6086lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6087 omp_context *ctx)
953ff289 6088{
d9a6bd32
JJ
6089 tree c, t;
6090 int ignored_looptemp = 0;
6091 bool is_taskloop = false;
6092
6093 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6094 by GOMP_taskloop. */
6095 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6096 {
6097 ignored_looptemp = 2;
6098 is_taskloop = true;
6099 }
953ff289
DN
6100
6101 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6102 {
50674e96 6103 tree val, ref, x, var;
953ff289 6104 bool by_ref, do_in = false, do_out = false;
db3927fb 6105 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 6106
aaf46ef9 6107 switch (OMP_CLAUSE_CODE (c))
953ff289 6108 {
a68ab351
JJ
6109 case OMP_CLAUSE_PRIVATE:
6110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6111 break;
6112 continue;
953ff289
DN
6113 case OMP_CLAUSE_FIRSTPRIVATE:
6114 case OMP_CLAUSE_COPYIN:
6115 case OMP_CLAUSE_LASTPRIVATE:
28567c40
JJ
6116 case OMP_CLAUSE_IN_REDUCTION:
6117 case OMP_CLAUSE__REDUCTEMP_:
6118 break;
953ff289 6119 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
6120 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6121 continue;
d9a6bd32
JJ
6122 break;
6123 case OMP_CLAUSE_SHARED:
6124 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6125 break;
6126 continue;
acf0174b 6127 case OMP_CLAUSE__LOOPTEMP_:
d9a6bd32
JJ
6128 if (ignored_looptemp)
6129 {
6130 ignored_looptemp--;
6131 continue;
6132 }
953ff289
DN
6133 break;
6134 default:
6135 continue;
6136 }
6137
d2dda7fe 6138 val = OMP_CLAUSE_DECL (c);
28567c40
JJ
6139 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6140 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
d9a6bd32
JJ
6141 && TREE_CODE (val) == MEM_REF)
6142 {
6143 val = TREE_OPERAND (val, 0);
e01d41e5
JJ
6144 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6145 val = TREE_OPERAND (val, 0);
d9a6bd32
JJ
6146 if (TREE_CODE (val) == INDIRECT_REF
6147 || TREE_CODE (val) == ADDR_EXPR)
6148 val = TREE_OPERAND (val, 0);
6149 if (is_variable_sized (val))
6150 continue;
6151 }
6152
6153 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6154 outer taskloop region. */
6155 omp_context *ctx_for_o = ctx;
6156 if (is_taskloop
6157 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6158 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6159 ctx_for_o = ctx->outer;
6160
6161 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
50674e96 6162
8ca5b2a2 6163 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
28567c40
JJ
6164 && is_global_var (var)
6165 && (val == OMP_CLAUSE_DECL (c)
6166 || !is_task_ctx (ctx)
6167 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6168 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6169 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6170 != POINTER_TYPE)))))
8ca5b2a2 6171 continue;
d9a6bd32
JJ
6172
6173 t = omp_member_access_dummy_var (var);
6174 if (t)
6175 {
6176 var = DECL_VALUE_EXPR (var);
6177 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6178 if (o != t)
6179 var = unshare_and_remap (var, t, o);
6180 else
6181 var = unshare_expr (var);
6182 }
6183
6184 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6185 {
6186 /* Handle taskloop firstprivate/lastprivate, where the
6187 lastprivate on GIMPLE_OMP_TASK is represented as
6188 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6189 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6190 x = omp_build_component_ref (ctx->sender_decl, f);
6191 if (use_pointer_for_field (val, ctx))
6192 var = build_fold_addr_expr (var);
6193 gimplify_assign (x, var, ilist);
6194 DECL_ABSTRACT_ORIGIN (f) = NULL;
6195 continue;
6196 }
6197
28567c40
JJ
6198 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6199 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
d9a6bd32
JJ
6200 || val == OMP_CLAUSE_DECL (c))
6201 && is_variable_sized (val))
953ff289 6202 continue;
7c8f7639 6203 by_ref = use_pointer_for_field (val, NULL);
953ff289 6204
aaf46ef9 6205 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6206 {
6207 case OMP_CLAUSE_FIRSTPRIVATE:
ec35ea45
JJ
6208 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6209 && !by_ref
6210 && is_task_ctx (ctx))
6211 TREE_NO_WARNING (var) = 1;
6212 do_in = true;
6213 break;
6214
6215 case OMP_CLAUSE_PRIVATE:
953ff289 6216 case OMP_CLAUSE_COPYIN:
acf0174b 6217 case OMP_CLAUSE__LOOPTEMP_:
28567c40 6218 case OMP_CLAUSE__REDUCTEMP_:
953ff289
DN
6219 do_in = true;
6220 break;
6221
6222 case OMP_CLAUSE_LASTPRIVATE:
629b3d75 6223 if (by_ref || omp_is_reference (val))
953ff289
DN
6224 {
6225 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6226 continue;
6227 do_in = true;
6228 }
6229 else
a68ab351
JJ
6230 {
6231 do_out = true;
6232 if (lang_hooks.decls.omp_private_outer_ref (val))
6233 do_in = true;
6234 }
953ff289
DN
6235 break;
6236
6237 case OMP_CLAUSE_REDUCTION:
28567c40 6238 case OMP_CLAUSE_IN_REDUCTION:
953ff289 6239 do_in = true;
d9a6bd32 6240 if (val == OMP_CLAUSE_DECL (c))
28567c40
JJ
6241 {
6242 if (is_task_ctx (ctx))
6243 by_ref = use_pointer_for_field (val, ctx);
6244 else
6245 do_out = !(by_ref || omp_is_reference (val));
6246 }
d9a6bd32
JJ
6247 else
6248 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
953ff289
DN
6249 break;
6250
6251 default:
6252 gcc_unreachable ();
6253 }
6254
6255 if (do_in)
6256 {
6257 ref = build_sender_ref (val, ctx);
db3927fb 6258 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 6259 gimplify_assign (ref, x, ilist);
a68ab351
JJ
6260 if (is_task_ctx (ctx))
6261 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 6262 }
50674e96 6263
953ff289
DN
6264 if (do_out)
6265 {
6266 ref = build_sender_ref (val, ctx);
726a989a 6267 gimplify_assign (var, ref, olist);
953ff289
DN
6268 }
6269 }
6270}
6271
726a989a
RB
6272/* Generate code to implement SHARED from the sender (aka parent)
6273 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6274 list things that got automatically shared. */
953ff289
DN
6275
6276static void
726a989a 6277lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 6278{
d9a6bd32 6279 tree var, ovar, nvar, t, f, x, record_type;
953ff289
DN
6280
6281 if (ctx->record_type == NULL)
6282 return;
50674e96 6283
a68ab351 6284 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 6285 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
6286 {
6287 ovar = DECL_ABSTRACT_ORIGIN (f);
d9a6bd32
JJ
6288 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6289 continue;
6290
953ff289
DN
6291 nvar = maybe_lookup_decl (ovar, ctx);
6292 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6293 continue;
6294
50674e96
DN
6295 /* If CTX is a nested parallel directive. Find the immediately
6296 enclosing parallel or workshare construct that contains a
6297 mapping for OVAR. */
d2dda7fe 6298 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 6299
d9a6bd32
JJ
6300 t = omp_member_access_dummy_var (var);
6301 if (t)
6302 {
6303 var = DECL_VALUE_EXPR (var);
6304 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6305 if (o != t)
6306 var = unshare_and_remap (var, t, o);
6307 else
6308 var = unshare_expr (var);
6309 }
6310
7c8f7639 6311 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
6312 {
6313 x = build_sender_ref (ovar, ctx);
50674e96 6314 var = build_fold_addr_expr (var);
726a989a 6315 gimplify_assign (x, var, ilist);
953ff289
DN
6316 }
6317 else
6318 {
6319 x = build_sender_ref (ovar, ctx);
726a989a 6320 gimplify_assign (x, var, ilist);
953ff289 6321
14e5b285
RG
6322 if (!TREE_READONLY (var)
6323 /* We don't need to receive a new reference to a result
6324 or parm decl. In fact we may not store to it as we will
6325 invalidate any pending RSO and generate wrong gimple
6326 during inlining. */
6327 && !((TREE_CODE (var) == RESULT_DECL
6328 || TREE_CODE (var) == PARM_DECL)
6329 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
6330 {
6331 x = build_sender_ref (ovar, ctx);
726a989a 6332 gimplify_assign (var, x, olist);
a68ab351 6333 }
953ff289
DN
6334 }
6335 }
6336}
6337
e4834818
NS
6338/* Emit an OpenACC head marker call, encapulating the partitioning and
6339 other information that must be processed by the target compiler.
6340 Return the maximum number of dimensions the associated loop might
6341 be partitioned over. */
6342
6343static unsigned
6344lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6345 gimple_seq *seq, omp_context *ctx)
6346{
6347 unsigned levels = 0;
6348 unsigned tag = 0;
6349 tree gang_static = NULL_TREE;
6350 auto_vec<tree, 5> args;
6351
6352 args.quick_push (build_int_cst
6353 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6354 args.quick_push (ddvar);
6355 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6356 {
6357 switch (OMP_CLAUSE_CODE (c))
6358 {
6359 case OMP_CLAUSE_GANG:
6360 tag |= OLF_DIM_GANG;
6361 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6362 /* static:* is represented by -1, and we can ignore it, as
6363 scheduling is always static. */
6364 if (gang_static && integer_minus_onep (gang_static))
6365 gang_static = NULL_TREE;
6366 levels++;
6367 break;
6368
6369 case OMP_CLAUSE_WORKER:
6370 tag |= OLF_DIM_WORKER;
6371 levels++;
6372 break;
6373
6374 case OMP_CLAUSE_VECTOR:
6375 tag |= OLF_DIM_VECTOR;
6376 levels++;
6377 break;
6378
6379 case OMP_CLAUSE_SEQ:
6380 tag |= OLF_SEQ;
6381 break;
6382
6383 case OMP_CLAUSE_AUTO:
6384 tag |= OLF_AUTO;
6385 break;
6386
6387 case OMP_CLAUSE_INDEPENDENT:
6388 tag |= OLF_INDEPENDENT;
6389 break;
6390
02889d23
CLT
6391 case OMP_CLAUSE_TILE:
6392 tag |= OLF_TILE;
6393 break;
6394
e4834818
NS
6395 default:
6396 continue;
6397 }
6398 }
6399
6400 if (gang_static)
6401 {
6402 if (DECL_P (gang_static))
6403 gang_static = build_outer_var_ref (gang_static, ctx);
6404 tag |= OLF_GANG_STATIC;
6405 }
6406
6407 /* In a parallel region, loops are implicitly INDEPENDENT. */
6408 omp_context *tgt = enclosing_target_ctx (ctx);
6409 if (!tgt || is_oacc_parallel (tgt))
6410 tag |= OLF_INDEPENDENT;
6411
02889d23
CLT
6412 if (tag & OLF_TILE)
6413 /* Tiling could use all 3 levels. */
6414 levels = 3;
6415 else
6416 {
6417 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6418 Ensure at least one level, or 2 for possible auto
6419 partitioning */
6420 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6421 << OLF_DIM_BASE) | OLF_SEQ));
6422
6423 if (levels < 1u + maybe_auto)
6424 levels = 1u + maybe_auto;
6425 }
e4834818
NS
6426
6427 args.quick_push (build_int_cst (integer_type_node, levels));
6428 args.quick_push (build_int_cst (integer_type_node, tag));
6429 if (gang_static)
6430 args.quick_push (gang_static);
6431
6432 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6433 gimple_set_location (call, loc);
6434 gimple_set_lhs (call, ddvar);
6435 gimple_seq_add_stmt (seq, call);
6436
6437 return levels;
6438}
6439
6440/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
6441 partitioning level of the enclosed region. */
6442
6443static void
6444lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6445 tree tofollow, gimple_seq *seq)
6446{
6447 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6448 : IFN_UNIQUE_OACC_TAIL_MARK);
6449 tree marker = build_int_cst (integer_type_node, marker_kind);
6450 int nargs = 2 + (tofollow != NULL_TREE);
6451 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6452 marker, ddvar, tofollow);
6453 gimple_set_location (call, loc);
6454 gimple_set_lhs (call, ddvar);
6455 gimple_seq_add_stmt (seq, call);
6456}
6457
6458/* Generate the before and after OpenACC loop sequences. CLAUSES are
6459 the loop clauses, from which we extract reductions. Initialize
6460 HEAD and TAIL. */
6461
6462static void
6463lower_oacc_head_tail (location_t loc, tree clauses,
6464 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6465{
6466 bool inner = false;
6467 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6468 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6469
6470 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
e4834818
NS
6471 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6472 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6473
4877b5a4 6474 gcc_assert (count);
e4834818
NS
6475 for (unsigned done = 1; count; count--, done++)
6476 {
6477 gimple_seq fork_seq = NULL;
6478 gimple_seq join_seq = NULL;
6479
6480 tree place = build_int_cst (integer_type_node, -1);
6481 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6482 fork_kind, ddvar, place);
6483 gimple_set_location (fork, loc);
6484 gimple_set_lhs (fork, ddvar);
6485
6486 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6487 join_kind, ddvar, place);
6488 gimple_set_location (join, loc);
6489 gimple_set_lhs (join, ddvar);
6490
6491 /* Mark the beginning of this level sequence. */
6492 if (inner)
6493 lower_oacc_loop_marker (loc, ddvar, true,
6494 build_int_cst (integer_type_node, count),
6495 &fork_seq);
6496 lower_oacc_loop_marker (loc, ddvar, false,
6497 build_int_cst (integer_type_node, done),
6498 &join_seq);
6499
e5014671
NS
6500 lower_oacc_reductions (loc, clauses, place, inner,
6501 fork, join, &fork_seq, &join_seq, ctx);
e4834818
NS
6502
6503 /* Append this level to head. */
6504 gimple_seq_add_seq (head, fork_seq);
6505 /* Prepend it to tail. */
6506 gimple_seq_add_seq (&join_seq, *tail);
6507 *tail = join_seq;
6508
6509 inner = true;
6510 }
6511
6512 /* Mark the end of the sequence. */
6513 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6514 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6515}
726a989a 6516
629b3d75
MJ
6517/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6518 catch handler and return it. This prevents programs from violating the
6519 structured block semantics with throws. */
726a989a 6520
629b3d75
MJ
6521static gimple_seq
6522maybe_catch_exception (gimple_seq body)
726a989a 6523{
629b3d75
MJ
6524 gimple *g;
6525 tree decl;
b2b40051 6526
629b3d75
MJ
6527 if (!flag_exceptions)
6528 return body;
b2b40051 6529
629b3d75
MJ
6530 if (lang_hooks.eh_protect_cleanup_actions != NULL)
6531 decl = lang_hooks.eh_protect_cleanup_actions ();
6532 else
6533 decl = builtin_decl_explicit (BUILT_IN_TRAP);
b2b40051 6534
629b3d75
MJ
6535 g = gimple_build_eh_must_not_throw (decl);
6536 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6537 GIMPLE_TRY_CATCH);
b2b40051 6538
629b3d75 6539 return gimple_seq_alloc_with_stmt (g);
b2b40051
MJ
6540}
6541
629b3d75
MJ
6542\f
6543/* Routines to lower OMP directives into OMP-GIMPLE. */
726a989a 6544
629b3d75
MJ
6545/* If ctx is a worksharing context inside of a cancellable parallel
6546 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6547 and conditional branch to parallel's cancel_label to handle
6548 cancellation in the implicit barrier. */
953ff289
DN
6549
6550static void
28567c40
JJ
6551maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6552 gimple_seq *body)
953ff289 6553{
629b3d75
MJ
6554 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6555 if (gimple_omp_return_nowait_p (omp_return))
6556 return;
28567c40
JJ
6557 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6558 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6559 && outer->cancellable)
6560 {
6561 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6562 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6563 tree lhs = create_tmp_var (c_bool_type);
6564 gimple_omp_return_set_lhs (omp_return, lhs);
6565 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6566 gimple *g = gimple_build_cond (NE_EXPR, lhs,
6567 fold_convert (c_bool_type,
6568 boolean_false_node),
6569 outer->cancel_label, fallthru_label);
6570 gimple_seq_add_stmt (body, g);
6571 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6572 }
6573 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6574 return;
6575}
6576
6577/* Find the first task_reduction or reduction clause or return NULL
6578 if there are none. */
6579
6580static inline tree
6581omp_task_reductions_find_first (tree clauses, enum tree_code code,
6582 enum omp_clause_code ccode)
6583{
6584 while (1)
6585 {
6586 clauses = omp_find_clause (clauses, ccode);
6587 if (clauses == NULL_TREE)
6588 return NULL_TREE;
6589 if (ccode != OMP_CLAUSE_REDUCTION
6590 || code == OMP_TASKLOOP
6591 || OMP_CLAUSE_REDUCTION_TASK (clauses))
6592 return clauses;
6593 clauses = OMP_CLAUSE_CHAIN (clauses);
50674e96 6594 }
629b3d75 6595}
953ff289 6596
28567c40
JJ
6597static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6598 gimple_seq *, gimple_seq *);
6599
629b3d75
MJ
6600/* Lower the OpenMP sections directive in the current statement in GSI_P.
6601 CTX is the enclosing OMP context for the current statement. */
953ff289 6602
629b3d75
MJ
6603static void
6604lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6605{
6606 tree block, control;
6607 gimple_stmt_iterator tgsi;
6608 gomp_sections *stmt;
6609 gimple *t;
6610 gbind *new_stmt, *bind;
28567c40 6611 gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
953ff289 6612
629b3d75 6613 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
953ff289 6614
629b3d75 6615 push_gimplify_context ();
acf0174b 6616
629b3d75
MJ
6617 dlist = NULL;
6618 ilist = NULL;
28567c40
JJ
6619
6620 tree rclauses
6621 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6622 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6623 tree rtmp = NULL_TREE;
6624 if (rclauses)
6625 {
6626 tree type = build_pointer_type (pointer_sized_int_node);
6627 tree temp = create_tmp_var (type);
6628 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6629 OMP_CLAUSE_DECL (c) = temp;
6630 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6631 gimple_omp_sections_set_clauses (stmt, c);
6632 lower_omp_task_reductions (ctx, OMP_SECTIONS,
6633 gimple_omp_sections_clauses (stmt),
6634 &ilist, &tred_dlist);
6635 rclauses = c;
6636 rtmp = make_ssa_name (type);
6637 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6638 }
6639
629b3d75
MJ
6640 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6641 &ilist, &dlist, ctx, NULL);
953ff289 6642
629b3d75
MJ
6643 new_body = gimple_omp_body (stmt);
6644 gimple_omp_set_body (stmt, NULL);
6645 tgsi = gsi_start (new_body);
6646 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
953ff289 6647 {
629b3d75
MJ
6648 omp_context *sctx;
6649 gimple *sec_start;
50674e96 6650
629b3d75
MJ
6651 sec_start = gsi_stmt (tgsi);
6652 sctx = maybe_lookup_ctx (sec_start);
6653 gcc_assert (sctx);
6654
6655 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6656 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6657 GSI_CONTINUE_LINKING);
6658 gimple_omp_set_body (sec_start, NULL);
6659
6660 if (gsi_one_before_end_p (tgsi))
50674e96 6661 {
629b3d75
MJ
6662 gimple_seq l = NULL;
6663 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6664 &l, ctx);
6665 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6666 gimple_omp_section_set_last (sec_start);
6667 }
917948d3 6668
629b3d75
MJ
6669 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6670 GSI_CONTINUE_LINKING);
6671 }
50674e96 6672
629b3d75
MJ
6673 block = make_node (BLOCK);
6674 bind = gimple_build_bind (NULL, new_body, block);
50674e96 6675
629b3d75
MJ
6676 olist = NULL;
6677 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 6678
629b3d75
MJ
6679 block = make_node (BLOCK);
6680 new_stmt = gimple_build_bind (NULL, NULL, block);
6681 gsi_replace (gsi_p, new_stmt, true);
50674e96 6682
629b3d75
MJ
6683 pop_gimplify_context (new_stmt);
6684 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6685 BLOCK_VARS (block) = gimple_bind_vars (bind);
6686 if (BLOCK_VARS (block))
6687 TREE_USED (block) = 1;
50674e96 6688
629b3d75
MJ
6689 new_body = NULL;
6690 gimple_seq_add_seq (&new_body, ilist);
6691 gimple_seq_add_stmt (&new_body, stmt);
6692 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6693 gimple_seq_add_stmt (&new_body, bind);
50674e96 6694
629b3d75
MJ
6695 control = create_tmp_var (unsigned_type_node, ".section");
6696 t = gimple_build_omp_continue (control, control);
6697 gimple_omp_sections_set_control (stmt, control);
6698 gimple_seq_add_stmt (&new_body, t);
50674e96 6699
629b3d75
MJ
6700 gimple_seq_add_seq (&new_body, olist);
6701 if (ctx->cancellable)
6702 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6703 gimple_seq_add_seq (&new_body, dlist);
917948d3 6704
629b3d75 6705 new_body = maybe_catch_exception (new_body);
50674e96 6706
01914336
MJ
6707 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6708 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6709 t = gimple_build_omp_return (nowait);
629b3d75 6710 gimple_seq_add_stmt (&new_body, t);
28567c40
JJ
6711 gimple_seq_add_seq (&new_body, tred_dlist);
6712 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6713
6714 if (rclauses)
6715 OMP_CLAUSE_DECL (rclauses) = rtmp;
953ff289 6716
629b3d75 6717 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
6718}
6719
9a771876 6720
629b3d75
MJ
6721/* A subroutine of lower_omp_single. Expand the simple form of
6722 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
9a771876 6723
629b3d75
MJ
6724 if (GOMP_single_start ())
6725 BODY;
6726 [ GOMP_barrier (); ] -> unless 'nowait' is present.
9a771876 6727
629b3d75
MJ
6728 FIXME. It may be better to delay expanding the logic of this until
6729 pass_expand_omp. The expanded logic may make the job more difficult
6730 to a synchronization analysis pass. */
a68ab351
JJ
6731
6732static void
629b3d75 6733lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
a68ab351 6734{
629b3d75
MJ
6735 location_t loc = gimple_location (single_stmt);
6736 tree tlabel = create_artificial_label (loc);
6737 tree flabel = create_artificial_label (loc);
6738 gimple *call, *cond;
6739 tree lhs, decl;
20906c66 6740
629b3d75
MJ
6741 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6742 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6743 call = gimple_build_call (decl, 0);
6744 gimple_call_set_lhs (call, lhs);
6745 gimple_seq_add_stmt (pre_p, call);
a68ab351 6746
629b3d75
MJ
6747 cond = gimple_build_cond (EQ_EXPR, lhs,
6748 fold_convert_loc (loc, TREE_TYPE (lhs),
6749 boolean_true_node),
6750 tlabel, flabel);
6751 gimple_seq_add_stmt (pre_p, cond);
6752 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6753 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6754 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
a68ab351
JJ
6755}
6756
6757
629b3d75
MJ
6758/* A subroutine of lower_omp_single. Expand the simple form of
6759 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289 6760
629b3d75 6761 #pragma omp single copyprivate (a, b, c)
953ff289 6762
629b3d75 6763 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
953ff289 6764
629b3d75
MJ
6765 {
6766 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6767 {
6768 BODY;
6769 copyout.a = a;
6770 copyout.b = b;
6771 copyout.c = c;
6772 GOMP_single_copy_end (&copyout);
6773 }
6774 else
6775 {
6776 a = copyout_p->a;
6777 b = copyout_p->b;
6778 c = copyout_p->c;
6779 }
6780 GOMP_barrier ();
6781 }
726a989a 6782
629b3d75
MJ
6783 FIXME. It may be better to delay expanding the logic of this until
6784 pass_expand_omp. The expanded logic may make the job more difficult
6785 to a synchronization analysis pass. */
953ff289 6786
629b3d75
MJ
6787static void
6788lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6789 omp_context *ctx)
6790{
6791 tree ptr_type, t, l0, l1, l2, bfn_decl;
6792 gimple_seq copyin_seq;
6793 location_t loc = gimple_location (single_stmt);
953ff289 6794
629b3d75 6795 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
953ff289 6796
629b3d75
MJ
6797 ptr_type = build_pointer_type (ctx->record_type);
6798 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
953ff289 6799
629b3d75
MJ
6800 l0 = create_artificial_label (loc);
6801 l1 = create_artificial_label (loc);
6802 l2 = create_artificial_label (loc);
953ff289 6803
629b3d75
MJ
6804 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6805 t = build_call_expr_loc (loc, bfn_decl, 0);
6806 t = fold_convert_loc (loc, ptr_type, t);
6807 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289 6808
629b3d75
MJ
6809 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6810 build_int_cst (ptr_type, 0));
6811 t = build3 (COND_EXPR, void_type_node, t,
6812 build_and_jump (&l0), build_and_jump (&l1));
6813 gimplify_and_add (t, pre_p);
953ff289 6814
629b3d75 6815 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 6816
629b3d75 6817 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289 6818
629b3d75
MJ
6819 copyin_seq = NULL;
6820 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6821 &copyin_seq, ctx);
953ff289 6822
629b3d75
MJ
6823 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6824 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6825 t = build_call_expr_loc (loc, bfn_decl, 1, t);
6826 gimplify_and_add (t, pre_p);
2aee3e57 6827
629b3d75
MJ
6828 t = build_and_jump (&l2);
6829 gimplify_and_add (t, pre_p);
953ff289 6830
629b3d75 6831 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 6832
629b3d75 6833 gimple_seq_add_seq (pre_p, copyin_seq);
777f7f9a 6834
629b3d75 6835 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
777f7f9a 6836}
50674e96 6837
629b3d75
MJ
6838
6839/* Expand code for an OpenMP single directive. */
2b4cf991
JJ
6840
6841static void
629b3d75 6842lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
2b4cf991 6843{
629b3d75 6844 tree block;
629b3d75
MJ
6845 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6846 gbind *bind;
6847 gimple_seq bind_body, bind_body_tail = NULL, dlist;
2b4cf991 6848
629b3d75 6849 push_gimplify_context ();
2b4cf991 6850
629b3d75
MJ
6851 block = make_node (BLOCK);
6852 bind = gimple_build_bind (NULL, NULL, block);
6853 gsi_replace (gsi_p, bind, true);
6854 bind_body = NULL;
6855 dlist = NULL;
6856 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6857 &bind_body, &dlist, ctx, NULL);
6858 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
2b4cf991 6859
629b3d75 6860 gimple_seq_add_stmt (&bind_body, single_stmt);
2b4cf991 6861
629b3d75
MJ
6862 if (ctx->record_type)
6863 lower_omp_single_copy (single_stmt, &bind_body, ctx);
6864 else
6865 lower_omp_single_simple (single_stmt, &bind_body);
2b4cf991 6866
629b3d75 6867 gimple_omp_set_body (single_stmt, NULL);
2b4cf991 6868
629b3d75 6869 gimple_seq_add_seq (&bind_body, dlist);
5a0f4dd3 6870
629b3d75 6871 bind_body = maybe_catch_exception (bind_body);
5a0f4dd3 6872
01914336
MJ
6873 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6874 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6875 gimple *g = gimple_build_omp_return (nowait);
6876 gimple_seq_add_stmt (&bind_body_tail, g);
28567c40 6877 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
629b3d75
MJ
6878 if (ctx->record_type)
6879 {
6880 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6881 tree clobber = build_constructor (ctx->record_type, NULL);
6882 TREE_THIS_VOLATILE (clobber) = 1;
6883 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6884 clobber), GSI_SAME_STMT);
6885 }
6886 gimple_seq_add_seq (&bind_body, bind_body_tail);
6887 gimple_bind_set_body (bind, bind_body);
5a0f4dd3 6888
629b3d75 6889 pop_gimplify_context (bind);
5a0f4dd3 6890
629b3d75
MJ
6891 gimple_bind_append_vars (bind, ctx->block_vars);
6892 BLOCK_VARS (block) = ctx->block_vars;
6893 if (BLOCK_VARS (block))
6894 TREE_USED (block) = 1;
5a0f4dd3
JJ
6895}
6896
74bf76ed 6897
629b3d75 6898/* Expand code for an OpenMP master directive. */
953ff289
DN
6899
6900static void
629b3d75 6901lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6902{
629b3d75
MJ
6903 tree block, lab = NULL, x, bfn_decl;
6904 gimple *stmt = gsi_stmt (*gsi_p);
6905 gbind *bind;
6906 location_t loc = gimple_location (stmt);
6907 gimple_seq tseq;
50674e96 6908
629b3d75 6909 push_gimplify_context ();
50674e96 6910
629b3d75
MJ
6911 block = make_node (BLOCK);
6912 bind = gimple_build_bind (NULL, NULL, block);
6913 gsi_replace (gsi_p, bind, true);
6914 gimple_bind_add_stmt (bind, stmt);
50674e96 6915
629b3d75
MJ
6916 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6917 x = build_call_expr_loc (loc, bfn_decl, 0);
6918 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6919 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6920 tseq = NULL;
6921 gimplify_and_add (x, &tseq);
6922 gimple_bind_add_seq (bind, tseq);
9a771876 6923
629b3d75
MJ
6924 lower_omp (gimple_omp_body_ptr (stmt), ctx);
6925 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6926 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6927 gimple_omp_set_body (stmt, NULL);
b357f682 6928
629b3d75 6929 gimple_bind_add_stmt (bind, gimple_build_label (lab));
99819c63 6930
629b3d75 6931 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
e01d41e5 6932
629b3d75 6933 pop_gimplify_context (bind);
b8698a0f 6934
629b3d75
MJ
6935 gimple_bind_append_vars (bind, ctx->block_vars);
6936 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
6937}
6938
28567c40
JJ
6939/* Helper function for lower_omp_task_reductions. For a specific PASS
6940 find out the current clause it should be processed, or return false
6941 if all have been processed already. */
6942
6943static inline bool
6944omp_task_reduction_iterate (int pass, enum tree_code code,
6945 enum omp_clause_code ccode, tree *c, tree *decl,
6946 tree *type, tree *next)
6947{
6948 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
6949 {
6950 if (ccode == OMP_CLAUSE_REDUCTION
6951 && code != OMP_TASKLOOP
6952 && !OMP_CLAUSE_REDUCTION_TASK (*c))
6953 continue;
6954 *decl = OMP_CLAUSE_DECL (*c);
6955 *type = TREE_TYPE (*decl);
6956 if (TREE_CODE (*decl) == MEM_REF)
6957 {
6958 if (pass != 1)
6959 continue;
6960 }
6961 else
6962 {
6963 if (omp_is_reference (*decl))
6964 *type = TREE_TYPE (*type);
6965 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
6966 continue;
6967 }
6968 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
6969 return true;
6970 }
6971 *decl = NULL_TREE;
6972 *type = NULL_TREE;
6973 *next = NULL_TREE;
6974 return false;
6975}
6976
6977/* Lower task_reduction and reduction clauses (the latter unless CODE is
6978 OMP_TASKGROUP only with task modifier). Register mapping of those in
6979 START sequence and reducing them and unregister them in the END sequence. */
6980
6981static void
6982lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
6983 gimple_seq *start, gimple_seq *end)
6984{
6985 enum omp_clause_code ccode
6986 = (code == OMP_TASKGROUP
6987 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
6988 tree cancellable = NULL_TREE;
6989 clauses = omp_task_reductions_find_first (clauses, code, ccode);
6990 if (clauses == NULL_TREE)
6991 return;
6992 if (code == OMP_FOR || code == OMP_SECTIONS)
6993 {
6994 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6995 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6996 && outer->cancellable)
6997 {
6998 cancellable = error_mark_node;
6999 break;
7000 }
7001 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7002 break;
7003 }
7004 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7005 tree *last = &TYPE_FIELDS (record_type);
7006 unsigned cnt = 0;
7007 if (cancellable)
7008 {
7009 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7010 ptr_type_node);
7011 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7012 integer_type_node);
7013 *last = field;
7014 DECL_CHAIN (field) = ifield;
7015 last = &DECL_CHAIN (ifield);
a4983b7a
JJ
7016 DECL_CONTEXT (field) = record_type;
7017 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7018 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7019 DECL_CONTEXT (ifield) = record_type;
7020 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7021 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
28567c40
JJ
7022 }
7023 for (int pass = 0; pass < 2; pass++)
7024 {
7025 tree decl, type, next;
7026 for (tree c = clauses;
7027 omp_task_reduction_iterate (pass, code, ccode,
7028 &c, &decl, &type, &next); c = next)
7029 {
7030 ++cnt;
7031 tree new_type = type;
7032 if (ctx->outer)
7033 new_type = remap_type (type, &ctx->outer->cb);
7034 tree field
7035 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7036 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7037 new_type);
7038 if (DECL_P (decl) && type == TREE_TYPE (decl))
7039 {
7040 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7041 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7042 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7043 }
7044 else
7045 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7046 DECL_CONTEXT (field) = record_type;
a4983b7a
JJ
7047 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7048 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
28567c40
JJ
7049 *last = field;
7050 last = &DECL_CHAIN (field);
7051 tree bfield
7052 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7053 boolean_type_node);
7054 DECL_CONTEXT (bfield) = record_type;
a4983b7a
JJ
7055 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7056 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
28567c40
JJ
7057 *last = bfield;
7058 last = &DECL_CHAIN (bfield);
7059 }
7060 }
7061 *last = NULL_TREE;
7062 layout_type (record_type);
7063
7064 /* Build up an array which registers with the runtime all the reductions
7065 and deregisters them at the end. Format documented in libgomp/task.c. */
7066 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7067 tree avar = create_tmp_var_raw (atype);
7068 gimple_add_tmp_var (avar);
7069 TREE_ADDRESSABLE (avar) = 1;
7070 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7071 NULL_TREE, NULL_TREE);
7072 tree t = build_int_cst (pointer_sized_int_node, cnt);
7073 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7074 gimple_seq seq = NULL;
7075 tree sz = fold_convert (pointer_sized_int_node,
7076 TYPE_SIZE_UNIT (record_type));
7077 int cachesz = 64;
7078 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7079 build_int_cst (pointer_sized_int_node, cachesz - 1));
7080 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7081 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7082 ctx->task_reductions.create (1 + cnt);
7083 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7084 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7085 ? sz : NULL_TREE);
7086 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7087 gimple_seq_add_seq (start, seq);
7088 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7089 NULL_TREE, NULL_TREE);
7090 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7091 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7092 NULL_TREE, NULL_TREE);
7093 t = build_int_cst (pointer_sized_int_node,
7094 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7095 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7096 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7097 NULL_TREE, NULL_TREE);
7098 t = build_int_cst (pointer_sized_int_node, -1);
7099 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7100 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7101 NULL_TREE, NULL_TREE);
7102 t = build_int_cst (pointer_sized_int_node, 0);
7103 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7104
7105 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7106 and for each task reduction checks a bool right after the private variable
7107 within that thread's chunk; if the bool is clear, it hasn't been
7108 initialized and thus isn't going to be reduced nor destructed, otherwise
7109 reduce and destruct it. */
7110 tree idx = create_tmp_var (size_type_node);
7111 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7112 tree num_thr_sz = create_tmp_var (size_type_node);
7113 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7114 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7115 tree lab3 = NULL_TREE;
7116 gimple *g;
7117 if (code == OMP_FOR || code == OMP_SECTIONS)
7118 {
7119 /* For worksharing constructs, only perform it in the master thread,
7120 with the exception of cancelled implicit barriers - then only handle
7121 the current thread. */
7122 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7123 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7124 tree thr_num = create_tmp_var (integer_type_node);
7125 g = gimple_build_call (t, 0);
7126 gimple_call_set_lhs (g, thr_num);
7127 gimple_seq_add_stmt (end, g);
7128 if (cancellable)
7129 {
7130 tree c;
7131 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7132 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7133 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7134 if (code == OMP_FOR)
7135 c = gimple_omp_for_clauses (ctx->stmt);
829a4338 7136 else /* if (code == OMP_SECTIONS) */
28567c40
JJ
7137 c = gimple_omp_sections_clauses (ctx->stmt);
7138 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7139 cancellable = c;
7140 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7141 lab5, lab6);
7142 gimple_seq_add_stmt (end, g);
7143 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7144 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7145 gimple_seq_add_stmt (end, g);
7146 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7147 build_one_cst (TREE_TYPE (idx)));
7148 gimple_seq_add_stmt (end, g);
7149 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7150 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7151 }
7152 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7153 gimple_seq_add_stmt (end, g);
7154 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7155 }
7156 if (code != OMP_PARALLEL)
7157 {
7158 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7159 tree num_thr = create_tmp_var (integer_type_node);
7160 g = gimple_build_call (t, 0);
7161 gimple_call_set_lhs (g, num_thr);
7162 gimple_seq_add_stmt (end, g);
7163 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7164 gimple_seq_add_stmt (end, g);
7165 if (cancellable)
7166 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7167 }
7168 else
7169 {
7170 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7171 OMP_CLAUSE__REDUCTEMP_);
7172 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7173 t = fold_convert (size_type_node, t);
7174 gimplify_assign (num_thr_sz, t, end);
7175 }
7176 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7177 NULL_TREE, NULL_TREE);
7178 tree data = create_tmp_var (pointer_sized_int_node);
7179 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7180 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7181 tree ptr;
7182 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7183 ptr = create_tmp_var (build_pointer_type (record_type));
7184 else
7185 ptr = create_tmp_var (ptr_type_node);
7186 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7187
7188 tree field = TYPE_FIELDS (record_type);
7189 cnt = 0;
7190 if (cancellable)
7191 field = DECL_CHAIN (DECL_CHAIN (field));
7192 for (int pass = 0; pass < 2; pass++)
7193 {
7194 tree decl, type, next;
7195 for (tree c = clauses;
7196 omp_task_reduction_iterate (pass, code, ccode,
7197 &c, &decl, &type, &next); c = next)
7198 {
7199 tree var = decl, ref;
7200 if (TREE_CODE (decl) == MEM_REF)
7201 {
7202 var = TREE_OPERAND (var, 0);
7203 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7204 var = TREE_OPERAND (var, 0);
7205 tree v = var;
7206 if (TREE_CODE (var) == ADDR_EXPR)
7207 var = TREE_OPERAND (var, 0);
7208 else if (TREE_CODE (var) == INDIRECT_REF)
7209 var = TREE_OPERAND (var, 0);
7210 tree orig_var = var;
7211 if (is_variable_sized (var))
7212 {
7213 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7214 var = DECL_VALUE_EXPR (var);
7215 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7216 var = TREE_OPERAND (var, 0);
7217 gcc_assert (DECL_P (var));
7218 }
7219 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7220 if (orig_var != var)
7221 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7222 else if (TREE_CODE (v) == ADDR_EXPR)
7223 t = build_fold_addr_expr (t);
7224 else if (TREE_CODE (v) == INDIRECT_REF)
7225 t = build_fold_indirect_ref (t);
7226 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7227 {
7228 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7229 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7230 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7231 }
7232 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7233 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7234 fold_convert (size_type_node,
7235 TREE_OPERAND (decl, 1)));
7236 }
7237 else
7238 {
7239 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7240 if (!omp_is_reference (decl))
7241 t = build_fold_addr_expr (t);
7242 }
7243 t = fold_convert (pointer_sized_int_node, t);
7244 seq = NULL;
7245 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7246 gimple_seq_add_seq (start, seq);
7247 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7248 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7249 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7250 t = unshare_expr (byte_position (field));
7251 t = fold_convert (pointer_sized_int_node, t);
7252 ctx->task_reduction_map->put (c, cnt);
7253 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7254 ? t : NULL_TREE);
7255 seq = NULL;
7256 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7257 gimple_seq_add_seq (start, seq);
7258 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7259 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7260 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7261
7262 tree bfield = DECL_CHAIN (field);
7263 tree cond;
7264 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7265 /* In parallel or worksharing all threads unconditionally
7266 initialize all their task reduction private variables. */
7267 cond = boolean_true_node;
7268 else if (TREE_TYPE (ptr) == ptr_type_node)
7269 {
7270 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7271 unshare_expr (byte_position (bfield)));
7272 seq = NULL;
7273 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7274 gimple_seq_add_seq (end, seq);
7275 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7276 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7277 build_int_cst (pbool, 0));
7278 }
7279 else
7280 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7281 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7282 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7283 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7284 tree condv = create_tmp_var (boolean_type_node);
7285 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7286 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7287 lab3, lab4);
7288 gimple_seq_add_stmt (end, g);
7289 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7290 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7291 {
7292 /* If this reduction doesn't need destruction and parallel
7293 has been cancelled, there is nothing to do for this
7294 reduction, so jump around the merge operation. */
7295 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7296 g = gimple_build_cond (NE_EXPR, cancellable,
7297 build_zero_cst (TREE_TYPE (cancellable)),
7298 lab4, lab5);
7299 gimple_seq_add_stmt (end, g);
7300 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7301 }
7302
7303 tree new_var;
7304 if (TREE_TYPE (ptr) == ptr_type_node)
7305 {
7306 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7307 unshare_expr (byte_position (field)));
7308 seq = NULL;
7309 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7310 gimple_seq_add_seq (end, seq);
7311 tree pbool = build_pointer_type (TREE_TYPE (field));
7312 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7313 build_int_cst (pbool, 0));
7314 }
7315 else
7316 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7317 build_simple_mem_ref (ptr), field, NULL_TREE);
7318
7319 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7320 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7321 ref = build_simple_mem_ref (ref);
7322 /* reduction(-:var) sums up the partial results, so it acts
7323 identically to reduction(+:var). */
7324 if (rcode == MINUS_EXPR)
7325 rcode = PLUS_EXPR;
7326 if (TREE_CODE (decl) == MEM_REF)
7327 {
7328 tree type = TREE_TYPE (new_var);
7329 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7330 tree i = create_tmp_var (TREE_TYPE (v));
7331 tree ptype = build_pointer_type (TREE_TYPE (type));
7332 if (DECL_P (v))
7333 {
7334 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7335 tree vv = create_tmp_var (TREE_TYPE (v));
7336 gimplify_assign (vv, v, start);
7337 v = vv;
7338 }
7339 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7340 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7341 new_var = build_fold_addr_expr (new_var);
7342 new_var = fold_convert (ptype, new_var);
7343 ref = fold_convert (ptype, ref);
7344 tree m = create_tmp_var (ptype);
7345 gimplify_assign (m, new_var, end);
7346 new_var = m;
7347 m = create_tmp_var (ptype);
7348 gimplify_assign (m, ref, end);
7349 ref = m;
7350 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7351 tree body = create_artificial_label (UNKNOWN_LOCATION);
7352 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7353 gimple_seq_add_stmt (end, gimple_build_label (body));
7354 tree priv = build_simple_mem_ref (new_var);
7355 tree out = build_simple_mem_ref (ref);
7356 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7357 {
7358 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7359 tree decl_placeholder
7360 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7361 tree lab6 = NULL_TREE;
7362 if (cancellable)
7363 {
7364 /* If this reduction needs destruction and parallel
7365 has been cancelled, jump around the merge operation
7366 to the destruction. */
7367 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7368 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7369 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7370 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7371 lab6, lab5);
7372 gimple_seq_add_stmt (end, g);
7373 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7374 }
7375 SET_DECL_VALUE_EXPR (placeholder, out);
7376 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7377 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7378 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7379 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7380 gimple_seq_add_seq (end,
7381 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7382 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7384 {
7385 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7386 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7387 }
7388 if (cancellable)
7389 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7390 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7391 if (x)
7392 {
7393 gimple_seq tseq = NULL;
7394 gimplify_stmt (&x, &tseq);
7395 gimple_seq_add_seq (end, tseq);
7396 }
7397 }
7398 else
7399 {
7400 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7401 out = unshare_expr (out);
7402 gimplify_assign (out, x, end);
7403 }
7404 gimple *g
7405 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7406 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7407 gimple_seq_add_stmt (end, g);
7408 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7409 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7410 gimple_seq_add_stmt (end, g);
7411 g = gimple_build_assign (i, PLUS_EXPR, i,
7412 build_int_cst (TREE_TYPE (i), 1));
7413 gimple_seq_add_stmt (end, g);
7414 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7415 gimple_seq_add_stmt (end, g);
7416 gimple_seq_add_stmt (end, gimple_build_label (endl));
7417 }
7418 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7419 {
7420 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7421 tree oldv = NULL_TREE;
7422 tree lab6 = NULL_TREE;
7423 if (cancellable)
7424 {
7425 /* If this reduction needs destruction and parallel
7426 has been cancelled, jump around the merge operation
7427 to the destruction. */
7428 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7429 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7430 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7431 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7432 lab6, lab5);
7433 gimple_seq_add_stmt (end, g);
7434 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7435 }
7436 if (omp_is_reference (decl)
7437 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7438 TREE_TYPE (ref)))
7439 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7440 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7441 tree refv = create_tmp_var (TREE_TYPE (ref));
7442 gimplify_assign (refv, ref, end);
7443 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7444 SET_DECL_VALUE_EXPR (placeholder, ref);
7445 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7446 tree d = maybe_lookup_decl (decl, ctx);
7447 gcc_assert (d);
7448 if (DECL_HAS_VALUE_EXPR_P (d))
7449 oldv = DECL_VALUE_EXPR (d);
7450 if (omp_is_reference (var))
7451 {
7452 tree v = fold_convert (TREE_TYPE (d),
7453 build_fold_addr_expr (new_var));
7454 SET_DECL_VALUE_EXPR (d, v);
7455 }
7456 else
7457 SET_DECL_VALUE_EXPR (d, new_var);
7458 DECL_HAS_VALUE_EXPR_P (d) = 1;
7459 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7460 if (oldv)
7461 SET_DECL_VALUE_EXPR (d, oldv);
7462 else
7463 {
7464 SET_DECL_VALUE_EXPR (d, NULL_TREE);
7465 DECL_HAS_VALUE_EXPR_P (d) = 0;
7466 }
7467 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7468 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7470 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7471 if (cancellable)
7472 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7473 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7474 if (x)
7475 {
7476 gimple_seq tseq = NULL;
7477 gimplify_stmt (&x, &tseq);
7478 gimple_seq_add_seq (end, tseq);
7479 }
7480 }
7481 else
7482 {
7483 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7484 ref = unshare_expr (ref);
7485 gimplify_assign (ref, x, end);
7486 }
7487 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7488 ++cnt;
7489 field = DECL_CHAIN (bfield);
7490 }
7491 }
7492
7493 if (code == OMP_TASKGROUP)
7494 {
7495 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7496 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7497 gimple_seq_add_stmt (start, g);
7498 }
7499 else
7500 {
7501 tree c;
7502 if (code == OMP_FOR)
7503 c = gimple_omp_for_clauses (ctx->stmt);
7504 else if (code == OMP_SECTIONS)
7505 c = gimple_omp_sections_clauses (ctx->stmt);
7506 else
7507 c = gimple_omp_taskreg_clauses (ctx->stmt);
7508 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7509 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7510 build_fold_addr_expr (avar));
7511 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7512 }
7513
7514 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7515 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7516 size_one_node));
7517 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7518 gimple_seq_add_stmt (end, g);
7519 gimple_seq_add_stmt (end, gimple_build_label (lab2));
7520 if (code == OMP_FOR || code == OMP_SECTIONS)
7521 {
7522 enum built_in_function bfn
7523 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7524 t = builtin_decl_explicit (bfn);
7525 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7526 tree arg;
7527 if (cancellable)
7528 {
7529 arg = create_tmp_var (c_bool_type);
7530 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7531 cancellable));
7532 }
7533 else
7534 arg = build_int_cst (c_bool_type, 0);
7535 g = gimple_build_call (t, 1, arg);
7536 }
7537 else
7538 {
7539 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7540 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7541 }
7542 gimple_seq_add_stmt (end, g);
7543 t = build_constructor (atype, NULL);
7544 TREE_THIS_VOLATILE (t) = 1;
7545 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7546}
e4834818 7547
629b3d75 7548/* Expand code for an OpenMP taskgroup directive. */
e4834818 7549
629b3d75
MJ
7550static void
7551lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
e4834818 7552{
629b3d75
MJ
7553 gimple *stmt = gsi_stmt (*gsi_p);
7554 gcall *x;
7555 gbind *bind;
28567c40 7556 gimple_seq dseq = NULL;
629b3d75 7557 tree block = make_node (BLOCK);
e4834818 7558
629b3d75
MJ
7559 bind = gimple_build_bind (NULL, NULL, block);
7560 gsi_replace (gsi_p, bind, true);
7561 gimple_bind_add_stmt (bind, stmt);
e4834818 7562
28567c40
JJ
7563 push_gimplify_context ();
7564
629b3d75
MJ
7565 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7566 0);
7567 gimple_bind_add_stmt (bind, x);
e4834818 7568
28567c40
JJ
7569 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7570 gimple_omp_taskgroup_clauses (stmt),
7571 gimple_bind_body_ptr (bind), &dseq);
7572
629b3d75
MJ
7573 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7574 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7575 gimple_omp_set_body (stmt, NULL);
e4834818 7576
629b3d75 7577 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
28567c40
JJ
7578 gimple_bind_add_seq (bind, dseq);
7579
7580 pop_gimplify_context (bind);
e4834818 7581
629b3d75
MJ
7582 gimple_bind_append_vars (bind, ctx->block_vars);
7583 BLOCK_VARS (block) = ctx->block_vars;
e4834818
NS
7584}
7585
50674e96 7586
629b3d75 7587/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
74bf76ed
JJ
7588
7589static void
629b3d75
MJ
7590lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7591 omp_context *ctx)
74bf76ed 7592{
629b3d75
MJ
7593 struct omp_for_data fd;
7594 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7595 return;
74bf76ed 7596
629b3d75
MJ
7597 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7598 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7599 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7600 if (!fd.ordered)
7601 return;
acf0174b 7602
629b3d75
MJ
7603 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7604 tree c = gimple_omp_ordered_clauses (ord_stmt);
7605 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7606 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
74bf76ed 7607 {
629b3d75
MJ
7608 /* Merge depend clauses from multiple adjacent
7609 #pragma omp ordered depend(sink:...) constructs
7610 into one #pragma omp ordered depend(sink:...), so that
7611 we can optimize them together. */
7612 gimple_stmt_iterator gsi = *gsi_p;
7613 gsi_next (&gsi);
7614 while (!gsi_end_p (gsi))
74bf76ed 7615 {
629b3d75
MJ
7616 gimple *stmt = gsi_stmt (gsi);
7617 if (is_gimple_debug (stmt)
7618 || gimple_code (stmt) == GIMPLE_NOP)
74bf76ed 7619 {
629b3d75
MJ
7620 gsi_next (&gsi);
7621 continue;
74bf76ed 7622 }
629b3d75
MJ
7623 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7624 break;
7625 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7626 c = gimple_omp_ordered_clauses (ord_stmt2);
7627 if (c == NULL_TREE
7628 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7629 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7630 break;
7631 while (*list_p)
7632 list_p = &OMP_CLAUSE_CHAIN (*list_p);
7633 *list_p = c;
7634 gsi_remove (&gsi, true);
74bf76ed
JJ
7635 }
7636 }
74bf76ed 7637
629b3d75
MJ
7638 /* Canonicalize sink dependence clauses into one folded clause if
7639 possible.
74bf76ed 7640
629b3d75
MJ
7641 The basic algorithm is to create a sink vector whose first
7642 element is the GCD of all the first elements, and whose remaining
7643 elements are the minimum of the subsequent columns.
74bf76ed 7644
629b3d75
MJ
7645 We ignore dependence vectors whose first element is zero because
7646 such dependencies are known to be executed by the same thread.
acf0174b 7647
629b3d75
MJ
7648 We take into account the direction of the loop, so a minimum
7649 becomes a maximum if the loop is iterating forwards. We also
7650 ignore sink clauses where the loop direction is unknown, or where
7651 the offsets are clearly invalid because they are not a multiple
7652 of the loop increment.
7653
7654 For example:
7655
7656 #pragma omp for ordered(2)
7657 for (i=0; i < N; ++i)
7658 for (j=0; j < M; ++j)
acf0174b 7659 {
629b3d75
MJ
7660 #pragma omp ordered \
7661 depend(sink:i-8,j-2) \
7662 depend(sink:i,j-1) \ // Completely ignored because i+0.
7663 depend(sink:i-4,j-3) \
7664 depend(sink:i-6,j-4)
7665 #pragma omp ordered depend(source)
acf0174b 7666 }
acf0174b 7667
629b3d75 7668 Folded clause is:
74bf76ed 7669
629b3d75
MJ
7670 depend(sink:-gcd(8,4,6),-min(2,3,4))
7671 -or-
7672 depend(sink:-2,-2)
7673 */
74bf76ed 7674
629b3d75
MJ
7675 /* FIXME: Computing GCD's where the first element is zero is
7676 non-trivial in the presence of collapsed loops. Do this later. */
7677 if (fd.collapse > 1)
7678 return;
74bf76ed 7679
629b3d75 7680 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
c3684b7b
MS
7681
7682 /* wide_int is not a POD so it must be default-constructed. */
7683 for (unsigned i = 0; i != 2 * len - 1; ++i)
7684 new (static_cast<void*>(folded_deps + i)) wide_int ();
7685
629b3d75
MJ
7686 tree folded_dep = NULL_TREE;
7687 /* TRUE if the first dimension's offset is negative. */
7688 bool neg_offset_p = false;
74bf76ed 7689
629b3d75
MJ
7690 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7691 unsigned int i;
7692 while ((c = *list_p) != NULL)
74bf76ed 7693 {
629b3d75 7694 bool remove = false;
74bf76ed 7695
629b3d75
MJ
7696 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7697 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7698 goto next_ordered_clause;
74bf76ed 7699
629b3d75
MJ
7700 tree vec;
7701 for (vec = OMP_CLAUSE_DECL (c), i = 0;
7702 vec && TREE_CODE (vec) == TREE_LIST;
7703 vec = TREE_CHAIN (vec), ++i)
74bf76ed 7704 {
629b3d75 7705 gcc_assert (i < len);
74bf76ed 7706
629b3d75
MJ
7707 /* omp_extract_for_data has canonicalized the condition. */
7708 gcc_assert (fd.loops[i].cond_code == LT_EXPR
7709 || fd.loops[i].cond_code == GT_EXPR);
7710 bool forward = fd.loops[i].cond_code == LT_EXPR;
7711 bool maybe_lexically_later = true;
953ff289 7712
629b3d75
MJ
7713 /* While the committee makes up its mind, bail if we have any
7714 non-constant steps. */
7715 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7716 goto lower_omp_ordered_ret;
953ff289 7717
629b3d75
MJ
7718 tree itype = TREE_TYPE (TREE_VALUE (vec));
7719 if (POINTER_TYPE_P (itype))
7720 itype = sizetype;
8e6cdc90 7721 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
629b3d75
MJ
7722 TYPE_PRECISION (itype),
7723 TYPE_SIGN (itype));
a68ab351 7724
629b3d75 7725 /* Ignore invalid offsets that are not multiples of the step. */
8e6cdc90
RS
7726 if (!wi::multiple_of_p (wi::abs (offset),
7727 wi::abs (wi::to_wide (fd.loops[i].step)),
7728 UNSIGNED))
b4c3a85b 7729 {
629b3d75
MJ
7730 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7731 "ignoring sink clause with offset that is not "
7732 "a multiple of the loop step");
7733 remove = true;
7734 goto next_ordered_clause;
b4c3a85b 7735 }
d9a6bd32 7736
629b3d75
MJ
7737 /* Calculate the first dimension. The first dimension of
7738 the folded dependency vector is the GCD of the first
7739 elements, while ignoring any first elements whose offset
7740 is 0. */
7741 if (i == 0)
b4c3a85b 7742 {
629b3d75
MJ
7743 /* Ignore dependence vectors whose first dimension is 0. */
7744 if (offset == 0)
b4c3a85b 7745 {
629b3d75
MJ
7746 remove = true;
7747 goto next_ordered_clause;
b4c3a85b 7748 }
d9a6bd32 7749 else
629b3d75
MJ
7750 {
7751 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7752 {
7753 error_at (OMP_CLAUSE_LOCATION (c),
7754 "first offset must be in opposite direction "
7755 "of loop iterations");
7756 goto lower_omp_ordered_ret;
7757 }
7758 if (forward)
7759 offset = -offset;
7760 neg_offset_p = forward;
7761 /* Initialize the first time around. */
7762 if (folded_dep == NULL_TREE)
7763 {
7764 folded_dep = c;
7765 folded_deps[0] = offset;
7766 }
7767 else
7768 folded_deps[0] = wi::gcd (folded_deps[0],
7769 offset, UNSIGNED);
7770 }
d9a6bd32 7771 }
629b3d75 7772 /* Calculate minimum for the remaining dimensions. */
d9a6bd32 7773 else
d9a6bd32 7774 {
629b3d75
MJ
7775 folded_deps[len + i - 1] = offset;
7776 if (folded_dep == c)
7777 folded_deps[i] = offset;
7778 else if (maybe_lexically_later
7779 && !wi::eq_p (folded_deps[i], offset))
7780 {
7781 if (forward ^ wi::gts_p (folded_deps[i], offset))
7782 {
7783 unsigned int j;
7784 folded_dep = c;
7785 for (j = 1; j <= i; j++)
7786 folded_deps[j] = folded_deps[len + j - 1];
7787 }
7788 else
7789 maybe_lexically_later = false;
7790 }
d9a6bd32 7791 }
d9a6bd32 7792 }
629b3d75 7793 gcc_assert (i == len);
d9a6bd32 7794
629b3d75
MJ
7795 remove = true;
7796
7797 next_ordered_clause:
7798 if (remove)
7799 *list_p = OMP_CLAUSE_CHAIN (c);
d9a6bd32 7800 else
629b3d75 7801 list_p = &OMP_CLAUSE_CHAIN (c);
d9a6bd32 7802 }
d9a6bd32 7803
629b3d75 7804 if (folded_dep)
d9a6bd32 7805 {
629b3d75
MJ
7806 if (neg_offset_p)
7807 folded_deps[0] = -folded_deps[0];
d9a6bd32 7808
629b3d75
MJ
7809 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7810 if (POINTER_TYPE_P (itype))
7811 itype = sizetype;
7812
7813 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7814 = wide_int_to_tree (itype, folded_deps[0]);
7815 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7816 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
d9a6bd32
JJ
7817 }
7818
629b3d75 7819 lower_omp_ordered_ret:
d9a6bd32 7820
629b3d75
MJ
7821 /* Ordered without clauses is #pragma omp threads, while we want
7822 a nop instead if we remove all clauses. */
7823 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7824 gsi_replace (gsi_p, gimple_build_nop (), true);
d9a6bd32
JJ
7825}
7826
7827
629b3d75 7828/* Expand code for an OpenMP ordered directive. */
953ff289 7829
777f7f9a 7830static void
629b3d75 7831lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 7832{
629b3d75
MJ
7833 tree block;
7834 gimple *stmt = gsi_stmt (*gsi_p), *g;
7835 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7836 gcall *x;
7837 gbind *bind;
7838 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7839 OMP_CLAUSE_SIMD);
7840 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7841 loop. */
7842 bool maybe_simt
7843 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7844 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7845 OMP_CLAUSE_THREADS);
d9a6bd32 7846
629b3d75
MJ
7847 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7848 OMP_CLAUSE_DEPEND))
d9a6bd32 7849 {
629b3d75
MJ
7850 /* FIXME: This is needs to be moved to the expansion to verify various
7851 conditions only testable on cfg with dominators computed, and also
7852 all the depend clauses to be merged still might need to be available
7853 for the runtime checks. */
7854 if (0)
7855 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7856 return;
a68ab351 7857 }
d9a6bd32 7858
629b3d75
MJ
7859 push_gimplify_context ();
7860
7861 block = make_node (BLOCK);
7862 bind = gimple_build_bind (NULL, NULL, block);
7863 gsi_replace (gsi_p, bind, true);
7864 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 7865
629b3d75 7866 if (simd)
917948d3 7867 {
629b3d75
MJ
7868 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7869 build_int_cst (NULL_TREE, threads));
7870 cfun->has_simduid_loops = true;
917948d3
ZD
7871 }
7872 else
629b3d75
MJ
7873 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7874 0);
7875 gimple_bind_add_stmt (bind, x);
7876
7877 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7878 if (maybe_simt)
953ff289 7879 {
629b3d75
MJ
7880 counter = create_tmp_var (integer_type_node);
7881 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7882 gimple_call_set_lhs (g, counter);
7883 gimple_bind_add_stmt (bind, g);
d9a6bd32 7884
629b3d75
MJ
7885 body = create_artificial_label (UNKNOWN_LOCATION);
7886 test = create_artificial_label (UNKNOWN_LOCATION);
7887 gimple_bind_add_stmt (bind, gimple_build_label (body));
953ff289 7888
629b3d75
MJ
7889 tree simt_pred = create_tmp_var (integer_type_node);
7890 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7891 gimple_call_set_lhs (g, simt_pred);
7892 gimple_bind_add_stmt (bind, g);
d9a6bd32 7893
629b3d75
MJ
7894 tree t = create_artificial_label (UNKNOWN_LOCATION);
7895 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7896 gimple_bind_add_stmt (bind, g);
74bf76ed 7897
629b3d75 7898 gimple_bind_add_stmt (bind, gimple_build_label (t));
acf0174b 7899 }
629b3d75
MJ
7900 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7901 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7902 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7903 gimple_omp_set_body (stmt, NULL);
acf0174b 7904
629b3d75 7905 if (maybe_simt)
d9a6bd32 7906 {
629b3d75
MJ
7907 gimple_bind_add_stmt (bind, gimple_build_label (test));
7908 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7909 gimple_bind_add_stmt (bind, g);
50674e96 7910
629b3d75
MJ
7911 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7912 tree nonneg = create_tmp_var (integer_type_node);
7913 gimple_seq tseq = NULL;
7914 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7915 gimple_bind_add_seq (bind, tseq);
d9a6bd32 7916
629b3d75
MJ
7917 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7918 gimple_call_set_lhs (g, nonneg);
7919 gimple_bind_add_stmt (bind, g);
d9a6bd32 7920
629b3d75
MJ
7921 tree end = create_artificial_label (UNKNOWN_LOCATION);
7922 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7923 gimple_bind_add_stmt (bind, g);
50674e96 7924
629b3d75 7925 gimple_bind_add_stmt (bind, gimple_build_label (end));
e5c95afe 7926 }
629b3d75
MJ
7927 if (simd)
7928 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
7929 build_int_cst (NULL_TREE, threads));
777f7f9a 7930 else
629b3d75
MJ
7931 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
7932 0);
7933 gimple_bind_add_stmt (bind, x);
917948d3 7934
629b3d75 7935 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 7936
629b3d75 7937 pop_gimplify_context (bind);
917948d3 7938
629b3d75
MJ
7939 gimple_bind_append_vars (bind, ctx->block_vars);
7940 BLOCK_VARS (block) = gimple_bind_vars (bind);
7941}
56102c7f 7942
56102c7f 7943
629b3d75
MJ
7944/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
7945 substitution of a couple of function calls. But in the NAMED case,
7946 requires that languages coordinate a symbol name. It is therefore
7947 best put here in common code. */
56102c7f 7948
629b3d75 7949static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
56102c7f 7950
629b3d75
MJ
7951static void
7952lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7953{
7954 tree block;
7955 tree name, lock, unlock;
7956 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
7957 gbind *bind;
7958 location_t loc = gimple_location (stmt);
7959 gimple_seq tbody;
56102c7f 7960
629b3d75
MJ
7961 name = gimple_omp_critical_name (stmt);
7962 if (name)
7963 {
7964 tree decl;
56102c7f 7965
629b3d75
MJ
7966 if (!critical_name_mutexes)
7967 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
56102c7f 7968
629b3d75
MJ
7969 tree *n = critical_name_mutexes->get (name);
7970 if (n == NULL)
74bf76ed 7971 {
629b3d75 7972 char *new_str;
953ff289 7973
629b3d75 7974 decl = create_tmp_var_raw (ptr_type_node);
953ff289 7975
629b3d75
MJ
7976 new_str = ACONCAT ((".gomp_critical_user_",
7977 IDENTIFIER_POINTER (name), NULL));
7978 DECL_NAME (decl) = get_identifier (new_str);
7979 TREE_PUBLIC (decl) = 1;
7980 TREE_STATIC (decl) = 1;
7981 DECL_COMMON (decl) = 1;
7982 DECL_ARTIFICIAL (decl) = 1;
7983 DECL_IGNORED_P (decl) = 1;
953ff289 7984
629b3d75 7985 varpool_node::finalize_decl (decl);
953ff289 7986
629b3d75
MJ
7987 critical_name_mutexes->put (name, decl);
7988 }
7989 else
7990 decl = *n;
953ff289 7991
629b3d75
MJ
7992 /* If '#pragma omp critical' is inside offloaded region or
7993 inside function marked as offloadable, the symbol must be
7994 marked as offloadable too. */
7995 omp_context *octx;
7996 if (cgraph_node::get (current_function_decl)->offloadable)
7997 varpool_node::get_create (decl)->offloadable = 1;
7998 else
7999 for (octx = ctx->outer; octx; octx = octx->outer)
8000 if (is_gimple_omp_offloaded (octx->stmt))
8001 {
8002 varpool_node::get_create (decl)->offloadable = 1;
8003 break;
8004 }
777f7f9a 8005
629b3d75 8006 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
01914336
MJ
8007 lock = build_call_expr_loc (loc, lock, 1,
8008 build_fold_addr_expr_loc (loc, decl));
777f7f9a 8009
629b3d75
MJ
8010 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8011 unlock = build_call_expr_loc (loc, unlock, 1,
8012 build_fold_addr_expr_loc (loc, decl));
acf0174b 8013 }
acf0174b 8014 else
5a0f4dd3 8015 {
629b3d75
MJ
8016 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8017 lock = build_call_expr_loc (loc, lock, 0);
5a0f4dd3 8018
629b3d75
MJ
8019 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8020 unlock = build_call_expr_loc (loc, unlock, 0);
acf0174b 8021 }
953ff289 8022
629b3d75 8023 push_gimplify_context ();
fb79f500 8024
629b3d75
MJ
8025 block = make_node (BLOCK);
8026 bind = gimple_build_bind (NULL, NULL, block);
8027 gsi_replace (gsi_p, bind, true);
8028 gimple_bind_add_stmt (bind, stmt);
fb79f500 8029
629b3d75
MJ
8030 tbody = gimple_bind_body (bind);
8031 gimplify_and_add (lock, &tbody);
8032 gimple_bind_set_body (bind, tbody);
fb79f500 8033
629b3d75
MJ
8034 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8035 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8036 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8037 gimple_omp_set_body (stmt, NULL);
953ff289 8038
629b3d75
MJ
8039 tbody = gimple_bind_body (bind);
8040 gimplify_and_add (unlock, &tbody);
8041 gimple_bind_set_body (bind, tbody);
953ff289 8042
629b3d75 8043 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
917948d3 8044
629b3d75
MJ
8045 pop_gimplify_context (bind);
8046 gimple_bind_append_vars (bind, ctx->block_vars);
8047 BLOCK_VARS (block) = gimple_bind_vars (bind);
8048}
50674e96 8049
629b3d75
MJ
8050/* A subroutine of lower_omp_for. Generate code to emit the predicate
8051 for a lastprivate clause. Given a loop control predicate of (V
8052 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8053 is appended to *DLIST, iterator initialization is appended to
8054 *BODY_P. */
50674e96 8055
629b3d75
MJ
8056static void
8057lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8058 gimple_seq *dlist, struct omp_context *ctx)
8059{
8060 tree clauses, cond, vinit;
8061 enum tree_code cond_code;
8062 gimple_seq stmts;
953ff289 8063
629b3d75
MJ
8064 cond_code = fd->loop.cond_code;
8065 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
acf0174b 8066
629b3d75
MJ
8067 /* When possible, use a strict equality expression. This can let VRP
8068 type optimizations deduce the value and remove a copy. */
8069 if (tree_fits_shwi_p (fd->loop.step))
acf0174b 8070 {
629b3d75
MJ
8071 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8072 if (step == 1 || step == -1)
8073 cond_code = EQ_EXPR;
acf0174b 8074 }
629b3d75
MJ
8075
8076 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8077 || gimple_omp_for_grid_phony (fd->for_stmt))
8078 cond = omp_grid_lastprivate_predicate (fd);
a68ab351 8079 else
acf0174b 8080 {
629b3d75
MJ
8081 tree n2 = fd->loop.n2;
8082 if (fd->collapse > 1
8083 && TREE_CODE (n2) != INTEGER_CST
8084 && gimple_omp_for_combined_into_p (fd->for_stmt))
d9a6bd32 8085 {
629b3d75
MJ
8086 struct omp_context *taskreg_ctx = NULL;
8087 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
d9a6bd32 8088 {
629b3d75
MJ
8089 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8090 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8091 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
d9a6bd32 8092 {
629b3d75
MJ
8093 if (gimple_omp_for_combined_into_p (gfor))
8094 {
8095 gcc_assert (ctx->outer->outer
8096 && is_parallel_ctx (ctx->outer->outer));
8097 taskreg_ctx = ctx->outer->outer;
8098 }
8099 else
8100 {
8101 struct omp_for_data outer_fd;
8102 omp_extract_for_data (gfor, &outer_fd, NULL);
8103 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8104 }
d9a6bd32 8105 }
629b3d75
MJ
8106 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8107 taskreg_ctx = ctx->outer->outer;
8108 }
8109 else if (is_taskreg_ctx (ctx->outer))
8110 taskreg_ctx = ctx->outer;
8111 if (taskreg_ctx)
8112 {
8113 int i;
8114 tree taskreg_clauses
8115 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8116 tree innerc = omp_find_clause (taskreg_clauses,
8117 OMP_CLAUSE__LOOPTEMP_);
8118 gcc_assert (innerc);
8119 for (i = 0; i < fd->collapse; i++)
8120 {
8121 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8122 OMP_CLAUSE__LOOPTEMP_);
8123 gcc_assert (innerc);
8124 }
8125 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8126 OMP_CLAUSE__LOOPTEMP_);
8127 if (innerc)
8128 n2 = fold_convert (TREE_TYPE (n2),
8129 lookup_decl (OMP_CLAUSE_DECL (innerc),
8130 taskreg_ctx));
d9a6bd32 8131 }
acf0174b 8132 }
629b3d75 8133 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
acf0174b 8134 }
50674e96 8135
629b3d75
MJ
8136 clauses = gimple_omp_for_clauses (fd->for_stmt);
8137 stmts = NULL;
8138 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8139 if (!gimple_seq_empty_p (stmts))
acf0174b 8140 {
629b3d75
MJ
8141 gimple_seq_add_seq (&stmts, *dlist);
8142 *dlist = stmts;
6093bc06 8143
629b3d75
MJ
8144 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8145 vinit = fd->loop.n1;
8146 if (cond_code == EQ_EXPR
8147 && tree_fits_shwi_p (fd->loop.n2)
8148 && ! integer_zerop (fd->loop.n2))
8149 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8150 else
8151 vinit = unshare_expr (vinit);
e67d7a1e 8152
629b3d75
MJ
8153 /* Initialize the iterator variable, so that threads that don't execute
8154 any iterations don't execute the lastprivate clauses by accident. */
8155 gimplify_assign (fd->loop.v, vinit, body_p);
acf0174b 8156 }
953ff289
DN
8157}
8158
1b96e9a4 8159
629b3d75 8160/* Lower code for an OMP loop directive. */
50674e96 8161
629b3d75
MJ
8162static void
8163lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8164{
8165 tree *rhs_p, block;
8166 struct omp_for_data fd, *fdp = NULL;
8167 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8168 gbind *new_stmt;
28567c40
JJ
8169 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8170 gimple_seq cnt_list = NULL;
629b3d75
MJ
8171 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8172 size_t i;
953ff289 8173
629b3d75 8174 push_gimplify_context ();
953ff289 8175
629b3d75 8176 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
953ff289 8177
629b3d75
MJ
8178 block = make_node (BLOCK);
8179 new_stmt = gimple_build_bind (NULL, NULL, block);
8180 /* Replace at gsi right away, so that 'stmt' is no member
8181 of a sequence anymore as we're going to add to a different
8182 one below. */
8183 gsi_replace (gsi_p, new_stmt, true);
953ff289 8184
629b3d75
MJ
8185 /* Move declaration of temporaries in the loop body before we make
8186 it go away. */
8187 omp_for_body = gimple_omp_body (stmt);
8188 if (!gimple_seq_empty_p (omp_for_body)
8189 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
acf0174b 8190 {
629b3d75
MJ
8191 gbind *inner_bind
8192 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8193 tree vars = gimple_bind_vars (inner_bind);
8194 gimple_bind_append_vars (new_stmt, vars);
8195 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8196 keep them on the inner_bind and it's block. */
8197 gimple_bind_set_vars (inner_bind, NULL_TREE);
8198 if (gimple_bind_block (inner_bind))
8199 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
acf0174b 8200 }
50674e96 8201
629b3d75 8202 if (gimple_omp_for_combined_into_p (stmt))
5a0f4dd3 8203 {
629b3d75
MJ
8204 omp_extract_for_data (stmt, &fd, NULL);
8205 fdp = &fd;
8206
8207 /* We need two temporaries with fd.loop.v type (istart/iend)
8208 and then (fd.collapse - 1) temporaries with the same
8209 type for count2 ... countN-1 vars if not constant. */
8210 size_t count = 2;
8211 tree type = fd.iter_type;
8212 if (fd.collapse > 1
8213 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8214 count += fd.collapse - 1;
8215 bool taskreg_for
8216 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8217 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8218 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6e6cf7b0 8219 tree simtc = NULL;
629b3d75
MJ
8220 tree clauses = *pc;
8221 if (taskreg_for)
8222 outerc
8223 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8224 OMP_CLAUSE__LOOPTEMP_);
6e6cf7b0
JJ
8225 if (ctx->simt_stmt)
8226 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8227 OMP_CLAUSE__LOOPTEMP_);
629b3d75 8228 for (i = 0; i < count; i++)
5a0f4dd3 8229 {
629b3d75
MJ
8230 tree temp;
8231 if (taskreg_for)
8232 {
8233 gcc_assert (outerc);
8234 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8235 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8236 OMP_CLAUSE__LOOPTEMP_);
8237 }
8238 else
5a0f4dd3 8239 {
6e6cf7b0
JJ
8240 /* If there are 2 adjacent SIMD stmts, one with _simt_
8241 clause, another without, make sure they have the same
8242 decls in _looptemp_ clauses, because the outer stmt
8243 they are combined into will look up just one inner_stmt. */
8244 if (ctx->simt_stmt)
8245 temp = OMP_CLAUSE_DECL (simtc);
8246 else
8247 temp = create_tmp_var (type);
629b3d75 8248 insert_decl_map (&ctx->outer->cb, temp, temp);
5a0f4dd3 8249 }
629b3d75
MJ
8250 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8251 OMP_CLAUSE_DECL (*pc) = temp;
8252 pc = &OMP_CLAUSE_CHAIN (*pc);
6e6cf7b0
JJ
8253 if (ctx->simt_stmt)
8254 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8255 OMP_CLAUSE__LOOPTEMP_);
5a0f4dd3 8256 }
629b3d75 8257 *pc = clauses;
5a0f4dd3
JJ
8258 }
8259
629b3d75
MJ
8260 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
8261 dlist = NULL;
8262 body = NULL;
28567c40
JJ
8263 tree rclauses
8264 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8265 OMP_CLAUSE_REDUCTION);
8266 tree rtmp = NULL_TREE;
8267 if (rclauses)
8268 {
8269 tree type = build_pointer_type (pointer_sized_int_node);
8270 tree temp = create_tmp_var (type);
8271 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8272 OMP_CLAUSE_DECL (c) = temp;
8273 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8274 gimple_omp_for_set_clauses (stmt, c);
8275 lower_omp_task_reductions (ctx, OMP_FOR,
8276 gimple_omp_for_clauses (stmt),
8277 &tred_ilist, &tred_dlist);
8278 rclauses = c;
8279 rtmp = make_ssa_name (type);
8280 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8281 }
8282
629b3d75
MJ
8283 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8284 fdp);
28567c40
JJ
8285 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8286 gimple_omp_for_pre_body (stmt));
917948d3 8287
629b3d75 8288 lower_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289 8289
629b3d75
MJ
8290 /* Lower the header expressions. At this point, we can assume that
8291 the header is of the form:
50674e96 8292
629b3d75 8293 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
917948d3 8294
629b3d75
MJ
8295 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8296 using the .omp_data_s mapping, if needed. */
8297 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8298 {
8299 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8300 if (!is_gimple_min_invariant (*rhs_p))
28567c40 8301 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
0fe4bc78
JJ
8302 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8303 recompute_tree_invariant_for_addr_expr (*rhs_p);
50674e96 8304
629b3d75
MJ
8305 rhs_p = gimple_omp_for_final_ptr (stmt, i);
8306 if (!is_gimple_min_invariant (*rhs_p))
28567c40 8307 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
0fe4bc78
JJ
8308 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8309 recompute_tree_invariant_for_addr_expr (*rhs_p);
d9a6bd32 8310
629b3d75
MJ
8311 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8312 if (!is_gimple_min_invariant (*rhs_p))
28567c40 8313 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
629b3d75 8314 }
28567c40
JJ
8315 if (rclauses)
8316 gimple_seq_add_seq (&tred_ilist, cnt_list);
8317 else
8318 gimple_seq_add_seq (&body, cnt_list);
953ff289 8319
629b3d75
MJ
8320 /* Once lowered, extract the bounds and clauses. */
8321 omp_extract_for_data (stmt, &fd, NULL);
953ff289 8322
629b3d75
MJ
8323 if (is_gimple_omp_oacc (ctx->stmt)
8324 && !ctx_in_oacc_kernels_region (ctx))
8325 lower_oacc_head_tail (gimple_location (stmt),
8326 gimple_omp_for_clauses (stmt),
8327 &oacc_head, &oacc_tail, ctx);
953ff289 8328
01914336 8329 /* Add OpenACC partitioning and reduction markers just before the loop. */
629b3d75
MJ
8330 if (oacc_head)
8331 gimple_seq_add_seq (&body, oacc_head);
01914336 8332
629b3d75 8333 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
acf0174b 8334
629b3d75
MJ
8335 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8336 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
d9a6bd32
JJ
8337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8338 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8339 {
629b3d75
MJ
8340 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8341 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8342 OMP_CLAUSE_LINEAR_STEP (c)
8343 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8344 ctx);
d9a6bd32 8345 }
acf0174b 8346
629b3d75
MJ
8347 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8348 && gimple_omp_for_grid_phony (stmt));
8349 if (!phony_loop)
8350 gimple_seq_add_stmt (&body, stmt);
8351 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8352
8353 if (!phony_loop)
8354 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8355 fd.loop.v));
917948d3 8356
629b3d75
MJ
8357 /* After the loop, add exit clauses. */
8358 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
b8698a0f 8359
629b3d75
MJ
8360 if (ctx->cancellable)
8361 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
50674e96 8362
629b3d75 8363 gimple_seq_add_seq (&body, dlist);
953ff289 8364
28567c40
JJ
8365 if (rclauses)
8366 {
8367 gimple_seq_add_seq (&tred_ilist, body);
8368 body = tred_ilist;
8369 }
8370
629b3d75 8371 body = maybe_catch_exception (body);
953ff289 8372
629b3d75 8373 if (!phony_loop)
acf0174b 8374 {
629b3d75 8375 /* Region exit marker goes at the end of the loop body. */
28567c40
JJ
8376 gimple *g = gimple_build_omp_return (fd.have_nowait);
8377 gimple_seq_add_stmt (&body, g);
8378
8379 gimple_seq_add_seq (&body, tred_dlist);
8380
8381 maybe_add_implicit_barrier_cancel (ctx, g, &body);
8382
8383 if (rclauses)
8384 OMP_CLAUSE_DECL (rclauses) = rtmp;
acf0174b 8385 }
953ff289 8386
629b3d75
MJ
8387 /* Add OpenACC joining and reduction markers just after the loop. */
8388 if (oacc_tail)
8389 gimple_seq_add_seq (&body, oacc_tail);
917948d3 8390
629b3d75 8391 pop_gimplify_context (new_stmt);
917948d3 8392
629b3d75 8393 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6724f8a6 8394 maybe_remove_omp_member_access_dummy_vars (new_stmt);
629b3d75
MJ
8395 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8396 if (BLOCK_VARS (block))
8397 TREE_USED (block) = 1;
917948d3 8398
629b3d75
MJ
8399 gimple_bind_set_body (new_stmt, body);
8400 gimple_omp_set_body (stmt, NULL);
8401 gimple_omp_for_set_pre_body (stmt, NULL);
8402}
17720e84 8403
629b3d75
MJ
8404/* Callback for walk_stmts. Check if the current statement only contains
8405 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
917948d3 8406
629b3d75
MJ
8407static tree
8408check_combined_parallel (gimple_stmt_iterator *gsi_p,
8409 bool *handled_ops_p,
8410 struct walk_stmt_info *wi)
8411{
8412 int *info = (int *) wi->info;
8413 gimple *stmt = gsi_stmt (*gsi_p);
917948d3 8414
629b3d75
MJ
8415 *handled_ops_p = true;
8416 switch (gimple_code (stmt))
acf0174b 8417 {
629b3d75 8418 WALK_SUBSTMTS;
8cba6b95 8419
65f4b875
AO
8420 case GIMPLE_DEBUG:
8421 break;
629b3d75
MJ
8422 case GIMPLE_OMP_FOR:
8423 case GIMPLE_OMP_SECTIONS:
8424 *info = *info == 0 ? 1 : -1;
8425 break;
8426 default:
8427 *info = -1;
8428 break;
acf0174b 8429 }
629b3d75 8430 return NULL;
953ff289
DN
8431}
8432
629b3d75
MJ
8433struct omp_taskcopy_context
8434{
8435 /* This field must be at the beginning, as we do "inheritance": Some
8436 callback functions for tree-inline.c (e.g., omp_copy_decl)
8437 receive a copy_body_data pointer that is up-casted to an
8438 omp_context pointer. */
8439 copy_body_data cb;
8440 omp_context *ctx;
8441};
9a771876 8442
629b3d75
MJ
8443static tree
8444task_copyfn_copy_decl (tree var, copy_body_data *cb)
8445{
8446 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9a771876 8447
629b3d75
MJ
8448 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8449 return create_tmp_var (TREE_TYPE (var));
9a771876 8450
629b3d75
MJ
8451 return var;
8452}
9a771876 8453
629b3d75
MJ
8454static tree
8455task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9a771876 8456{
629b3d75 8457 tree name, new_fields = NULL, type, f;
9a771876 8458
629b3d75
MJ
8459 type = lang_hooks.types.make_type (RECORD_TYPE);
8460 name = DECL_NAME (TYPE_NAME (orig_type));
8461 name = build_decl (gimple_location (tcctx->ctx->stmt),
8462 TYPE_DECL, name, type);
8463 TYPE_NAME (type) = name;
9a771876 8464
629b3d75 8465 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9a771876 8466 {
629b3d75
MJ
8467 tree new_f = copy_node (f);
8468 DECL_CONTEXT (new_f) = type;
8469 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8470 TREE_CHAIN (new_f) = new_fields;
8471 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8472 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8473 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8474 &tcctx->cb, NULL);
8475 new_fields = new_f;
8476 tcctx->cb.decl_map->put (f, new_f);
9a771876 8477 }
629b3d75
MJ
8478 TYPE_FIELDS (type) = nreverse (new_fields);
8479 layout_type (type);
8480 return type;
8481}
9a771876 8482
629b3d75 8483/* Create task copyfn. */
9a771876 8484
629b3d75
MJ
8485static void
8486create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8487{
8488 struct function *child_cfun;
8489 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8490 tree record_type, srecord_type, bind, list;
8491 bool record_needs_remap = false, srecord_needs_remap = false;
8492 splay_tree_node n;
8493 struct omp_taskcopy_context tcctx;
8494 location_t loc = gimple_location (task_stmt);
a3bccfa1 8495 size_t looptempno = 0;
9a771876 8496
629b3d75
MJ
8497 child_fn = gimple_omp_task_copy_fn (task_stmt);
8498 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8499 gcc_assert (child_cfun->cfg == NULL);
8500 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9a771876 8501
629b3d75
MJ
8502 /* Reset DECL_CONTEXT on function arguments. */
8503 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8504 DECL_CONTEXT (t) = child_fn;
9a771876 8505
629b3d75
MJ
8506 /* Populate the function. */
8507 push_gimplify_context ();
8508 push_cfun (child_cfun);
9a771876 8509
629b3d75
MJ
8510 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8511 TREE_SIDE_EFFECTS (bind) = 1;
8512 list = NULL;
8513 DECL_SAVED_TREE (child_fn) = bind;
8514 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
9a771876 8515
629b3d75
MJ
8516 /* Remap src and dst argument types if needed. */
8517 record_type = ctx->record_type;
8518 srecord_type = ctx->srecord_type;
8519 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8520 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8521 {
8522 record_needs_remap = true;
8523 break;
8524 }
8525 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8526 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8527 {
8528 srecord_needs_remap = true;
8529 break;
8530 }
9a771876 8531
629b3d75 8532 if (record_needs_remap || srecord_needs_remap)
9a771876 8533 {
629b3d75
MJ
8534 memset (&tcctx, '\0', sizeof (tcctx));
8535 tcctx.cb.src_fn = ctx->cb.src_fn;
8536 tcctx.cb.dst_fn = child_fn;
8537 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8538 gcc_checking_assert (tcctx.cb.src_node);
8539 tcctx.cb.dst_node = tcctx.cb.src_node;
8540 tcctx.cb.src_cfun = ctx->cb.src_cfun;
8541 tcctx.cb.copy_decl = task_copyfn_copy_decl;
8542 tcctx.cb.eh_lp_nr = 0;
8543 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8544 tcctx.cb.decl_map = new hash_map<tree, tree>;
8545 tcctx.ctx = ctx;
9a771876 8546
629b3d75
MJ
8547 if (record_needs_remap)
8548 record_type = task_copyfn_remap_type (&tcctx, record_type);
8549 if (srecord_needs_remap)
8550 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9a771876
JJ
8551 }
8552 else
629b3d75 8553 tcctx.cb.decl_map = NULL;
9a771876 8554
629b3d75
MJ
8555 arg = DECL_ARGUMENTS (child_fn);
8556 TREE_TYPE (arg) = build_pointer_type (record_type);
8557 sarg = DECL_CHAIN (arg);
8558 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9a771876 8559
629b3d75
MJ
8560 /* First pass: initialize temporaries used in record_type and srecord_type
8561 sizes and field offsets. */
8562 if (tcctx.cb.decl_map)
8563 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8564 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8565 {
8566 tree *p;
9a771876 8567
629b3d75
MJ
8568 decl = OMP_CLAUSE_DECL (c);
8569 p = tcctx.cb.decl_map->get (decl);
8570 if (p == NULL)
8571 continue;
8572 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8573 sf = (tree) n->value;
8574 sf = *tcctx.cb.decl_map->get (sf);
8575 src = build_simple_mem_ref_loc (loc, sarg);
8576 src = omp_build_component_ref (src, sf);
8577 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8578 append_to_statement_list (t, &list);
8579 }
9a771876 8580
629b3d75
MJ
8581 /* Second pass: copy shared var pointers and copy construct non-VLA
8582 firstprivate vars. */
8583 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8584 switch (OMP_CLAUSE_CODE (c))
8585 {
8586 splay_tree_key key;
8587 case OMP_CLAUSE_SHARED:
8588 decl = OMP_CLAUSE_DECL (c);
8589 key = (splay_tree_key) decl;
8590 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8591 key = (splay_tree_key) &DECL_UID (decl);
8592 n = splay_tree_lookup (ctx->field_map, key);
8593 if (n == NULL)
8594 break;
8595 f = (tree) n->value;
8596 if (tcctx.cb.decl_map)
8597 f = *tcctx.cb.decl_map->get (f);
8598 n = splay_tree_lookup (ctx->sfield_map, key);
8599 sf = (tree) n->value;
8600 if (tcctx.cb.decl_map)
8601 sf = *tcctx.cb.decl_map->get (sf);
8602 src = build_simple_mem_ref_loc (loc, sarg);
8603 src = omp_build_component_ref (src, sf);
8604 dst = build_simple_mem_ref_loc (loc, arg);
8605 dst = omp_build_component_ref (dst, f);
8606 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8607 append_to_statement_list (t, &list);
8608 break;
28567c40
JJ
8609 case OMP_CLAUSE_REDUCTION:
8610 case OMP_CLAUSE_IN_REDUCTION:
8611 decl = OMP_CLAUSE_DECL (c);
8612 if (TREE_CODE (decl) == MEM_REF)
8613 {
8614 decl = TREE_OPERAND (decl, 0);
8615 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8616 decl = TREE_OPERAND (decl, 0);
8617 if (TREE_CODE (decl) == INDIRECT_REF
8618 || TREE_CODE (decl) == ADDR_EXPR)
8619 decl = TREE_OPERAND (decl, 0);
8620 }
8621 key = (splay_tree_key) decl;
8622 n = splay_tree_lookup (ctx->field_map, key);
8623 if (n == NULL)
8624 break;
8625 f = (tree) n->value;
8626 if (tcctx.cb.decl_map)
8627 f = *tcctx.cb.decl_map->get (f);
8628 n = splay_tree_lookup (ctx->sfield_map, key);
8629 sf = (tree) n->value;
8630 if (tcctx.cb.decl_map)
8631 sf = *tcctx.cb.decl_map->get (sf);
8632 src = build_simple_mem_ref_loc (loc, sarg);
8633 src = omp_build_component_ref (src, sf);
8634 if (decl != OMP_CLAUSE_DECL (c)
8635 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8637 src = build_simple_mem_ref_loc (loc, src);
8638 dst = build_simple_mem_ref_loc (loc, arg);
8639 dst = omp_build_component_ref (dst, f);
8640 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8641 append_to_statement_list (t, &list);
8642 break;
a3bccfa1
JJ
8643 case OMP_CLAUSE__LOOPTEMP_:
8644 /* Fields for first two _looptemp_ clauses are initialized by
8645 GOMP_taskloop*, the rest are handled like firstprivate. */
8646 if (looptempno < 2)
8647 {
8648 looptempno++;
8649 break;
8650 }
8651 /* FALLTHRU */
28567c40 8652 case OMP_CLAUSE__REDUCTEMP_:
629b3d75
MJ
8653 case OMP_CLAUSE_FIRSTPRIVATE:
8654 decl = OMP_CLAUSE_DECL (c);
8655 if (is_variable_sized (decl))
8656 break;
8657 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8658 if (n == NULL)
8659 break;
8660 f = (tree) n->value;
8661 if (tcctx.cb.decl_map)
8662 f = *tcctx.cb.decl_map->get (f);
8663 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8664 if (n != NULL)
8665 {
8666 sf = (tree) n->value;
8667 if (tcctx.cb.decl_map)
8668 sf = *tcctx.cb.decl_map->get (sf);
8669 src = build_simple_mem_ref_loc (loc, sarg);
8670 src = omp_build_component_ref (src, sf);
8671 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8672 src = build_simple_mem_ref_loc (loc, src);
8673 }
8674 else
8675 src = decl;
8676 dst = build_simple_mem_ref_loc (loc, arg);
8677 dst = omp_build_component_ref (dst, f);
28567c40 8678 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
a3bccfa1
JJ
8679 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8680 else
8681 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
629b3d75
MJ
8682 append_to_statement_list (t, &list);
8683 break;
8684 case OMP_CLAUSE_PRIVATE:
8685 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8686 break;
8687 decl = OMP_CLAUSE_DECL (c);
8688 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8689 f = (tree) n->value;
8690 if (tcctx.cb.decl_map)
8691 f = *tcctx.cb.decl_map->get (f);
8692 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8693 if (n != NULL)
8694 {
8695 sf = (tree) n->value;
8696 if (tcctx.cb.decl_map)
8697 sf = *tcctx.cb.decl_map->get (sf);
8698 src = build_simple_mem_ref_loc (loc, sarg);
8699 src = omp_build_component_ref (src, sf);
8700 if (use_pointer_for_field (decl, NULL))
8701 src = build_simple_mem_ref_loc (loc, src);
8702 }
8703 else
8704 src = decl;
8705 dst = build_simple_mem_ref_loc (loc, arg);
8706 dst = omp_build_component_ref (dst, f);
8707 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8708 append_to_statement_list (t, &list);
8709 break;
8710 default:
8711 break;
8712 }
74bf76ed 8713
629b3d75
MJ
8714 /* Last pass: handle VLA firstprivates. */
8715 if (tcctx.cb.decl_map)
8716 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8718 {
8719 tree ind, ptr, df;
74bf76ed 8720
629b3d75
MJ
8721 decl = OMP_CLAUSE_DECL (c);
8722 if (!is_variable_sized (decl))
8723 continue;
8724 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8725 if (n == NULL)
8726 continue;
8727 f = (tree) n->value;
8728 f = *tcctx.cb.decl_map->get (f);
8729 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8730 ind = DECL_VALUE_EXPR (decl);
8731 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8732 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8733 n = splay_tree_lookup (ctx->sfield_map,
8734 (splay_tree_key) TREE_OPERAND (ind, 0));
8735 sf = (tree) n->value;
8736 sf = *tcctx.cb.decl_map->get (sf);
8737 src = build_simple_mem_ref_loc (loc, sarg);
8738 src = omp_build_component_ref (src, sf);
8739 src = build_simple_mem_ref_loc (loc, src);
8740 dst = build_simple_mem_ref_loc (loc, arg);
8741 dst = omp_build_component_ref (dst, f);
8742 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8743 append_to_statement_list (t, &list);
8744 n = splay_tree_lookup (ctx->field_map,
8745 (splay_tree_key) TREE_OPERAND (ind, 0));
8746 df = (tree) n->value;
8747 df = *tcctx.cb.decl_map->get (df);
8748 ptr = build_simple_mem_ref_loc (loc, arg);
8749 ptr = omp_build_component_ref (ptr, df);
8750 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8751 build_fold_addr_expr_loc (loc, dst));
8752 append_to_statement_list (t, &list);
8753 }
74bf76ed 8754
629b3d75
MJ
8755 t = build1 (RETURN_EXPR, void_type_node, NULL);
8756 append_to_statement_list (t, &list);
74bf76ed 8757
629b3d75
MJ
8758 if (tcctx.cb.decl_map)
8759 delete tcctx.cb.decl_map;
8760 pop_gimplify_context (NULL);
8761 BIND_EXPR_BODY (bind) = list;
8762 pop_cfun ();
8763}
74bf76ed
JJ
8764
8765static void
629b3d75 8766lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
74bf76ed 8767{
629b3d75
MJ
8768 tree c, clauses;
8769 gimple *g;
28567c40 8770 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
629b3d75
MJ
8771
8772 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8773 gcc_assert (clauses);
8774 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8776 switch (OMP_CLAUSE_DEPEND_KIND (c))
8777 {
28567c40
JJ
8778 case OMP_CLAUSE_DEPEND_LAST:
8779 /* Lowering already done at gimplification. */
8780 return;
629b3d75 8781 case OMP_CLAUSE_DEPEND_IN:
28567c40 8782 cnt[2]++;
629b3d75
MJ
8783 break;
8784 case OMP_CLAUSE_DEPEND_OUT:
8785 case OMP_CLAUSE_DEPEND_INOUT:
28567c40
JJ
8786 cnt[0]++;
8787 break;
8788 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8789 cnt[1]++;
8790 break;
8791 case OMP_CLAUSE_DEPEND_DEPOBJ:
8792 cnt[3]++;
629b3d75
MJ
8793 break;
8794 case OMP_CLAUSE_DEPEND_SOURCE:
8795 case OMP_CLAUSE_DEPEND_SINK:
8796 /* FALLTHRU */
8797 default:
8798 gcc_unreachable ();
8799 }
28567c40
JJ
8800 if (cnt[1] || cnt[3])
8801 idx = 5;
8802 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8803 tree type = build_array_type_nelts (ptr_type_node, total + idx);
629b3d75
MJ
8804 tree array = create_tmp_var (type);
8805 TREE_ADDRESSABLE (array) = 1;
8806 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8807 NULL_TREE);
28567c40
JJ
8808 if (idx == 5)
8809 {
8810 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8811 gimple_seq_add_stmt (iseq, g);
8812 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8813 NULL_TREE);
8814 }
8815 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
629b3d75 8816 gimple_seq_add_stmt (iseq, g);
28567c40
JJ
8817 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8818 {
8819 r = build4 (ARRAY_REF, ptr_type_node, array,
8820 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8821 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8822 gimple_seq_add_stmt (iseq, g);
8823 }
8824 for (i = 0; i < 4; i++)
74bf76ed 8825 {
28567c40 8826 if (cnt[i] == 0)
629b3d75
MJ
8827 continue;
8828 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
28567c40
JJ
8829 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8830 continue;
8831 else
629b3d75 8832 {
28567c40
JJ
8833 switch (OMP_CLAUSE_DEPEND_KIND (c))
8834 {
8835 case OMP_CLAUSE_DEPEND_IN:
8836 if (i != 2)
8837 continue;
8838 break;
8839 case OMP_CLAUSE_DEPEND_OUT:
8840 case OMP_CLAUSE_DEPEND_INOUT:
8841 if (i != 0)
8842 continue;
8843 break;
8844 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8845 if (i != 1)
8846 continue;
8847 break;
8848 case OMP_CLAUSE_DEPEND_DEPOBJ:
8849 if (i != 3)
8850 continue;
8851 break;
8852 default:
8853 gcc_unreachable ();
8854 }
629b3d75
MJ
8855 tree t = OMP_CLAUSE_DECL (c);
8856 t = fold_convert (ptr_type_node, t);
8857 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8858 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8859 NULL_TREE, NULL_TREE);
8860 g = gimple_build_assign (r, t);
8861 gimple_seq_add_stmt (iseq, g);
8862 }
74bf76ed 8863 }
629b3d75 8864 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
28567c40 8865 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
629b3d75
MJ
8866 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8867 OMP_CLAUSE_CHAIN (c) = *pclauses;
8868 *pclauses = c;
8869 tree clobber = build_constructor (type, NULL);
8870 TREE_THIS_VOLATILE (clobber) = 1;
8871 g = gimple_build_assign (array, clobber);
8872 gimple_seq_add_stmt (oseq, g);
8873}
8874
8875/* Lower the OpenMP parallel or task directive in the current statement
8876 in GSI_P. CTX holds context information for the directive. */
74bf76ed 8877
629b3d75
MJ
8878static void
8879lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8880{
8881 tree clauses;
8882 tree child_fn, t;
8883 gimple *stmt = gsi_stmt (*gsi_p);
8884 gbind *par_bind, *bind, *dep_bind = NULL;
28567c40 8885 gimple_seq par_body;
629b3d75 8886 location_t loc = gimple_location (stmt);
74bf76ed 8887
629b3d75 8888 clauses = gimple_omp_taskreg_clauses (stmt);
28567c40
JJ
8889 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8890 && gimple_omp_task_taskwait_p (stmt))
8891 {
8892 par_bind = NULL;
8893 par_body = NULL;
8894 }
8895 else
8896 {
8897 par_bind
8898 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8899 par_body = gimple_bind_body (par_bind);
8900 }
629b3d75
MJ
8901 child_fn = ctx->cb.dst_fn;
8902 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8903 && !gimple_omp_parallel_combined_p (stmt))
74bf76ed 8904 {
629b3d75
MJ
8905 struct walk_stmt_info wi;
8906 int ws_num = 0;
74bf76ed 8907
629b3d75
MJ
8908 memset (&wi, 0, sizeof (wi));
8909 wi.info = &ws_num;
8910 wi.val_only = true;
8911 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8912 if (ws_num == 1)
8913 gimple_omp_parallel_set_combined_p (stmt, true);
74bf76ed 8914 }
629b3d75
MJ
8915 gimple_seq dep_ilist = NULL;
8916 gimple_seq dep_olist = NULL;
8917 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8918 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
acf0174b 8919 {
629b3d75
MJ
8920 push_gimplify_context ();
8921 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8922 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8923 &dep_ilist, &dep_olist);
9669b00b 8924 }
9669b00b 8925
28567c40
JJ
8926 if (gimple_code (stmt) == GIMPLE_OMP_TASK
8927 && gimple_omp_task_taskwait_p (stmt))
8928 {
8929 if (dep_bind)
8930 {
8931 gsi_replace (gsi_p, dep_bind, true);
8932 gimple_bind_add_seq (dep_bind, dep_ilist);
8933 gimple_bind_add_stmt (dep_bind, stmt);
8934 gimple_bind_add_seq (dep_bind, dep_olist);
8935 pop_gimplify_context (dep_bind);
8936 }
8937 return;
8938 }
8939
629b3d75
MJ
8940 if (ctx->srecord_type)
8941 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9669b00b 8942
28567c40
JJ
8943 gimple_seq tskred_ilist = NULL;
8944 gimple_seq tskred_olist = NULL;
8945 if ((is_task_ctx (ctx)
8946 && gimple_omp_task_taskloop_p (ctx->stmt)
8947 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
8948 OMP_CLAUSE_REDUCTION))
8949 || (is_parallel_ctx (ctx)
8950 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
8951 OMP_CLAUSE__REDUCTEMP_)))
8952 {
8953 if (dep_bind == NULL)
8954 {
8955 push_gimplify_context ();
8956 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8957 }
8958 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
8959 : OMP_PARALLEL,
8960 gimple_omp_taskreg_clauses (ctx->stmt),
8961 &tskred_ilist, &tskred_olist);
8962 }
8963
629b3d75 8964 push_gimplify_context ();
74bf76ed 8965
28567c40
JJ
8966 gimple_seq par_olist = NULL;
8967 gimple_seq par_ilist = NULL;
8968 gimple_seq par_rlist = NULL;
629b3d75
MJ
8969 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8970 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
8971 if (phony_construct && ctx->record_type)
9669b00b 8972 {
629b3d75
MJ
8973 gcc_checking_assert (!ctx->receiver_decl);
8974 ctx->receiver_decl = create_tmp_var
8975 (build_reference_type (ctx->record_type), ".omp_rec");
9669b00b 8976 }
629b3d75
MJ
8977 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
8978 lower_omp (&par_body, ctx);
8979 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
8980 lower_reduction_clauses (clauses, &par_rlist, ctx);
9669b00b 8981
629b3d75
MJ
8982 /* Declare all the variables created by mapping and the variables
8983 declared in the scope of the parallel body. */
8984 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 8985 maybe_remove_omp_member_access_dummy_vars (par_bind);
629b3d75 8986 record_vars_into (gimple_bind_vars (par_bind), child_fn);
74bf76ed 8987
629b3d75 8988 if (ctx->record_type)
74bf76ed 8989 {
629b3d75
MJ
8990 ctx->sender_decl
8991 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
8992 : ctx->record_type, ".omp_data_o");
8993 DECL_NAMELESS (ctx->sender_decl) = 1;
8994 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
8995 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
74bf76ed 8996 }
74bf76ed 8997
28567c40
JJ
8998 gimple_seq olist = NULL;
8999 gimple_seq ilist = NULL;
629b3d75
MJ
9000 lower_send_clauses (clauses, &ilist, &olist, ctx);
9001 lower_send_shared_vars (&ilist, &olist, ctx);
9669b00b 9002
629b3d75 9003 if (ctx->record_type)
74bf76ed 9004 {
629b3d75
MJ
9005 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9006 TREE_THIS_VOLATILE (clobber) = 1;
9007 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9008 clobber));
d9a6bd32 9009 }
d9a6bd32 9010
629b3d75
MJ
9011 /* Once all the expansions are done, sequence all the different
9012 fragments inside gimple_omp_body. */
d9a6bd32 9013
28567c40 9014 gimple_seq new_body = NULL;
d9a6bd32 9015
629b3d75 9016 if (ctx->record_type)
d9a6bd32 9017 {
629b3d75
MJ
9018 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9019 /* fixup_child_record_type might have changed receiver_decl's type. */
9020 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9021 gimple_seq_add_stmt (&new_body,
9022 gimple_build_assign (ctx->receiver_decl, t));
d9a6bd32
JJ
9023 }
9024
629b3d75
MJ
9025 gimple_seq_add_seq (&new_body, par_ilist);
9026 gimple_seq_add_seq (&new_body, par_body);
9027 gimple_seq_add_seq (&new_body, par_rlist);
9028 if (ctx->cancellable)
9029 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9030 gimple_seq_add_seq (&new_body, par_olist);
9031 new_body = maybe_catch_exception (new_body);
9032 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9033 gimple_seq_add_stmt (&new_body,
9034 gimple_build_omp_continue (integer_zero_node,
9035 integer_zero_node));
9036 if (!phony_construct)
d9a6bd32 9037 {
629b3d75
MJ
9038 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9039 gimple_omp_set_body (stmt, new_body);
d9a6bd32
JJ
9040 }
9041
28567c40
JJ
9042 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9043 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9044 else
9045 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
629b3d75
MJ
9046 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9047 gimple_bind_add_seq (bind, ilist);
9048 if (!phony_construct)
9049 gimple_bind_add_stmt (bind, stmt);
d9a6bd32 9050 else
629b3d75
MJ
9051 gimple_bind_add_seq (bind, new_body);
9052 gimple_bind_add_seq (bind, olist);
d9a6bd32 9053
629b3d75
MJ
9054 pop_gimplify_context (NULL);
9055
9056 if (dep_bind)
d9a6bd32 9057 {
629b3d75 9058 gimple_bind_add_seq (dep_bind, dep_ilist);
28567c40 9059 gimple_bind_add_seq (dep_bind, tskred_ilist);
629b3d75 9060 gimple_bind_add_stmt (dep_bind, bind);
28567c40 9061 gimple_bind_add_seq (dep_bind, tskred_olist);
629b3d75
MJ
9062 gimple_bind_add_seq (dep_bind, dep_olist);
9063 pop_gimplify_context (dep_bind);
d9a6bd32 9064 }
d9a6bd32
JJ
9065}
9066
629b3d75
MJ
9067/* Lower the GIMPLE_OMP_TARGET in the current statement
9068 in GSI_P. CTX holds context information for the directive. */
d9a6bd32
JJ
9069
9070static void
629b3d75 9071lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
d9a6bd32 9072{
629b3d75
MJ
9073 tree clauses;
9074 tree child_fn, t, c;
9075 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9076 gbind *tgt_bind, *bind, *dep_bind = NULL;
9077 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9078 location_t loc = gimple_location (stmt);
9079 bool offloaded, data_region;
9080 unsigned int map_cnt = 0;
d9a6bd32 9081
629b3d75
MJ
9082 offloaded = is_gimple_omp_offloaded (stmt);
9083 switch (gimple_omp_target_kind (stmt))
d9a6bd32 9084 {
629b3d75
MJ
9085 case GF_OMP_TARGET_KIND_REGION:
9086 case GF_OMP_TARGET_KIND_UPDATE:
9087 case GF_OMP_TARGET_KIND_ENTER_DATA:
9088 case GF_OMP_TARGET_KIND_EXIT_DATA:
9089 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9090 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9091 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9092 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9093 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9094 data_region = false;
9095 break;
9096 case GF_OMP_TARGET_KIND_DATA:
9097 case GF_OMP_TARGET_KIND_OACC_DATA:
9098 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9099 data_region = true;
9100 break;
9101 default:
9102 gcc_unreachable ();
74bf76ed 9103 }
74bf76ed 9104
629b3d75 9105 clauses = gimple_omp_target_clauses (stmt);
d9a6bd32 9106
629b3d75
MJ
9107 gimple_seq dep_ilist = NULL;
9108 gimple_seq dep_olist = NULL;
9109 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
d9a6bd32 9110 {
629b3d75
MJ
9111 push_gimplify_context ();
9112 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9113 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9114 &dep_ilist, &dep_olist);
d9a6bd32 9115 }
953ff289 9116
629b3d75
MJ
9117 tgt_bind = NULL;
9118 tgt_body = NULL;
9119 if (offloaded)
e4834818 9120 {
629b3d75
MJ
9121 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9122 tgt_body = gimple_bind_body (tgt_bind);
e4834818 9123 }
629b3d75
MJ
9124 else if (data_region)
9125 tgt_body = gimple_omp_body (stmt);
9126 child_fn = ctx->cb.dst_fn;
e4834818 9127
629b3d75
MJ
9128 push_gimplify_context ();
9129 fplist = NULL;
e4834818 9130
629b3d75
MJ
9131 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9132 switch (OMP_CLAUSE_CODE (c))
9133 {
9134 tree var, x;
e4834818 9135
629b3d75
MJ
9136 default:
9137 break;
9138 case OMP_CLAUSE_MAP:
9139#if CHECKING_P
9140 /* First check what we're prepared to handle in the following. */
9141 switch (OMP_CLAUSE_MAP_KIND (c))
9142 {
9143 case GOMP_MAP_ALLOC:
9144 case GOMP_MAP_TO:
9145 case GOMP_MAP_FROM:
9146 case GOMP_MAP_TOFROM:
9147 case GOMP_MAP_POINTER:
9148 case GOMP_MAP_TO_PSET:
9149 case GOMP_MAP_DELETE:
9150 case GOMP_MAP_RELEASE:
9151 case GOMP_MAP_ALWAYS_TO:
9152 case GOMP_MAP_ALWAYS_FROM:
9153 case GOMP_MAP_ALWAYS_TOFROM:
9154 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9155 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9156 case GOMP_MAP_STRUCT:
9157 case GOMP_MAP_ALWAYS_POINTER:
9158 break;
9159 case GOMP_MAP_FORCE_ALLOC:
9160 case GOMP_MAP_FORCE_TO:
9161 case GOMP_MAP_FORCE_FROM:
9162 case GOMP_MAP_FORCE_TOFROM:
9163 case GOMP_MAP_FORCE_PRESENT:
9164 case GOMP_MAP_FORCE_DEVICEPTR:
9165 case GOMP_MAP_DEVICE_RESIDENT:
9166 case GOMP_MAP_LINK:
9167 gcc_assert (is_gimple_omp_oacc (stmt));
9168 break;
9169 default:
9170 gcc_unreachable ();
9171 }
9172#endif
9173 /* FALLTHRU */
9174 case OMP_CLAUSE_TO:
9175 case OMP_CLAUSE_FROM:
9176 oacc_firstprivate:
9177 var = OMP_CLAUSE_DECL (c);
9178 if (!DECL_P (var))
9179 {
9180 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9181 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9182 && (OMP_CLAUSE_MAP_KIND (c)
9183 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9184 map_cnt++;
9185 continue;
9186 }
e4834818 9187
629b3d75
MJ
9188 if (DECL_SIZE (var)
9189 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9190 {
9191 tree var2 = DECL_VALUE_EXPR (var);
9192 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9193 var2 = TREE_OPERAND (var2, 0);
9194 gcc_assert (DECL_P (var2));
9195 var = var2;
9196 }
e4834818 9197
629b3d75
MJ
9198 if (offloaded
9199 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9200 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9201 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9202 {
9203 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9204 {
9205 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9206 && varpool_node::get_create (var)->offloadable)
9207 continue;
e4834818 9208
629b3d75
MJ
9209 tree type = build_pointer_type (TREE_TYPE (var));
9210 tree new_var = lookup_decl (var, ctx);
9211 x = create_tmp_var_raw (type, get_name (new_var));
9212 gimple_add_tmp_var (x);
9213 x = build_simple_mem_ref (x);
9214 SET_DECL_VALUE_EXPR (new_var, x);
9215 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9216 }
9217 continue;
9218 }
e4834818 9219
629b3d75
MJ
9220 if (!maybe_lookup_field (var, ctx))
9221 continue;
e4834818 9222
629b3d75
MJ
9223 /* Don't remap oacc parallel reduction variables, because the
9224 intermediate result must be local to each gang. */
9225 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9226 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9227 {
9228 x = build_receiver_ref (var, true, ctx);
9229 tree new_var = lookup_decl (var, ctx);
e4834818 9230
629b3d75
MJ
9231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9232 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9233 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9234 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9235 x = build_simple_mem_ref (x);
9236 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9237 {
9238 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
bd1cab35
CLT
9239 if (omp_is_reference (new_var)
9240 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
629b3d75
MJ
9241 {
9242 /* Create a local object to hold the instance
9243 value. */
9244 tree type = TREE_TYPE (TREE_TYPE (new_var));
9245 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9246 tree inst = create_tmp_var (type, id);
9247 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9248 x = build_fold_addr_expr (inst);
9249 }
9250 gimplify_assign (new_var, x, &fplist);
9251 }
9252 else if (DECL_P (new_var))
9253 {
9254 SET_DECL_VALUE_EXPR (new_var, x);
9255 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9256 }
9257 else
9258 gcc_unreachable ();
9259 }
9260 map_cnt++;
9261 break;
e4834818 9262
629b3d75
MJ
9263 case OMP_CLAUSE_FIRSTPRIVATE:
9264 if (is_oacc_parallel (ctx))
9265 goto oacc_firstprivate;
9266 map_cnt++;
9267 var = OMP_CLAUSE_DECL (c);
9268 if (!omp_is_reference (var)
9269 && !is_gimple_reg_type (TREE_TYPE (var)))
9270 {
9271 tree new_var = lookup_decl (var, ctx);
9272 if (is_variable_sized (var))
9273 {
9274 tree pvar = DECL_VALUE_EXPR (var);
9275 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9276 pvar = TREE_OPERAND (pvar, 0);
9277 gcc_assert (DECL_P (pvar));
9278 tree new_pvar = lookup_decl (pvar, ctx);
9279 x = build_fold_indirect_ref (new_pvar);
9280 TREE_THIS_NOTRAP (x) = 1;
9281 }
9282 else
9283 x = build_receiver_ref (var, true, ctx);
9284 SET_DECL_VALUE_EXPR (new_var, x);
9285 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9286 }
9287 break;
e4834818 9288
629b3d75
MJ
9289 case OMP_CLAUSE_PRIVATE:
9290 if (is_gimple_omp_oacc (ctx->stmt))
9291 break;
9292 var = OMP_CLAUSE_DECL (c);
9293 if (is_variable_sized (var))
9294 {
9295 tree new_var = lookup_decl (var, ctx);
9296 tree pvar = DECL_VALUE_EXPR (var);
9297 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9298 pvar = TREE_OPERAND (pvar, 0);
9299 gcc_assert (DECL_P (pvar));
9300 tree new_pvar = lookup_decl (pvar, ctx);
9301 x = build_fold_indirect_ref (new_pvar);
9302 TREE_THIS_NOTRAP (x) = 1;
9303 SET_DECL_VALUE_EXPR (new_var, x);
9304 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9305 }
9306 break;
e4834818 9307
629b3d75
MJ
9308 case OMP_CLAUSE_USE_DEVICE_PTR:
9309 case OMP_CLAUSE_IS_DEVICE_PTR:
9310 var = OMP_CLAUSE_DECL (c);
9311 map_cnt++;
9312 if (is_variable_sized (var))
9313 {
9314 tree new_var = lookup_decl (var, ctx);
9315 tree pvar = DECL_VALUE_EXPR (var);
9316 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9317 pvar = TREE_OPERAND (pvar, 0);
9318 gcc_assert (DECL_P (pvar));
9319 tree new_pvar = lookup_decl (pvar, ctx);
9320 x = build_fold_indirect_ref (new_pvar);
9321 TREE_THIS_NOTRAP (x) = 1;
9322 SET_DECL_VALUE_EXPR (new_var, x);
9323 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9324 }
9325 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9326 {
9327 tree new_var = lookup_decl (var, ctx);
9328 tree type = build_pointer_type (TREE_TYPE (var));
9329 x = create_tmp_var_raw (type, get_name (new_var));
9330 gimple_add_tmp_var (x);
9331 x = build_simple_mem_ref (x);
9332 SET_DECL_VALUE_EXPR (new_var, x);
9333 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9334 }
9335 else
9336 {
9337 tree new_var = lookup_decl (var, ctx);
9338 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9339 gimple_add_tmp_var (x);
9340 SET_DECL_VALUE_EXPR (new_var, x);
9341 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9342 }
9343 break;
9344 }
e4834818 9345
629b3d75 9346 if (offloaded)
e4834818 9347 {
629b3d75
MJ
9348 target_nesting_level++;
9349 lower_omp (&tgt_body, ctx);
9350 target_nesting_level--;
e4834818 9351 }
629b3d75
MJ
9352 else if (data_region)
9353 lower_omp (&tgt_body, ctx);
e4834818 9354
629b3d75 9355 if (offloaded)
e4834818 9356 {
629b3d75
MJ
9357 /* Declare all the variables created by mapping and the variables
9358 declared in the scope of the target body. */
9359 record_vars_into (ctx->block_vars, child_fn);
6724f8a6 9360 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
629b3d75 9361 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
e4834818
NS
9362 }
9363
629b3d75
MJ
9364 olist = NULL;
9365 ilist = NULL;
9366 if (ctx->record_type)
e4834818 9367 {
629b3d75
MJ
9368 ctx->sender_decl
9369 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9370 DECL_NAMELESS (ctx->sender_decl) = 1;
9371 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9372 t = make_tree_vec (3);
9373 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9374 TREE_VEC_ELT (t, 1)
9375 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9376 ".omp_data_sizes");
9377 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9378 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9379 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9380 tree tkind_type = short_unsigned_type_node;
9381 int talign_shift = 8;
9382 TREE_VEC_ELT (t, 2)
9383 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9384 ".omp_data_kinds");
9385 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9386 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9387 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9388 gimple_omp_target_set_data_arg (stmt, t);
953ff289 9389
629b3d75
MJ
9390 vec<constructor_elt, va_gc> *vsize;
9391 vec<constructor_elt, va_gc> *vkind;
9392 vec_alloc (vsize, map_cnt);
9393 vec_alloc (vkind, map_cnt);
9394 unsigned int map_idx = 0;
953ff289 9395
629b3d75
MJ
9396 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9397 switch (OMP_CLAUSE_CODE (c))
953ff289 9398 {
629b3d75
MJ
9399 tree ovar, nc, s, purpose, var, x, type;
9400 unsigned int talign;
953ff289 9401
629b3d75
MJ
9402 default:
9403 break;
953ff289 9404
629b3d75
MJ
9405 case OMP_CLAUSE_MAP:
9406 case OMP_CLAUSE_TO:
9407 case OMP_CLAUSE_FROM:
9408 oacc_firstprivate_map:
9409 nc = c;
9410 ovar = OMP_CLAUSE_DECL (c);
9411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9412 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9413 || (OMP_CLAUSE_MAP_KIND (c)
9414 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9415 break;
9416 if (!DECL_P (ovar))
c34938a8 9417 {
629b3d75
MJ
9418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9419 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9420 {
9421 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9422 == get_base_address (ovar));
9423 nc = OMP_CLAUSE_CHAIN (c);
9424 ovar = OMP_CLAUSE_DECL (nc);
9425 }
9426 else
9427 {
9428 tree x = build_sender_ref (ovar, ctx);
9429 tree v
9430 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9431 gimplify_assign (x, v, &ilist);
9432 nc = NULL_TREE;
9433 }
9434 }
9435 else
9436 {
9437 if (DECL_SIZE (ovar)
9438 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9439 {
9440 tree ovar2 = DECL_VALUE_EXPR (ovar);
9441 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9442 ovar2 = TREE_OPERAND (ovar2, 0);
9443 gcc_assert (DECL_P (ovar2));
9444 ovar = ovar2;
9445 }
9446 if (!maybe_lookup_field (ovar, ctx))
9447 continue;
c34938a8 9448 }
777f7f9a 9449
629b3d75
MJ
9450 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9451 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9452 talign = DECL_ALIGN_UNIT (ovar);
9453 if (nc)
9454 {
9455 var = lookup_decl_in_outer_ctx (ovar, ctx);
9456 x = build_sender_ref (ovar, ctx);
777f7f9a 9457
629b3d75
MJ
9458 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9459 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9460 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9461 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9462 {
9463 gcc_assert (offloaded);
9464 tree avar
9465 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9466 mark_addressable (avar);
9467 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9468 talign = DECL_ALIGN_UNIT (avar);
9469 avar = build_fold_addr_expr (avar);
9470 gimplify_assign (x, avar, &ilist);
9471 }
9472 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9473 {
9474 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9475 if (!omp_is_reference (var))
9476 {
9477 if (is_gimple_reg (var)
9478 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9479 TREE_NO_WARNING (var) = 1;
9480 var = build_fold_addr_expr (var);
9481 }
9482 else
9483 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9484 gimplify_assign (x, var, &ilist);
9485 }
9486 else if (is_gimple_reg (var))
9487 {
9488 gcc_assert (offloaded);
9489 tree avar = create_tmp_var (TREE_TYPE (var));
9490 mark_addressable (avar);
9491 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9492 if (GOMP_MAP_COPY_TO_P (map_kind)
9493 || map_kind == GOMP_MAP_POINTER
9494 || map_kind == GOMP_MAP_TO_PSET
9495 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9496 {
9497 /* If we need to initialize a temporary
9498 with VAR because it is not addressable, and
9499 the variable hasn't been initialized yet, then
9500 we'll get a warning for the store to avar.
9501 Don't warn in that case, the mapping might
9502 be implicit. */
9503 TREE_NO_WARNING (var) = 1;
9504 gimplify_assign (avar, var, &ilist);
9505 }
9506 avar = build_fold_addr_expr (avar);
9507 gimplify_assign (x, avar, &ilist);
9508 if ((GOMP_MAP_COPY_FROM_P (map_kind)
9509 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9510 && !TYPE_READONLY (TREE_TYPE (var)))
9511 {
9512 x = unshare_expr (x);
9513 x = build_simple_mem_ref (x);
9514 gimplify_assign (var, x, &olist);
9515 }
9516 }
9517 else
9518 {
9519 var = build_fold_addr_expr (var);
9520 gimplify_assign (x, var, &ilist);
9521 }
9522 }
9523 s = NULL_TREE;
9524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9525 {
9526 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9527 s = TREE_TYPE (ovar);
9528 if (TREE_CODE (s) == REFERENCE_TYPE)
9529 s = TREE_TYPE (s);
9530 s = TYPE_SIZE_UNIT (s);
9531 }
9532 else
9533 s = OMP_CLAUSE_SIZE (c);
9534 if (s == NULL_TREE)
9535 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9536 s = fold_convert (size_type_node, s);
9537 purpose = size_int (map_idx++);
9538 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9539 if (TREE_CODE (s) != INTEGER_CST)
9540 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
777f7f9a 9541
629b3d75
MJ
9542 unsigned HOST_WIDE_INT tkind, tkind_zero;
9543 switch (OMP_CLAUSE_CODE (c))
9544 {
9545 case OMP_CLAUSE_MAP:
9546 tkind = OMP_CLAUSE_MAP_KIND (c);
9547 tkind_zero = tkind;
9548 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9549 switch (tkind)
9550 {
9551 case GOMP_MAP_ALLOC:
9552 case GOMP_MAP_TO:
9553 case GOMP_MAP_FROM:
9554 case GOMP_MAP_TOFROM:
9555 case GOMP_MAP_ALWAYS_TO:
9556 case GOMP_MAP_ALWAYS_FROM:
9557 case GOMP_MAP_ALWAYS_TOFROM:
9558 case GOMP_MAP_RELEASE:
9559 case GOMP_MAP_FORCE_TO:
9560 case GOMP_MAP_FORCE_FROM:
9561 case GOMP_MAP_FORCE_TOFROM:
9562 case GOMP_MAP_FORCE_PRESENT:
9563 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9564 break;
9565 case GOMP_MAP_DELETE:
9566 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9567 default:
9568 break;
9569 }
9570 if (tkind_zero != tkind)
9571 {
9572 if (integer_zerop (s))
9573 tkind = tkind_zero;
9574 else if (integer_nonzerop (s))
9575 tkind_zero = tkind;
9576 }
9577 break;
9578 case OMP_CLAUSE_FIRSTPRIVATE:
9579 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9580 tkind = GOMP_MAP_TO;
9581 tkind_zero = tkind;
9582 break;
9583 case OMP_CLAUSE_TO:
9584 tkind = GOMP_MAP_TO;
9585 tkind_zero = tkind;
9586 break;
9587 case OMP_CLAUSE_FROM:
9588 tkind = GOMP_MAP_FROM;
9589 tkind_zero = tkind;
9590 break;
9591 default:
9592 gcc_unreachable ();
9593 }
9594 gcc_checking_assert (tkind
9595 < (HOST_WIDE_INT_C (1U) << talign_shift));
9596 gcc_checking_assert (tkind_zero
9597 < (HOST_WIDE_INT_C (1U) << talign_shift));
9598 talign = ceil_log2 (talign);
9599 tkind |= talign << talign_shift;
9600 tkind_zero |= talign << talign_shift;
9601 gcc_checking_assert (tkind
9602 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9603 gcc_checking_assert (tkind_zero
9604 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9605 if (tkind == tkind_zero)
9606 x = build_int_cstu (tkind_type, tkind);
9607 else
9608 {
9609 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9610 x = build3 (COND_EXPR, tkind_type,
9611 fold_build2 (EQ_EXPR, boolean_type_node,
9612 unshare_expr (s), size_zero_node),
9613 build_int_cstu (tkind_type, tkind_zero),
9614 build_int_cstu (tkind_type, tkind));
9615 }
9616 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9617 if (nc && nc != c)
9618 c = nc;
9619 break;
05409788 9620
629b3d75
MJ
9621 case OMP_CLAUSE_FIRSTPRIVATE:
9622 if (is_oacc_parallel (ctx))
9623 goto oacc_firstprivate_map;
9624 ovar = OMP_CLAUSE_DECL (c);
9625 if (omp_is_reference (ovar))
9626 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9627 else
9628 talign = DECL_ALIGN_UNIT (ovar);
9629 var = lookup_decl_in_outer_ctx (ovar, ctx);
9630 x = build_sender_ref (ovar, ctx);
9631 tkind = GOMP_MAP_FIRSTPRIVATE;
9632 type = TREE_TYPE (ovar);
9633 if (omp_is_reference (ovar))
9634 type = TREE_TYPE (type);
9635 if ((INTEGRAL_TYPE_P (type)
9636 && TYPE_PRECISION (type) <= POINTER_SIZE)
9637 || TREE_CODE (type) == POINTER_TYPE)
9638 {
9639 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9640 tree t = var;
9641 if (omp_is_reference (var))
9642 t = build_simple_mem_ref (var);
9643 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9644 TREE_NO_WARNING (var) = 1;
9645 if (TREE_CODE (type) != POINTER_TYPE)
9646 t = fold_convert (pointer_sized_int_node, t);
9647 t = fold_convert (TREE_TYPE (x), t);
9648 gimplify_assign (x, t, &ilist);
9649 }
9650 else if (omp_is_reference (var))
9651 gimplify_assign (x, var, &ilist);
9652 else if (is_gimple_reg (var))
9653 {
9654 tree avar = create_tmp_var (TREE_TYPE (var));
9655 mark_addressable (avar);
9656 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9657 TREE_NO_WARNING (var) = 1;
9658 gimplify_assign (avar, var, &ilist);
9659 avar = build_fold_addr_expr (avar);
9660 gimplify_assign (x, avar, &ilist);
9661 }
9662 else
9663 {
9664 var = build_fold_addr_expr (var);
9665 gimplify_assign (x, var, &ilist);
9666 }
9667 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9668 s = size_int (0);
9669 else if (omp_is_reference (ovar))
9670 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9671 else
9672 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9673 s = fold_convert (size_type_node, s);
9674 purpose = size_int (map_idx++);
9675 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9676 if (TREE_CODE (s) != INTEGER_CST)
9677 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
05409788 9678
629b3d75
MJ
9679 gcc_checking_assert (tkind
9680 < (HOST_WIDE_INT_C (1U) << talign_shift));
9681 talign = ceil_log2 (talign);
9682 tkind |= talign << talign_shift;
9683 gcc_checking_assert (tkind
9684 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9685 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9686 build_int_cstu (tkind_type, tkind));
9687 break;
05409788 9688
629b3d75
MJ
9689 case OMP_CLAUSE_USE_DEVICE_PTR:
9690 case OMP_CLAUSE_IS_DEVICE_PTR:
9691 ovar = OMP_CLAUSE_DECL (c);
9692 var = lookup_decl_in_outer_ctx (ovar, ctx);
9693 x = build_sender_ref (ovar, ctx);
9694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9695 tkind = GOMP_MAP_USE_DEVICE_PTR;
9696 else
9697 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9698 type = TREE_TYPE (ovar);
9699 if (TREE_CODE (type) == ARRAY_TYPE)
9700 var = build_fold_addr_expr (var);
9701 else
9702 {
9703 if (omp_is_reference (ovar))
9704 {
9705 type = TREE_TYPE (type);
9706 if (TREE_CODE (type) != ARRAY_TYPE)
9707 var = build_simple_mem_ref (var);
9708 var = fold_convert (TREE_TYPE (x), var);
9709 }
9710 }
9711 gimplify_assign (x, var, &ilist);
9712 s = size_int (0);
9713 purpose = size_int (map_idx++);
9714 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9715 gcc_checking_assert (tkind
9716 < (HOST_WIDE_INT_C (1U) << talign_shift));
9717 gcc_checking_assert (tkind
9718 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9719 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9720 build_int_cstu (tkind_type, tkind));
9721 break;
9722 }
05409788 9723
629b3d75 9724 gcc_assert (map_idx == map_cnt);
20906c66 9725
629b3d75
MJ
9726 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9727 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9728 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9729 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9730 for (int i = 1; i <= 2; i++)
9731 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9732 {
9733 gimple_seq initlist = NULL;
9734 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9735 TREE_VEC_ELT (t, i)),
9736 &initlist, true, NULL_TREE);
9737 gimple_seq_add_seq (&ilist, initlist);
20906c66 9738
629b3d75
MJ
9739 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9740 NULL);
9741 TREE_THIS_VOLATILE (clobber) = 1;
9742 gimple_seq_add_stmt (&olist,
9743 gimple_build_assign (TREE_VEC_ELT (t, i),
9744 clobber));
9745 }
05409788 9746
629b3d75
MJ
9747 tree clobber = build_constructor (ctx->record_type, NULL);
9748 TREE_THIS_VOLATILE (clobber) = 1;
9749 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9750 clobber));
9751 }
05409788 9752
629b3d75
MJ
9753 /* Once all the expansions are done, sequence all the different
9754 fragments inside gimple_omp_body. */
05409788 9755
629b3d75 9756 new_body = NULL;
05409788 9757
629b3d75
MJ
9758 if (offloaded
9759 && ctx->record_type)
05409788 9760 {
629b3d75
MJ
9761 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9762 /* fixup_child_record_type might have changed receiver_decl's type. */
9763 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9764 gimple_seq_add_stmt (&new_body,
9765 gimple_build_assign (ctx->receiver_decl, t));
05409788 9766 }
629b3d75 9767 gimple_seq_add_seq (&new_body, fplist);
05409788 9768
629b3d75 9769 if (offloaded || data_region)
0645c1a2 9770 {
629b3d75
MJ
9771 tree prev = NULL_TREE;
9772 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9773 switch (OMP_CLAUSE_CODE (c))
0645c1a2 9774 {
629b3d75
MJ
9775 tree var, x;
9776 default:
9777 break;
9778 case OMP_CLAUSE_FIRSTPRIVATE:
9779 if (is_gimple_omp_oacc (ctx->stmt))
9780 break;
9781 var = OMP_CLAUSE_DECL (c);
9782 if (omp_is_reference (var)
9783 || is_gimple_reg_type (TREE_TYPE (var)))
0645c1a2 9784 {
629b3d75
MJ
9785 tree new_var = lookup_decl (var, ctx);
9786 tree type;
9787 type = TREE_TYPE (var);
9788 if (omp_is_reference (var))
9789 type = TREE_TYPE (type);
9790 if ((INTEGRAL_TYPE_P (type)
9791 && TYPE_PRECISION (type) <= POINTER_SIZE)
9792 || TREE_CODE (type) == POINTER_TYPE)
9793 {
9794 x = build_receiver_ref (var, false, ctx);
9795 if (TREE_CODE (type) != POINTER_TYPE)
9796 x = fold_convert (pointer_sized_int_node, x);
9797 x = fold_convert (type, x);
9798 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9799 fb_rvalue);
9800 if (omp_is_reference (var))
9801 {
9802 tree v = create_tmp_var_raw (type, get_name (var));
9803 gimple_add_tmp_var (v);
9804 TREE_ADDRESSABLE (v) = 1;
9805 gimple_seq_add_stmt (&new_body,
9806 gimple_build_assign (v, x));
9807 x = build_fold_addr_expr (v);
9808 }
9809 gimple_seq_add_stmt (&new_body,
9810 gimple_build_assign (new_var, x));
9811 }
9812 else
9813 {
9814 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9815 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9816 fb_rvalue);
9817 gimple_seq_add_stmt (&new_body,
9818 gimple_build_assign (new_var, x));
9819 }
9820 }
9821 else if (is_variable_sized (var))
9822 {
9823 tree pvar = DECL_VALUE_EXPR (var);
9824 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9825 pvar = TREE_OPERAND (pvar, 0);
9826 gcc_assert (DECL_P (pvar));
9827 tree new_var = lookup_decl (pvar, ctx);
9828 x = build_receiver_ref (var, false, ctx);
9829 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9830 gimple_seq_add_stmt (&new_body,
9831 gimple_build_assign (new_var, x));
9832 }
9833 break;
9834 case OMP_CLAUSE_PRIVATE:
9835 if (is_gimple_omp_oacc (ctx->stmt))
9836 break;
9837 var = OMP_CLAUSE_DECL (c);
9838 if (omp_is_reference (var))
9839 {
9840 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9841 tree new_var = lookup_decl (var, ctx);
9842 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9843 if (TREE_CONSTANT (x))
9844 {
9845 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9846 get_name (var));
9847 gimple_add_tmp_var (x);
9848 TREE_ADDRESSABLE (x) = 1;
9849 x = build_fold_addr_expr_loc (clause_loc, x);
9850 }
9851 else
9852 break;
9bd46bc9 9853
629b3d75
MJ
9854 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9855 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9856 gimple_seq_add_stmt (&new_body,
9857 gimple_build_assign (new_var, x));
9858 }
9859 break;
9860 case OMP_CLAUSE_USE_DEVICE_PTR:
9861 case OMP_CLAUSE_IS_DEVICE_PTR:
9862 var = OMP_CLAUSE_DECL (c);
9863 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9864 x = build_sender_ref (var, ctx);
9865 else
9866 x = build_receiver_ref (var, false, ctx);
9867 if (is_variable_sized (var))
9868 {
9869 tree pvar = DECL_VALUE_EXPR (var);
9870 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9871 pvar = TREE_OPERAND (pvar, 0);
9872 gcc_assert (DECL_P (pvar));
9873 tree new_var = lookup_decl (pvar, ctx);
9874 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9875 gimple_seq_add_stmt (&new_body,
9876 gimple_build_assign (new_var, x));
9877 }
9878 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9879 {
9880 tree new_var = lookup_decl (var, ctx);
9881 new_var = DECL_VALUE_EXPR (new_var);
9882 gcc_assert (TREE_CODE (new_var) == MEM_REF);
9883 new_var = TREE_OPERAND (new_var, 0);
9884 gcc_assert (DECL_P (new_var));
9885 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9886 gimple_seq_add_stmt (&new_body,
9887 gimple_build_assign (new_var, x));
9888 }
9bd46bc9 9889 else
629b3d75
MJ
9890 {
9891 tree type = TREE_TYPE (var);
9892 tree new_var = lookup_decl (var, ctx);
9893 if (omp_is_reference (var))
9894 {
9895 type = TREE_TYPE (type);
9896 if (TREE_CODE (type) != ARRAY_TYPE)
9897 {
9898 tree v = create_tmp_var_raw (type, get_name (var));
9899 gimple_add_tmp_var (v);
9900 TREE_ADDRESSABLE (v) = 1;
9901 x = fold_convert (type, x);
9902 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9903 fb_rvalue);
9904 gimple_seq_add_stmt (&new_body,
9905 gimple_build_assign (v, x));
9906 x = build_fold_addr_expr (v);
9907 }
9908 }
9909 new_var = DECL_VALUE_EXPR (new_var);
9910 x = fold_convert (TREE_TYPE (new_var), x);
9911 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9912 gimple_seq_add_stmt (&new_body,
9913 gimple_build_assign (new_var, x));
9914 }
9915 break;
9bd46bc9 9916 }
629b3d75
MJ
9917 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9918 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9919 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
9920 or references to VLAs. */
9921 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9922 switch (OMP_CLAUSE_CODE (c))
9923 {
9924 tree var;
9925 default:
9926 break;
9927 case OMP_CLAUSE_MAP:
9928 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9929 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9930 {
9931 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
a90c8804 9932 poly_int64 offset = 0;
629b3d75
MJ
9933 gcc_assert (prev);
9934 var = OMP_CLAUSE_DECL (c);
9935 if (DECL_P (var)
9936 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
9937 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
9938 ctx))
9939 && varpool_node::get_create (var)->offloadable)
9940 break;
9941 if (TREE_CODE (var) == INDIRECT_REF
9942 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
9943 var = TREE_OPERAND (var, 0);
9944 if (TREE_CODE (var) == COMPONENT_REF)
9945 {
9946 var = get_addr_base_and_unit_offset (var, &offset);
9947 gcc_assert (var != NULL_TREE && DECL_P (var));
9948 }
9949 else if (DECL_SIZE (var)
9950 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9951 {
9952 tree var2 = DECL_VALUE_EXPR (var);
9953 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9954 var2 = TREE_OPERAND (var2, 0);
9955 gcc_assert (DECL_P (var2));
9956 var = var2;
9957 }
9958 tree new_var = lookup_decl (var, ctx), x;
9959 tree type = TREE_TYPE (new_var);
9960 bool is_ref;
9961 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
9962 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9963 == COMPONENT_REF))
9964 {
9965 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
9966 is_ref = true;
9967 new_var = build2 (MEM_REF, type,
9968 build_fold_addr_expr (new_var),
9969 build_int_cst (build_pointer_type (type),
9970 offset));
9971 }
9972 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
9973 {
9974 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
9975 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
9976 new_var = build2 (MEM_REF, type,
9977 build_fold_addr_expr (new_var),
9978 build_int_cst (build_pointer_type (type),
9979 offset));
9980 }
9981 else
9982 is_ref = omp_is_reference (var);
9983 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9984 is_ref = false;
9985 bool ref_to_array = false;
9986 if (is_ref)
9987 {
9988 type = TREE_TYPE (type);
9989 if (TREE_CODE (type) == ARRAY_TYPE)
9990 {
9991 type = build_pointer_type (type);
9992 ref_to_array = true;
9993 }
9994 }
9995 else if (TREE_CODE (type) == ARRAY_TYPE)
9996 {
9997 tree decl2 = DECL_VALUE_EXPR (new_var);
9998 gcc_assert (TREE_CODE (decl2) == MEM_REF);
9999 decl2 = TREE_OPERAND (decl2, 0);
10000 gcc_assert (DECL_P (decl2));
10001 new_var = decl2;
10002 type = TREE_TYPE (new_var);
10003 }
10004 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10005 x = fold_convert_loc (clause_loc, type, x);
10006 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10007 {
10008 tree bias = OMP_CLAUSE_SIZE (c);
10009 if (DECL_P (bias))
10010 bias = lookup_decl (bias, ctx);
10011 bias = fold_convert_loc (clause_loc, sizetype, bias);
10012 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10013 bias);
10014 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10015 TREE_TYPE (x), x, bias);
10016 }
10017 if (ref_to_array)
10018 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10019 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10020 if (is_ref && !ref_to_array)
10021 {
10022 tree t = create_tmp_var_raw (type, get_name (var));
10023 gimple_add_tmp_var (t);
10024 TREE_ADDRESSABLE (t) = 1;
10025 gimple_seq_add_stmt (&new_body,
10026 gimple_build_assign (t, x));
10027 x = build_fold_addr_expr_loc (clause_loc, t);
10028 }
10029 gimple_seq_add_stmt (&new_body,
10030 gimple_build_assign (new_var, x));
10031 prev = NULL_TREE;
10032 }
10033 else if (OMP_CLAUSE_CHAIN (c)
10034 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10035 == OMP_CLAUSE_MAP
10036 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10037 == GOMP_MAP_FIRSTPRIVATE_POINTER
10038 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10039 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10040 prev = c;
10041 break;
10042 case OMP_CLAUSE_PRIVATE:
10043 var = OMP_CLAUSE_DECL (c);
10044 if (is_variable_sized (var))
10045 {
10046 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10047 tree new_var = lookup_decl (var, ctx);
10048 tree pvar = DECL_VALUE_EXPR (var);
10049 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10050 pvar = TREE_OPERAND (pvar, 0);
10051 gcc_assert (DECL_P (pvar));
10052 tree new_pvar = lookup_decl (pvar, ctx);
10053 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10054 tree al = size_int (DECL_ALIGN (var));
10055 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10056 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10057 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10058 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10059 gimple_seq_add_stmt (&new_body,
10060 gimple_build_assign (new_pvar, x));
10061 }
10062 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10063 {
10064 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10065 tree new_var = lookup_decl (var, ctx);
10066 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10067 if (TREE_CONSTANT (x))
10068 break;
10069 else
10070 {
10071 tree atmp
10072 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10073 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10074 tree al = size_int (TYPE_ALIGN (rtype));
10075 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10076 }
9bd46bc9 10077
629b3d75
MJ
10078 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10079 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10080 gimple_seq_add_stmt (&new_body,
10081 gimple_build_assign (new_var, x));
10082 }
10083 break;
10084 }
9bd46bc9 10085
629b3d75
MJ
10086 gimple_seq fork_seq = NULL;
10087 gimple_seq join_seq = NULL;
9bd46bc9 10088
629b3d75 10089 if (is_oacc_parallel (ctx))
9bd46bc9 10090 {
629b3d75
MJ
10091 /* If there are reductions on the offloaded region itself, treat
10092 them as a dummy GANG loop. */
10093 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
9bd46bc9 10094
629b3d75
MJ
10095 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10096 false, NULL, NULL, &fork_seq, &join_seq, ctx);
9bd46bc9 10097 }
9bd46bc9 10098
629b3d75
MJ
10099 gimple_seq_add_seq (&new_body, fork_seq);
10100 gimple_seq_add_seq (&new_body, tgt_body);
10101 gimple_seq_add_seq (&new_body, join_seq);
9bd46bc9 10102
629b3d75
MJ
10103 if (offloaded)
10104 new_body = maybe_catch_exception (new_body);
9bd46bc9 10105
629b3d75
MJ
10106 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10107 gimple_omp_set_body (stmt, new_body);
9bd46bc9
NS
10108 }
10109
629b3d75
MJ
10110 bind = gimple_build_bind (NULL, NULL,
10111 tgt_bind ? gimple_bind_block (tgt_bind)
10112 : NULL_TREE);
10113 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10114 gimple_bind_add_seq (bind, ilist);
10115 gimple_bind_add_stmt (bind, stmt);
10116 gimple_bind_add_seq (bind, olist);
9bd46bc9
NS
10117
10118 pop_gimplify_context (NULL);
10119
629b3d75 10120 if (dep_bind)
b6adbb9f 10121 {
629b3d75
MJ
10122 gimple_bind_add_seq (dep_bind, dep_ilist);
10123 gimple_bind_add_stmt (dep_bind, bind);
10124 gimple_bind_add_seq (dep_bind, dep_olist);
10125 pop_gimplify_context (dep_bind);
b6adbb9f 10126 }
b6adbb9f
NS
10127}
10128
629b3d75 10129/* Expand code for an OpenMP teams directive. */
94829f87 10130
f8393eb0 10131static void
629b3d75 10132lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
94829f87 10133{
629b3d75
MJ
10134 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10135 push_gimplify_context ();
94829f87 10136
629b3d75
MJ
10137 tree block = make_node (BLOCK);
10138 gbind *bind = gimple_build_bind (NULL, NULL, block);
10139 gsi_replace (gsi_p, bind, true);
10140 gimple_seq bind_body = NULL;
10141 gimple_seq dlist = NULL;
10142 gimple_seq olist = NULL;
94829f87 10143
629b3d75
MJ
10144 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10145 OMP_CLAUSE_NUM_TEAMS);
10146 if (num_teams == NULL_TREE)
10147 num_teams = build_int_cst (unsigned_type_node, 0);
10148 else
94829f87 10149 {
629b3d75
MJ
10150 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10151 num_teams = fold_convert (unsigned_type_node, num_teams);
10152 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
94829f87 10153 }
629b3d75
MJ
10154 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10155 OMP_CLAUSE_THREAD_LIMIT);
10156 if (thread_limit == NULL_TREE)
10157 thread_limit = build_int_cst (unsigned_type_node, 0);
10158 else
94829f87 10159 {
629b3d75
MJ
10160 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10161 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10162 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10163 fb_rvalue);
94829f87 10164 }
9bd46bc9 10165
629b3d75
MJ
10166 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10167 &bind_body, &dlist, ctx, NULL);
10168 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10169 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10170 if (!gimple_omp_teams_grid_phony (teams_stmt))
9bd46bc9 10171 {
629b3d75
MJ
10172 gimple_seq_add_stmt (&bind_body, teams_stmt);
10173 location_t loc = gimple_location (teams_stmt);
10174 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10175 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10176 gimple_set_location (call, loc);
10177 gimple_seq_add_stmt (&bind_body, call);
9bd46bc9
NS
10178 }
10179
629b3d75
MJ
10180 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10181 gimple_omp_set_body (teams_stmt, NULL);
10182 gimple_seq_add_seq (&bind_body, olist);
10183 gimple_seq_add_seq (&bind_body, dlist);
10184 if (!gimple_omp_teams_grid_phony (teams_stmt))
10185 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10186 gimple_bind_set_body (bind, bind_body);
9bd46bc9 10187
629b3d75 10188 pop_gimplify_context (bind);
9bd46bc9 10189
629b3d75
MJ
10190 gimple_bind_append_vars (bind, ctx->block_vars);
10191 BLOCK_VARS (block) = ctx->block_vars;
10192 if (BLOCK_VARS (block))
10193 TREE_USED (block) = 1;
9bd46bc9
NS
10194}
10195
629b3d75 10196/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
9bd46bc9 10197
629b3d75
MJ
10198static void
10199lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 10200{
629b3d75
MJ
10201 gimple *stmt = gsi_stmt (*gsi_p);
10202 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10203 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10204 gimple_build_omp_return (false));
9bd46bc9
NS
10205}
10206
9bd46bc9 10207
629b3d75
MJ
10208/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10209 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10210 of OMP context, but with task_shared_vars set. */
9bd46bc9 10211
629b3d75
MJ
10212static tree
10213lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10214 void *data)
9bd46bc9 10215{
629b3d75 10216 tree t = *tp;
9bd46bc9 10217
629b3d75
MJ
10218 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
10219 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10220 return t;
9bd46bc9 10221
629b3d75
MJ
10222 if (task_shared_vars
10223 && DECL_P (t)
10224 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10225 return t;
9bd46bc9 10226
629b3d75
MJ
10227 /* If a global variable has been privatized, TREE_CONSTANT on
10228 ADDR_EXPR might be wrong. */
10229 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10230 recompute_tree_invariant_for_addr_expr (t);
9bd46bc9 10231
629b3d75
MJ
10232 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10233 return NULL_TREE;
9bd46bc9
NS
10234}
10235
629b3d75
MJ
10236/* Data to be communicated between lower_omp_regimplify_operands and
10237 lower_omp_regimplify_operands_p. */
9bd46bc9 10238
629b3d75 10239struct lower_omp_regimplify_operands_data
9bd46bc9 10240{
629b3d75
MJ
10241 omp_context *ctx;
10242 vec<tree> *decls;
10243};
9bd46bc9 10244
629b3d75
MJ
10245/* Helper function for lower_omp_regimplify_operands. Find
10246 omp_member_access_dummy_var vars and adjust temporarily their
10247 DECL_VALUE_EXPRs if needed. */
9bd46bc9 10248
629b3d75
MJ
10249static tree
10250lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10251 void *data)
9bd46bc9 10252{
629b3d75
MJ
10253 tree t = omp_member_access_dummy_var (*tp);
10254 if (t)
9bd46bc9 10255 {
629b3d75
MJ
10256 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10257 lower_omp_regimplify_operands_data *ldata
10258 = (lower_omp_regimplify_operands_data *) wi->info;
10259 tree o = maybe_lookup_decl (t, ldata->ctx);
10260 if (o != t)
9bd46bc9 10261 {
629b3d75
MJ
10262 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10263 ldata->decls->safe_push (*tp);
10264 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10265 SET_DECL_VALUE_EXPR (*tp, v);
9bd46bc9 10266 }
9bd46bc9 10267 }
629b3d75
MJ
10268 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10269 return NULL_TREE;
9bd46bc9
NS
10270}
10271
629b3d75
MJ
10272/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10273 of omp_member_access_dummy_var vars during regimplification. */
9bd46bc9
NS
10274
10275static void
629b3d75
MJ
10276lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10277 gimple_stmt_iterator *gsi_p)
9bd46bc9 10278{
629b3d75
MJ
10279 auto_vec<tree, 10> decls;
10280 if (ctx)
10281 {
10282 struct walk_stmt_info wi;
10283 memset (&wi, '\0', sizeof (wi));
10284 struct lower_omp_regimplify_operands_data data;
10285 data.ctx = ctx;
10286 data.decls = &decls;
10287 wi.info = &data;
10288 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10289 }
10290 gimple_regimplify_operands (stmt, gsi_p);
10291 while (!decls.is_empty ())
10292 {
10293 tree t = decls.pop ();
10294 tree v = decls.pop ();
10295 SET_DECL_VALUE_EXPR (t, v);
10296 }
9bd46bc9
NS
10297}
10298
9bd46bc9 10299static void
629b3d75 10300lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9bd46bc9 10301{
629b3d75
MJ
10302 gimple *stmt = gsi_stmt (*gsi_p);
10303 struct walk_stmt_info wi;
10304 gcall *call_stmt;
9bd46bc9 10305
629b3d75
MJ
10306 if (gimple_has_location (stmt))
10307 input_location = gimple_location (stmt);
9bd46bc9 10308
629b3d75
MJ
10309 if (task_shared_vars)
10310 memset (&wi, '\0', sizeof (wi));
9bd46bc9 10311
629b3d75
MJ
10312 /* If we have issued syntax errors, avoid doing any heavy lifting.
10313 Just replace the OMP directives with a NOP to avoid
10314 confusing RTL expansion. */
10315 if (seen_error () && is_gimple_omp (stmt))
9bd46bc9 10316 {
629b3d75
MJ
10317 gsi_replace (gsi_p, gimple_build_nop (), true);
10318 return;
10319 }
9bd46bc9 10320
629b3d75
MJ
10321 switch (gimple_code (stmt))
10322 {
10323 case GIMPLE_COND:
10324 {
10325 gcond *cond_stmt = as_a <gcond *> (stmt);
10326 if ((ctx || task_shared_vars)
10327 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10328 lower_omp_regimplify_p,
10329 ctx ? NULL : &wi, NULL)
10330 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10331 lower_omp_regimplify_p,
10332 ctx ? NULL : &wi, NULL)))
10333 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10334 }
10335 break;
10336 case GIMPLE_CATCH:
10337 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10338 break;
10339 case GIMPLE_EH_FILTER:
10340 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10341 break;
10342 case GIMPLE_TRY:
10343 lower_omp (gimple_try_eval_ptr (stmt), ctx);
10344 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10345 break;
10346 case GIMPLE_TRANSACTION:
01914336 10347 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
629b3d75
MJ
10348 ctx);
10349 break;
10350 case GIMPLE_BIND:
10351 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
6724f8a6 10352 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
629b3d75
MJ
10353 break;
10354 case GIMPLE_OMP_PARALLEL:
10355 case GIMPLE_OMP_TASK:
10356 ctx = maybe_lookup_ctx (stmt);
10357 gcc_assert (ctx);
10358 if (ctx->cancellable)
10359 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10360 lower_omp_taskreg (gsi_p, ctx);
10361 break;
10362 case GIMPLE_OMP_FOR:
10363 ctx = maybe_lookup_ctx (stmt);
10364 gcc_assert (ctx);
10365 if (ctx->cancellable)
10366 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10367 lower_omp_for (gsi_p, ctx);
10368 break;
10369 case GIMPLE_OMP_SECTIONS:
10370 ctx = maybe_lookup_ctx (stmt);
10371 gcc_assert (ctx);
10372 if (ctx->cancellable)
10373 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10374 lower_omp_sections (gsi_p, ctx);
10375 break;
10376 case GIMPLE_OMP_SINGLE:
10377 ctx = maybe_lookup_ctx (stmt);
10378 gcc_assert (ctx);
10379 lower_omp_single (gsi_p, ctx);
10380 break;
10381 case GIMPLE_OMP_MASTER:
10382 ctx = maybe_lookup_ctx (stmt);
10383 gcc_assert (ctx);
10384 lower_omp_master (gsi_p, ctx);
10385 break;
10386 case GIMPLE_OMP_TASKGROUP:
10387 ctx = maybe_lookup_ctx (stmt);
10388 gcc_assert (ctx);
10389 lower_omp_taskgroup (gsi_p, ctx);
10390 break;
10391 case GIMPLE_OMP_ORDERED:
10392 ctx = maybe_lookup_ctx (stmt);
10393 gcc_assert (ctx);
10394 lower_omp_ordered (gsi_p, ctx);
10395 break;
10396 case GIMPLE_OMP_CRITICAL:
10397 ctx = maybe_lookup_ctx (stmt);
10398 gcc_assert (ctx);
10399 lower_omp_critical (gsi_p, ctx);
10400 break;
10401 case GIMPLE_OMP_ATOMIC_LOAD:
10402 if ((ctx || task_shared_vars)
10403 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10404 as_a <gomp_atomic_load *> (stmt)),
10405 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10406 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10407 break;
10408 case GIMPLE_OMP_TARGET:
10409 ctx = maybe_lookup_ctx (stmt);
10410 gcc_assert (ctx);
10411 lower_omp_target (gsi_p, ctx);
10412 break;
10413 case GIMPLE_OMP_TEAMS:
10414 ctx = maybe_lookup_ctx (stmt);
10415 gcc_assert (ctx);
28567c40
JJ
10416 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10417 lower_omp_taskreg (gsi_p, ctx);
10418 else
10419 lower_omp_teams (gsi_p, ctx);
629b3d75
MJ
10420 break;
10421 case GIMPLE_OMP_GRID_BODY:
10422 ctx = maybe_lookup_ctx (stmt);
10423 gcc_assert (ctx);
10424 lower_omp_grid_body (gsi_p, ctx);
10425 break;
10426 case GIMPLE_CALL:
10427 tree fndecl;
10428 call_stmt = as_a <gcall *> (stmt);
10429 fndecl = gimple_call_fndecl (call_stmt);
10430 if (fndecl
3d78e008 10431 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
629b3d75 10432 switch (DECL_FUNCTION_CODE (fndecl))
9bd46bc9 10433 {
629b3d75
MJ
10434 case BUILT_IN_GOMP_BARRIER:
10435 if (ctx == NULL)
10436 break;
10437 /* FALLTHRU */
10438 case BUILT_IN_GOMP_CANCEL:
10439 case BUILT_IN_GOMP_CANCELLATION_POINT:
10440 omp_context *cctx;
10441 cctx = ctx;
10442 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10443 cctx = cctx->outer;
10444 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10445 if (!cctx->cancellable)
10446 {
10447 if (DECL_FUNCTION_CODE (fndecl)
10448 == BUILT_IN_GOMP_CANCELLATION_POINT)
10449 {
10450 stmt = gimple_build_nop ();
10451 gsi_replace (gsi_p, stmt, false);
10452 }
10453 break;
10454 }
10455 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10456 {
10457 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10458 gimple_call_set_fndecl (call_stmt, fndecl);
10459 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10460 }
10461 tree lhs;
10462 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10463 gimple_call_set_lhs (call_stmt, lhs);
10464 tree fallthru_label;
10465 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10466 gimple *g;
10467 g = gimple_build_label (fallthru_label);
10468 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10469 g = gimple_build_cond (NE_EXPR, lhs,
10470 fold_convert (TREE_TYPE (lhs),
10471 boolean_false_node),
10472 cctx->cancel_label, fallthru_label);
10473 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10474 break;
10475 default:
10476 break;
9bd46bc9 10477 }
629b3d75
MJ
10478 /* FALLTHRU */
10479 default:
10480 if ((ctx || task_shared_vars)
10481 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10482 ctx ? NULL : &wi))
9bd46bc9 10483 {
629b3d75
MJ
10484 /* Just remove clobbers, this should happen only if we have
10485 "privatized" local addressable variables in SIMD regions,
10486 the clobber isn't needed in that case and gimplifying address
10487 of the ARRAY_REF into a pointer and creating MEM_REF based
10488 clobber would create worse code than we get with the clobber
10489 dropped. */
10490 if (gimple_clobber_p (stmt))
4ae13300 10491 {
629b3d75
MJ
10492 gsi_replace (gsi_p, gimple_build_nop (), true);
10493 break;
9bd46bc9 10494 }
629b3d75 10495 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9bd46bc9 10496 }
629b3d75 10497 break;
9bd46bc9 10498 }
9bd46bc9
NS
10499}
10500
9bd46bc9 10501static void
629b3d75 10502lower_omp (gimple_seq *body, omp_context *ctx)
9bd46bc9 10503{
629b3d75
MJ
10504 location_t saved_location = input_location;
10505 gimple_stmt_iterator gsi;
10506 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10507 lower_omp_1 (&gsi, ctx);
10508 /* During gimplification, we haven't folded statments inside offloading
10509 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
10510 if (target_nesting_level || taskreg_nesting_level)
10511 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10512 fold_stmt (&gsi);
10513 input_location = saved_location;
9bd46bc9
NS
10514}
10515
629b3d75 10516/* Main entry point. */
9bd46bc9 10517
629b3d75
MJ
10518static unsigned int
10519execute_lower_omp (void)
9bd46bc9 10520{
629b3d75
MJ
10521 gimple_seq body;
10522 int i;
10523 omp_context *ctx;
9bd46bc9 10524
629b3d75
MJ
10525 /* This pass always runs, to provide PROP_gimple_lomp.
10526 But often, there is nothing to do. */
5e9d6aa4 10527 if (flag_openacc == 0 && flag_openmp == 0
629b3d75
MJ
10528 && flag_openmp_simd == 0)
10529 return 0;
9bd46bc9 10530
629b3d75
MJ
10531 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10532 delete_omp_context);
9bd46bc9 10533
629b3d75 10534 body = gimple_body (current_function_decl);
9bd46bc9 10535
629b3d75
MJ
10536 if (hsa_gen_requested_p ())
10537 omp_grid_gridify_all_targets (&body);
10538
10539 scan_omp (&body, NULL);
10540 gcc_assert (taskreg_nesting_level == 0);
10541 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10542 finish_taskreg_scan (ctx);
10543 taskreg_contexts.release ();
9bd46bc9 10544
629b3d75
MJ
10545 if (all_contexts->root)
10546 {
10547 if (task_shared_vars)
10548 push_gimplify_context ();
10549 lower_omp (&body, NULL);
10550 if (task_shared_vars)
10551 pop_gimplify_context (NULL);
10552 }
10553
10554 if (all_contexts)
10555 {
10556 splay_tree_delete (all_contexts);
10557 all_contexts = NULL;
9bd46bc9 10558 }
629b3d75 10559 BITMAP_FREE (task_shared_vars);
6724f8a6
JJ
10560
10561 /* If current function is a method, remove artificial dummy VAR_DECL created
10562 for non-static data member privatization, they aren't needed for
10563 debuginfo nor anything else, have been already replaced everywhere in the
10564 IL and cause problems with LTO. */
10565 if (DECL_ARGUMENTS (current_function_decl)
10566 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10567 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10568 == POINTER_TYPE))
10569 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
629b3d75 10570 return 0;
9bd46bc9
NS
10571}
10572
629b3d75 10573namespace {
9bd46bc9 10574
629b3d75 10575const pass_data pass_data_lower_omp =
9bd46bc9 10576{
629b3d75
MJ
10577 GIMPLE_PASS, /* type */
10578 "omplower", /* name */
fd2b8c8b 10579 OPTGROUP_OMP, /* optinfo_flags */
629b3d75
MJ
10580 TV_NONE, /* tv_id */
10581 PROP_gimple_any, /* properties_required */
10582 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10583 0, /* properties_destroyed */
10584 0, /* todo_flags_start */
10585 0, /* todo_flags_finish */
10586};
9bd46bc9 10587
629b3d75
MJ
10588class pass_lower_omp : public gimple_opt_pass
10589{
10590public:
10591 pass_lower_omp (gcc::context *ctxt)
10592 : gimple_opt_pass (pass_data_lower_omp, ctxt)
10593 {}
9bd46bc9 10594
629b3d75
MJ
10595 /* opt_pass methods: */
10596 virtual unsigned int execute (function *) { return execute_lower_omp (); }
9bd46bc9 10597
629b3d75 10598}; // class pass_lower_omp
9bd46bc9 10599
629b3d75 10600} // anon namespace
9bd46bc9 10601
629b3d75
MJ
10602gimple_opt_pass *
10603make_pass_lower_omp (gcc::context *ctxt)
10604{
10605 return new pass_lower_omp (ctxt);
9bd46bc9 10606}
629b3d75
MJ
10607\f
10608/* The following is a utility to diagnose structured block violations.
10609 It is not part of the "omplower" pass, as that's invoked too late. It
10610 should be invoked by the respective front ends after gimplification. */
9bd46bc9 10611
629b3d75 10612static splay_tree all_labels;
9bd46bc9 10613
629b3d75
MJ
10614/* Check for mismatched contexts and generate an error if needed. Return
10615 true if an error is detected. */
9bd46bc9 10616
629b3d75
MJ
10617static bool
10618diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10619 gimple *branch_ctx, gimple *label_ctx)
10620{
10621 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10622 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9bd46bc9 10623
629b3d75
MJ
10624 if (label_ctx == branch_ctx)
10625 return false;
9bd46bc9 10626
629b3d75 10627 const char* kind = NULL;
9bd46bc9 10628
629b3d75 10629 if (flag_openacc)
9bd46bc9 10630 {
629b3d75
MJ
10631 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10632 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9bd46bc9 10633 {
629b3d75
MJ
10634 gcc_checking_assert (kind == NULL);
10635 kind = "OpenACC";
9bd46bc9
NS
10636 }
10637 }
629b3d75 10638 if (kind == NULL)
5b37e866 10639 {
0a734553 10640 gcc_checking_assert (flag_openmp || flag_openmp_simd);
629b3d75 10641 kind = "OpenMP";
5b37e866 10642 }
9bd46bc9 10643
01914336 10644 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
629b3d75
MJ
10645 so we could traverse it and issue a correct "exit" or "enter" error
10646 message upon a structured block violation.
c5a64cfe 10647
629b3d75
MJ
10648 We built the context by building a list with tree_cons'ing, but there is
10649 no easy counterpart in gimple tuples. It seems like far too much work
10650 for issuing exit/enter error messages. If someone really misses the
01914336 10651 distinct error message... patches welcome. */
c5a64cfe 10652
629b3d75
MJ
10653#if 0
10654 /* Try to avoid confusing the user by producing and error message
10655 with correct "exit" or "enter" verbiage. We prefer "exit"
10656 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10657 if (branch_ctx == NULL)
10658 exit_p = false;
10659 else
5b37e866 10660 {
629b3d75
MJ
10661 while (label_ctx)
10662 {
10663 if (TREE_VALUE (label_ctx) == branch_ctx)
10664 {
10665 exit_p = false;
10666 break;
10667 }
10668 label_ctx = TREE_CHAIN (label_ctx);
10669 }
5b37e866
NS
10670 }
10671
629b3d75
MJ
10672 if (exit_p)
10673 error ("invalid exit from %s structured block", kind);
10674 else
10675 error ("invalid entry to %s structured block", kind);
10676#endif
5b37e866 10677
629b3d75
MJ
10678 /* If it's obvious we have an invalid entry, be specific about the error. */
10679 if (branch_ctx == NULL)
10680 error ("invalid entry to %s structured block", kind);
10681 else
c5a64cfe 10682 {
629b3d75
MJ
10683 /* Otherwise, be vague and lazy, but efficient. */
10684 error ("invalid branch to/from %s structured block", kind);
c5a64cfe 10685 }
5b37e866 10686
629b3d75
MJ
10687 gsi_replace (gsi_p, gimple_build_nop (), false);
10688 return true;
c5a64cfe
NS
10689}
10690
629b3d75
MJ
10691/* Pass 1: Create a minimal tree of structured blocks, and record
10692 where each label is found. */
9bd46bc9 10693
629b3d75
MJ
10694static tree
10695diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10696 struct walk_stmt_info *wi)
9bd46bc9 10697{
629b3d75
MJ
10698 gimple *context = (gimple *) wi->info;
10699 gimple *inner_context;
10700 gimple *stmt = gsi_stmt (*gsi_p);
9bd46bc9 10701
629b3d75 10702 *handled_ops_p = true;
6e91acf8 10703
629b3d75
MJ
10704 switch (gimple_code (stmt))
10705 {
10706 WALK_SUBSTMTS;
6e91acf8 10707
629b3d75
MJ
10708 case GIMPLE_OMP_PARALLEL:
10709 case GIMPLE_OMP_TASK:
10710 case GIMPLE_OMP_SECTIONS:
10711 case GIMPLE_OMP_SINGLE:
10712 case GIMPLE_OMP_SECTION:
10713 case GIMPLE_OMP_MASTER:
10714 case GIMPLE_OMP_ORDERED:
10715 case GIMPLE_OMP_CRITICAL:
10716 case GIMPLE_OMP_TARGET:
10717 case GIMPLE_OMP_TEAMS:
10718 case GIMPLE_OMP_TASKGROUP:
10719 /* The minimal context here is just the current OMP construct. */
10720 inner_context = stmt;
10721 wi->info = inner_context;
10722 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10723 wi->info = context;
10724 break;
e5014671 10725
629b3d75
MJ
10726 case GIMPLE_OMP_FOR:
10727 inner_context = stmt;
10728 wi->info = inner_context;
10729 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10730 walk them. */
10731 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10732 diagnose_sb_1, NULL, wi);
10733 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10734 wi->info = context;
10735 break;
e5014671 10736
629b3d75
MJ
10737 case GIMPLE_LABEL:
10738 splay_tree_insert (all_labels,
10739 (splay_tree_key) gimple_label_label (
10740 as_a <glabel *> (stmt)),
10741 (splay_tree_value) context);
10742 break;
e5014671 10743
629b3d75
MJ
10744 default:
10745 break;
e5014671
NS
10746 }
10747
629b3d75 10748 return NULL_TREE;
e5014671
NS
10749}
10750
629b3d75
MJ
10751/* Pass 2: Check each branch and see if its context differs from that of
10752 the destination label's context. */
94829f87 10753
629b3d75
MJ
10754static tree
10755diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10756 struct walk_stmt_info *wi)
94829f87 10757{
629b3d75
MJ
10758 gimple *context = (gimple *) wi->info;
10759 splay_tree_node n;
10760 gimple *stmt = gsi_stmt (*gsi_p);
f8393eb0 10761
629b3d75 10762 *handled_ops_p = true;
f8393eb0 10763
629b3d75 10764 switch (gimple_code (stmt))
9bd46bc9 10765 {
629b3d75 10766 WALK_SUBSTMTS;
9bd46bc9 10767
629b3d75
MJ
10768 case GIMPLE_OMP_PARALLEL:
10769 case GIMPLE_OMP_TASK:
10770 case GIMPLE_OMP_SECTIONS:
10771 case GIMPLE_OMP_SINGLE:
10772 case GIMPLE_OMP_SECTION:
10773 case GIMPLE_OMP_MASTER:
10774 case GIMPLE_OMP_ORDERED:
10775 case GIMPLE_OMP_CRITICAL:
10776 case GIMPLE_OMP_TARGET:
10777 case GIMPLE_OMP_TEAMS:
10778 case GIMPLE_OMP_TASKGROUP:
10779 wi->info = stmt;
10780 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10781 wi->info = context;
10782 break;
e5014671 10783
629b3d75
MJ
10784 case GIMPLE_OMP_FOR:
10785 wi->info = stmt;
10786 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10787 walk them. */
10788 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10789 diagnose_sb_2, NULL, wi);
10790 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10791 wi->info = context;
10792 break;
e5014671 10793
629b3d75
MJ
10794 case GIMPLE_COND:
10795 {
10796 gcond *cond_stmt = as_a <gcond *> (stmt);
10797 tree lab = gimple_cond_true_label (cond_stmt);
10798 if (lab)
9bd46bc9 10799 {
629b3d75
MJ
10800 n = splay_tree_lookup (all_labels,
10801 (splay_tree_key) lab);
10802 diagnose_sb_0 (gsi_p, context,
10803 n ? (gimple *) n->value : NULL);
9bd46bc9 10804 }
629b3d75
MJ
10805 lab = gimple_cond_false_label (cond_stmt);
10806 if (lab)
10807 {
10808 n = splay_tree_lookup (all_labels,
10809 (splay_tree_key) lab);
10810 diagnose_sb_0 (gsi_p, context,
10811 n ? (gimple *) n->value : NULL);
10812 }
10813 }
10814 break;
9bd46bc9 10815
629b3d75
MJ
10816 case GIMPLE_GOTO:
10817 {
10818 tree lab = gimple_goto_dest (stmt);
10819 if (TREE_CODE (lab) != LABEL_DECL)
10820 break;
10821
10822 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10823 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10824 }
10825 break;
9bd46bc9 10826
629b3d75
MJ
10827 case GIMPLE_SWITCH:
10828 {
10829 gswitch *switch_stmt = as_a <gswitch *> (stmt);
10830 unsigned int i;
10831 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9bd46bc9 10832 {
629b3d75
MJ
10833 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10834 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10835 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10836 break;
9bd46bc9 10837 }
9bd46bc9 10838 }
629b3d75 10839 break;
9bd46bc9 10840
629b3d75
MJ
10841 case GIMPLE_RETURN:
10842 diagnose_sb_0 (gsi_p, context, NULL);
10843 break;
94829f87 10844
629b3d75
MJ
10845 default:
10846 break;
94829f87
NS
10847 }
10848
629b3d75 10849 return NULL_TREE;
bd751975
NS
10850}
10851
629b3d75
MJ
10852static unsigned int
10853diagnose_omp_structured_block_errors (void)
94829f87 10854{
629b3d75
MJ
10855 struct walk_stmt_info wi;
10856 gimple_seq body = gimple_body (current_function_decl);
346a966e 10857
629b3d75 10858 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
94829f87 10859
629b3d75
MJ
10860 memset (&wi, 0, sizeof (wi));
10861 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
94829f87 10862
629b3d75
MJ
10863 memset (&wi, 0, sizeof (wi));
10864 wi.want_locations = true;
10865 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
94829f87 10866
629b3d75 10867 gimple_set_body (current_function_decl, body);
9669b00b 10868
629b3d75
MJ
10869 splay_tree_delete (all_labels);
10870 all_labels = NULL;
9669b00b 10871
9669b00b
AM
10872 return 0;
10873}
10874
10875namespace {
10876
629b3d75 10877const pass_data pass_data_diagnose_omp_blocks =
9669b00b
AM
10878{
10879 GIMPLE_PASS, /* type */
629b3d75 10880 "*diagnose_omp_blocks", /* name */
fd2b8c8b 10881 OPTGROUP_OMP, /* optinfo_flags */
9669b00b 10882 TV_NONE, /* tv_id */
629b3d75
MJ
10883 PROP_gimple_any, /* properties_required */
10884 0, /* properties_provided */
9669b00b
AM
10885 0, /* properties_destroyed */
10886 0, /* todo_flags_start */
629b3d75 10887 0, /* todo_flags_finish */
9669b00b
AM
10888};
10889
629b3d75 10890class pass_diagnose_omp_blocks : public gimple_opt_pass
9669b00b
AM
10891{
10892public:
629b3d75
MJ
10893 pass_diagnose_omp_blocks (gcc::context *ctxt)
10894 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9669b00b
AM
10895 {}
10896
10897 /* opt_pass methods: */
629b3d75
MJ
10898 virtual bool gate (function *)
10899 {
5e9d6aa4 10900 return flag_openacc || flag_openmp || flag_openmp_simd;
629b3d75 10901 }
9669b00b
AM
10902 virtual unsigned int execute (function *)
10903 {
629b3d75 10904 return diagnose_omp_structured_block_errors ();
4a38b02b
IV
10905 }
10906
629b3d75 10907}; // class pass_diagnose_omp_blocks
4a38b02b
IV
10908
10909} // anon namespace
10910
10911gimple_opt_pass *
629b3d75 10912make_pass_diagnose_omp_blocks (gcc::context *ctxt)
4a38b02b 10913{
629b3d75 10914 return new pass_diagnose_omp_blocks (ctxt);
4a38b02b 10915}
629b3d75 10916\f
4a38b02b 10917
953ff289 10918#include "gt-omp-low.h"