]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
[Ada] Warning for out-of-order record representation clauses
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
ca4c3545 1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
1e8e9920 5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
fbd26352 7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
1e8e9920 8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
8c4c00c1 13Software Foundation; either version 3, or (at your option) any later
1e8e9920 14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
8c4c00c1 22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
1e8e9920 24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
9ef16211 28#include "backend.h"
7c29e30e 29#include "target.h"
1e8e9920 30#include "tree.h"
9ef16211 31#include "gimple.h"
7c29e30e 32#include "tree-pass.h"
9ef16211 33#include "ssa.h"
7c29e30e 34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
bc61cadb 39#include "internal-fn.h"
40#include "gimple-fold.h"
a8783bee 41#include "gimplify.h"
dcf1a1ec 42#include "gimple-iterator.h"
e795d6e1 43#include "gimplify-me.h"
dcf1a1ec 44#include "gimple-walk.h"
75a70cf9 45#include "tree-iterator.h"
1e8e9920 46#include "tree-inline.h"
47#include "langhooks.h"
073c1fd5 48#include "tree-dfa.h"
69ee5dbb 49#include "tree-ssa.h"
e3022db7 50#include "splay-tree.h"
4954efd4 51#include "omp-general.h"
7740abd8 52#include "omp-low.h"
4954efd4 53#include "omp-grid.h"
424a4a92 54#include "gimple-low.h"
2cc80ac3 55#include "symbol-summary.h"
e797f49f 56#include "tree-nested.h"
b0c5e347 57#include "context.h"
ca4c3545 58#include "gomp-constants.h"
cbba99a0 59#include "gimple-pretty-print.h"
ef2beaf2 60#include "hsa-common.h"
30a86690 61#include "stringpool.h"
62#include "attribs.h"
1e8e9920 63
ca4c3545 64/* Lowering of OMP parallel and workshare constructs proceeds in two
1e8e9920 65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
334ec2d8 68 re-gimplifying things when variables have been replaced with complex
1e8e9920 69 expressions.
70
d134bccc 71 Final code generation is done by pass_expand_omp. The flowgraph is
ca4c3545 72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
1e8e9920 74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
6dc50383 78struct omp_context
1e8e9920 79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
42acab1c 88 gimple *stmt;
1e8e9920 89
48e1416a 90 /* Map variables to fields in a structure that allows communication
1e8e9920 91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
fd6481cf 97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
1e8e9920 105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
bc7bff74 109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
9cf7bec9 113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
7e5a76c8 117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
9a1d892b 122 /* A hash map from the reduction clauses to the registered array
7e5a76c8 123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
9a1d892b 126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
1e8e9920 130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
1e8e9920 135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
bc7bff74 137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
384aea12 140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
a0110ad7 143 bool combined_into_simd_safelen1;
da008d72 144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
7d26f131 147
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
3d2b49b2 150
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
3ab58307 153
154 /* True if there is order(concurrent) clause on the construct. */
155 bool order_concurrent;
31890b58 156
157 /* True if there is bind clause on the construct (i.e. a loop construct). */
158 bool loop_p;
6dc50383 159};
1e8e9920 160
1e8e9920 161static splay_tree all_contexts;
fd6481cf 162static int taskreg_nesting_level;
bc7bff74 163static int target_nesting_level;
fd6481cf 164static bitmap task_shared_vars;
70ef2520 165static bitmap global_nonaddressable_vars;
37eaded9 166static vec<omp_context *> taskreg_contexts;
1e8e9920 167
ab129075 168static void scan_omp (gimple_seq *, omp_context *);
75a70cf9 169static tree scan_omp_1_op (tree *, int *, void *);
170
171#define WALK_SUBSTMTS \
172 case GIMPLE_BIND: \
173 case GIMPLE_TRY: \
174 case GIMPLE_CATCH: \
175 case GIMPLE_EH_FILTER: \
4c0315d0 176 case GIMPLE_TRANSACTION: \
75a70cf9 177 /* The sub-statements for these should be walked. */ \
178 *handled_ops_p = false; \
179 break;
180
a8e785ba 181/* Return true if CTX corresponds to an oacc parallel region. */
182
183static bool
184is_oacc_parallel (omp_context *ctx)
185{
186 enum gimple_code outer_type = gimple_code (ctx->stmt);
187 return ((outer_type == GIMPLE_OMP_TARGET)
188 && (gimple_omp_target_kind (ctx->stmt)
189 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
190}
191
192/* Return true if CTX corresponds to an oacc kernels region. */
193
194static bool
195is_oacc_kernels (omp_context *ctx)
196{
197 enum gimple_code outer_type = gimple_code (ctx->stmt);
198 return ((outer_type == GIMPLE_OMP_TARGET)
199 && (gimple_omp_target_kind (ctx->stmt)
200 == GF_OMP_TARGET_KIND_OACC_KERNELS));
201}
202
43895be5 203/* If DECL is the artificial dummy VAR_DECL created for non-static
204 data member privatization, return the underlying "this" parameter,
205 otherwise return NULL. */
206
207tree
208omp_member_access_dummy_var (tree decl)
209{
210 if (!VAR_P (decl)
211 || !DECL_ARTIFICIAL (decl)
212 || !DECL_IGNORED_P (decl)
213 || !DECL_HAS_VALUE_EXPR_P (decl)
214 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
215 return NULL_TREE;
216
217 tree v = DECL_VALUE_EXPR (decl);
218 if (TREE_CODE (v) != COMPONENT_REF)
219 return NULL_TREE;
220
221 while (1)
222 switch (TREE_CODE (v))
223 {
224 case COMPONENT_REF:
225 case MEM_REF:
226 case INDIRECT_REF:
227 CASE_CONVERT:
228 case POINTER_PLUS_EXPR:
229 v = TREE_OPERAND (v, 0);
230 continue;
231 case PARM_DECL:
232 if (DECL_CONTEXT (v) == current_function_decl
233 && DECL_ARTIFICIAL (v)
234 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
235 return v;
236 return NULL_TREE;
237 default:
238 return NULL_TREE;
239 }
240}
241
242/* Helper for unshare_and_remap, called through walk_tree. */
243
244static tree
245unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
246{
247 tree *pair = (tree *) data;
248 if (*tp == pair[0])
249 {
250 *tp = unshare_expr (pair[1]);
251 *walk_subtrees = 0;
252 }
253 else if (IS_TYPE_OR_DECL_P (*tp))
254 *walk_subtrees = 0;
255 return NULL_TREE;
256}
257
258/* Return unshare_expr (X) with all occurrences of FROM
259 replaced with TO. */
260
261static tree
262unshare_and_remap (tree x, tree from, tree to)
263{
264 tree pair[2] = { from, to };
265 x = unshare_expr (x);
266 walk_tree (&x, unshare_and_remap_1, pair, NULL);
267 return x;
268}
269
75a70cf9 270/* Convenience function for calling scan_omp_1_op on tree operands. */
271
272static inline tree
273scan_omp_op (tree *tp, omp_context *ctx)
274{
275 struct walk_stmt_info wi;
276
277 memset (&wi, 0, sizeof (wi));
278 wi.info = ctx;
279 wi.want_locations = true;
280
281 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
282}
283
e3a19533 284static void lower_omp (gimple_seq *, omp_context *);
f49d7bb5 285static tree lookup_decl_in_outer_ctx (tree, omp_context *);
286static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
1e8e9920 287
1e8e9920 288/* Return true if CTX is for an omp parallel. */
289
290static inline bool
291is_parallel_ctx (omp_context *ctx)
292{
75a70cf9 293 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
1e8e9920 294}
295
773c5ba7 296
fd6481cf 297/* Return true if CTX is for an omp task. */
298
299static inline bool
300is_task_ctx (omp_context *ctx)
301{
75a70cf9 302 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 303}
304
305
43895be5 306/* Return true if CTX is for an omp taskloop. */
307
308static inline bool
309is_taskloop_ctx (omp_context *ctx)
310{
311 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
312 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
313}
314
315
7e5a76c8 316/* Return true if CTX is for a host omp teams. */
317
318static inline bool
319is_host_teams_ctx (omp_context *ctx)
320{
321 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
322 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
323}
324
325/* Return true if CTX is for an omp parallel or omp task or host omp teams
326 (the last one is strictly not a task region in OpenMP speak, but we
327 need to treat it similarly). */
fd6481cf 328
329static inline bool
330is_taskreg_ctx (omp_context *ctx)
331{
7e5a76c8 332 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
fd6481cf 333}
334
1e8e9920 335/* Return true if EXPR is variable sized. */
336
337static inline bool
1f1872fd 338is_variable_sized (const_tree expr)
1e8e9920 339{
340 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
341}
342
ca4c3545 343/* Lookup variables. The "maybe" form
1e8e9920 344 allows for the variable form to not have been entered, otherwise we
345 assert that the variable must have been entered. */
346
347static inline tree
348lookup_decl (tree var, omp_context *ctx)
349{
06ecf488 350 tree *n = ctx->cb.decl_map->get (var);
e3022db7 351 return *n;
1e8e9920 352}
353
354static inline tree
e8a588af 355maybe_lookup_decl (const_tree var, omp_context *ctx)
1e8e9920 356{
06ecf488 357 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
e3022db7 358 return n ? *n : NULL_TREE;
1e8e9920 359}
360
361static inline tree
362lookup_field (tree var, omp_context *ctx)
363{
364 splay_tree_node n;
365 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
366 return (tree) n->value;
367}
368
fd6481cf 369static inline tree
43895be5 370lookup_sfield (splay_tree_key key, omp_context *ctx)
fd6481cf 371{
372 splay_tree_node n;
373 n = splay_tree_lookup (ctx->sfield_map
43895be5 374 ? ctx->sfield_map : ctx->field_map, key);
fd6481cf 375 return (tree) n->value;
376}
377
1e8e9920 378static inline tree
43895be5 379lookup_sfield (tree var, omp_context *ctx)
380{
381 return lookup_sfield ((splay_tree_key) var, ctx);
382}
383
384static inline tree
385maybe_lookup_field (splay_tree_key key, omp_context *ctx)
1e8e9920 386{
387 splay_tree_node n;
43895be5 388 n = splay_tree_lookup (ctx->field_map, key);
1e8e9920 389 return n ? (tree) n->value : NULL_TREE;
390}
391
43895be5 392static inline tree
393maybe_lookup_field (tree var, omp_context *ctx)
394{
395 return maybe_lookup_field ((splay_tree_key) var, ctx);
396}
397
e8a588af 398/* Return true if DECL should be copied by pointer. SHARED_CTX is
399 the parallel context if DECL is to be shared. */
1e8e9920 400
401static bool
fd6481cf 402use_pointer_for_field (tree decl, omp_context *shared_ctx)
1e8e9920 403{
0b80c4b2 404 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
405 || TYPE_ATOMIC (TREE_TYPE (decl)))
1e8e9920 406 return true;
407
554f2707 408 /* We can only use copy-in/copy-out semantics for shared variables
1e8e9920 409 when we know the value is not accessible from an outer scope. */
e8a588af 410 if (shared_ctx)
1e8e9920 411 {
ca4c3545 412 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
413
1e8e9920 414 /* ??? Trivially accessible from anywhere. But why would we even
415 be passing an address in this case? Should we simply assert
416 this to be false, or should we have a cleanup pass that removes
417 these from the list of mappings? */
7e5a76c8 418 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
1e8e9920 419 return true;
420
421 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
422 without analyzing the expression whether or not its location
423 is accessible to anyone else. In the case of nested parallel
424 regions it certainly may be. */
df2c34fc 425 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
1e8e9920 426 return true;
427
428 /* Do not use copy-in/copy-out for variables that have their
429 address taken. */
70ef2520 430 if (is_global_var (decl))
431 {
432 /* For file scope vars, track whether we've seen them as
433 non-addressable initially and in that case, keep the same
434 answer for the duration of the pass, even when they are made
435 addressable later on e.g. through reduction expansion. Global
436 variables which weren't addressable before the pass will not
437 have their privatized copies address taken. See PR91216. */
438 if (!TREE_ADDRESSABLE (decl))
439 {
440 if (!global_nonaddressable_vars)
441 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
442 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
443 }
444 else if (!global_nonaddressable_vars
445 || !bitmap_bit_p (global_nonaddressable_vars,
446 DECL_UID (decl)))
447 return true;
448 }
449 else if (TREE_ADDRESSABLE (decl))
1e8e9920 450 return true;
e8a588af 451
b8214689 452 /* lower_send_shared_vars only uses copy-in, but not copy-out
453 for these. */
454 if (TREE_READONLY (decl)
455 || ((TREE_CODE (decl) == RESULT_DECL
456 || TREE_CODE (decl) == PARM_DECL)
457 && DECL_BY_REFERENCE (decl)))
458 return false;
459
e8a588af 460 /* Disallow copy-in/out in nested parallel if
461 decl is shared in outer parallel, otherwise
462 each thread could store the shared variable
463 in its own copy-in location, making the
464 variable no longer really shared. */
b8214689 465 if (shared_ctx->is_nested)
e8a588af 466 {
467 omp_context *up;
468
469 for (up = shared_ctx->outer; up; up = up->outer)
0cb159ec 470 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
e8a588af 471 break;
472
0cb159ec 473 if (up)
e8a588af 474 {
475 tree c;
476
75a70cf9 477 for (c = gimple_omp_taskreg_clauses (up->stmt);
e8a588af 478 c; c = OMP_CLAUSE_CHAIN (c))
479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
480 && OMP_CLAUSE_DECL (c) == decl)
481 break;
482
483 if (c)
784ad964 484 goto maybe_mark_addressable_and_ret;
e8a588af 485 }
486 }
fd6481cf 487
b8214689 488 /* For tasks avoid using copy-in/out. As tasks can be
fd6481cf 489 deferred or executed in different thread, when GOMP_task
490 returns, the task hasn't necessarily terminated. */
b8214689 491 if (is_task_ctx (shared_ctx))
fd6481cf 492 {
784ad964 493 tree outer;
494 maybe_mark_addressable_and_ret:
495 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
43895be5 496 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
fd6481cf 497 {
498 /* Taking address of OUTER in lower_send_shared_vars
499 might need regimplification of everything that uses the
500 variable. */
501 if (!task_shared_vars)
502 task_shared_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
504 TREE_ADDRESSABLE (outer) = 1;
505 }
506 return true;
507 }
1e8e9920 508 }
509
510 return false;
511}
512
79acaae1 513/* Construct a new automatic decl similar to VAR. */
514
515static tree
516omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
517{
518 tree copy = copy_var_decl (var, name, type);
519
520 DECL_CONTEXT (copy) = current_function_decl;
1767a056 521 DECL_CHAIN (copy) = ctx->block_vars;
43895be5 522 /* If VAR is listed in task_shared_vars, it means it wasn't
523 originally addressable and is just because task needs to take
524 it's address. But we don't need to take address of privatizations
525 from that var. */
526 if (TREE_ADDRESSABLE (var)
70ef2520 527 && ((task_shared_vars
528 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
529 || (global_nonaddressable_vars
530 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
43895be5 531 TREE_ADDRESSABLE (copy) = 0;
1e8e9920 532 ctx->block_vars = copy;
533
534 return copy;
535}
536
537static tree
538omp_copy_decl_1 (tree var, omp_context *ctx)
539{
540 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
541}
542
445d06b6 543/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
544 as appropriate. */
545static tree
546omp_build_component_ref (tree obj, tree field)
547{
548 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
549 if (TREE_THIS_VOLATILE (field))
550 TREE_THIS_VOLATILE (ret) |= 1;
551 if (TREE_READONLY (field))
552 TREE_READONLY (ret) |= 1;
553 return ret;
554}
555
1e8e9920 556/* Build tree nodes to access the field for VAR on the receiver side. */
557
558static tree
559build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
560{
561 tree x, field = lookup_field (var, ctx);
562
563 /* If the receiver record type was remapped in the child function,
564 remap the field into the new record type. */
565 x = maybe_lookup_field (field, ctx);
566 if (x != NULL)
567 field = x;
568
182cf5a9 569 x = build_simple_mem_ref (ctx->receiver_decl);
75c20a2e 570 TREE_THIS_NOTRAP (x) = 1;
445d06b6 571 x = omp_build_component_ref (x, field);
1e8e9920 572 if (by_ref)
b16d27b9 573 {
574 x = build_simple_mem_ref (x);
575 TREE_THIS_NOTRAP (x) = 1;
576 }
1e8e9920 577
578 return x;
579}
580
581/* Build tree nodes to access VAR in the scope outer to CTX. In the case
582 of a parallel, this is a component reference; for workshare constructs
583 this is some variable. */
584
585static tree
1f355935 586build_outer_var_ref (tree var, omp_context *ctx,
587 enum omp_clause_code code = OMP_CLAUSE_ERROR)
1e8e9920 588{
589 tree x;
7e5a76c8 590 omp_context *outer = ctx->outer;
591 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
592 outer = outer->outer;
1e8e9920 593
f49d7bb5 594 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
1e8e9920 595 x = var;
596 else if (is_variable_sized (var))
597 {
598 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1f355935 599 x = build_outer_var_ref (x, ctx, code);
182cf5a9 600 x = build_simple_mem_ref (x);
1e8e9920 601 }
fd6481cf 602 else if (is_taskreg_ctx (ctx))
1e8e9920 603 {
e8a588af 604 bool by_ref = use_pointer_for_field (var, NULL);
1e8e9920 605 x = build_receiver_ref (var, by_ref, ctx);
606 }
1f355935 607 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
31890b58 609 || ctx->loop_p
1f355935 610 || (code == OMP_CLAUSE_PRIVATE
611 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
612 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
613 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
3d483a94 614 {
1f355935 615 /* #pragma omp simd isn't a worksharing construct, and can reference
616 even private vars in its linear etc. clauses.
617 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
618 to private vars in all worksharing constructs. */
3d483a94 619 x = NULL_TREE;
7e5a76c8 620 if (outer && is_taskreg_ctx (outer))
621 x = lookup_decl (var, outer);
622 else if (outer)
84cb1020 623 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
3d483a94 624 if (x == NULL_TREE)
625 x = var;
626 }
1f355935 627 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
43895be5 628 {
7e5a76c8 629 gcc_assert (outer);
43895be5 630 splay_tree_node n
7e5a76c8 631 = splay_tree_lookup (outer->field_map,
43895be5 632 (splay_tree_key) &DECL_UID (var));
633 if (n == NULL)
634 {
7e5a76c8 635 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
43895be5 636 x = var;
637 else
7e5a76c8 638 x = lookup_decl (var, outer);
43895be5 639 }
640 else
641 {
642 tree field = (tree) n->value;
643 /* If the receiver record type was remapped in the child function,
644 remap the field into the new record type. */
7e5a76c8 645 x = maybe_lookup_field (field, outer);
43895be5 646 if (x != NULL)
647 field = x;
648
7e5a76c8 649 x = build_simple_mem_ref (outer->receiver_decl);
43895be5 650 x = omp_build_component_ref (x, field);
7e5a76c8 651 if (use_pointer_for_field (var, outer))
43895be5 652 x = build_simple_mem_ref (x);
653 }
654 }
7e5a76c8 655 else if (outer)
56686608 656 {
56686608 657 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
658 {
659 outer = outer->outer;
660 gcc_assert (outer
661 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
662 }
1f355935 663 x = lookup_decl (var, outer);
56686608 664 }
4954efd4 665 else if (omp_is_reference (var))
9438af57 666 /* This can happen with orphaned constructs. If var is reference, it is
667 possible it is shared and as such valid. */
668 x = var;
43895be5 669 else if (omp_member_access_dummy_var (var))
670 x = var;
1e8e9920 671 else
672 gcc_unreachable ();
673
43895be5 674 if (x == var)
675 {
676 tree t = omp_member_access_dummy_var (var);
677 if (t)
678 {
679 x = DECL_VALUE_EXPR (var);
680 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
681 if (o != t)
682 x = unshare_and_remap (x, t, o);
683 else
684 x = unshare_expr (x);
685 }
686 }
687
4954efd4 688 if (omp_is_reference (var))
182cf5a9 689 x = build_simple_mem_ref (x);
1e8e9920 690
691 return x;
692}
693
694/* Build tree nodes to access the field for VAR on the sender side. */
695
696static tree
43895be5 697build_sender_ref (splay_tree_key key, omp_context *ctx)
1e8e9920 698{
43895be5 699 tree field = lookup_sfield (key, ctx);
445d06b6 700 return omp_build_component_ref (ctx->sender_decl, field);
1e8e9920 701}
702
43895be5 703static tree
704build_sender_ref (tree var, omp_context *ctx)
705{
706 return build_sender_ref ((splay_tree_key) var, ctx);
707}
708
12dc9a16 709/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
710 BASE_POINTERS_RESTRICT, declare the field with restrict. */
1e8e9920 711
712static void
737cc978 713install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
1e8e9920 714{
fd6481cf 715 tree field, type, sfield = NULL_TREE;
43895be5 716 splay_tree_key key = (splay_tree_key) var;
1e8e9920 717
43895be5 718 if ((mask & 8) != 0)
719 {
720 key = (splay_tree_key) &DECL_UID (var);
721 gcc_checking_assert (key != (splay_tree_key) var);
722 }
fd6481cf 723 gcc_assert ((mask & 1) == 0
43895be5 724 || !splay_tree_lookup (ctx->field_map, key));
fd6481cf 725 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
43895be5 726 || !splay_tree_lookup (ctx->sfield_map, key));
ca4c3545 727 gcc_assert ((mask & 3) == 3
728 || !is_gimple_omp_oacc (ctx->stmt));
1e8e9920 729
730 type = TREE_TYPE (var);
44c0112f 731 /* Prevent redeclaring the var in the split-off function with a restrict
732 pointer type. Note that we only clear type itself, restrict qualifiers in
733 the pointed-to type will be ignored by points-to analysis. */
734 if (POINTER_TYPE_P (type)
735 && TYPE_RESTRICT (type))
736 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
737
bc7bff74 738 if (mask & 4)
739 {
740 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
741 type = build_pointer_type (build_pointer_type (type));
742 }
743 else if (by_ref)
737cc978 744 type = build_pointer_type (type);
4954efd4 745 else if ((mask & 3) == 1 && omp_is_reference (var))
fd6481cf 746 type = TREE_TYPE (type);
1e8e9920 747
e60a6f7b 748 field = build_decl (DECL_SOURCE_LOCATION (var),
749 FIELD_DECL, DECL_NAME (var), type);
1e8e9920 750
751 /* Remember what variable this field was created for. This does have a
752 side effect of making dwarf2out ignore this member, so for helpful
753 debugging we clear it later in delete_omp_context. */
754 DECL_ABSTRACT_ORIGIN (field) = var;
fd6481cf 755 if (type == TREE_TYPE (var))
756 {
5d4b30ea 757 SET_DECL_ALIGN (field, DECL_ALIGN (var));
fd6481cf 758 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
759 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
760 }
761 else
5d4b30ea 762 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
1e8e9920 763
fd6481cf 764 if ((mask & 3) == 3)
765 {
766 insert_field_into_struct (ctx->record_type, field);
767 if (ctx->srecord_type)
768 {
e60a6f7b 769 sfield = build_decl (DECL_SOURCE_LOCATION (var),
770 FIELD_DECL, DECL_NAME (var), type);
fd6481cf 771 DECL_ABSTRACT_ORIGIN (sfield) = var;
5d4b30ea 772 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
fd6481cf 773 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
774 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
775 insert_field_into_struct (ctx->srecord_type, sfield);
776 }
777 }
778 else
779 {
780 if (ctx->srecord_type == NULL_TREE)
781 {
782 tree t;
783
784 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
785 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
786 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
787 {
43895be5 788 sfield = build_decl (DECL_SOURCE_LOCATION (t),
e60a6f7b 789 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
fd6481cf 790 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
791 insert_field_into_struct (ctx->srecord_type, sfield);
792 splay_tree_insert (ctx->sfield_map,
793 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
794 (splay_tree_value) sfield);
795 }
796 }
797 sfield = field;
798 insert_field_into_struct ((mask & 1) ? ctx->record_type
799 : ctx->srecord_type, field);
800 }
1e8e9920 801
fd6481cf 802 if (mask & 1)
43895be5 803 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
fd6481cf 804 if ((mask & 2) && ctx->sfield_map)
43895be5 805 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
1e8e9920 806}
807
808static tree
809install_var_local (tree var, omp_context *ctx)
810{
811 tree new_var = omp_copy_decl_1 (var, ctx);
812 insert_decl_map (&ctx->cb, var, new_var);
813 return new_var;
814}
815
816/* Adjust the replacement for DECL in CTX for the new context. This means
817 copying the DECL_VALUE_EXPR, and fixing up the type. */
818
819static void
820fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
821{
822 tree new_decl, size;
823
824 new_decl = lookup_decl (decl, ctx);
825
826 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
827
828 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
829 && DECL_HAS_VALUE_EXPR_P (decl))
830 {
831 tree ve = DECL_VALUE_EXPR (decl);
75a70cf9 832 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
1e8e9920 833 SET_DECL_VALUE_EXPR (new_decl, ve);
834 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
835 }
836
837 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
838 {
839 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
840 if (size == error_mark_node)
841 size = TYPE_SIZE (TREE_TYPE (new_decl));
842 DECL_SIZE (new_decl) = size;
843
844 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
845 if (size == error_mark_node)
846 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
847 DECL_SIZE_UNIT (new_decl) = size;
848 }
849}
850
851/* The callback for remap_decl. Search all containing contexts for a
852 mapping of the variable; this avoids having to duplicate the splay
853 tree ahead of time. We know a mapping doesn't already exist in the
854 given context. Create new mappings to implement default semantics. */
855
856static tree
857omp_copy_decl (tree var, copy_body_data *cb)
858{
859 omp_context *ctx = (omp_context *) cb;
860 tree new_var;
861
1e8e9920 862 if (TREE_CODE (var) == LABEL_DECL)
863 {
ed5078db 864 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
865 return var;
e60a6f7b 866 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
773c5ba7 867 DECL_CONTEXT (new_var) = current_function_decl;
1e8e9920 868 insert_decl_map (&ctx->cb, var, new_var);
869 return new_var;
870 }
871
fd6481cf 872 while (!is_taskreg_ctx (ctx))
1e8e9920 873 {
874 ctx = ctx->outer;
875 if (ctx == NULL)
876 return var;
877 new_var = maybe_lookup_decl (var, ctx);
878 if (new_var)
879 return new_var;
880 }
881
f49d7bb5 882 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
883 return var;
884
1e8e9920 885 return error_mark_node;
886}
887
4954efd4 888/* Create a new context, with OUTER_CTX being the surrounding context. */
773c5ba7 889
4954efd4 890static omp_context *
891new_omp_context (gimple *stmt, omp_context *outer_ctx)
773c5ba7 892{
4954efd4 893 omp_context *ctx = XCNEW (omp_context);
773c5ba7 894
4954efd4 895 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
896 (splay_tree_value) ctx);
897 ctx->stmt = stmt;
773c5ba7 898
4954efd4 899 if (outer_ctx)
61e47ac8 900 {
4954efd4 901 ctx->outer = outer_ctx;
902 ctx->cb = outer_ctx->cb;
903 ctx->cb.block = NULL;
904 ctx->depth = outer_ctx->depth + 1;
1e8e9920 905 }
906 else
907 {
908 ctx->cb.src_fn = current_function_decl;
909 ctx->cb.dst_fn = current_function_decl;
415d1b9a 910 ctx->cb.src_node = cgraph_node::get (current_function_decl);
53f79206 911 gcc_checking_assert (ctx->cb.src_node);
1e8e9920 912 ctx->cb.dst_node = ctx->cb.src_node;
913 ctx->cb.src_cfun = cfun;
914 ctx->cb.copy_decl = omp_copy_decl;
e38def9c 915 ctx->cb.eh_lp_nr = 0;
1e8e9920 916 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
744a33a1 917 ctx->cb.adjust_array_error_bounds = true;
918 ctx->cb.dont_remap_vla_if_no_change = true;
1e8e9920 919 ctx->depth = 1;
920 }
921
06ecf488 922 ctx->cb.decl_map = new hash_map<tree, tree>;
1e8e9920 923
924 return ctx;
925}
926
75a70cf9 927static gimple_seq maybe_catch_exception (gimple_seq);
f6430caa 928
929/* Finalize task copyfn. */
930
931static void
1a91d914 932finalize_task_copyfn (gomp_task *task_stmt)
f6430caa 933{
934 struct function *child_cfun;
9078126c 935 tree child_fn;
e3a19533 936 gimple_seq seq = NULL, new_seq;
1a91d914 937 gbind *bind;
f6430caa 938
75a70cf9 939 child_fn = gimple_omp_task_copy_fn (task_stmt);
f6430caa 940 if (child_fn == NULL_TREE)
941 return;
942
943 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
82b40354 944 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
f6430caa 945
f6430caa 946 push_cfun (child_cfun);
7e3aae05 947 bind = gimplify_body (child_fn, false);
75a70cf9 948 gimple_seq_add_stmt (&seq, bind);
949 new_seq = maybe_catch_exception (seq);
950 if (new_seq != seq)
951 {
952 bind = gimple_build_bind (NULL, new_seq, NULL);
e3a19533 953 seq = NULL;
75a70cf9 954 gimple_seq_add_stmt (&seq, bind);
955 }
956 gimple_set_body (child_fn, seq);
f6430caa 957 pop_cfun ();
f6430caa 958
82b40354 959 /* Inform the callgraph about the new function. */
47300487 960 cgraph_node *node = cgraph_node::get_create (child_fn);
961 node->parallelized_function = 1;
415d1b9a 962 cgraph_node::add_new_function (child_fn, false);
f6430caa 963}
964
1e8e9920 965/* Destroy a omp_context data structures. Called through the splay tree
966 value delete callback. */
967
968static void
969delete_omp_context (splay_tree_value value)
970{
971 omp_context *ctx = (omp_context *) value;
972
06ecf488 973 delete ctx->cb.decl_map;
1e8e9920 974
975 if (ctx->field_map)
976 splay_tree_delete (ctx->field_map);
fd6481cf 977 if (ctx->sfield_map)
978 splay_tree_delete (ctx->sfield_map);
1e8e9920 979
980 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
981 it produces corrupt debug information. */
982 if (ctx->record_type)
983 {
984 tree t;
1767a056 985 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1e8e9920 986 DECL_ABSTRACT_ORIGIN (t) = NULL;
987 }
fd6481cf 988 if (ctx->srecord_type)
989 {
990 tree t;
1767a056 991 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
fd6481cf 992 DECL_ABSTRACT_ORIGIN (t) = NULL;
993 }
1e8e9920 994
f6430caa 995 if (is_task_ctx (ctx))
1a91d914 996 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
f6430caa 997
7e5a76c8 998 if (ctx->task_reduction_map)
999 {
1000 ctx->task_reductions.release ();
1001 delete ctx->task_reduction_map;
1002 }
1003
9a1d892b 1004 delete ctx->lastprivate_conditional_map;
1005
1e8e9920 1006 XDELETE (ctx);
1007}
1008
1009/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1010 context. */
1011
1012static void
1013fixup_child_record_type (omp_context *ctx)
1014{
1015 tree f, type = ctx->record_type;
1016
56686608 1017 if (!ctx->receiver_decl)
1018 return;
1e8e9920 1019 /* ??? It isn't sufficient to just call remap_type here, because
1020 variably_modified_type_p doesn't work the way we expect for
1021 record types. Testing each field for whether it needs remapping
1022 and creating a new record by hand works, however. */
1767a056 1023 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1e8e9920 1024 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1025 break;
1026 if (f)
1027 {
1028 tree name, new_fields = NULL;
1029
1030 type = lang_hooks.types.make_type (RECORD_TYPE);
1031 name = DECL_NAME (TYPE_NAME (ctx->record_type));
e60a6f7b 1032 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1033 TYPE_DECL, name, type);
1e8e9920 1034 TYPE_NAME (type) = name;
1035
1767a056 1036 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1e8e9920 1037 {
1038 tree new_f = copy_node (f);
1039 DECL_CONTEXT (new_f) = type;
1040 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1767a056 1041 DECL_CHAIN (new_f) = new_fields;
75a70cf9 1042 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1043 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1044 &ctx->cb, NULL);
1045 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1046 &ctx->cb, NULL);
1e8e9920 1047 new_fields = new_f;
1048
1049 /* Arrange to be able to look up the receiver field
1050 given the sender field. */
1051 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1052 (splay_tree_value) new_f);
1053 }
1054 TYPE_FIELDS (type) = nreverse (new_fields);
1055 layout_type (type);
1056 }
1057
43895be5 1058 /* In a target region we never modify any of the pointers in *.omp_data_i,
1059 so attempt to help the optimizers. */
1060 if (is_gimple_omp_offloaded (ctx->stmt))
1061 type = build_qualified_type (type, TYPE_QUAL_CONST);
1062
5455b100 1063 TREE_TYPE (ctx->receiver_decl)
1064 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1e8e9920 1065}
1066
1067/* Instantiate decls as necessary in CTX to satisfy the data sharing
737cc978 1068 specified by CLAUSES. */
1e8e9920 1069
1070static void
737cc978 1071scan_sharing_clauses (tree clauses, omp_context *ctx)
1e8e9920 1072{
1073 tree c, decl;
1074 bool scan_array_reductions = false;
1075
1076 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1077 {
1078 bool by_ref;
1079
55d6e7cd 1080 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1081 {
1082 case OMP_CLAUSE_PRIVATE:
1083 decl = OMP_CLAUSE_DECL (c);
fd6481cf 1084 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1085 goto do_private;
1086 else if (!is_variable_sized (decl))
1e8e9920 1087 install_var_local (decl, ctx);
1088 break;
1089
1090 case OMP_CLAUSE_SHARED:
5fddcf34 1091 decl = OMP_CLAUSE_DECL (c);
7e5a76c8 1092 /* Ignore shared directives in teams construct inside of
1093 target construct. */
1094 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1095 && !is_host_teams_ctx (ctx))
5fddcf34 1096 {
1097 /* Global variables don't need to be copied,
1098 the receiver side will use them directly. */
1099 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1100 if (is_global_var (odecl))
1101 break;
1102 insert_decl_map (&ctx->cb, decl, odecl);
1103 break;
1104 }
fd6481cf 1105 gcc_assert (is_taskreg_ctx (ctx));
e7327393 1106 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1107 || !is_variable_sized (decl));
f49d7bb5 1108 /* Global variables don't need to be copied,
1109 the receiver side will use them directly. */
1110 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1111 break;
43895be5 1112 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
98588013 1113 {
1114 use_pointer_for_field (decl, ctx);
1115 break;
1116 }
1117 by_ref = use_pointer_for_field (decl, NULL);
1118 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1e8e9920 1119 || TREE_ADDRESSABLE (decl)
1120 || by_ref
4954efd4 1121 || omp_is_reference (decl))
1e8e9920 1122 {
98588013 1123 by_ref = use_pointer_for_field (decl, ctx);
fd6481cf 1124 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1125 install_var_local (decl, ctx);
1126 break;
1127 }
1128 /* We don't need to copy const scalar vars back. */
55d6e7cd 1129 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1e8e9920 1130 goto do_private;
1131
43895be5 1132 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1133 case OMP_CLAUSE_IN_REDUCTION:
43895be5 1134 decl = OMP_CLAUSE_DECL (c);
7e5a76c8 1135 if (TREE_CODE (decl) == MEM_REF)
43895be5 1136 {
1137 tree t = TREE_OPERAND (decl, 0);
9561765e 1138 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1139 t = TREE_OPERAND (t, 0);
43895be5 1140 if (TREE_CODE (t) == INDIRECT_REF
1141 || TREE_CODE (t) == ADDR_EXPR)
1142 t = TREE_OPERAND (t, 0);
1143 install_var_local (t, ctx);
1144 if (is_taskreg_ctx (ctx)
7e5a76c8 1145 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1146 || (is_task_ctx (ctx)
1147 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1148 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1150 == POINTER_TYPE)))))
1151 && !is_variable_sized (t)
1152 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1153 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1154 && !is_task_ctx (ctx))))
43895be5 1155 {
7e5a76c8 1156 by_ref = use_pointer_for_field (t, NULL);
1157 if (is_task_ctx (ctx)
1158 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1159 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1160 {
1161 install_var_field (t, false, 1, ctx);
1162 install_var_field (t, by_ref, 2, ctx);
1163 }
1164 else
1165 install_var_field (t, by_ref, 3, ctx);
43895be5 1166 }
1167 break;
1168 }
7e5a76c8 1169 if (is_task_ctx (ctx)
1170 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1171 && OMP_CLAUSE_REDUCTION_TASK (c)
1172 && is_parallel_ctx (ctx)))
1173 {
1174 /* Global variables don't need to be copied,
1175 the receiver side will use them directly. */
1176 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1177 {
1178 by_ref = use_pointer_for_field (decl, ctx);
1179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1180 install_var_field (decl, by_ref, 3, ctx);
1181 }
1182 install_var_local (decl, ctx);
1183 break;
1184 }
1185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1186 && OMP_CLAUSE_REDUCTION_TASK (c))
1187 {
1188 install_var_local (decl, ctx);
1189 break;
1190 }
43895be5 1191 goto do_private;
1192
1e8e9920 1193 case OMP_CLAUSE_LASTPRIVATE:
1194 /* Let the corresponding firstprivate clause create
1195 the variable. */
1196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1197 break;
1198 /* FALLTHRU */
1199
1200 case OMP_CLAUSE_FIRSTPRIVATE:
3d483a94 1201 case OMP_CLAUSE_LINEAR:
1e8e9920 1202 decl = OMP_CLAUSE_DECL (c);
1203 do_private:
43895be5 1204 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1205 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1206 && is_gimple_omp_offloaded (ctx->stmt))
1207 {
1208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
4954efd4 1209 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
43895be5 1210 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1211 install_var_field (decl, true, 3, ctx);
1212 else
1213 install_var_field (decl, false, 3, ctx);
1214 }
1e8e9920 1215 if (is_variable_sized (decl))
1e8e9920 1216 {
fd6481cf 1217 if (is_task_ctx (ctx))
1218 install_var_field (decl, false, 1, ctx);
1219 break;
1220 }
1221 else if (is_taskreg_ctx (ctx))
1222 {
1223 bool global
1224 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
e8a588af 1225 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1226
1227 if (is_task_ctx (ctx)
4954efd4 1228 && (global || by_ref || omp_is_reference (decl)))
fd6481cf 1229 {
1230 install_var_field (decl, false, 1, ctx);
1231 if (!global)
1232 install_var_field (decl, by_ref, 2, ctx);
1233 }
1234 else if (!global)
1235 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1236 }
1237 install_var_local (decl, ctx);
1238 break;
1239
43895be5 1240 case OMP_CLAUSE_USE_DEVICE_PTR:
75145cfe 1241 case OMP_CLAUSE_USE_DEVICE_ADDR:
43895be5 1242 decl = OMP_CLAUSE_DECL (c);
75145cfe 1243 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1244 && !omp_is_reference (decl))
1245 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7082a9c7 1246 install_var_field (decl, true, 11, ctx);
43895be5 1247 else
7082a9c7 1248 install_var_field (decl, false, 11, ctx);
43895be5 1249 if (DECL_SIZE (decl)
1250 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1251 {
1252 tree decl2 = DECL_VALUE_EXPR (decl);
1253 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1254 decl2 = TREE_OPERAND (decl2, 0);
1255 gcc_assert (DECL_P (decl2));
1256 install_var_local (decl2, ctx);
1257 }
1258 install_var_local (decl, ctx);
1259 break;
1260
1261 case OMP_CLAUSE_IS_DEVICE_PTR:
1262 decl = OMP_CLAUSE_DECL (c);
1263 goto do_private;
1264
bc7bff74 1265 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 1266 case OMP_CLAUSE__REDUCTEMP_:
43895be5 1267 gcc_assert (is_taskreg_ctx (ctx));
bc7bff74 1268 decl = OMP_CLAUSE_DECL (c);
1269 install_var_field (decl, false, 3, ctx);
1270 install_var_local (decl, ctx);
1271 break;
1272
1e8e9920 1273 case OMP_CLAUSE_COPYPRIVATE:
1e8e9920 1274 case OMP_CLAUSE_COPYIN:
1275 decl = OMP_CLAUSE_DECL (c);
e8a588af 1276 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1277 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1278 break;
1279
2169f33b 1280 case OMP_CLAUSE_FINAL:
1e8e9920 1281 case OMP_CLAUSE_IF:
1282 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1283 case OMP_CLAUSE_NUM_TEAMS:
1284 case OMP_CLAUSE_THREAD_LIMIT:
1285 case OMP_CLAUSE_DEVICE:
1e8e9920 1286 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1287 case OMP_CLAUSE_DIST_SCHEDULE:
1288 case OMP_CLAUSE_DEPEND:
43895be5 1289 case OMP_CLAUSE_PRIORITY:
1290 case OMP_CLAUSE_GRAINSIZE:
1291 case OMP_CLAUSE_NUM_TASKS:
ca4c3545 1292 case OMP_CLAUSE_NUM_GANGS:
1293 case OMP_CLAUSE_NUM_WORKERS:
1294 case OMP_CLAUSE_VECTOR_LENGTH:
1e8e9920 1295 if (ctx->outer)
75a70cf9 1296 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1e8e9920 1297 break;
1298
bc7bff74 1299 case OMP_CLAUSE_TO:
1300 case OMP_CLAUSE_FROM:
1301 case OMP_CLAUSE_MAP:
1302 if (ctx->outer)
1303 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1304 decl = OMP_CLAUSE_DECL (c);
1305 /* Global variables with "omp declare target" attribute
1306 don't need to be copied, the receiver side will use them
c0998828 1307 directly. However, global variables with "omp declare target link"
c1eaba31 1308 attribute need to be copied. Or when ALWAYS modifier is used. */
bc7bff74 1309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1310 && DECL_P (decl)
9561765e 1311 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1312 && (OMP_CLAUSE_MAP_KIND (c)
1313 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 1314 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
c1eaba31 1315 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1316 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1317 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
bc7bff74 1318 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
c0998828 1319 && varpool_node::get_create (decl)->offloadable
1320 && !lookup_attribute ("omp declare target link",
1321 DECL_ATTRIBUTES (decl)))
bc7bff74 1322 break;
1323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
ca4c3545 1324 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
bc7bff74 1325 {
ca4c3545 1326 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1327 not offloaded; there is nothing to map for those. */
1328 if (!is_gimple_omp_offloaded (ctx->stmt)
c1a114c2 1329 && !POINTER_TYPE_P (TREE_TYPE (decl))
1330 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
bc7bff74 1331 break;
1332 }
43895be5 1333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9561765e 1334 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1335 || (OMP_CLAUSE_MAP_KIND (c)
1336 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
43895be5 1337 {
1338 if (TREE_CODE (decl) == COMPONENT_REF
1339 || (TREE_CODE (decl) == INDIRECT_REF
1340 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1341 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1342 == REFERENCE_TYPE)))
1343 break;
1344 if (DECL_SIZE (decl)
1345 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1346 {
1347 tree decl2 = DECL_VALUE_EXPR (decl);
1348 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1349 decl2 = TREE_OPERAND (decl2, 0);
1350 gcc_assert (DECL_P (decl2));
1351 install_var_local (decl2, ctx);
1352 }
1353 install_var_local (decl, ctx);
1354 break;
1355 }
bc7bff74 1356 if (DECL_P (decl))
1357 {
1358 if (DECL_SIZE (decl)
1359 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1360 {
1361 tree decl2 = DECL_VALUE_EXPR (decl);
1362 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1363 decl2 = TREE_OPERAND (decl2, 0);
1364 gcc_assert (DECL_P (decl2));
9561765e 1365 install_var_field (decl2, true, 3, ctx);
bc7bff74 1366 install_var_local (decl2, ctx);
1367 install_var_local (decl, ctx);
1368 }
1369 else
1370 {
1371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
ca4c3545 1372 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
bc7bff74 1373 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1374 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1375 install_var_field (decl, true, 7, ctx);
1376 else
737cc978 1377 install_var_field (decl, true, 3, ctx);
0c302595 1378 if (is_gimple_omp_offloaded (ctx->stmt)
1379 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
bc7bff74 1380 install_var_local (decl, ctx);
1381 }
1382 }
1383 else
1384 {
1385 tree base = get_base_address (decl);
1386 tree nc = OMP_CLAUSE_CHAIN (c);
1387 if (DECL_P (base)
1388 && nc != NULL_TREE
1389 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1390 && OMP_CLAUSE_DECL (nc) == base
ca4c3545 1391 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
bc7bff74 1392 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1393 {
1394 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1395 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1396 }
1397 else
1398 {
691447ab 1399 if (ctx->outer)
1400 {
1401 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1402 decl = OMP_CLAUSE_DECL (c);
1403 }
bc7bff74 1404 gcc_assert (!splay_tree_lookup (ctx->field_map,
1405 (splay_tree_key) decl));
1406 tree field
1407 = build_decl (OMP_CLAUSE_LOCATION (c),
1408 FIELD_DECL, NULL_TREE, ptr_type_node);
5d4b30ea 1409 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
bc7bff74 1410 insert_field_into_struct (ctx->record_type, field);
1411 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1412 (splay_tree_value) field);
1413 }
1414 }
1415 break;
1416
56686608 1417 case OMP_CLAUSE__GRIDDIM_:
1418 if (ctx->outer)
1419 {
1420 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1421 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1422 }
1423 break;
1424
3ab58307 1425 case OMP_CLAUSE_ORDER:
1426 ctx->order_concurrent = true;
1427 break;
1428
31890b58 1429 case OMP_CLAUSE_BIND:
1430 ctx->loop_p = true;
1431 break;
1432
1e8e9920 1433 case OMP_CLAUSE_NOWAIT:
1434 case OMP_CLAUSE_ORDERED:
fd6481cf 1435 case OMP_CLAUSE_COLLAPSE:
1436 case OMP_CLAUSE_UNTIED:
2169f33b 1437 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1438 case OMP_CLAUSE_PROC_BIND:
3d483a94 1439 case OMP_CLAUSE_SAFELEN:
43895be5 1440 case OMP_CLAUSE_SIMDLEN:
1441 case OMP_CLAUSE_THREADS:
1442 case OMP_CLAUSE_SIMD:
1443 case OMP_CLAUSE_NOGROUP:
1444 case OMP_CLAUSE_DEFAULTMAP:
ca4c3545 1445 case OMP_CLAUSE_ASYNC:
1446 case OMP_CLAUSE_WAIT:
1447 case OMP_CLAUSE_GANG:
1448 case OMP_CLAUSE_WORKER:
1449 case OMP_CLAUSE_VECTOR:
f4f5b4b4 1450 case OMP_CLAUSE_INDEPENDENT:
1451 case OMP_CLAUSE_AUTO:
1452 case OMP_CLAUSE_SEQ:
719a7570 1453 case OMP_CLAUSE_TILE:
57f872be 1454 case OMP_CLAUSE__SIMT_:
093c94dd 1455 case OMP_CLAUSE_DEFAULT:
7e5a76c8 1456 case OMP_CLAUSE_NONTEMPORAL:
737cc978 1457 case OMP_CLAUSE_IF_PRESENT:
1458 case OMP_CLAUSE_FINALIZE:
7e5a76c8 1459 case OMP_CLAUSE_TASK_REDUCTION:
1e8e9920 1460 break;
1461
bc7bff74 1462 case OMP_CLAUSE_ALIGNED:
1463 decl = OMP_CLAUSE_DECL (c);
1464 if (is_global_var (decl)
1465 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1466 install_var_local (decl, ctx);
1467 break;
1468
48152aa2 1469 case OMP_CLAUSE__CONDTEMP_:
4f4b92d8 1470 decl = OMP_CLAUSE_DECL (c);
48152aa2 1471 if (is_parallel_ctx (ctx))
1472 {
48152aa2 1473 install_var_field (decl, false, 3, ctx);
1474 install_var_local (decl, ctx);
1475 }
4f4b92d8 1476 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 1477 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4f4b92d8 1478 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1479 install_var_local (decl, ctx);
48152aa2 1480 break;
1481
ca4c3545 1482 case OMP_CLAUSE__CACHE_:
1e8e9920 1483 default:
1484 gcc_unreachable ();
1485 }
1486 }
1487
1488 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1489 {
55d6e7cd 1490 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1491 {
1492 case OMP_CLAUSE_LASTPRIVATE:
1493 /* Let the corresponding firstprivate clause create
1494 the variable. */
75a70cf9 1495 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
fd6481cf 1496 scan_array_reductions = true;
1e8e9920 1497 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1498 break;
1499 /* FALLTHRU */
1500
1e8e9920 1501 case OMP_CLAUSE_FIRSTPRIVATE:
ca4c3545 1502 case OMP_CLAUSE_PRIVATE:
3d483a94 1503 case OMP_CLAUSE_LINEAR:
43895be5 1504 case OMP_CLAUSE_IS_DEVICE_PTR:
1e8e9920 1505 decl = OMP_CLAUSE_DECL (c);
1506 if (is_variable_sized (decl))
43895be5 1507 {
1508 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1509 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1510 && is_gimple_omp_offloaded (ctx->stmt))
1511 {
1512 tree decl2 = DECL_VALUE_EXPR (decl);
1513 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1514 decl2 = TREE_OPERAND (decl2, 0);
1515 gcc_assert (DECL_P (decl2));
1516 install_var_local (decl2, ctx);
1517 fixup_remapped_decl (decl2, ctx, false);
1518 }
1519 install_var_local (decl, ctx);
1520 }
1e8e9920 1521 fixup_remapped_decl (decl, ctx,
55d6e7cd 1522 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1e8e9920 1523 && OMP_CLAUSE_PRIVATE_DEBUG (c));
43895be5 1524 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1e8e9920 1526 scan_array_reductions = true;
43895be5 1527 break;
1528
1529 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1530 case OMP_CLAUSE_IN_REDUCTION:
43895be5 1531 decl = OMP_CLAUSE_DECL (c);
1532 if (TREE_CODE (decl) != MEM_REF)
1533 {
1534 if (is_variable_sized (decl))
1535 install_var_local (decl, ctx);
1536 fixup_remapped_decl (decl, ctx, false);
1537 }
1538 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2b536a17 1539 scan_array_reductions = true;
1e8e9920 1540 break;
1541
7e5a76c8 1542 case OMP_CLAUSE_TASK_REDUCTION:
1543 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1544 scan_array_reductions = true;
1545 break;
1546
1e8e9920 1547 case OMP_CLAUSE_SHARED:
7e5a76c8 1548 /* Ignore shared directives in teams construct inside of
1549 target construct. */
1550 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1551 && !is_host_teams_ctx (ctx))
bc7bff74 1552 break;
1e8e9920 1553 decl = OMP_CLAUSE_DECL (c);
43895be5 1554 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1555 break;
1556 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1557 {
1558 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1559 ctx->outer)))
1560 break;
1561 bool by_ref = use_pointer_for_field (decl, ctx);
1562 install_var_field (decl, by_ref, 11, ctx);
1563 break;
1564 }
1565 fixup_remapped_decl (decl, ctx, false);
1e8e9920 1566 break;
1567
bc7bff74 1568 case OMP_CLAUSE_MAP:
ca4c3545 1569 if (!is_gimple_omp_offloaded (ctx->stmt))
bc7bff74 1570 break;
1571 decl = OMP_CLAUSE_DECL (c);
1572 if (DECL_P (decl)
9561765e 1573 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1574 && (OMP_CLAUSE_MAP_KIND (c)
1575 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 1576 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
bc7bff74 1577 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
b0c5e347 1578 && varpool_node::get_create (decl)->offloadable)
bc7bff74 1579 break;
1580 if (DECL_P (decl))
1581 {
43895be5 1582 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1583 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
bc7bff74 1584 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1585 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1586 {
1587 tree new_decl = lookup_decl (decl, ctx);
1588 TREE_TYPE (new_decl)
1589 = remap_type (TREE_TYPE (decl), &ctx->cb);
1590 }
1591 else if (DECL_SIZE (decl)
1592 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1593 {
1594 tree decl2 = DECL_VALUE_EXPR (decl);
1595 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1596 decl2 = TREE_OPERAND (decl2, 0);
1597 gcc_assert (DECL_P (decl2));
1598 fixup_remapped_decl (decl2, ctx, false);
1599 fixup_remapped_decl (decl, ctx, true);
1600 }
1601 else
1602 fixup_remapped_decl (decl, ctx, false);
1603 }
1604 break;
1605
1e8e9920 1606 case OMP_CLAUSE_COPYPRIVATE:
1607 case OMP_CLAUSE_COPYIN:
1608 case OMP_CLAUSE_DEFAULT:
1609 case OMP_CLAUSE_IF:
1610 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1611 case OMP_CLAUSE_NUM_TEAMS:
1612 case OMP_CLAUSE_THREAD_LIMIT:
1613 case OMP_CLAUSE_DEVICE:
1e8e9920 1614 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1615 case OMP_CLAUSE_DIST_SCHEDULE:
1e8e9920 1616 case OMP_CLAUSE_NOWAIT:
1617 case OMP_CLAUSE_ORDERED:
fd6481cf 1618 case OMP_CLAUSE_COLLAPSE:
1619 case OMP_CLAUSE_UNTIED:
2169f33b 1620 case OMP_CLAUSE_FINAL:
1621 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1622 case OMP_CLAUSE_PROC_BIND:
3d483a94 1623 case OMP_CLAUSE_SAFELEN:
43895be5 1624 case OMP_CLAUSE_SIMDLEN:
bc7bff74 1625 case OMP_CLAUSE_ALIGNED:
1626 case OMP_CLAUSE_DEPEND:
1627 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 1628 case OMP_CLAUSE__REDUCTEMP_:
bc7bff74 1629 case OMP_CLAUSE_TO:
1630 case OMP_CLAUSE_FROM:
43895be5 1631 case OMP_CLAUSE_PRIORITY:
1632 case OMP_CLAUSE_GRAINSIZE:
1633 case OMP_CLAUSE_NUM_TASKS:
1634 case OMP_CLAUSE_THREADS:
1635 case OMP_CLAUSE_SIMD:
1636 case OMP_CLAUSE_NOGROUP:
1637 case OMP_CLAUSE_DEFAULTMAP:
9b53afe9 1638 case OMP_CLAUSE_ORDER:
31890b58 1639 case OMP_CLAUSE_BIND:
43895be5 1640 case OMP_CLAUSE_USE_DEVICE_PTR:
75145cfe 1641 case OMP_CLAUSE_USE_DEVICE_ADDR:
7e5a76c8 1642 case OMP_CLAUSE_NONTEMPORAL:
ca4c3545 1643 case OMP_CLAUSE_ASYNC:
1644 case OMP_CLAUSE_WAIT:
1645 case OMP_CLAUSE_NUM_GANGS:
1646 case OMP_CLAUSE_NUM_WORKERS:
1647 case OMP_CLAUSE_VECTOR_LENGTH:
1648 case OMP_CLAUSE_GANG:
1649 case OMP_CLAUSE_WORKER:
1650 case OMP_CLAUSE_VECTOR:
f4f5b4b4 1651 case OMP_CLAUSE_INDEPENDENT:
1652 case OMP_CLAUSE_AUTO:
1653 case OMP_CLAUSE_SEQ:
719a7570 1654 case OMP_CLAUSE_TILE:
56686608 1655 case OMP_CLAUSE__GRIDDIM_:
57f872be 1656 case OMP_CLAUSE__SIMT_:
737cc978 1657 case OMP_CLAUSE_IF_PRESENT:
1658 case OMP_CLAUSE_FINALIZE:
48152aa2 1659 case OMP_CLAUSE__CONDTEMP_:
ca4c3545 1660 break;
1661
ca4c3545 1662 case OMP_CLAUSE__CACHE_:
1e8e9920 1663 default:
1664 gcc_unreachable ();
1665 }
1666 }
1667
ca4c3545 1668 gcc_checking_assert (!scan_array_reductions
1669 || !is_gimple_omp_oacc (ctx->stmt));
1e8e9920 1670 if (scan_array_reductions)
d3831f71 1671 {
1672 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 1673 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1674 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1675 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
d3831f71 1676 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1677 {
1678 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1679 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1680 }
1681 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1682 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1683 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1684 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1685 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1686 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1687 }
1e8e9920 1688}
1689
efa02472 1690/* Create a new name for omp child function. Returns an identifier. */
1e8e9920 1691
1e8e9920 1692static tree
efa02472 1693create_omp_child_function_name (bool task_copy)
1e8e9920 1694{
87943388 1695 return clone_function_name_numbered (current_function_decl,
1696 task_copy ? "_omp_cpyfn" : "_omp_fn");
40750995 1697}
1698
bab6706a 1699/* Return true if CTX may belong to offloaded code: either if current function
1700 is offloaded, or any enclosing context corresponds to a target region. */
1701
1702static bool
1703omp_maybe_offloaded_ctx (omp_context *ctx)
1704{
1705 if (cgraph_node::get (current_function_decl)->offloadable)
1706 return true;
1707 for (; ctx; ctx = ctx->outer)
1708 if (is_gimple_omp_offloaded (ctx->stmt))
1709 return true;
1710 return false;
1711}
1712
1e8e9920 1713/* Build a decl for the omp child function. It'll not contain a body
1714 yet, just the bare decl. */
1715
1716static void
fd6481cf 1717create_omp_child_function (omp_context *ctx, bool task_copy)
1e8e9920 1718{
1719 tree decl, type, name, t;
1720
efa02472 1721 name = create_omp_child_function_name (task_copy);
fd6481cf 1722 if (task_copy)
1723 type = build_function_type_list (void_type_node, ptr_type_node,
1724 ptr_type_node, NULL_TREE);
1725 else
1726 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1e8e9920 1727
40750995 1728 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1e8e9920 1729
ca4c3545 1730 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1731 || !task_copy);
fd6481cf 1732 if (!task_copy)
1733 ctx->cb.dst_fn = decl;
1734 else
75a70cf9 1735 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1e8e9920 1736
1737 TREE_STATIC (decl) = 1;
1738 TREE_USED (decl) = 1;
1739 DECL_ARTIFICIAL (decl) = 1;
1740 DECL_IGNORED_P (decl) = 0;
1741 TREE_PUBLIC (decl) = 0;
1742 DECL_UNINLINABLE (decl) = 1;
1743 DECL_EXTERNAL (decl) = 0;
1744 DECL_CONTEXT (decl) = NULL_TREE;
773c5ba7 1745 DECL_INITIAL (decl) = make_node (BLOCK);
2a066179 1746 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1a12ad87 1747 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
02e955b2 1748 /* Remove omp declare simd attribute from the new attributes. */
1749 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1750 {
1751 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1752 a = a2;
1753 a = TREE_CHAIN (a);
1754 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1755 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1756 *p = TREE_CHAIN (*p);
1757 else
1758 {
1759 tree chain = TREE_CHAIN (*p);
1760 *p = copy_node (*p);
1761 p = &TREE_CHAIN (*p);
1762 *p = chain;
1763 }
1764 }
1a12ad87 1765 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1766 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1767 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1768 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1769 DECL_FUNCTION_VERSIONED (decl)
1770 = DECL_FUNCTION_VERSIONED (current_function_decl);
1771
bab6706a 1772 if (omp_maybe_offloaded_ctx (ctx))
bc7bff74 1773 {
bab6706a 1774 cgraph_node::get_create (decl)->offloadable = 1;
1775 if (ENABLE_OFFLOADING)
1776 g->have_offload = true;
bc7bff74 1777 }
1e8e9920 1778
ec12b31a 1779 if (cgraph_node::get_create (decl)->offloadable
1780 && !lookup_attribute ("omp declare target",
1781 DECL_ATTRIBUTES (current_function_decl)))
bab6706a 1782 {
1783 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1784 ? "omp target entrypoint"
1785 : "omp declare target");
1786 DECL_ATTRIBUTES (decl)
1787 = tree_cons (get_identifier (target_attr),
1788 NULL_TREE, DECL_ATTRIBUTES (decl));
1789 }
ec12b31a 1790
e60a6f7b 1791 t = build_decl (DECL_SOURCE_LOCATION (decl),
1792 RESULT_DECL, NULL_TREE, void_type_node);
1e8e9920 1793 DECL_ARTIFICIAL (t) = 1;
1794 DECL_IGNORED_P (t) = 1;
8e5b4ed6 1795 DECL_CONTEXT (t) = decl;
1e8e9920 1796 DECL_RESULT (decl) = t;
1797
40750995 1798 tree data_name = get_identifier (".omp_data_i");
1799 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1800 ptr_type_node);
1e8e9920 1801 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1802 DECL_NAMELESS (t) = 1;
1e8e9920 1803 DECL_ARG_TYPE (t) = ptr_type_node;
773c5ba7 1804 DECL_CONTEXT (t) = current_function_decl;
1e8e9920 1805 TREE_USED (t) = 1;
43895be5 1806 TREE_READONLY (t) = 1;
1e8e9920 1807 DECL_ARGUMENTS (decl) = t;
fd6481cf 1808 if (!task_copy)
1809 ctx->receiver_decl = t;
1810 else
1811 {
e60a6f7b 1812 t = build_decl (DECL_SOURCE_LOCATION (decl),
1813 PARM_DECL, get_identifier (".omp_data_o"),
fd6481cf 1814 ptr_type_node);
1815 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1816 DECL_NAMELESS (t) = 1;
fd6481cf 1817 DECL_ARG_TYPE (t) = ptr_type_node;
1818 DECL_CONTEXT (t) = current_function_decl;
1819 TREE_USED (t) = 1;
86f2ad37 1820 TREE_ADDRESSABLE (t) = 1;
1767a056 1821 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
fd6481cf 1822 DECL_ARGUMENTS (decl) = t;
1823 }
1e8e9920 1824
48e1416a 1825 /* Allocate memory for the function structure. The call to
773c5ba7 1826 allocate_struct_function clobbers CFUN, so we need to restore
1e8e9920 1827 it afterward. */
87d4aa85 1828 push_struct_function (decl);
75a70cf9 1829 cfun->function_end_locus = gimple_location (ctx->stmt);
9ae1b28a 1830 init_tree_ssa (cfun);
87d4aa85 1831 pop_cfun ();
1e8e9920 1832}
1833
bc7bff74 1834/* Callback for walk_gimple_seq. Check if combined parallel
1835 contains gimple_omp_for_combined_into_p OMP_FOR. */
1836
4954efd4 1837tree
1838omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1839 bool *handled_ops_p,
1840 struct walk_stmt_info *wi)
bc7bff74 1841{
42acab1c 1842 gimple *stmt = gsi_stmt (*gsi_p);
bc7bff74 1843
1844 *handled_ops_p = true;
1845 switch (gimple_code (stmt))
1846 {
1847 WALK_SUBSTMTS;
1848
1849 case GIMPLE_OMP_FOR:
1850 if (gimple_omp_for_combined_into_p (stmt)
43895be5 1851 && gimple_omp_for_kind (stmt)
1852 == *(const enum gf_mask *) (wi->info))
bc7bff74 1853 {
1854 wi->info = stmt;
1855 return integer_zero_node;
1856 }
1857 break;
1858 default:
1859 break;
1860 }
1861 return NULL;
1862}
1863
7e5a76c8 1864/* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
43895be5 1865
1866static void
1867add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1868 omp_context *outer_ctx)
1869{
1870 struct walk_stmt_info wi;
1871
1872 memset (&wi, 0, sizeof (wi));
1873 wi.val_only = true;
1874 wi.info = (void *) &msk;
4954efd4 1875 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
43895be5 1876 if (wi.info != (void *) &msk)
1877 {
1878 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1879 struct omp_for_data fd;
4954efd4 1880 omp_extract_for_data (for_stmt, &fd, NULL);
43895be5 1881 /* We need two temporaries with fd.loop.v type (istart/iend)
1882 and then (fd.collapse - 1) temporaries with the same
1883 type for count2 ... countN-1 vars if not constant. */
1884 size_t count = 2, i;
1885 tree type = fd.iter_type;
1886 if (fd.collapse > 1
1887 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1888 {
1889 count += fd.collapse - 1;
9561765e 1890 /* If there are lastprivate clauses on the inner
43895be5 1891 GIMPLE_OMP_FOR, add one more temporaries for the total number
1892 of iterations (product of count1 ... countN-1). */
4954efd4 1893 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
9561765e 1894 OMP_CLAUSE_LASTPRIVATE))
1895 count++;
1896 else if (msk == GF_OMP_FOR_KIND_FOR
4954efd4 1897 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9561765e 1898 OMP_CLAUSE_LASTPRIVATE))
43895be5 1899 count++;
1900 }
1901 for (i = 0; i < count; i++)
1902 {
1903 tree temp = create_tmp_var (type);
1904 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1905 insert_decl_map (&outer_ctx->cb, temp, temp);
1906 OMP_CLAUSE_DECL (c) = temp;
1907 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1908 gimple_omp_taskreg_set_clauses (stmt, c);
1909 }
1910 }
7e5a76c8 1911 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1912 && omp_find_clause (gimple_omp_task_clauses (stmt),
1913 OMP_CLAUSE_REDUCTION))
1914 {
1915 tree type = build_pointer_type (pointer_sized_int_node);
1916 tree temp = create_tmp_var (type);
1917 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1918 insert_decl_map (&outer_ctx->cb, temp, temp);
1919 OMP_CLAUSE_DECL (c) = temp;
1920 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1921 gimple_omp_task_set_clauses (stmt, c);
1922 }
43895be5 1923}
1924
1e8e9920 1925/* Scan an OpenMP parallel directive. */
1926
1927static void
75a70cf9 1928scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1e8e9920 1929{
1930 omp_context *ctx;
1931 tree name;
1a91d914 1932 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1e8e9920 1933
1934 /* Ignore parallel directives with empty bodies, unless there
1935 are copyin clauses. */
1936 if (optimize > 0
75a70cf9 1937 && empty_body_p (gimple_omp_body (stmt))
4954efd4 1938 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
75a70cf9 1939 OMP_CLAUSE_COPYIN) == NULL)
1e8e9920 1940 {
75a70cf9 1941 gsi_replace (gsi, gimple_build_nop (), false);
1e8e9920 1942 return;
1943 }
1944
bc7bff74 1945 if (gimple_omp_parallel_combined_p (stmt))
43895be5 1946 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
7e5a76c8 1947 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1948 OMP_CLAUSE_REDUCTION);
1949 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1950 if (OMP_CLAUSE_REDUCTION_TASK (c))
1951 {
1952 tree type = build_pointer_type (pointer_sized_int_node);
1953 tree temp = create_tmp_var (type);
1954 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1955 if (outer_ctx)
1956 insert_decl_map (&outer_ctx->cb, temp, temp);
1957 OMP_CLAUSE_DECL (c) = temp;
1958 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1959 gimple_omp_parallel_set_clauses (stmt, c);
1960 break;
1961 }
1962 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1963 break;
bc7bff74 1964
75a70cf9 1965 ctx = new_omp_context (stmt, outer_ctx);
37eaded9 1966 taskreg_contexts.safe_push (ctx);
fd6481cf 1967 if (taskreg_nesting_level > 1)
773c5ba7 1968 ctx->is_nested = true;
1e8e9920 1969 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1e8e9920 1970 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1e8e9920 1971 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 1972 name = build_decl (gimple_location (stmt),
1973 TYPE_DECL, name, ctx->record_type);
84bfaaeb 1974 DECL_ARTIFICIAL (name) = 1;
1975 DECL_NAMELESS (name) = 1;
1e8e9920 1976 TYPE_NAME (ctx->record_type) = name;
240131b5 1977 TYPE_ARTIFICIAL (ctx->record_type) = 1;
56686608 1978 if (!gimple_omp_parallel_grid_phony (stmt))
1979 {
1980 create_omp_child_function (ctx, false);
1981 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1982 }
1e8e9920 1983
75a70cf9 1984 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
ab129075 1985 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 1986
1987 if (TYPE_FIELDS (ctx->record_type) == NULL)
1988 ctx->record_type = ctx->receiver_decl = NULL;
1e8e9920 1989}
1990
fd6481cf 1991/* Scan an OpenMP task directive. */
1992
1993static void
75a70cf9 1994scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
fd6481cf 1995{
1996 omp_context *ctx;
75a70cf9 1997 tree name, t;
1a91d914 1998 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
fd6481cf 1999
e51b4c73 2000 /* Ignore task directives with empty bodies, unless they have depend
2001 clause. */
fd6481cf 2002 if (optimize > 0
7e5a76c8 2003 && gimple_omp_body (stmt)
e51b4c73 2004 && empty_body_p (gimple_omp_body (stmt))
2005 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
fd6481cf 2006 {
75a70cf9 2007 gsi_replace (gsi, gimple_build_nop (), false);
fd6481cf 2008 return;
2009 }
2010
43895be5 2011 if (gimple_omp_task_taskloop_p (stmt))
2012 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2013
75a70cf9 2014 ctx = new_omp_context (stmt, outer_ctx);
7e5a76c8 2015
2016 if (gimple_omp_task_taskwait_p (stmt))
2017 {
2018 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2019 return;
2020 }
2021
37eaded9 2022 taskreg_contexts.safe_push (ctx);
fd6481cf 2023 if (taskreg_nesting_level > 1)
2024 ctx->is_nested = true;
2025 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
fd6481cf 2026 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2027 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 2028 name = build_decl (gimple_location (stmt),
2029 TYPE_DECL, name, ctx->record_type);
84bfaaeb 2030 DECL_ARTIFICIAL (name) = 1;
2031 DECL_NAMELESS (name) = 1;
fd6481cf 2032 TYPE_NAME (ctx->record_type) = name;
240131b5 2033 TYPE_ARTIFICIAL (ctx->record_type) = 1;
fd6481cf 2034 create_omp_child_function (ctx, false);
75a70cf9 2035 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
fd6481cf 2036
75a70cf9 2037 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
fd6481cf 2038
2039 if (ctx->srecord_type)
2040 {
2041 name = create_tmp_var_name (".omp_data_a");
e60a6f7b 2042 name = build_decl (gimple_location (stmt),
2043 TYPE_DECL, name, ctx->srecord_type);
84bfaaeb 2044 DECL_ARTIFICIAL (name) = 1;
2045 DECL_NAMELESS (name) = 1;
fd6481cf 2046 TYPE_NAME (ctx->srecord_type) = name;
240131b5 2047 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
fd6481cf 2048 create_omp_child_function (ctx, true);
2049 }
2050
ab129075 2051 scan_omp (gimple_omp_body_ptr (stmt), ctx);
fd6481cf 2052
2053 if (TYPE_FIELDS (ctx->record_type) == NULL)
2054 {
2055 ctx->record_type = ctx->receiver_decl = NULL;
75a70cf9 2056 t = build_int_cst (long_integer_type_node, 0);
2057 gimple_omp_task_set_arg_size (stmt, t);
2058 t = build_int_cst (long_integer_type_node, 1);
2059 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2060 }
37eaded9 2061}
2062
b9238860 2063/* Helper function for finish_taskreg_scan, called through walk_tree.
2064 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2065 tree, replace it in the expression. */
2066
2067static tree
2068finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2069{
2070 if (VAR_P (*tp))
2071 {
2072 omp_context *ctx = (omp_context *) data;
2073 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2074 if (t != *tp)
2075 {
2076 if (DECL_HAS_VALUE_EXPR_P (t))
2077 t = unshare_expr (DECL_VALUE_EXPR (t));
2078 *tp = t;
2079 }
2080 *walk_subtrees = 0;
2081 }
2082 else if (IS_TYPE_OR_DECL_P (*tp))
2083 *walk_subtrees = 0;
2084 return NULL_TREE;
2085}
37eaded9 2086
2087/* If any decls have been made addressable during scan_omp,
2088 adjust their fields if needed, and layout record types
2089 of parallel/task constructs. */
2090
2091static void
2092finish_taskreg_scan (omp_context *ctx)
2093{
2094 if (ctx->record_type == NULL_TREE)
2095 return;
2096
2097 /* If any task_shared_vars were needed, verify all
7e5a76c8 2098 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
37eaded9 2099 statements if use_pointer_for_field hasn't changed
2100 because of that. If it did, update field types now. */
2101 if (task_shared_vars)
2102 {
2103 tree c;
2104
2105 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2106 c; c = OMP_CLAUSE_CHAIN (c))
43895be5 2107 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2108 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
37eaded9 2109 {
2110 tree decl = OMP_CLAUSE_DECL (c);
2111
2112 /* Global variables don't need to be copied,
2113 the receiver side will use them directly. */
2114 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2115 continue;
2116 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2117 || !use_pointer_for_field (decl, ctx))
2118 continue;
2119 tree field = lookup_field (decl, ctx);
2120 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2121 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2122 continue;
2123 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2124 TREE_THIS_VOLATILE (field) = 0;
2125 DECL_USER_ALIGN (field) = 0;
5d4b30ea 2126 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
37eaded9 2127 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
5d4b30ea 2128 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
37eaded9 2129 if (ctx->srecord_type)
2130 {
2131 tree sfield = lookup_sfield (decl, ctx);
2132 TREE_TYPE (sfield) = TREE_TYPE (field);
2133 TREE_THIS_VOLATILE (sfield) = 0;
2134 DECL_USER_ALIGN (sfield) = 0;
5d4b30ea 2135 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
37eaded9 2136 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
5d4b30ea 2137 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
37eaded9 2138 }
2139 }
2140 }
2141
2142 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
7e5a76c8 2143 {
2144 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2145 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2146 if (c)
2147 {
2148 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2149 expects to find it at the start of data. */
2150 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2151 tree *p = &TYPE_FIELDS (ctx->record_type);
2152 while (*p)
2153 if (*p == f)
2154 {
2155 *p = DECL_CHAIN (*p);
2156 break;
2157 }
2158 else
2159 p = &DECL_CHAIN (*p);
2160 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2161 TYPE_FIELDS (ctx->record_type) = f;
2162 }
2163 layout_type (ctx->record_type);
2164 fixup_child_record_type (ctx);
2165 }
2166 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
37eaded9 2167 {
2168 layout_type (ctx->record_type);
2169 fixup_child_record_type (ctx);
2170 }
fd6481cf 2171 else
2172 {
37eaded9 2173 location_t loc = gimple_location (ctx->stmt);
fd6481cf 2174 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2175 /* Move VLA fields to the end. */
2176 p = &TYPE_FIELDS (ctx->record_type);
2177 while (*p)
2178 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2179 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2180 {
2181 *q = *p;
2182 *p = TREE_CHAIN (*p);
2183 TREE_CHAIN (*q) = NULL_TREE;
2184 q = &TREE_CHAIN (*q);
2185 }
2186 else
1767a056 2187 p = &DECL_CHAIN (*p);
fd6481cf 2188 *p = vla_fields;
43895be5 2189 if (gimple_omp_task_taskloop_p (ctx->stmt))
2190 {
2191 /* Move fields corresponding to first and second _looptemp_
2192 clause first. There are filled by GOMP_taskloop
2193 and thus need to be in specific positions. */
7e5a76c8 2194 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2195 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
4954efd4 2196 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
43895be5 2197 OMP_CLAUSE__LOOPTEMP_);
7e5a76c8 2198 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
43895be5 2199 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2200 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
7e5a76c8 2201 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
43895be5 2202 p = &TYPE_FIELDS (ctx->record_type);
2203 while (*p)
7e5a76c8 2204 if (*p == f1 || *p == f2 || *p == f3)
43895be5 2205 *p = DECL_CHAIN (*p);
2206 else
2207 p = &DECL_CHAIN (*p);
2208 DECL_CHAIN (f1) = f2;
7e5a76c8 2209 if (c3)
2210 {
2211 DECL_CHAIN (f2) = f3;
2212 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2213 }
2214 else
2215 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
43895be5 2216 TYPE_FIELDS (ctx->record_type) = f1;
2217 if (ctx->srecord_type)
2218 {
2219 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2220 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
7e5a76c8 2221 if (c3)
2222 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
43895be5 2223 p = &TYPE_FIELDS (ctx->srecord_type);
2224 while (*p)
7e5a76c8 2225 if (*p == f1 || *p == f2 || *p == f3)
43895be5 2226 *p = DECL_CHAIN (*p);
2227 else
2228 p = &DECL_CHAIN (*p);
2229 DECL_CHAIN (f1) = f2;
2230 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
7e5a76c8 2231 if (c3)
2232 {
2233 DECL_CHAIN (f2) = f3;
2234 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2235 }
2236 else
2237 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
43895be5 2238 TYPE_FIELDS (ctx->srecord_type) = f1;
2239 }
2240 }
fd6481cf 2241 layout_type (ctx->record_type);
2242 fixup_child_record_type (ctx);
2243 if (ctx->srecord_type)
2244 layout_type (ctx->srecord_type);
37eaded9 2245 tree t = fold_convert_loc (loc, long_integer_type_node,
2246 TYPE_SIZE_UNIT (ctx->record_type));
b9238860 2247 if (TREE_CODE (t) != INTEGER_CST)
2248 {
2249 t = unshare_expr (t);
2250 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2251 }
37eaded9 2252 gimple_omp_task_set_arg_size (ctx->stmt, t);
75a70cf9 2253 t = build_int_cst (long_integer_type_node,
fd6481cf 2254 TYPE_ALIGN_UNIT (ctx->record_type));
37eaded9 2255 gimple_omp_task_set_arg_align (ctx->stmt, t);
fd6481cf 2256 }
2257}
2258
a8e785ba 2259/* Find the enclosing offload context. */
1e8e9920 2260
ca4c3545 2261static omp_context *
2262enclosing_target_ctx (omp_context *ctx)
2263{
a8e785ba 2264 for (; ctx; ctx = ctx->outer)
2265 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2266 break;
2267
ca4c3545 2268 return ctx;
2269}
2270
a8e785ba 2271/* Return true if ctx is part of an oacc kernels region. */
2272
ca4c3545 2273static bool
a8e785ba 2274ctx_in_oacc_kernels_region (omp_context *ctx)
ca4c3545 2275{
a8e785ba 2276 for (;ctx != NULL; ctx = ctx->outer)
2277 {
2278 gimple *stmt = ctx->stmt;
2279 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2280 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2281 return true;
2282 }
2283
2284 return false;
2285}
2286
2287/* Check the parallelism clauses inside a kernels regions.
2288 Until kernels handling moves to use the same loop indirection
2289 scheme as parallel, we need to do this checking early. */
2290
2291static unsigned
2292check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2293{
2294 bool checking = true;
2295 unsigned outer_mask = 0;
2296 unsigned this_mask = 0;
2297 bool has_seq = false, has_auto = false;
2298
2299 if (ctx->outer)
2300 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2301 if (!stmt)
2302 {
2303 checking = false;
2304 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2305 return outer_mask;
2306 stmt = as_a <gomp_for *> (ctx->stmt);
2307 }
2308
2309 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2310 {
2311 switch (OMP_CLAUSE_CODE (c))
2312 {
2313 case OMP_CLAUSE_GANG:
2314 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2315 break;
2316 case OMP_CLAUSE_WORKER:
2317 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2318 break;
2319 case OMP_CLAUSE_VECTOR:
2320 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2321 break;
2322 case OMP_CLAUSE_SEQ:
2323 has_seq = true;
2324 break;
2325 case OMP_CLAUSE_AUTO:
2326 has_auto = true;
2327 break;
2328 default:
2329 break;
2330 }
2331 }
2332
2333 if (checking)
2334 {
2335 if (has_seq && (this_mask || has_auto))
2336 error_at (gimple_location (stmt), "%<seq%> overrides other"
2337 " OpenACC loop specifiers");
2338 else if (has_auto && this_mask)
2339 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2340 " OpenACC loop specifiers");
2341
2342 if (this_mask & outer_mask)
2343 error_at (gimple_location (stmt), "inner loop uses same"
2344 " OpenACC parallelism as containing loop");
2345 }
2346
2347 return outer_mask | this_mask;
ca4c3545 2348}
2349
2350/* Scan a GIMPLE_OMP_FOR. */
1e8e9920 2351
9cf7bec9 2352static omp_context *
1a91d914 2353scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
1e8e9920 2354{
773c5ba7 2355 omp_context *ctx;
75a70cf9 2356 size_t i;
ca4c3545 2357 tree clauses = gimple_omp_for_clauses (stmt);
2358
773c5ba7 2359 ctx = new_omp_context (stmt, outer_ctx);
1e8e9920 2360
ca4c3545 2361 if (is_gimple_omp_oacc (stmt))
2362 {
a8e785ba 2363 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2364
2365 if (!tgt || is_oacc_parallel (tgt))
2366 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2367 {
2368 char const *check = NULL;
2369
2370 switch (OMP_CLAUSE_CODE (c))
2371 {
2372 case OMP_CLAUSE_GANG:
2373 check = "gang";
2374 break;
2375
2376 case OMP_CLAUSE_WORKER:
2377 check = "worker";
2378 break;
2379
2380 case OMP_CLAUSE_VECTOR:
2381 check = "vector";
2382 break;
2383
2384 default:
2385 break;
2386 }
2387
2388 if (check && OMP_CLAUSE_OPERAND (c, 0))
2389 error_at (gimple_location (stmt),
2390 "argument not permitted on %qs clause in"
2391 " OpenACC %<parallel%>", check);
2392 }
2393
2394 if (tgt && is_oacc_kernels (tgt))
2395 {
7e5a76c8 2396 /* Strip out reductions, as they are not handled yet. */
a8e785ba 2397 tree *prev_ptr = &clauses;
2398
2399 while (tree probe = *prev_ptr)
ca4c3545 2400 {
a8e785ba 2401 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2402
2403 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2404 *prev_ptr = *next_ptr;
2405 else
2406 prev_ptr = next_ptr;
ca4c3545 2407 }
a8e785ba 2408
2409 gimple_omp_for_set_clauses (stmt, clauses);
2410 check_oacc_kernel_gwv (stmt, ctx);
ca4c3545 2411 }
2412 }
2413
2414 scan_sharing_clauses (clauses, ctx);
1e8e9920 2415
ab129075 2416 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
75a70cf9 2417 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 2418 {
75a70cf9 2419 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2420 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2421 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2422 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
fd6481cf 2423 }
ab129075 2424 scan_omp (gimple_omp_body_ptr (stmt), ctx);
9cf7bec9 2425 return ctx;
1e8e9920 2426}
2427
57f872be 2428/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2429
2430static void
2431scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2432 omp_context *outer_ctx)
2433{
2434 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2435 gsi_replace (gsi, bind, false);
2436 gimple_seq seq = NULL;
2437 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2438 tree cond = create_tmp_var_raw (integer_type_node);
2439 DECL_CONTEXT (cond) = current_function_decl;
2440 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2441 gimple_bind_set_vars (bind, cond);
2442 gimple_call_set_lhs (g, cond);
2443 gimple_seq_add_stmt (&seq, g);
2444 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2445 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2446 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2447 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2448 gimple_seq_add_stmt (&seq, g);
2449 g = gimple_build_label (lab1);
2450 gimple_seq_add_stmt (&seq, g);
2451 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2452 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2453 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2454 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2455 gimple_omp_for_set_clauses (new_stmt, clause);
2456 gimple_seq_add_stmt (&seq, new_stmt);
2457 g = gimple_build_goto (lab3);
2458 gimple_seq_add_stmt (&seq, g);
2459 g = gimple_build_label (lab2);
2460 gimple_seq_add_stmt (&seq, g);
2461 gimple_seq_add_stmt (&seq, stmt);
2462 g = gimple_build_label (lab3);
2463 gimple_seq_add_stmt (&seq, g);
2464 gimple_bind_set_body (bind, seq);
2465 update_stmt (bind);
2466 scan_omp_for (new_stmt, outer_ctx);
9cf7bec9 2467 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
57f872be 2468}
2469
3d2b49b2 2470static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2471 struct walk_stmt_info *);
2472static omp_context *maybe_lookup_ctx (gimple *);
2473
2474/* Duplicate #pragma omp simd, one for the scan input phase loop and one
2475 for scan phase loop. */
2476
2477static void
2478scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2479 omp_context *outer_ctx)
2480{
2481 /* The only change between inclusive and exclusive scan will be
2482 within the first simd loop, so just use inclusive in the
2483 worksharing loop. */
2484 outer_ctx->scan_inclusive = true;
2485 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2486 OMP_CLAUSE_DECL (c) = integer_zero_node;
2487
2488 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2489 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2490 gsi_replace (gsi, input_stmt, false);
2491 gimple_seq input_body = NULL;
2492 gimple_seq_add_stmt (&input_body, stmt);
2493 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2494
2495 gimple_stmt_iterator input1_gsi = gsi_none ();
2496 struct walk_stmt_info wi;
2497 memset (&wi, 0, sizeof (wi));
2498 wi.val_only = true;
2499 wi.info = (void *) &input1_gsi;
2500 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2501 gcc_assert (!gsi_end_p (input1_gsi));
2502
2503 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2504 gsi_next (&input1_gsi);
2505 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2506 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2507 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2508 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2509 std::swap (input_stmt1, scan_stmt1);
2510
2511 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2512 gimple_omp_set_body (input_stmt1, NULL);
2513
2514 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2515 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2516
2517 gimple_omp_set_body (input_stmt1, input_body1);
2518 gimple_omp_set_body (scan_stmt1, NULL);
2519
2520 gimple_stmt_iterator input2_gsi = gsi_none ();
2521 memset (&wi, 0, sizeof (wi));
2522 wi.val_only = true;
2523 wi.info = (void *) &input2_gsi;
2524 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2525 NULL, &wi);
2526 gcc_assert (!gsi_end_p (input2_gsi));
2527
2528 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2529 gsi_next (&input2_gsi);
2530 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2531 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2532 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2533 std::swap (input_stmt2, scan_stmt2);
2534
2535 gimple_omp_set_body (input_stmt2, NULL);
2536
2537 gimple_omp_set_body (input_stmt, input_body);
2538 gimple_omp_set_body (scan_stmt, scan_body);
2539
2540 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2541 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2542
2543 ctx = new_omp_context (scan_stmt, outer_ctx);
2544 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2545
2546 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2547}
2548
1e8e9920 2549/* Scan an OpenMP sections directive. */
2550
2551static void
1a91d914 2552scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
1e8e9920 2553{
1e8e9920 2554 omp_context *ctx;
2555
2556 ctx = new_omp_context (stmt, outer_ctx);
75a70cf9 2557 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
ab129075 2558 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2559}
2560
2561/* Scan an OpenMP single directive. */
2562
2563static void
1a91d914 2564scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
1e8e9920 2565{
1e8e9920 2566 omp_context *ctx;
2567 tree name;
2568
2569 ctx = new_omp_context (stmt, outer_ctx);
2570 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2571 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2572 name = create_tmp_var_name (".omp_copy_s");
e60a6f7b 2573 name = build_decl (gimple_location (stmt),
2574 TYPE_DECL, name, ctx->record_type);
1e8e9920 2575 TYPE_NAME (ctx->record_type) = name;
2576
75a70cf9 2577 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
ab129075 2578 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2579
2580 if (TYPE_FIELDS (ctx->record_type) == NULL)
2581 ctx->record_type = NULL;
2582 else
2583 layout_type (ctx->record_type);
2584}
2585
ca4c3545 2586/* Scan a GIMPLE_OMP_TARGET. */
bc7bff74 2587
2588static void
1a91d914 2589scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
bc7bff74 2590{
2591 omp_context *ctx;
2592 tree name;
ca4c3545 2593 bool offloaded = is_gimple_omp_offloaded (stmt);
2594 tree clauses = gimple_omp_target_clauses (stmt);
bc7bff74 2595
2596 ctx = new_omp_context (stmt, outer_ctx);
2597 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
bc7bff74 2598 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2599 name = create_tmp_var_name (".omp_data_t");
2600 name = build_decl (gimple_location (stmt),
2601 TYPE_DECL, name, ctx->record_type);
2602 DECL_ARTIFICIAL (name) = 1;
2603 DECL_NAMELESS (name) = 1;
2604 TYPE_NAME (ctx->record_type) = name;
240131b5 2605 TYPE_ARTIFICIAL (ctx->record_type) = 1;
12dc9a16 2606
ca4c3545 2607 if (offloaded)
bc7bff74 2608 {
2609 create_omp_child_function (ctx, false);
2610 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2611 }
2612
737cc978 2613 scan_sharing_clauses (clauses, ctx);
bc7bff74 2614 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2615
2616 if (TYPE_FIELDS (ctx->record_type) == NULL)
2617 ctx->record_type = ctx->receiver_decl = NULL;
2618 else
2619 {
2620 TYPE_FIELDS (ctx->record_type)
2621 = nreverse (TYPE_FIELDS (ctx->record_type));
382ecba7 2622 if (flag_checking)
2623 {
2624 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2625 for (tree field = TYPE_FIELDS (ctx->record_type);
2626 field;
2627 field = DECL_CHAIN (field))
2628 gcc_assert (DECL_ALIGN (field) == align);
2629 }
bc7bff74 2630 layout_type (ctx->record_type);
ca4c3545 2631 if (offloaded)
bc7bff74 2632 fixup_child_record_type (ctx);
2633 }
2634}
2635
2636/* Scan an OpenMP teams directive. */
2637
2638static void
1a91d914 2639scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
bc7bff74 2640{
2641 omp_context *ctx = new_omp_context (stmt, outer_ctx);
7e5a76c8 2642
2643 if (!gimple_omp_teams_host (stmt))
2644 {
2645 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2646 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2647 return;
2648 }
2649 taskreg_contexts.safe_push (ctx);
2650 gcc_assert (taskreg_nesting_level == 1);
2651 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2652 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2653 tree name = create_tmp_var_name (".omp_data_s");
2654 name = build_decl (gimple_location (stmt),
2655 TYPE_DECL, name, ctx->record_type);
2656 DECL_ARTIFICIAL (name) = 1;
2657 DECL_NAMELESS (name) = 1;
2658 TYPE_NAME (ctx->record_type) = name;
2659 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2660 create_omp_child_function (ctx, false);
2661 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2662
bc7bff74 2663 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2664 scan_omp (gimple_omp_body_ptr (stmt), ctx);
7e5a76c8 2665
2666 if (TYPE_FIELDS (ctx->record_type) == NULL)
2667 ctx->record_type = ctx->receiver_decl = NULL;
bc7bff74 2668}
1e8e9920 2669
ca4c3545 2670/* Check nesting restrictions. */
ab129075 2671static bool
42acab1c 2672check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
c1d127dd 2673{
43895be5 2674 tree c;
2675
56686608 2676 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2677 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2678 the original copy of its contents. */
2679 return true;
2680
ca4c3545 2681 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2682 inside an OpenACC CTX. */
2683 if (!(is_gimple_omp (stmt)
32f692e2 2684 && is_gimple_omp_oacc (stmt))
2685 /* Except for atomic codes that we share with OpenMP. */
2686 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2687 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2688 {
4954efd4 2689 if (oacc_get_fn_attrib (cfun->decl) != NULL)
32f692e2 2690 {
2691 error_at (gimple_location (stmt),
2692 "non-OpenACC construct inside of OpenACC routine");
2693 return false;
2694 }
2695 else
2696 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2697 if (is_gimple_omp (octx->stmt)
2698 && is_gimple_omp_oacc (octx->stmt))
2699 {
2700 error_at (gimple_location (stmt),
2701 "non-OpenACC construct inside of OpenACC region");
2702 return false;
2703 }
ca4c3545 2704 }
2705
3d483a94 2706 if (ctx != NULL)
2707 {
70a6624c 2708 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2709 && ctx->outer
2710 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2711 ctx = ctx->outer;
3d483a94 2712 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
31890b58 2713 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2714 && !ctx->loop_p)
3d483a94 2715 {
43895be5 2716 c = NULL_TREE;
3ab58307 2717 if (ctx->order_concurrent
2718 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2719 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2720 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2721 {
2722 error_at (gimple_location (stmt),
31890b58 2723 "OpenMP constructs other than %<parallel%>, %<loop%>"
2724 " or %<simd%> may not be nested inside a region with"
3ab58307 2725 " the %<order(concurrent)%> clause");
2726 return false;
2727 }
43895be5 2728 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2729 {
2730 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
4954efd4 2731 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
7821c1b5 2732 {
4954efd4 2733 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
7821c1b5 2734 && (ctx->outer == NULL
2735 || !gimple_omp_for_combined_into_p (ctx->stmt)
2736 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2737 || (gimple_omp_for_kind (ctx->outer->stmt)
2738 != GF_OMP_FOR_KIND_FOR)
2739 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2740 {
2741 error_at (gimple_location (stmt),
2742 "%<ordered simd threads%> must be closely "
2743 "nested inside of %<for simd%> region");
2744 return false;
2745 }
2746 return true;
2747 }
43895be5 2748 }
7e5a76c8 2749 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
70a6624c 2750 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2751 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
7e5a76c8 2752 return true;
31890b58 2753 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2754 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2755 return true;
3d483a94 2756 error_at (gimple_location (stmt),
31890b58 2757 "OpenMP constructs other than "
2758 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2759 "not be nested inside %<simd%> region");
3d483a94 2760 return false;
2761 }
bc7bff74 2762 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2763 {
2764 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
31890b58 2765 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2766 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2767 && omp_find_clause (gimple_omp_for_clauses (stmt),
2768 OMP_CLAUSE_BIND) == NULL_TREE))
bc7bff74 2769 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2770 {
2771 error_at (gimple_location (stmt),
31890b58 2772 "only %<distribute%>, %<parallel%> or %<loop%> "
2773 "regions are allowed to be strictly nested inside "
2774 "%<teams%> region");
bc7bff74 2775 return false;
2776 }
2777 }
3ab58307 2778 else if (ctx->order_concurrent
2779 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2780 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2781 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2782 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2783 {
31890b58 2784 if (ctx->loop_p)
2785 error_at (gimple_location (stmt),
2786 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2787 "%<simd%> may not be nested inside a %<loop%> region");
2788 else
2789 error_at (gimple_location (stmt),
2790 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2791 "%<simd%> may not be nested inside a region with "
2792 "the %<order(concurrent)%> clause");
3ab58307 2793 return false;
2794 }
3d483a94 2795 }
75a70cf9 2796 switch (gimple_code (stmt))
c1d127dd 2797 {
75a70cf9 2798 case GIMPLE_OMP_FOR:
0076df39 2799 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3d483a94 2800 return true;
bc7bff74 2801 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2802 {
2803 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2804 {
2805 error_at (gimple_location (stmt),
7821c1b5 2806 "%<distribute%> region must be strictly nested "
2807 "inside %<teams%> construct");
bc7bff74 2808 return false;
2809 }
2810 return true;
2811 }
43895be5 2812 /* We split taskloop into task and nested taskloop in it. */
2813 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2814 return true;
31890b58 2815 /* For now, hope this will change and loop bind(parallel) will not
2816 be allowed in lots of contexts. */
2817 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2818 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2819 return true;
7a1ed40d 2820 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2821 {
2822 bool ok = false;
7c6746c9 2823
7a1ed40d 2824 if (ctx)
2825 switch (gimple_code (ctx->stmt))
2826 {
2827 case GIMPLE_OMP_FOR:
2828 ok = (gimple_omp_for_kind (ctx->stmt)
2829 == GF_OMP_FOR_KIND_OACC_LOOP);
2830 break;
2831
2832 case GIMPLE_OMP_TARGET:
2833 switch (gimple_omp_target_kind (ctx->stmt))
2834 {
2835 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2836 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2837 ok = true;
2838 break;
2839
2840 default:
2841 break;
2842 }
2843
2844 default:
2845 break;
2846 }
4954efd4 2847 else if (oacc_get_fn_attrib (current_function_decl))
7a1ed40d 2848 ok = true;
2849 if (!ok)
2850 {
2851 error_at (gimple_location (stmt),
2852 "OpenACC loop directive must be associated with"
2853 " an OpenACC compute region");
2854 return false;
2855 }
2856 }
bc7bff74 2857 /* FALLTHRU */
2858 case GIMPLE_CALL:
2859 if (is_gimple_call (stmt)
2860 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2861 == BUILT_IN_GOMP_CANCEL
2862 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2863 == BUILT_IN_GOMP_CANCELLATION_POINT))
2864 {
2865 const char *bad = NULL;
2866 const char *kind = NULL;
7821c1b5 2867 const char *construct
2868 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2869 == BUILT_IN_GOMP_CANCEL)
31890b58 2870 ? "cancel"
2871 : "cancellation point";
bc7bff74 2872 if (ctx == NULL)
2873 {
2874 error_at (gimple_location (stmt), "orphaned %qs construct",
7821c1b5 2875 construct);
bc7bff74 2876 return false;
2877 }
6b409616 2878 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2879 ? tree_to_shwi (gimple_call_arg (stmt, 0))
bc7bff74 2880 : 0)
2881 {
2882 case 1:
2883 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
31890b58 2884 bad = "parallel";
bc7bff74 2885 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2886 == BUILT_IN_GOMP_CANCEL
2887 && !integer_zerop (gimple_call_arg (stmt, 1)))
2888 ctx->cancellable = true;
2889 kind = "parallel";
2890 break;
2891 case 2:
2892 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2893 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
31890b58 2894 bad = "for";
bc7bff74 2895 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2896 == BUILT_IN_GOMP_CANCEL
2897 && !integer_zerop (gimple_call_arg (stmt, 1)))
2898 {
2899 ctx->cancellable = true;
4954efd4 2900 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
bc7bff74 2901 OMP_CLAUSE_NOWAIT))
2902 warning_at (gimple_location (stmt), 0,
31890b58 2903 "%<cancel for%> inside "
bc7bff74 2904 "%<nowait%> for construct");
4954efd4 2905 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
bc7bff74 2906 OMP_CLAUSE_ORDERED))
2907 warning_at (gimple_location (stmt), 0,
31890b58 2908 "%<cancel for%> inside "
bc7bff74 2909 "%<ordered%> for construct");
2910 }
2911 kind = "for";
2912 break;
2913 case 4:
2914 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2915 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
31890b58 2916 bad = "sections";
bc7bff74 2917 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2918 == BUILT_IN_GOMP_CANCEL
2919 && !integer_zerop (gimple_call_arg (stmt, 1)))
2920 {
2921 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2922 {
2923 ctx->cancellable = true;
4954efd4 2924 if (omp_find_clause (gimple_omp_sections_clauses
bc7bff74 2925 (ctx->stmt),
2926 OMP_CLAUSE_NOWAIT))
2927 warning_at (gimple_location (stmt), 0,
31890b58 2928 "%<cancel sections%> inside "
bc7bff74 2929 "%<nowait%> sections construct");
2930 }
2931 else
2932 {
2933 gcc_assert (ctx->outer
2934 && gimple_code (ctx->outer->stmt)
2935 == GIMPLE_OMP_SECTIONS);
2936 ctx->outer->cancellable = true;
4954efd4 2937 if (omp_find_clause (gimple_omp_sections_clauses
bc7bff74 2938 (ctx->outer->stmt),
2939 OMP_CLAUSE_NOWAIT))
2940 warning_at (gimple_location (stmt), 0,
31890b58 2941 "%<cancel sections%> inside "
bc7bff74 2942 "%<nowait%> sections construct");
2943 }
2944 }
2945 kind = "sections";
2946 break;
2947 case 8:
69cc430f 2948 if (!is_task_ctx (ctx)
2949 && (!is_taskloop_ctx (ctx)
2950 || ctx->outer == NULL
2951 || !is_task_ctx (ctx->outer)))
31890b58 2952 bad = "task";
bc7bff74 2953 else
7821c1b5 2954 {
2955 for (omp_context *octx = ctx->outer;
2956 octx; octx = octx->outer)
2957 {
2958 switch (gimple_code (octx->stmt))
2959 {
2960 case GIMPLE_OMP_TASKGROUP:
2961 break;
2962 case GIMPLE_OMP_TARGET:
2963 if (gimple_omp_target_kind (octx->stmt)
2964 != GF_OMP_TARGET_KIND_REGION)
2965 continue;
2966 /* FALLTHRU */
2967 case GIMPLE_OMP_PARALLEL:
2968 case GIMPLE_OMP_TEAMS:
2969 error_at (gimple_location (stmt),
2970 "%<%s taskgroup%> construct not closely "
2971 "nested inside of %<taskgroup%> region",
2972 construct);
2973 return false;
69cc430f 2974 case GIMPLE_OMP_TASK:
2975 if (gimple_omp_task_taskloop_p (octx->stmt)
2976 && octx->outer
2977 && is_taskloop_ctx (octx->outer))
2978 {
2979 tree clauses
2980 = gimple_omp_for_clauses (octx->outer->stmt);
2981 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2982 break;
2983 }
2984 continue;
7821c1b5 2985 default:
2986 continue;
2987 }
2988 break;
2989 }
2990 ctx->cancellable = true;
2991 }
bc7bff74 2992 kind = "taskgroup";
2993 break;
2994 default:
2995 error_at (gimple_location (stmt), "invalid arguments");
2996 return false;
2997 }
2998 if (bad)
2999 {
3000 error_at (gimple_location (stmt),
3001 "%<%s %s%> construct not closely nested inside of %qs",
7821c1b5 3002 construct, kind, bad);
bc7bff74 3003 return false;
3004 }
3005 }
3d483a94 3006 /* FALLTHRU */
75a70cf9 3007 case GIMPLE_OMP_SECTIONS:
3008 case GIMPLE_OMP_SINGLE:
c1d127dd 3009 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 3010 switch (gimple_code (ctx->stmt))
c1d127dd 3011 {
75a70cf9 3012 case GIMPLE_OMP_FOR:
7821c1b5 3013 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3014 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3015 break;
3016 /* FALLTHRU */
75a70cf9 3017 case GIMPLE_OMP_SECTIONS:
3018 case GIMPLE_OMP_SINGLE:
3019 case GIMPLE_OMP_ORDERED:
3020 case GIMPLE_OMP_MASTER:
3021 case GIMPLE_OMP_TASK:
bc7bff74 3022 case GIMPLE_OMP_CRITICAL:
75a70cf9 3023 if (is_gimple_call (stmt))
fd6481cf 3024 {
bc7bff74 3025 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3026 != BUILT_IN_GOMP_BARRIER)
3027 return true;
ab129075 3028 error_at (gimple_location (stmt),
3029 "barrier region may not be closely nested inside "
31890b58 3030 "of work-sharing, %<loop%>, %<critical%>, "
3031 "%<ordered%>, %<master%>, explicit %<task%> or "
3032 "%<taskloop%> region");
ab129075 3033 return false;
fd6481cf 3034 }
ab129075 3035 error_at (gimple_location (stmt),
3036 "work-sharing region may not be closely nested inside "
31890b58 3037 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
7821c1b5 3038 "%<master%>, explicit %<task%> or %<taskloop%> region");
ab129075 3039 return false;
75a70cf9 3040 case GIMPLE_OMP_PARALLEL:
7821c1b5 3041 case GIMPLE_OMP_TEAMS:
ab129075 3042 return true;
7821c1b5 3043 case GIMPLE_OMP_TARGET:
3044 if (gimple_omp_target_kind (ctx->stmt)
3045 == GF_OMP_TARGET_KIND_REGION)
3046 return true;
3047 break;
c1d127dd 3048 default:
3049 break;
3050 }
3051 break;
75a70cf9 3052 case GIMPLE_OMP_MASTER:
c1d127dd 3053 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 3054 switch (gimple_code (ctx->stmt))
c1d127dd 3055 {
75a70cf9 3056 case GIMPLE_OMP_FOR:
7821c1b5 3057 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3058 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3059 break;
3060 /* FALLTHRU */
75a70cf9 3061 case GIMPLE_OMP_SECTIONS:
3062 case GIMPLE_OMP_SINGLE:
3063 case GIMPLE_OMP_TASK:
ab129075 3064 error_at (gimple_location (stmt),
7821c1b5 3065 "%<master%> region may not be closely nested inside "
31890b58 3066 "of work-sharing, %<loop%>, explicit %<task%> or "
3067 "%<taskloop%> region");
ab129075 3068 return false;
75a70cf9 3069 case GIMPLE_OMP_PARALLEL:
7821c1b5 3070 case GIMPLE_OMP_TEAMS:
ab129075 3071 return true;
7821c1b5 3072 case GIMPLE_OMP_TARGET:
3073 if (gimple_omp_target_kind (ctx->stmt)
3074 == GF_OMP_TARGET_KIND_REGION)
3075 return true;
3076 break;
c1d127dd 3077 default:
3078 break;
3079 }
3080 break;
43895be5 3081 case GIMPLE_OMP_TASK:
3082 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3083 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3084 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3085 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3086 {
3087 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3088 error_at (OMP_CLAUSE_LOCATION (c),
3089 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3090 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3091 return false;
3092 }
3093 break;
75a70cf9 3094 case GIMPLE_OMP_ORDERED:
43895be5 3095 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3096 c; c = OMP_CLAUSE_CHAIN (c))
3097 {
3098 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3099 {
3100 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
7821c1b5 3101 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
43895be5 3102 continue;
3103 }
3104 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3105 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3106 || kind == OMP_CLAUSE_DEPEND_SINK)
3107 {
3108 tree oclause;
3109 /* Look for containing ordered(N) loop. */
3110 if (ctx == NULL
3111 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3112 || (oclause
4954efd4 3113 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
43895be5 3114 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3115 {
3116 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 3117 "%<ordered%> construct with %<depend%> clause "
3118 "must be closely nested inside an %<ordered%> "
3119 "loop");
43895be5 3120 return false;
3121 }
3122 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3123 {
3124 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 3125 "%<ordered%> construct with %<depend%> clause "
3126 "must be closely nested inside a loop with "
3127 "%<ordered%> clause with a parameter");
43895be5 3128 return false;
3129 }
3130 }
3131 else
3132 {
3133 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 3134 "invalid depend kind in omp %<ordered%> %<depend%>");
3135 return false;
3136 }
3137 }
3138 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
4954efd4 3139 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
7821c1b5 3140 {
3141 /* ordered simd must be closely nested inside of simd region,
3142 and simd region must not encounter constructs other than
3143 ordered simd, therefore ordered simd may be either orphaned,
3144 or ctx->stmt must be simd. The latter case is handled already
3145 earlier. */
3146 if (ctx != NULL)
3147 {
3148 error_at (gimple_location (stmt),
3149 "%<ordered%> %<simd%> must be closely nested inside "
3150 "%<simd%> region");
43895be5 3151 return false;
3152 }
3153 }
c1d127dd 3154 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 3155 switch (gimple_code (ctx->stmt))
c1d127dd 3156 {
75a70cf9 3157 case GIMPLE_OMP_CRITICAL:
3158 case GIMPLE_OMP_TASK:
7821c1b5 3159 case GIMPLE_OMP_ORDERED:
3160 ordered_in_taskloop:
ab129075 3161 error_at (gimple_location (stmt),
7821c1b5 3162 "%<ordered%> region may not be closely nested inside "
3163 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3164 "%<taskloop%> region");
ab129075 3165 return false;
75a70cf9 3166 case GIMPLE_OMP_FOR:
7821c1b5 3167 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3168 goto ordered_in_taskloop;
a0226394 3169 tree o;
3170 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3171 OMP_CLAUSE_ORDERED);
3172 if (o == NULL)
ab129075 3173 {
3174 error_at (gimple_location (stmt),
7821c1b5 3175 "%<ordered%> region must be closely nested inside "
3176 "a loop region with an %<ordered%> clause");
ab129075 3177 return false;
3178 }
a0226394 3179 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3180 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3181 {
3182 error_at (gimple_location (stmt),
3183 "%<ordered%> region without %<depend%> clause may "
3184 "not be closely nested inside a loop region with "
3185 "an %<ordered%> clause with a parameter");
3186 return false;
3187 }
ab129075 3188 return true;
7821c1b5 3189 case GIMPLE_OMP_TARGET:
3190 if (gimple_omp_target_kind (ctx->stmt)
3191 != GF_OMP_TARGET_KIND_REGION)
3192 break;
3193 /* FALLTHRU */
75a70cf9 3194 case GIMPLE_OMP_PARALLEL:
7821c1b5 3195 case GIMPLE_OMP_TEAMS:
bc7bff74 3196 error_at (gimple_location (stmt),
7821c1b5 3197 "%<ordered%> region must be closely nested inside "
3198 "a loop region with an %<ordered%> clause");
bc7bff74 3199 return false;
c1d127dd 3200 default:
3201 break;
3202 }
3203 break;
75a70cf9 3204 case GIMPLE_OMP_CRITICAL:
1a91d914 3205 {
3206 tree this_stmt_name
3207 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3208 for (; ctx != NULL; ctx = ctx->outer)
3209 if (gomp_critical *other_crit
3210 = dyn_cast <gomp_critical *> (ctx->stmt))
3211 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3212 {
3213 error_at (gimple_location (stmt),
7821c1b5 3214 "%<critical%> region may not be nested inside "
3215 "a %<critical%> region with the same name");
1a91d914 3216 return false;
3217 }
3218 }
c1d127dd 3219 break;
bc7bff74 3220 case GIMPLE_OMP_TEAMS:
7e5a76c8 3221 if (ctx == NULL)
3222 break;
3223 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3224 || (gimple_omp_target_kind (ctx->stmt)
3225 != GF_OMP_TARGET_KIND_REGION))
bc7bff74 3226 {
7e5a76c8 3227 /* Teams construct can appear either strictly nested inside of
3228 target construct with no intervening stmts, or can be encountered
3229 only by initial task (so must not appear inside any OpenMP
3230 construct. */
bc7bff74 3231 error_at (gimple_location (stmt),
7e5a76c8 3232 "%<teams%> construct must be closely nested inside of "
3233 "%<target%> construct or not nested in any OpenMP "
3234 "construct");
bc7bff74 3235 return false;
3236 }
3237 break;
691447ab 3238 case GIMPLE_OMP_TARGET:
43895be5 3239 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3241 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3242 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3243 {
3244 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3245 error_at (OMP_CLAUSE_LOCATION (c),
3246 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3247 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3248 return false;
3249 }
32f692e2 3250 if (is_gimple_omp_offloaded (stmt)
4954efd4 3251 && oacc_get_fn_attrib (cfun->decl) != NULL)
32f692e2 3252 {
3253 error_at (gimple_location (stmt),
3254 "OpenACC region inside of OpenACC routine, nested "
3255 "parallelism not supported yet");
3256 return false;
3257 }
691447ab 3258 for (; ctx != NULL; ctx = ctx->outer)
ca4c3545 3259 {
3260 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3261 {
3262 if (is_gimple_omp (stmt)
3263 && is_gimple_omp_oacc (stmt)
3264 && is_gimple_omp (ctx->stmt))
3265 {
3266 error_at (gimple_location (stmt),
3267 "OpenACC construct inside of non-OpenACC region");
3268 return false;
3269 }
3270 continue;
3271 }
3272
3273 const char *stmt_name, *ctx_stmt_name;
3274 switch (gimple_omp_target_kind (stmt))
3275 {
3276 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3277 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3278 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
43895be5 3279 case GF_OMP_TARGET_KIND_ENTER_DATA:
3280 stmt_name = "target enter data"; break;
3281 case GF_OMP_TARGET_KIND_EXIT_DATA:
3282 stmt_name = "target exit data"; break;
ca4c3545 3283 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3284 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3285 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3286 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
43895be5 3287 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3288 stmt_name = "enter/exit data"; break;
acb41570 3289 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
571b3486 3290 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3291 break;
ca4c3545 3292 default: gcc_unreachable ();
3293 }
3294 switch (gimple_omp_target_kind (ctx->stmt))
3295 {
3296 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3297 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
43895be5 3298 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3299 ctx_stmt_name = "parallel"; break;
3300 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3301 ctx_stmt_name = "kernels"; break;
ca4c3545 3302 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
571b3486 3303 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3304 ctx_stmt_name = "host_data"; break;
ca4c3545 3305 default: gcc_unreachable ();
3306 }
3307
3308 /* OpenACC/OpenMP mismatch? */
3309 if (is_gimple_omp_oacc (stmt)
3310 != is_gimple_omp_oacc (ctx->stmt))
3311 {
3312 error_at (gimple_location (stmt),
7821c1b5 3313 "%s %qs construct inside of %s %qs region",
ca4c3545 3314 (is_gimple_omp_oacc (stmt)
3315 ? "OpenACC" : "OpenMP"), stmt_name,
3316 (is_gimple_omp_oacc (ctx->stmt)
3317 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3318 return false;
3319 }
3320 if (is_gimple_omp_offloaded (ctx->stmt))
3321 {
3322 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3323 if (is_gimple_omp_oacc (ctx->stmt))
3324 {
3325 error_at (gimple_location (stmt),
7821c1b5 3326 "%qs construct inside of %qs region",
ca4c3545 3327 stmt_name, ctx_stmt_name);
3328 return false;
3329 }
3330 else
3331 {
ca4c3545 3332 warning_at (gimple_location (stmt), 0,
7821c1b5 3333 "%qs construct inside of %qs region",
ca4c3545 3334 stmt_name, ctx_stmt_name);
3335 }
3336 }
3337 }
691447ab 3338 break;
c1d127dd 3339 default:
3340 break;
3341 }
ab129075 3342 return true;
c1d127dd 3343}
3344
3345
75a70cf9 3346/* Helper function scan_omp.
3347
3348 Callback for walk_tree or operators in walk_gimple_stmt used to
ca4c3545 3349 scan for OMP directives in TP. */
1e8e9920 3350
3351static tree
75a70cf9 3352scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
1e8e9920 3353{
4077bf7a 3354 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3355 omp_context *ctx = (omp_context *) wi->info;
1e8e9920 3356 tree t = *tp;
3357
75a70cf9 3358 switch (TREE_CODE (t))
3359 {
3360 case VAR_DECL:
3361 case PARM_DECL:
3362 case LABEL_DECL:
3363 case RESULT_DECL:
3364 if (ctx)
56686608 3365 {
3366 tree repl = remap_decl (t, &ctx->cb);
3367 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3368 *tp = repl;
3369 }
75a70cf9 3370 break;
3371
3372 default:
3373 if (ctx && TYPE_P (t))
3374 *tp = remap_type (t, &ctx->cb);
3375 else if (!DECL_P (t))
7cf869dd 3376 {
3377 *walk_subtrees = 1;
3378 if (ctx)
182cf5a9 3379 {
3380 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3381 if (tem != TREE_TYPE (t))
3382 {
3383 if (TREE_CODE (t) == INTEGER_CST)
e3d0f65c 3384 *tp = wide_int_to_tree (tem, wi::to_wide (t));
182cf5a9 3385 else
3386 TREE_TYPE (t) = tem;
3387 }
3388 }
7cf869dd 3389 }
75a70cf9 3390 break;
3391 }
3392
3393 return NULL_TREE;
3394}
3395
f2697631 3396/* Return true if FNDECL is a setjmp or a longjmp. */
3397
3398static bool
3399setjmp_or_longjmp_p (const_tree fndecl)
3400{
a0e9bfbb 3401 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3402 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
f2697631 3403 return true;
3404
3405 tree declname = DECL_NAME (fndecl);
3ab58307 3406 if (!declname
3407 || (DECL_CONTEXT (fndecl) != NULL_TREE
3408 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3409 || !TREE_PUBLIC (fndecl))
f2697631 3410 return false;
3ab58307 3411
f2697631 3412 const char *name = IDENTIFIER_POINTER (declname);
3413 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3414}
3415
3ab58307 3416/* Return true if FNDECL is an omp_* runtime API call. */
3417
3418static bool
3419omp_runtime_api_call (const_tree fndecl)
3420{
3421 tree declname = DECL_NAME (fndecl);
3422 if (!declname
3423 || (DECL_CONTEXT (fndecl) != NULL_TREE
3424 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3425 || !TREE_PUBLIC (fndecl))
3426 return false;
3427
3428 const char *name = IDENTIFIER_POINTER (declname);
3429 if (strncmp (name, "omp_", 4) != 0)
3430 return false;
3431
3432 static const char *omp_runtime_apis[] =
3433 {
3434 /* This array has 3 sections. First omp_* calls that don't
3435 have any suffixes. */
3436 "target_alloc",
3437 "target_associate_ptr",
3438 "target_disassociate_ptr",
3439 "target_free",
3440 "target_is_present",
3441 "target_memcpy",
3442 "target_memcpy_rect",
3443 NULL,
3444 /* Now omp_* calls that are available as omp_* and omp_*_. */
3445 "capture_affinity",
3446 "destroy_lock",
3447 "destroy_nest_lock",
3448 "display_affinity",
3449 "get_active_level",
3450 "get_affinity_format",
3451 "get_cancellation",
3452 "get_default_device",
3453 "get_dynamic",
3454 "get_initial_device",
3455 "get_level",
3456 "get_max_active_levels",
3457 "get_max_task_priority",
3458 "get_max_threads",
3459 "get_nested",
3460 "get_num_devices",
3461 "get_num_places",
3462 "get_num_procs",
3463 "get_num_teams",
3464 "get_num_threads",
3465 "get_partition_num_places",
3466 "get_place_num",
3467 "get_proc_bind",
3468 "get_team_num",
3469 "get_thread_limit",
3470 "get_thread_num",
3471 "get_wtick",
3472 "get_wtime",
3473 "in_final",
3474 "in_parallel",
3475 "init_lock",
3476 "init_nest_lock",
3477 "is_initial_device",
3478 "pause_resource",
3479 "pause_resource_all",
3480 "set_affinity_format",
3481 "set_lock",
3482 "set_nest_lock",
3483 "test_lock",
3484 "test_nest_lock",
3485 "unset_lock",
3486 "unset_nest_lock",
3487 NULL,
3488 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3489 "get_ancestor_thread_num",
3490 "get_partition_place_nums",
3491 "get_place_num_procs",
3492 "get_place_proc_ids",
3493 "get_schedule",
3494 "get_team_size",
3495 "set_default_device",
3496 "set_dynamic",
3497 "set_max_active_levels",
3498 "set_nested",
3499 "set_num_threads",
3500 "set_schedule"
3501 };
3502
3503 int mode = 0;
3504 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3505 {
3506 if (omp_runtime_apis[i] == NULL)
3507 {
3508 mode++;
3509 continue;
3510 }
3511 size_t len = strlen (omp_runtime_apis[i]);
3512 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3513 && (name[4 + len] == '\0'
3514 || (mode > 0
3515 && name[4 + len] == '_'
3516 && (name[4 + len + 1] == '\0'
3517 || (mode > 1
3518 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3519 return true;
3520 }
3521 return false;
3522}
75a70cf9 3523
3524/* Helper function for scan_omp.
3525
ca4c3545 3526 Callback for walk_gimple_stmt used to scan for OMP directives in
75a70cf9 3527 the current statement in GSI. */
3528
3529static tree
3530scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3531 struct walk_stmt_info *wi)
3532{
42acab1c 3533 gimple *stmt = gsi_stmt (*gsi);
75a70cf9 3534 omp_context *ctx = (omp_context *) wi->info;
3535
3536 if (gimple_has_location (stmt))
3537 input_location = gimple_location (stmt);
1e8e9920 3538
ca4c3545 3539 /* Check the nesting restrictions. */
bc7bff74 3540 bool remove = false;
3541 if (is_gimple_omp (stmt))
3542 remove = !check_omp_nesting_restrictions (stmt, ctx);
3543 else if (is_gimple_call (stmt))
3544 {
3545 tree fndecl = gimple_call_fndecl (stmt);
f2697631 3546 if (fndecl)
3547 {
3ab58307 3548 if (ctx
f2697631 3549 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 3550 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
31890b58 3551 && setjmp_or_longjmp_p (fndecl)
3552 && !ctx->loop_p)
f2697631 3553 {
3554 remove = true;
3555 error_at (gimple_location (stmt),
31890b58 3556 "setjmp/longjmp inside %<simd%> construct");
f2697631 3557 }
3558 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3559 switch (DECL_FUNCTION_CODE (fndecl))
3560 {
3561 case BUILT_IN_GOMP_BARRIER:
3562 case BUILT_IN_GOMP_CANCEL:
3563 case BUILT_IN_GOMP_CANCELLATION_POINT:
3564 case BUILT_IN_GOMP_TASKYIELD:
3565 case BUILT_IN_GOMP_TASKWAIT:
3566 case BUILT_IN_GOMP_TASKGROUP_START:
3567 case BUILT_IN_GOMP_TASKGROUP_END:
3568 remove = !check_omp_nesting_restrictions (stmt, ctx);
3569 break;
3570 default:
3571 break;
3572 }
3ab58307 3573 else if (ctx)
3574 {
3575 omp_context *octx = ctx;
3576 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3577 octx = ctx->outer;
3578 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3579 {
3580 remove = true;
3581 error_at (gimple_location (stmt),
3582 "OpenMP runtime API call %qD in a region with "
3583 "%<order(concurrent)%> clause", fndecl);
3584 }
3585 }
f2697631 3586 }
bc7bff74 3587 }
3588 if (remove)
3589 {
3590 stmt = gimple_build_nop ();
3591 gsi_replace (gsi, stmt, false);
fd6481cf 3592 }
c1d127dd 3593
75a70cf9 3594 *handled_ops_p = true;
3595
3596 switch (gimple_code (stmt))
1e8e9920 3597 {
75a70cf9 3598 case GIMPLE_OMP_PARALLEL:
fd6481cf 3599 taskreg_nesting_level++;
75a70cf9 3600 scan_omp_parallel (gsi, ctx);
fd6481cf 3601 taskreg_nesting_level--;
3602 break;
3603
75a70cf9 3604 case GIMPLE_OMP_TASK:
fd6481cf 3605 taskreg_nesting_level++;
75a70cf9 3606 scan_omp_task (gsi, ctx);
fd6481cf 3607 taskreg_nesting_level--;
1e8e9920 3608 break;
3609
75a70cf9 3610 case GIMPLE_OMP_FOR:
3d2b49b2 3611 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3612 == GF_OMP_FOR_KIND_SIMD)
3613 && gimple_omp_for_combined_into_p (stmt)
3614 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3615 {
3616 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3617 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3618 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3619 {
3620 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3621 break;
3622 }
3623 }
7d26f131 3624 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3625 == GF_OMP_FOR_KIND_SIMD)
57f872be 3626 && omp_maybe_offloaded_ctx (ctx)
3627 && omp_max_simt_vf ())
3628 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3629 else
3630 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
1e8e9920 3631 break;
3632
75a70cf9 3633 case GIMPLE_OMP_SECTIONS:
1a91d914 3634 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
1e8e9920 3635 break;
3636
75a70cf9 3637 case GIMPLE_OMP_SINGLE:
1a91d914 3638 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
1e8e9920 3639 break;
3640
da008d72 3641 case GIMPLE_OMP_SCAN:
3642 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
7d26f131 3643 {
3644 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3645 ctx->scan_inclusive = true;
3646 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3647 ctx->scan_exclusive = true;
3648 }
da008d72 3649 /* FALLTHRU */
75a70cf9 3650 case GIMPLE_OMP_SECTION:
3651 case GIMPLE_OMP_MASTER:
3652 case GIMPLE_OMP_ORDERED:
3653 case GIMPLE_OMP_CRITICAL:
56686608 3654 case GIMPLE_OMP_GRID_BODY:
75a70cf9 3655 ctx = new_omp_context (stmt, ctx);
ab129075 3656 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 3657 break;
3658
7e5a76c8 3659 case GIMPLE_OMP_TASKGROUP:
3660 ctx = new_omp_context (stmt, ctx);
3661 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3662 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3663 break;
3664
bc7bff74 3665 case GIMPLE_OMP_TARGET:
1a91d914 3666 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
bc7bff74 3667 break;
3668
3669 case GIMPLE_OMP_TEAMS:
7e5a76c8 3670 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3671 {
3672 taskreg_nesting_level++;
3673 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3674 taskreg_nesting_level--;
3675 }
3676 else
3677 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
bc7bff74 3678 break;
3679
75a70cf9 3680 case GIMPLE_BIND:
1e8e9920 3681 {
3682 tree var;
1e8e9920 3683
75a70cf9 3684 *handled_ops_p = false;
3685 if (ctx)
1a91d914 3686 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3687 var ;
3688 var = DECL_CHAIN (var))
75a70cf9 3689 insert_decl_map (&ctx->cb, var, var);
1e8e9920 3690 }
3691 break;
1e8e9920 3692 default:
75a70cf9 3693 *handled_ops_p = false;
1e8e9920 3694 break;
3695 }
3696
3697 return NULL_TREE;
3698}
3699
3700
75a70cf9 3701/* Scan all the statements starting at the current statement. CTX
ca4c3545 3702 contains context information about the OMP directives and
75a70cf9 3703 clauses found during the scan. */
1e8e9920 3704
3705static void
ab129075 3706scan_omp (gimple_seq *body_p, omp_context *ctx)
1e8e9920 3707{
3708 location_t saved_location;
3709 struct walk_stmt_info wi;
3710
3711 memset (&wi, 0, sizeof (wi));
1e8e9920 3712 wi.info = ctx;
1e8e9920 3713 wi.want_locations = true;
3714
3715 saved_location = input_location;
ab129075 3716 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
1e8e9920 3717 input_location = saved_location;
3718}
3719\f
3720/* Re-gimplification and code generation routines. */
3721
2918f4e9 3722/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3723 of BIND if in a method. */
3724
3725static void
3726maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3727{
3728 if (DECL_ARGUMENTS (current_function_decl)
3729 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3730 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3731 == POINTER_TYPE))
3732 {
3733 tree vars = gimple_bind_vars (bind);
3734 for (tree *pvar = &vars; *pvar; )
3735 if (omp_member_access_dummy_var (*pvar))
3736 *pvar = DECL_CHAIN (*pvar);
3737 else
3738 pvar = &DECL_CHAIN (*pvar);
3739 gimple_bind_set_vars (bind, vars);
3740 }
3741}
3742
3743/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3744 block and its subblocks. */
3745
3746static void
3747remove_member_access_dummy_vars (tree block)
3748{
3749 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3750 if (omp_member_access_dummy_var (*pvar))
3751 *pvar = DECL_CHAIN (*pvar);
3752 else
3753 pvar = &DECL_CHAIN (*pvar);
3754
3755 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3756 remove_member_access_dummy_vars (block);
3757}
3758
1e8e9920 3759/* If a context was created for STMT when it was scanned, return it. */
3760
3761static omp_context *
42acab1c 3762maybe_lookup_ctx (gimple *stmt)
1e8e9920 3763{
3764 splay_tree_node n;
3765 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3766 return n ? (omp_context *) n->value : NULL;
3767}
3768
773c5ba7 3769
3770/* Find the mapping for DECL in CTX or the immediately enclosing
3771 context that has a mapping for DECL.
3772
3773 If CTX is a nested parallel directive, we may have to use the decl
3774 mappings created in CTX's parent context. Suppose that we have the
3775 following parallel nesting (variable UIDs showed for clarity):
3776
3777 iD.1562 = 0;
3778 #omp parallel shared(iD.1562) -> outer parallel
3779 iD.1562 = iD.1562 + 1;
3780
3781 #omp parallel shared (iD.1562) -> inner parallel
3782 iD.1562 = iD.1562 - 1;
3783
3784 Each parallel structure will create a distinct .omp_data_s structure
3785 for copying iD.1562 in/out of the directive:
3786
3787 outer parallel .omp_data_s.1.i -> iD.1562
3788 inner parallel .omp_data_s.2.i -> iD.1562
3789
3790 A shared variable mapping will produce a copy-out operation before
3791 the parallel directive and a copy-in operation after it. So, in
3792 this case we would have:
3793
3794 iD.1562 = 0;
3795 .omp_data_o.1.i = iD.1562;
3796 #omp parallel shared(iD.1562) -> outer parallel
3797 .omp_data_i.1 = &.omp_data_o.1
3798 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3799
3800 .omp_data_o.2.i = iD.1562; -> **
3801 #omp parallel shared(iD.1562) -> inner parallel
3802 .omp_data_i.2 = &.omp_data_o.2
3803 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3804
3805
3806 ** This is a problem. The symbol iD.1562 cannot be referenced
3807 inside the body of the outer parallel region. But since we are
3808 emitting this copy operation while expanding the inner parallel
3809 directive, we need to access the CTX structure of the outer
3810 parallel directive to get the correct mapping:
3811
3812 .omp_data_o.2.i = .omp_data_i.1->i
3813
3814 Since there may be other workshare or parallel directives enclosing
3815 the parallel directive, it may be necessary to walk up the context
3816 parent chain. This is not a problem in general because nested
3817 parallelism happens only rarely. */
3818
3819static tree
3820lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3821{
3822 tree t;
3823 omp_context *up;
3824
773c5ba7 3825 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3826 t = maybe_lookup_decl (decl, up);
3827
87b31375 3828 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
773c5ba7 3829
c37594c7 3830 return t ? t : decl;
773c5ba7 3831}
3832
3833
f49d7bb5 3834/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3835 in outer contexts. */
3836
3837static tree
3838maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3839{
3840 tree t = NULL;
3841 omp_context *up;
3842
87b31375 3843 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3844 t = maybe_lookup_decl (decl, up);
f49d7bb5 3845
3846 return t ? t : decl;
3847}
3848
3849
df67b98c 3850/* Construct the initialization value for reduction operation OP. */
1e8e9920 3851
3852tree
df67b98c 3853omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
1e8e9920 3854{
df67b98c 3855 switch (op)
1e8e9920 3856 {
3857 case PLUS_EXPR:
3858 case MINUS_EXPR:
3859 case BIT_IOR_EXPR:
3860 case BIT_XOR_EXPR:
3861 case TRUTH_OR_EXPR:
3862 case TRUTH_ORIF_EXPR:
3863 case TRUTH_XOR_EXPR:
3864 case NE_EXPR:
385f3f36 3865 return build_zero_cst (type);
1e8e9920 3866
3867 case MULT_EXPR:
3868 case TRUTH_AND_EXPR:
3869 case TRUTH_ANDIF_EXPR:
3870 case EQ_EXPR:
389dd41b 3871 return fold_convert_loc (loc, type, integer_one_node);
1e8e9920 3872
3873 case BIT_AND_EXPR:
389dd41b 3874 return fold_convert_loc (loc, type, integer_minus_one_node);
1e8e9920 3875
3876 case MAX_EXPR:
3877 if (SCALAR_FLOAT_TYPE_P (type))
3878 {
3879 REAL_VALUE_TYPE max, min;
fe994837 3880 if (HONOR_INFINITIES (type))
1e8e9920 3881 {
3882 real_inf (&max);
3883 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3884 }
3885 else
3886 real_maxval (&min, 1, TYPE_MODE (type));
3887 return build_real (type, min);
3888 }
5902cce5 3889 else if (POINTER_TYPE_P (type))
3890 {
3891 wide_int min
3892 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3893 return wide_int_to_tree (type, min);
3894 }
1e8e9920 3895 else
3896 {
3897 gcc_assert (INTEGRAL_TYPE_P (type));
3898 return TYPE_MIN_VALUE (type);
3899 }
3900
3901 case MIN_EXPR:
3902 if (SCALAR_FLOAT_TYPE_P (type))
3903 {
3904 REAL_VALUE_TYPE max;
fe994837 3905 if (HONOR_INFINITIES (type))
1e8e9920 3906 real_inf (&max);
3907 else
3908 real_maxval (&max, 0, TYPE_MODE (type));
3909 return build_real (type, max);
3910 }
5902cce5 3911 else if (POINTER_TYPE_P (type))
3912 {
3913 wide_int max
3914 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3915 return wide_int_to_tree (type, max);
3916 }
1e8e9920 3917 else
3918 {
3919 gcc_assert (INTEGRAL_TYPE_P (type));
3920 return TYPE_MAX_VALUE (type);
3921 }
3922
3923 default:
3924 gcc_unreachable ();
3925 }
3926}
3927
df67b98c 3928/* Construct the initialization value for reduction CLAUSE. */
3929
3930tree
3931omp_reduction_init (tree clause, tree type)
3932{
3933 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3934 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3935}
3936
bc7bff74 3937/* Return alignment to be assumed for var in CLAUSE, which should be
3938 OMP_CLAUSE_ALIGNED. */
3939
3940static tree
3941omp_clause_aligned_alignment (tree clause)
3942{
3943 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3944 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3945
3946 /* Otherwise return implementation defined alignment. */
3947 unsigned int al = 1;
2b8f5b8a 3948 opt_scalar_mode mode_iter;
3106770a 3949 auto_vector_sizes sizes;
e7419472 3950 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3106770a 3951 poly_uint64 vs = 0;
3952 for (unsigned int i = 0; i < sizes.length (); ++i)
3953 vs = ordered_max (vs, sizes[i]);
bc7bff74 3954 static enum mode_class classes[]
3955 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3956 for (int i = 0; i < 4; i += 2)
2b8f5b8a 3957 /* The for loop above dictates that we only walk through scalar classes. */
3958 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
bc7bff74 3959 {
2b8f5b8a 3960 scalar_mode mode = mode_iter.require ();
3961 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
bc7bff74 3962 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3963 continue;
3106770a 3964 while (maybe_ne (vs, 0U)
3965 && known_lt (GET_MODE_SIZE (vmode), vs)
28ebc73c 3966 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3967 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
7c6746c9 3968
bc7bff74 3969 tree type = lang_hooks.types.type_for_mode (mode, 1);
3970 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3971 continue;
52acb7ae 3972 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3973 GET_MODE_SIZE (mode));
3106770a 3974 type = build_vector_type (type, nelts);
bc7bff74 3975 if (TYPE_MODE (type) != vmode)
3976 continue;
3977 if (TYPE_ALIGN_UNIT (type) > al)
3978 al = TYPE_ALIGN_UNIT (type);
3979 }
3980 return build_int_cst (integer_type_node, al);
3981}
3982
8e818b28 3983
3984/* This structure is part of the interface between lower_rec_simd_input_clauses
3985 and lower_rec_input_clauses. */
3986
251317e4 3987class omplow_simd_context {
3988public:
9d805ed8 3989 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
8e818b28 3990 tree idx;
3991 tree lane;
da008d72 3992 tree lastlane;
1b576300 3993 vec<tree, va_heap> simt_eargs;
3994 gimple_seq simt_dlist;
9d805ed8 3995 poly_uint64_pod max_vf;
8e818b28 3996 bool is_simt;
3997};
3998
3d483a94 3999/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4000 privatization. */
4001
4002static bool
8e818b28 4003lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
da008d72 4004 omplow_simd_context *sctx, tree &ivar,
b05c7e43 4005 tree &lvar, tree *rvar = NULL,
4006 tree *rvar2 = NULL)
3d483a94 4007{
9d805ed8 4008 if (known_eq (sctx->max_vf, 0U))
3d483a94 4009 {
8e818b28 4010 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d805ed8 4011 if (maybe_gt (sctx->max_vf, 1U))
3d483a94 4012 {
4954efd4 4013 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3d483a94 4014 OMP_CLAUSE_SAFELEN);
9d805ed8 4015 if (c)
4016 {
4017 poly_uint64 safe_len;
4018 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4019 || maybe_lt (safe_len, 1U))
4020 sctx->max_vf = 1;
4021 else
4022 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4023 }
3d483a94 4024 }
9d805ed8 4025 if (maybe_gt (sctx->max_vf, 1U))
3d483a94 4026 {
8e818b28 4027 sctx->idx = create_tmp_var (unsigned_type_node);
4028 sctx->lane = create_tmp_var (unsigned_type_node);
3d483a94 4029 }
4030 }
9d805ed8 4031 if (known_eq (sctx->max_vf, 1U))
3d483a94 4032 return false;
4033
1b576300 4034 if (sctx->is_simt)
4035 {
4036 if (is_gimple_reg (new_var))
4037 {
4038 ivar = lvar = new_var;
4039 return true;
4040 }
4041 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4042 ivar = lvar = create_tmp_var (type);
4043 TREE_ADDRESSABLE (ivar) = 1;
4044 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4045 NULL, DECL_ATTRIBUTES (ivar));
4046 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4047 tree clobber = build_constructor (type, NULL);
4048 TREE_THIS_VOLATILE (clobber) = 1;
4049 gimple *g = gimple_build_assign (ivar, clobber);
4050 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4051 }
4052 else
4053 {
4054 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4055 tree avar = create_tmp_var_raw (atype);
4056 if (TREE_ADDRESSABLE (new_var))
4057 TREE_ADDRESSABLE (avar) = 1;
4058 DECL_ATTRIBUTES (avar)
4059 = tree_cons (get_identifier ("omp simd array"), NULL,
4060 DECL_ATTRIBUTES (avar));
4061 gimple_add_tmp_var (avar);
da008d72 4062 tree iavar = avar;
3d2b49b2 4063 if (rvar && !ctx->for_simd_scan_phase)
da008d72 4064 {
4065 /* For inscan reductions, create another array temporary,
4066 which will hold the reduced value. */
4067 iavar = create_tmp_var_raw (atype);
4068 if (TREE_ADDRESSABLE (new_var))
4069 TREE_ADDRESSABLE (iavar) = 1;
4070 DECL_ATTRIBUTES (iavar)
4071 = tree_cons (get_identifier ("omp simd array"), NULL,
4072 tree_cons (get_identifier ("omp simd inscan"), NULL,
4073 DECL_ATTRIBUTES (iavar)));
4074 gimple_add_tmp_var (iavar);
4075 ctx->cb.decl_map->put (avar, iavar);
4076 if (sctx->lastlane == NULL_TREE)
4077 sctx->lastlane = create_tmp_var (unsigned_type_node);
4078 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4079 sctx->lastlane, NULL_TREE, NULL_TREE);
4080 TREE_THIS_NOTRAP (*rvar) = 1;
b05c7e43 4081
7d26f131 4082 if (ctx->scan_exclusive)
b05c7e43 4083 {
4084 /* And for exclusive scan yet another one, which will
4085 hold the value during the scan phase. */
4086 tree savar = create_tmp_var_raw (atype);
4087 if (TREE_ADDRESSABLE (new_var))
4088 TREE_ADDRESSABLE (savar) = 1;
4089 DECL_ATTRIBUTES (savar)
4090 = tree_cons (get_identifier ("omp simd array"), NULL,
4091 tree_cons (get_identifier ("omp simd inscan "
4092 "exclusive"), NULL,
4093 DECL_ATTRIBUTES (savar)));
4094 gimple_add_tmp_var (savar);
4095 ctx->cb.decl_map->put (iavar, savar);
4096 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4097 sctx->idx, NULL_TREE, NULL_TREE);
4098 TREE_THIS_NOTRAP (*rvar2) = 1;
4099 }
da008d72 4100 }
4101 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
1b576300 4102 NULL_TREE, NULL_TREE);
4103 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4104 NULL_TREE, NULL_TREE);
443a28c5 4105 TREE_THIS_NOTRAP (ivar) = 1;
4106 TREE_THIS_NOTRAP (lvar) = 1;
1b576300 4107 }
bc7bff74 4108 if (DECL_P (new_var))
4109 {
4110 SET_DECL_VALUE_EXPR (new_var, lvar);
4111 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4112 }
3d483a94 4113 return true;
4114}
4115
2712b6de 4116/* Helper function of lower_rec_input_clauses. For a reference
4117 in simd reduction, add an underlying variable it will reference. */
4118
4119static void
4120handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4121{
4122 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4123 if (TREE_CONSTANT (z))
4124 {
43895be5 4125 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4126 get_name (new_vard));
2712b6de 4127 gimple_add_tmp_var (z);
4128 TREE_ADDRESSABLE (z) = 1;
4129 z = build_fold_addr_expr_loc (loc, z);
4130 gimplify_assign (new_vard, z, ilist);
4131 }
4132}
4133
7e5a76c8 4134/* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4135 code to emit (type) (tskred_temp[idx]). */
4136
4137static tree
4138task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4139 unsigned idx)
4140{
4141 unsigned HOST_WIDE_INT sz
4142 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4143 tree r = build2 (MEM_REF, pointer_sized_int_node,
4144 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4145 idx * sz));
4146 tree v = create_tmp_var (pointer_sized_int_node);
4147 gimple *g = gimple_build_assign (v, r);
4148 gimple_seq_add_stmt (ilist, g);
4149 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4150 {
4151 v = create_tmp_var (type);
4152 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4153 gimple_seq_add_stmt (ilist, g);
4154 }
4155 return v;
4156}
4157
1e8e9920 4158/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4159 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4160 private variables. Initialization statements go in ILIST, while calls
4161 to destructors go in DLIST. */
4162
4163static void
75a70cf9 4164lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
bc7bff74 4165 omp_context *ctx, struct omp_for_data *fd)
1e8e9920 4166{
7d26f131 4167 tree c, copyin_seq, x, ptr;
1e8e9920 4168 bool copyin_by_ref = false;
f49d7bb5 4169 bool lastprivate_firstprivate = false;
bc7bff74 4170 bool reduction_omp_orig_ref = false;
1e8e9920 4171 int pass;
3d483a94 4172 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 4173 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
8e818b28 4174 omplow_simd_context sctx = omplow_simd_context ();
1b576300 4175 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4176 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
da008d72 4177 gimple_seq llist[4] = { };
1d86b8dc 4178 tree nonconst_simd_if = NULL_TREE;
1e8e9920 4179
1e8e9920 4180 copyin_seq = NULL;
8e818b28 4181 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
1e8e9920 4182
3d483a94 4183 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4184 with data sharing clauses referencing variable sized vars. That
4185 is unnecessarily hard to support and very unlikely to result in
4186 vectorized code anyway. */
4187 if (is_simd)
4188 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4189 switch (OMP_CLAUSE_CODE (c))
4190 {
9580cb79 4191 case OMP_CLAUSE_LINEAR:
4192 if (OMP_CLAUSE_LINEAR_ARRAY (c))
8e818b28 4193 sctx.max_vf = 1;
9580cb79 4194 /* FALLTHRU */
3d483a94 4195 case OMP_CLAUSE_PRIVATE:
4196 case OMP_CLAUSE_FIRSTPRIVATE:
4197 case OMP_CLAUSE_LASTPRIVATE:
3d483a94 4198 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
8e818b28 4199 sctx.max_vf = 1;
e32d171e 4200 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4201 {
4202 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4203 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4204 sctx.max_vf = 1;
4205 }
3d483a94 4206 break;
43895be5 4207 case OMP_CLAUSE_REDUCTION:
7e5a76c8 4208 case OMP_CLAUSE_IN_REDUCTION:
43895be5 4209 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4210 || is_variable_sized (OMP_CLAUSE_DECL (c)))
8e818b28 4211 sctx.max_vf = 1;
e32d171e 4212 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4213 {
4214 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4215 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4216 sctx.max_vf = 1;
4217 }
43895be5 4218 break;
9144258a 4219 case OMP_CLAUSE_IF:
4220 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4221 sctx.max_vf = 1;
1d86b8dc 4222 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4223 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
9144258a 4224 break;
4225 case OMP_CLAUSE_SIMDLEN:
4226 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4227 sctx.max_vf = 1;
4228 break;
4f4b92d8 4229 case OMP_CLAUSE__CONDTEMP_:
4230 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4231 if (sctx.is_simt)
4232 sctx.max_vf = 1;
4233 break;
3d483a94 4234 default:
4235 continue;
4236 }
4237
1b576300 4238 /* Add a placeholder for simduid. */
9d805ed8 4239 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
1b576300 4240 sctx.simt_eargs.safe_push (NULL_TREE);
4241
7e5a76c8 4242 unsigned task_reduction_cnt = 0;
4243 unsigned task_reduction_cntorig = 0;
4244 unsigned task_reduction_cnt_full = 0;
4245 unsigned task_reduction_cntorig_full = 0;
4246 unsigned task_reduction_other_cnt = 0;
4247 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4248 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
1e8e9920 4249 /* Do all the fixed sized types in the first pass, and the variable sized
4250 types in the second pass. This makes sure that the scalar arguments to
48e1416a 4251 the variable sized types are processed before we use them in the
7e5a76c8 4252 variable sized operations. For task reductions we use 4 passes, in the
4253 first two we ignore them, in the third one gather arguments for
4254 GOMP_task_reduction_remap call and in the last pass actually handle
4255 the task reductions. */
4256 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4257 ? 4 : 2); ++pass)
4258 {
4259 if (pass == 2 && task_reduction_cnt)
4260 {
4261 tskred_atype
4262 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4263 + task_reduction_cntorig);
4264 tskred_avar = create_tmp_var_raw (tskred_atype);
4265 gimple_add_tmp_var (tskred_avar);
4266 TREE_ADDRESSABLE (tskred_avar) = 1;
4267 task_reduction_cnt_full = task_reduction_cnt;
4268 task_reduction_cntorig_full = task_reduction_cntorig;
4269 }
4270 else if (pass == 3 && task_reduction_cnt)
4271 {
4272 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4273 gimple *g
4274 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4275 size_int (task_reduction_cntorig),
4276 build_fold_addr_expr (tskred_avar));
4277 gimple_seq_add_stmt (ilist, g);
4278 }
4279 if (pass == 3 && task_reduction_other_cnt)
4280 {
4281 /* For reduction clauses, build
4282 tskred_base = (void *) tskred_temp[2]
4283 + omp_get_thread_num () * tskred_temp[1]
4284 or if tskred_temp[1] is known to be constant, that constant
4285 directly. This is the start of the private reduction copy block
4286 for the current thread. */
4287 tree v = create_tmp_var (integer_type_node);
4288 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4289 gimple *g = gimple_build_call (x, 0);
4290 gimple_call_set_lhs (g, v);
4291 gimple_seq_add_stmt (ilist, g);
4292 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4293 tskred_temp = OMP_CLAUSE_DECL (c);
4294 if (is_taskreg_ctx (ctx))
4295 tskred_temp = lookup_decl (tskred_temp, ctx);
4296 tree v2 = create_tmp_var (sizetype);
4297 g = gimple_build_assign (v2, NOP_EXPR, v);
4298 gimple_seq_add_stmt (ilist, g);
4299 if (ctx->task_reductions[0])
4300 v = fold_convert (sizetype, ctx->task_reductions[0]);
4301 else
4302 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4303 tree v3 = create_tmp_var (sizetype);
4304 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4305 gimple_seq_add_stmt (ilist, g);
4306 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4307 tskred_base = create_tmp_var (ptr_type_node);
4308 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4309 gimple_seq_add_stmt (ilist, g);
4310 }
4311 task_reduction_cnt = 0;
4312 task_reduction_cntorig = 0;
4313 task_reduction_other_cnt = 0;
1e8e9920 4314 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4315 {
55d6e7cd 4316 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1e8e9920 4317 tree var, new_var;
4318 bool by_ref;
389dd41b 4319 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7e5a76c8 4320 bool task_reduction_p = false;
4321 bool task_reduction_needs_orig_p = false;
4322 tree cond = NULL_TREE;
1e8e9920 4323
4324 switch (c_kind)
4325 {
4326 case OMP_CLAUSE_PRIVATE:
4327 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4328 continue;
4329 break;
4330 case OMP_CLAUSE_SHARED:
7e5a76c8 4331 /* Ignore shared directives in teams construct inside
4332 of target construct. */
4333 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4334 && !is_host_teams_ctx (ctx))
bc7bff74 4335 continue;
f49d7bb5 4336 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4337 {
43895be5 4338 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4339 || is_global_var (OMP_CLAUSE_DECL (c)));
f49d7bb5 4340 continue;
4341 }
1e8e9920 4342 case OMP_CLAUSE_FIRSTPRIVATE:
1e8e9920 4343 case OMP_CLAUSE_COPYIN:
43895be5 4344 break;
bc7bff74 4345 case OMP_CLAUSE_LINEAR:
43895be5 4346 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4347 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4348 lastprivate_firstprivate = true;
bc7bff74 4349 break;
1e8e9920 4350 case OMP_CLAUSE_REDUCTION:
7e5a76c8 4351 case OMP_CLAUSE_IN_REDUCTION:
4352 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4353 {
4354 task_reduction_p = true;
4355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4356 {
4357 task_reduction_other_cnt++;
4358 if (pass == 2)
4359 continue;
4360 }
4361 else
4362 task_reduction_cnt++;
4363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4364 {
4365 var = OMP_CLAUSE_DECL (c);
4366 /* If var is a global variable that isn't privatized
4367 in outer contexts, we don't need to look up the
4368 original address, it is always the address of the
4369 global variable itself. */
4370 if (!DECL_P (var)
4371 || omp_is_reference (var)
4372 || !is_global_var
4373 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4374 {
4375 task_reduction_needs_orig_p = true;
4376 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4377 task_reduction_cntorig++;
4378 }
4379 }
4380 }
4381 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
bc7bff74 4382 reduction_omp_orig_ref = true;
1e8e9920 4383 break;
7e5a76c8 4384 case OMP_CLAUSE__REDUCTEMP_:
4385 if (!is_taskreg_ctx (ctx))
4386 continue;
4387 /* FALLTHRU */
bc7bff74 4388 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 4389 /* Handle _looptemp_/_reductemp_ clauses only on
4390 parallel/task. */
bc7bff74 4391 if (fd)
4392 continue;
3d483a94 4393 break;
df2c34fc 4394 case OMP_CLAUSE_LASTPRIVATE:
f49d7bb5 4395 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4396 {
4397 lastprivate_firstprivate = true;
43895be5 4398 if (pass != 0 || is_taskloop_ctx (ctx))
f49d7bb5 4399 continue;
4400 }
cf5f881f 4401 /* Even without corresponding firstprivate, if
4402 decl is Fortran allocatable, it needs outer var
4403 reference. */
4404 else if (pass == 0
4405 && lang_hooks.decls.omp_private_outer_ref
4406 (OMP_CLAUSE_DECL (c)))
4407 lastprivate_firstprivate = true;
df2c34fc 4408 break;
bc7bff74 4409 case OMP_CLAUSE_ALIGNED:
7e5a76c8 4410 if (pass != 1)
bc7bff74 4411 continue;
4412 var = OMP_CLAUSE_DECL (c);
4413 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4414 && !is_global_var (var))
4415 {
4416 new_var = maybe_lookup_decl (var, ctx);
4417 if (new_var == NULL_TREE)
4418 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4419 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
9cf2d600 4420 tree alarg = omp_clause_aligned_alignment (c);
4421 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4422 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
bc7bff74 4423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4424 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4425 gimplify_and_add (x, ilist);
4426 }
4427 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4428 && is_global_var (var))
4429 {
4430 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4431 new_var = lookup_decl (var, ctx);
4432 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4433 t = build_fold_addr_expr_loc (clause_loc, t);
4434 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
9cf2d600 4435 tree alarg = omp_clause_aligned_alignment (c);
4436 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4437 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
bc7bff74 4438 t = fold_convert_loc (clause_loc, ptype, t);
f9e245b2 4439 x = create_tmp_var (ptype);
bc7bff74 4440 t = build2 (MODIFY_EXPR, ptype, x, t);
4441 gimplify_and_add (t, ilist);
4442 t = build_simple_mem_ref_loc (clause_loc, x);
4443 SET_DECL_VALUE_EXPR (new_var, t);
4444 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4445 }
4446 continue;
48152aa2 4447 case OMP_CLAUSE__CONDTEMP_:
4f4b92d8 4448 if (is_parallel_ctx (ctx)
4449 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
48152aa2 4450 break;
4451 continue;
1e8e9920 4452 default:
4453 continue;
4454 }
4455
7e5a76c8 4456 if (task_reduction_p != (pass >= 2))
4457 continue;
4458
1e8e9920 4459 new_var = var = OMP_CLAUSE_DECL (c);
7e5a76c8 4460 if ((c_kind == OMP_CLAUSE_REDUCTION
4461 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4462 && TREE_CODE (var) == MEM_REF)
43895be5 4463 {
4464 var = TREE_OPERAND (var, 0);
9561765e 4465 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4466 var = TREE_OPERAND (var, 0);
43895be5 4467 if (TREE_CODE (var) == INDIRECT_REF
4468 || TREE_CODE (var) == ADDR_EXPR)
4469 var = TREE_OPERAND (var, 0);
4470 if (is_variable_sized (var))
4471 {
4472 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4473 var = DECL_VALUE_EXPR (var);
4474 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4475 var = TREE_OPERAND (var, 0);
4476 gcc_assert (DECL_P (var));
4477 }
4478 new_var = var;
4479 }
1e8e9920 4480 if (c_kind != OMP_CLAUSE_COPYIN)
4481 new_var = lookup_decl (var, ctx);
4482
4483 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4484 {
4485 if (pass != 0)
4486 continue;
4487 }
43895be5 4488 /* C/C++ array section reductions. */
7e5a76c8 4489 else if ((c_kind == OMP_CLAUSE_REDUCTION
4490 || c_kind == OMP_CLAUSE_IN_REDUCTION)
43895be5 4491 && var != OMP_CLAUSE_DECL (c))
1e8e9920 4492 {
4493 if (pass == 0)
4494 continue;
4495
9561765e 4496 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
43895be5 4497 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
7e5a76c8 4498
9561765e 4499 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4500 {
4501 tree b = TREE_OPERAND (orig_var, 1);
4502 b = maybe_lookup_decl (b, ctx);
4503 if (b == NULL)
4504 {
4505 b = TREE_OPERAND (orig_var, 1);
4506 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4507 }
4508 if (integer_zerop (bias))
4509 bias = b;
4510 else
4511 {
4512 bias = fold_convert_loc (clause_loc,
4513 TREE_TYPE (b), bias);
4514 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4515 TREE_TYPE (b), b, bias);
4516 }
4517 orig_var = TREE_OPERAND (orig_var, 0);
4518 }
7e5a76c8 4519 if (pass == 2)
4520 {
4521 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4522 if (is_global_var (out)
4523 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4524 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4525 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4526 != POINTER_TYPE)))
4527 x = var;
4528 else
4529 {
4530 bool by_ref = use_pointer_for_field (var, NULL);
4531 x = build_receiver_ref (var, by_ref, ctx);
4532 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4533 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4534 == POINTER_TYPE))
4535 x = build_fold_addr_expr (x);
4536 }
4537 if (TREE_CODE (orig_var) == INDIRECT_REF)
4538 x = build_simple_mem_ref (x);
4539 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4540 {
4541 if (var == TREE_OPERAND (orig_var, 0))
4542 x = build_fold_addr_expr (x);
4543 }
4544 bias = fold_convert (sizetype, bias);
4545 x = fold_convert (ptr_type_node, x);
4546 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4547 TREE_TYPE (x), x, bias);
4548 unsigned cnt = task_reduction_cnt - 1;
4549 if (!task_reduction_needs_orig_p)
4550 cnt += (task_reduction_cntorig_full
4551 - task_reduction_cntorig);
4552 else
4553 cnt = task_reduction_cntorig - 1;
4554 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4555 size_int (cnt), NULL_TREE, NULL_TREE);
4556 gimplify_assign (r, x, ilist);
4557 continue;
4558 }
4559
43895be5 4560 if (TREE_CODE (orig_var) == INDIRECT_REF
4561 || TREE_CODE (orig_var) == ADDR_EXPR)
4562 orig_var = TREE_OPERAND (orig_var, 0);
4563 tree d = OMP_CLAUSE_DECL (c);
4564 tree type = TREE_TYPE (d);
4565 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4566 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4567 const char *name = get_name (orig_var);
7e5a76c8 4568 if (pass == 3)
4569 {
4570 tree xv = create_tmp_var (ptr_type_node);
4571 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4572 {
4573 unsigned cnt = task_reduction_cnt - 1;
4574 if (!task_reduction_needs_orig_p)
4575 cnt += (task_reduction_cntorig_full
4576 - task_reduction_cntorig);
4577 else
4578 cnt = task_reduction_cntorig - 1;
4579 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4580 size_int (cnt), NULL_TREE, NULL_TREE);
4581
4582 gimple *g = gimple_build_assign (xv, x);
4583 gimple_seq_add_stmt (ilist, g);
4584 }
4585 else
4586 {
4587 unsigned int idx = *ctx->task_reduction_map->get (c);
4588 tree off;
4589 if (ctx->task_reductions[1 + idx])
4590 off = fold_convert (sizetype,
4591 ctx->task_reductions[1 + idx]);
4592 else
4593 off = task_reduction_read (ilist, tskred_temp, sizetype,
4594 7 + 3 * idx + 1);
4595 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4596 tskred_base, off);
4597 gimple_seq_add_stmt (ilist, g);
4598 }
4599 x = fold_convert (build_pointer_type (boolean_type_node),
4600 xv);
4601 if (TREE_CONSTANT (v))
4602 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4603 TYPE_SIZE_UNIT (type));
4604 else
4605 {
4606 tree t = maybe_lookup_decl (v, ctx);
4607 if (t)
4608 v = t;
4609 else
4610 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4611 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4612 fb_rvalue);
4613 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4614 TREE_TYPE (v), v,
4615 build_int_cst (TREE_TYPE (v), 1));
4616 t = fold_build2_loc (clause_loc, MULT_EXPR,
4617 TREE_TYPE (v), t,
4618 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4619 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4620 }
4621 cond = create_tmp_var (TREE_TYPE (x));
4622 gimplify_assign (cond, x, ilist);
4623 x = xv;
4624 }
4625 else if (TREE_CONSTANT (v))
fd6481cf 4626 {
43895be5 4627 x = create_tmp_var_raw (type, name);
4628 gimple_add_tmp_var (x);
4629 TREE_ADDRESSABLE (x) = 1;
4630 x = build_fold_addr_expr_loc (clause_loc, x);
4631 }
4632 else
4633 {
4634 tree atmp
4635 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4636 tree t = maybe_lookup_decl (v, ctx);
4637 if (t)
4638 v = t;
4639 else
4640 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4641 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4642 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4643 TREE_TYPE (v), v,
4644 build_int_cst (TREE_TYPE (v), 1));
4645 t = fold_build2_loc (clause_loc, MULT_EXPR,
4646 TREE_TYPE (v), t,
4647 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4648 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4649 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4650 }
4651
4652 tree ptype = build_pointer_type (TREE_TYPE (type));
4653 x = fold_convert_loc (clause_loc, ptype, x);
4654 tree y = create_tmp_var (ptype, name);
4655 gimplify_assign (y, x, ilist);
4656 x = y;
9561765e 4657 tree yb = y;
4658
4659 if (!integer_zerop (bias))
4660 {
219e09fc 4661 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4662 bias);
4663 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4664 x);
4665 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4666 pointer_sized_int_node, yb, bias);
4667 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
9561765e 4668 yb = create_tmp_var (ptype, name);
4669 gimplify_assign (yb, x, ilist);
4670 x = yb;
4671 }
4672
4673 d = TREE_OPERAND (d, 0);
4674 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4675 d = TREE_OPERAND (d, 0);
4676 if (TREE_CODE (d) == ADDR_EXPR)
43895be5 4677 {
4678 if (orig_var != var)
4679 {
4680 gcc_assert (is_variable_sized (orig_var));
4681 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4682 x);
4683 gimplify_assign (new_var, x, ilist);
4684 tree new_orig_var = lookup_decl (orig_var, ctx);
4685 tree t = build_fold_indirect_ref (new_var);
4686 DECL_IGNORED_P (new_var) = 0;
7e5a76c8 4687 TREE_THIS_NOTRAP (t) = 1;
43895be5 4688 SET_DECL_VALUE_EXPR (new_orig_var, t);
4689 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4690 }
4691 else
4692 {
4693 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4694 build_int_cst (ptype, 0));
4695 SET_DECL_VALUE_EXPR (new_var, x);
4696 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4697 }
4698 }
4699 else
4700 {
4701 gcc_assert (orig_var == var);
9561765e 4702 if (TREE_CODE (d) == INDIRECT_REF)
43895be5 4703 {
4704 x = create_tmp_var (ptype, name);
4705 TREE_ADDRESSABLE (x) = 1;
9561765e 4706 gimplify_assign (x, yb, ilist);
43895be5 4707 x = build_fold_addr_expr_loc (clause_loc, x);
4708 }
4709 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4710 gimplify_assign (new_var, x, ilist);
4711 }
7e5a76c8 4712 /* GOMP_taskgroup_reduction_register memsets the whole
4713 array to zero. If the initializer is zero, we don't
4714 need to initialize it again, just mark it as ever
4715 used unconditionally, i.e. cond = true. */
4716 if (cond
4717 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4718 && initializer_zerop (omp_reduction_init (c,
4719 TREE_TYPE (type))))
4720 {
4721 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4722 boolean_true_node);
4723 gimple_seq_add_stmt (ilist, g);
4724 continue;
4725 }
4726 tree end = create_artificial_label (UNKNOWN_LOCATION);
4727 if (cond)
4728 {
4729 gimple *g;
4730 if (!is_parallel_ctx (ctx))
4731 {
4732 tree condv = create_tmp_var (boolean_type_node);
4733 g = gimple_build_assign (condv,
4734 build_simple_mem_ref (cond));
4735 gimple_seq_add_stmt (ilist, g);
4736 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4737 g = gimple_build_cond (NE_EXPR, condv,
4738 boolean_false_node, end, lab1);
4739 gimple_seq_add_stmt (ilist, g);
4740 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4741 }
4742 g = gimple_build_assign (build_simple_mem_ref (cond),
4743 boolean_true_node);
4744 gimple_seq_add_stmt (ilist, g);
4745 }
4746
4747 tree y1 = create_tmp_var (ptype);
43895be5 4748 gimplify_assign (y1, y, ilist);
4749 tree i2 = NULL_TREE, y2 = NULL_TREE;
4750 tree body2 = NULL_TREE, end2 = NULL_TREE;
4751 tree y3 = NULL_TREE, y4 = NULL_TREE;
7e5a76c8 4752 if (task_reduction_needs_orig_p)
43895be5 4753 {
7e5a76c8 4754 y3 = create_tmp_var (ptype);
4755 tree ref;
4756 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4757 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4758 size_int (task_reduction_cnt_full
4759 + task_reduction_cntorig - 1),
4760 NULL_TREE, NULL_TREE);
4761 else
43895be5 4762 {
7e5a76c8 4763 unsigned int idx = *ctx->task_reduction_map->get (c);
4764 ref = task_reduction_read (ilist, tskred_temp, ptype,
4765 7 + 3 * idx);
43895be5 4766 }
7e5a76c8 4767 gimplify_assign (y3, ref, ilist);
4768 }
4769 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4770 {
4771 if (pass != 3)
43895be5 4772 {
7e5a76c8 4773 y2 = create_tmp_var (ptype);
4774 gimplify_assign (y2, y, ilist);
4775 }
4776 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4777 {
4778 tree ref = build_outer_var_ref (var, ctx);
4779 /* For ref build_outer_var_ref already performs this. */
4780 if (TREE_CODE (d) == INDIRECT_REF)
4781 gcc_assert (omp_is_reference (var));
4782 else if (TREE_CODE (d) == ADDR_EXPR)
4783 ref = build_fold_addr_expr (ref);
4784 else if (omp_is_reference (var))
4785 ref = build_fold_addr_expr (ref);
4786 ref = fold_convert_loc (clause_loc, ptype, ref);
4787 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4788 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4789 {
4790 y3 = create_tmp_var (ptype);
4791 gimplify_assign (y3, unshare_expr (ref), ilist);
4792 }
4793 if (is_simd)
4794 {
4795 y4 = create_tmp_var (ptype);
4796 gimplify_assign (y4, ref, dlist);
4797 }
43895be5 4798 }
4799 }
7e5a76c8 4800 tree i = create_tmp_var (TREE_TYPE (v));
43895be5 4801 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4802 tree body = create_artificial_label (UNKNOWN_LOCATION);
43895be5 4803 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4804 if (y2)
4805 {
7e5a76c8 4806 i2 = create_tmp_var (TREE_TYPE (v));
43895be5 4807 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4808 body2 = create_artificial_label (UNKNOWN_LOCATION);
4809 end2 = create_artificial_label (UNKNOWN_LOCATION);
4810 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4811 }
4812 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4813 {
4814 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4815 tree decl_placeholder
4816 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4817 SET_DECL_VALUE_EXPR (decl_placeholder,
4818 build_simple_mem_ref (y1));
4819 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4820 SET_DECL_VALUE_EXPR (placeholder,
4821 y3 ? build_simple_mem_ref (y3)
4822 : error_mark_node);
4823 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4824 x = lang_hooks.decls.omp_clause_default_ctor
4825 (c, build_simple_mem_ref (y1),
4826 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4827 if (x)
4828 gimplify_and_add (x, ilist);
4829 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4830 {
4831 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4832 lower_omp (&tseq, ctx);
4833 gimple_seq_add_seq (ilist, tseq);
4834 }
4835 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4836 if (is_simd)
4837 {
4838 SET_DECL_VALUE_EXPR (decl_placeholder,
4839 build_simple_mem_ref (y2));
4840 SET_DECL_VALUE_EXPR (placeholder,
4841 build_simple_mem_ref (y4));
4842 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4843 lower_omp (&tseq, ctx);
4844 gimple_seq_add_seq (dlist, tseq);
4845 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4846 }
4847 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4848 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
7e5a76c8 4849 if (y2)
43895be5 4850 {
7e5a76c8 4851 x = lang_hooks.decls.omp_clause_dtor
4852 (c, build_simple_mem_ref (y2));
4853 if (x)
7d26f131 4854 gimplify_and_add (x, dlist);
43895be5 4855 }
4856 }
4857 else
4858 {
4859 x = omp_reduction_init (c, TREE_TYPE (type));
4860 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4861
4862 /* reduction(-:var) sums up the partial results, so it
4863 acts identically to reduction(+:var). */
4864 if (code == MINUS_EXPR)
4865 code = PLUS_EXPR;
4866
4867 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4868 if (is_simd)
4869 {
4870 x = build2 (code, TREE_TYPE (type),
4871 build_simple_mem_ref (y4),
4872 build_simple_mem_ref (y2));
4873 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4874 }
4875 }
4876 gimple *g
4877 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4878 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4879 gimple_seq_add_stmt (ilist, g);
4880 if (y3)
4881 {
4882 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4883 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4884 gimple_seq_add_stmt (ilist, g);
4885 }
4886 g = gimple_build_assign (i, PLUS_EXPR, i,
4887 build_int_cst (TREE_TYPE (i), 1));
4888 gimple_seq_add_stmt (ilist, g);
4889 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4890 gimple_seq_add_stmt (ilist, g);
4891 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4892 if (y2)
4893 {
4894 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4895 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4896 gimple_seq_add_stmt (dlist, g);
4897 if (y4)
4898 {
4899 g = gimple_build_assign
4900 (y4, POINTER_PLUS_EXPR, y4,
4901 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4902 gimple_seq_add_stmt (dlist, g);
4903 }
4904 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4905 build_int_cst (TREE_TYPE (i2), 1));
4906 gimple_seq_add_stmt (dlist, g);
4907 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4908 gimple_seq_add_stmt (dlist, g);
4909 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4910 }
4911 continue;
4912 }
7e5a76c8 4913 else if (pass == 2)
4914 {
4915 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4916 x = var;
4917 else
4918 {
4919 bool by_ref = use_pointer_for_field (var, ctx);
4920 x = build_receiver_ref (var, by_ref, ctx);
4921 }
4922 if (!omp_is_reference (var))
4923 x = build_fold_addr_expr (x);
4924 x = fold_convert (ptr_type_node, x);
4925 unsigned cnt = task_reduction_cnt - 1;
4926 if (!task_reduction_needs_orig_p)
4927 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4928 else
4929 cnt = task_reduction_cntorig - 1;
4930 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4931 size_int (cnt), NULL_TREE, NULL_TREE);
4932 gimplify_assign (r, x, ilist);
4933 continue;
4934 }
4935 else if (pass == 3)
4936 {
4937 tree type = TREE_TYPE (new_var);
4938 if (!omp_is_reference (var))
4939 type = build_pointer_type (type);
4940 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4941 {
4942 unsigned cnt = task_reduction_cnt - 1;
4943 if (!task_reduction_needs_orig_p)
4944 cnt += (task_reduction_cntorig_full
4945 - task_reduction_cntorig);
4946 else
4947 cnt = task_reduction_cntorig - 1;
4948 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4949 size_int (cnt), NULL_TREE, NULL_TREE);
4950 }
4951 else
4952 {
4953 unsigned int idx = *ctx->task_reduction_map->get (c);
4954 tree off;
4955 if (ctx->task_reductions[1 + idx])
4956 off = fold_convert (sizetype,
4957 ctx->task_reductions[1 + idx]);
4958 else
4959 off = task_reduction_read (ilist, tskred_temp, sizetype,
4960 7 + 3 * idx + 1);
4961 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4962 tskred_base, off);
4963 }
4964 x = fold_convert (type, x);
4965 tree t;
4966 if (omp_is_reference (var))
4967 {
4968 gimplify_assign (new_var, x, ilist);
4969 t = new_var;
4970 new_var = build_simple_mem_ref (new_var);
4971 }
4972 else
4973 {
4974 t = create_tmp_var (type);
4975 gimplify_assign (t, x, ilist);
4976 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4977 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4978 }
4979 t = fold_convert (build_pointer_type (boolean_type_node), t);
4980 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4981 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4982 cond = create_tmp_var (TREE_TYPE (t));
4983 gimplify_assign (cond, t, ilist);
4984 }
43895be5 4985 else if (is_variable_sized (var))
4986 {
4987 /* For variable sized types, we need to allocate the
4988 actual storage here. Call alloca and store the
4989 result in the pointer decl that we created elsewhere. */
4990 if (pass == 0)
4991 continue;
4992
4993 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4994 {
4995 gcall *stmt;
4996 tree tmp, atmp;
4997
4998 ptr = DECL_VALUE_EXPR (new_var);
4999 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5000 ptr = TREE_OPERAND (ptr, 0);
fd6481cf 5001 gcc_assert (DECL_P (ptr));
5002 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
75a70cf9 5003
5004 /* void *tmp = __builtin_alloca */
43895be5 5005 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5006 stmt = gimple_build_call (atmp, 2, x,
5007 size_int (DECL_ALIGN (var)));
f9e245b2 5008 tmp = create_tmp_var_raw (ptr_type_node);
75a70cf9 5009 gimple_add_tmp_var (tmp);
5010 gimple_call_set_lhs (stmt, tmp);
5011
5012 gimple_seq_add_stmt (ilist, stmt);
5013
389dd41b 5014 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
75a70cf9 5015 gimplify_assign (ptr, x, ilist);
fd6481cf 5016 }
1e8e9920 5017 }
7e5a76c8 5018 else if (omp_is_reference (var)
5019 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5020 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
1e8e9920 5021 {
773c5ba7 5022 /* For references that are being privatized for Fortran,
5023 allocate new backing storage for the new pointer
5024 variable. This allows us to avoid changing all the
5025 code that expects a pointer to something that expects
bc7bff74 5026 a direct variable. */
1e8e9920 5027 if (pass == 0)
5028 continue;
5029
5030 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
fd6481cf 5031 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5032 {
5033 x = build_receiver_ref (var, false, ctx);
389dd41b 5034 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 5035 }
5036 else if (TREE_CONSTANT (x))
1e8e9920 5037 {
2712b6de 5038 /* For reduction in SIMD loop, defer adding the
5039 initialization of the reference, because if we decide
5040 to use SIMD array for it, the initilization could cause
e32d171e 5041 expansion ICE. Ditto for other privatization clauses. */
5042 if (is_simd)
09d1c205 5043 x = NULL_TREE;
5044 else
5045 {
09d1c205 5046 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
43895be5 5047 get_name (var));
09d1c205 5048 gimple_add_tmp_var (x);
5049 TREE_ADDRESSABLE (x) = 1;
5050 x = build_fold_addr_expr_loc (clause_loc, x);
5051 }
1e8e9920 5052 }
5053 else
5054 {
43895be5 5055 tree atmp
5056 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5057 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5058 tree al = size_int (TYPE_ALIGN (rtype));
5059 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
1e8e9920 5060 }
5061
09d1c205 5062 if (x)
5063 {
5064 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5065 gimplify_assign (new_var, x, ilist);
5066 }
1e8e9920 5067
182cf5a9 5068 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 5069 }
7e5a76c8 5070 else if ((c_kind == OMP_CLAUSE_REDUCTION
5071 || c_kind == OMP_CLAUSE_IN_REDUCTION)
1e8e9920 5072 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5073 {
5074 if (pass == 0)
5075 continue;
5076 }
5077 else if (pass != 0)
5078 continue;
5079
55d6e7cd 5080 switch (OMP_CLAUSE_CODE (c))
1e8e9920 5081 {
5082 case OMP_CLAUSE_SHARED:
7e5a76c8 5083 /* Ignore shared directives in teams construct inside
5084 target construct. */
5085 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5086 && !is_host_teams_ctx (ctx))
bc7bff74 5087 continue;
f49d7bb5 5088 /* Shared global vars are just accessed directly. */
5089 if (is_global_var (new_var))
5090 break;
43895be5 5091 /* For taskloop firstprivate/lastprivate, represented
5092 as firstprivate and shared clause on the task, new_var
5093 is the firstprivate var. */
5094 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5095 break;
1e8e9920 5096 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5097 needs to be delayed until after fixup_child_record_type so
5098 that we get the correct type during the dereference. */
e8a588af 5099 by_ref = use_pointer_for_field (var, ctx);
1e8e9920 5100 x = build_receiver_ref (var, by_ref, ctx);
5101 SET_DECL_VALUE_EXPR (new_var, x);
5102 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5103
5104 /* ??? If VAR is not passed by reference, and the variable
5105 hasn't been initialized yet, then we'll get a warning for
5106 the store into the omp_data_s structure. Ideally, we'd be
48e1416a 5107 able to notice this and not store anything at all, but
1e8e9920 5108 we're generating code too early. Suppress the warning. */
5109 if (!by_ref)
5110 TREE_NO_WARNING (var) = 1;
5111 break;
5112
48152aa2 5113 case OMP_CLAUSE__CONDTEMP_:
5114 if (is_parallel_ctx (ctx))
5115 {
5116 x = build_receiver_ref (var, false, ctx);
5117 SET_DECL_VALUE_EXPR (new_var, x);
5118 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5119 }
4f4b92d8 5120 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5121 {
5122 x = build_zero_cst (TREE_TYPE (var));
5123 goto do_private;
5124 }
48152aa2 5125 break;
5126
1e8e9920 5127 case OMP_CLAUSE_LASTPRIVATE:
5128 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5129 break;
5130 /* FALLTHRU */
5131
5132 case OMP_CLAUSE_PRIVATE:
fd6481cf 5133 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5134 x = build_outer_var_ref (var, ctx);
5135 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5136 {
5137 if (is_task_ctx (ctx))
5138 x = build_receiver_ref (var, false, ctx);
5139 else
1f355935 5140 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
fd6481cf 5141 }
5142 else
5143 x = NULL;
3d483a94 5144 do_private:
bc7bff74 5145 tree nx;
d266deeb 5146 bool copy_ctor;
5147 copy_ctor = false;
5148 nx = unshare_expr (new_var);
5149 if (is_simd
5150 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5151 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5152 copy_ctor = true;
5153 if (copy_ctor)
5154 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5155 else
5156 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
3d483a94 5157 if (is_simd)
5158 {
5159 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
bc7bff74 5160 if ((TREE_ADDRESSABLE (new_var) || nx || y
36845cc1 5161 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5162 && (gimple_omp_for_collapse (ctx->stmt) != 1
5163 || (gimple_omp_for_index (ctx->stmt, 0)
5164 != new_var)))
e32d171e 5165 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5166 || omp_is_reference (var))
8e818b28 5167 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5168 ivar, lvar))
3d483a94 5169 {
e32d171e 5170 if (omp_is_reference (var))
5171 {
5172 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5173 tree new_vard = TREE_OPERAND (new_var, 0);
5174 gcc_assert (DECL_P (new_vard));
5175 SET_DECL_VALUE_EXPR (new_vard,
5176 build_fold_addr_expr (lvar));
5177 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5178 }
5179
bc7bff74 5180 if (nx)
d266deeb 5181 {
5182 tree iv = unshare_expr (ivar);
5183 if (copy_ctor)
5184 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5185 x);
5186 else
5187 x = lang_hooks.decls.omp_clause_default_ctor (c,
5188 iv,
5189 x);
5190 }
4f4b92d8 5191 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5192 {
5193 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5194 unshare_expr (ivar), x);
5195 nx = x;
5196 }
bc7bff74 5197 if (nx && x)
3d483a94 5198 gimplify_and_add (x, &llist[0]);
4f4b92d8 5199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5200 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5201 {
8259fae1 5202 tree v = new_var;
5203 if (!DECL_P (v))
5204 {
5205 gcc_assert (TREE_CODE (v) == MEM_REF);
5206 v = TREE_OPERAND (v, 0);
5207 gcc_assert (DECL_P (v));
5208 }
5209 v = *ctx->lastprivate_conditional_map->get (v);
4f4b92d8 5210 tree t = create_tmp_var (TREE_TYPE (v));
5211 tree z = build_zero_cst (TREE_TYPE (v));
5212 tree orig_v
5213 = build_outer_var_ref (var, ctx,
5214 OMP_CLAUSE_LASTPRIVATE);
5215 gimple_seq_add_stmt (dlist,
5216 gimple_build_assign (t, z));
5217 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5218 tree civar = DECL_VALUE_EXPR (v);
5219 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5220 civar = unshare_expr (civar);
5221 TREE_OPERAND (civar, 1) = sctx.idx;
5222 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5223 unshare_expr (civar));
5224 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5225 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5226 orig_v, unshare_expr (ivar)));
5227 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5228 civar);
5229 x = build3 (COND_EXPR, void_type_node, cond, x,
5230 void_node);
5231 gimple_seq tseq = NULL;
5232 gimplify_and_add (x, &tseq);
384aea12 5233 if (ctx->outer)
5234 lower_omp (&tseq, ctx->outer);
4f4b92d8 5235 gimple_seq_add_seq (&llist[1], tseq);
5236 }
8d54bfe2 5237 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5238 && ctx->for_simd_scan_phase)
5239 {
5240 x = unshare_expr (ivar);
5241 tree orig_v
5242 = build_outer_var_ref (var, ctx,
5243 OMP_CLAUSE_LASTPRIVATE);
5244 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5245 orig_v);
5246 gimplify_and_add (x, &llist[0]);
5247 }
3d483a94 5248 if (y)
5249 {
5250 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5251 if (y)
7d26f131 5252 gimplify_and_add (y, &llist[1]);
3d483a94 5253 }
5254 break;
5255 }
e32d171e 5256 if (omp_is_reference (var))
5257 {
5258 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5259 tree new_vard = TREE_OPERAND (new_var, 0);
5260 gcc_assert (DECL_P (new_vard));
5261 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5262 x = TYPE_SIZE_UNIT (type);
5263 if (TREE_CONSTANT (x))
5264 {
5265 x = create_tmp_var_raw (type, get_name (var));
5266 gimple_add_tmp_var (x);
5267 TREE_ADDRESSABLE (x) = 1;
5268 x = build_fold_addr_expr_loc (clause_loc, x);
5269 x = fold_convert_loc (clause_loc,
5270 TREE_TYPE (new_vard), x);
5271 gimplify_assign (new_vard, x, ilist);
5272 }
5273 }
3d483a94 5274 }
bc7bff74 5275 if (nx)
5276 gimplify_and_add (nx, ilist);
8d54bfe2 5277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5278 && is_simd
5279 && ctx->for_simd_scan_phase)
5280 {
5281 tree orig_v = build_outer_var_ref (var, ctx,
5282 OMP_CLAUSE_LASTPRIVATE);
5283 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5284 orig_v);
5285 gimplify_and_add (x, ilist);
5286 }
1e8e9920 5287 /* FALLTHRU */
5288
5289 do_dtor:
5290 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5291 if (x)
7d26f131 5292 gimplify_and_add (x, dlist);
1e8e9920 5293 break;
5294
3d483a94 5295 case OMP_CLAUSE_LINEAR:
5296 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5297 goto do_firstprivate;
5298 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5299 x = NULL;
5300 else
5301 x = build_outer_var_ref (var, ctx);
5302 goto do_private;
5303
1e8e9920 5304 case OMP_CLAUSE_FIRSTPRIVATE:
fd6481cf 5305 if (is_task_ctx (ctx))
5306 {
7e5a76c8 5307 if ((omp_is_reference (var)
5308 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5309 || is_variable_sized (var))
fd6481cf 5310 goto do_dtor;
5311 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5312 ctx))
5313 || use_pointer_for_field (var, NULL))
5314 {
5315 x = build_receiver_ref (var, false, ctx);
5316 SET_DECL_VALUE_EXPR (new_var, x);
5317 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5318 goto do_dtor;
5319 }
5320 }
7e5a76c8 5321 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5322 && omp_is_reference (var))
5323 {
5324 x = build_outer_var_ref (var, ctx);
5325 gcc_assert (TREE_CODE (x) == MEM_REF
5326 && integer_zerop (TREE_OPERAND (x, 1)));
5327 x = TREE_OPERAND (x, 0);
5328 x = lang_hooks.decls.omp_clause_copy_ctor
5329 (c, unshare_expr (new_var), x);
5330 gimplify_and_add (x, ilist);
5331 goto do_dtor;
5332 }
3d483a94 5333 do_firstprivate:
1e8e9920 5334 x = build_outer_var_ref (var, ctx);
3d483a94 5335 if (is_simd)
5336 {
bc7bff74 5337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5338 && gimple_omp_for_combined_into_p (ctx->stmt))
5339 {
9580cb79 5340 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5341 tree stept = TREE_TYPE (t);
4954efd4 5342 tree ct = omp_find_clause (clauses,
9580cb79 5343 OMP_CLAUSE__LOOPTEMP_);
5344 gcc_assert (ct);
5345 tree l = OMP_CLAUSE_DECL (ct);
e471cc6f 5346 tree n1 = fd->loop.n1;
5347 tree step = fd->loop.step;
5348 tree itype = TREE_TYPE (l);
5349 if (POINTER_TYPE_P (itype))
5350 itype = signed_type_for (itype);
5351 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5352 if (TYPE_UNSIGNED (itype)
5353 && fd->loop.cond_code == GT_EXPR)
5354 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5355 fold_build1 (NEGATE_EXPR, itype, l),
5356 fold_build1 (NEGATE_EXPR,
5357 itype, step));
5358 else
5359 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
bc7bff74 5360 t = fold_build2 (MULT_EXPR, stept,
5361 fold_convert (stept, l), t);
9580cb79 5362
5363 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5364 {
e32d171e 5365 if (omp_is_reference (var))
5366 {
5367 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5368 tree new_vard = TREE_OPERAND (new_var, 0);
5369 gcc_assert (DECL_P (new_vard));
5370 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5371 nx = TYPE_SIZE_UNIT (type);
5372 if (TREE_CONSTANT (nx))
5373 {
5374 nx = create_tmp_var_raw (type,
5375 get_name (var));
5376 gimple_add_tmp_var (nx);
5377 TREE_ADDRESSABLE (nx) = 1;
5378 nx = build_fold_addr_expr_loc (clause_loc,
5379 nx);
5380 nx = fold_convert_loc (clause_loc,
5381 TREE_TYPE (new_vard),
5382 nx);
5383 gimplify_assign (new_vard, nx, ilist);
5384 }
5385 }
5386
9580cb79 5387 x = lang_hooks.decls.omp_clause_linear_ctor
5388 (c, new_var, x, t);
5389 gimplify_and_add (x, ilist);
5390 goto do_dtor;
5391 }
5392
bc7bff74 5393 if (POINTER_TYPE_P (TREE_TYPE (x)))
5394 x = fold_build2 (POINTER_PLUS_EXPR,
5395 TREE_TYPE (x), x, t);
5396 else
5397 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5398 }
5399
3d483a94 5400 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
e32d171e 5401 || TREE_ADDRESSABLE (new_var)
5402 || omp_is_reference (var))
8e818b28 5403 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5404 ivar, lvar))
3d483a94 5405 {
e32d171e 5406 if (omp_is_reference (var))
5407 {
5408 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5409 tree new_vard = TREE_OPERAND (new_var, 0);
5410 gcc_assert (DECL_P (new_vard));
5411 SET_DECL_VALUE_EXPR (new_vard,
5412 build_fold_addr_expr (lvar));
5413 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5414 }
3d483a94 5415 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5416 {
f9e245b2 5417 tree iv = create_tmp_var (TREE_TYPE (new_var));
3d483a94 5418 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5419 gimplify_and_add (x, ilist);
5420 gimple_stmt_iterator gsi
5421 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
1a91d914 5422 gassign *g
3d483a94 5423 = gimple_build_assign (unshare_expr (lvar), iv);
5424 gsi_insert_before_without_update (&gsi, g,
5425 GSI_SAME_STMT);
9580cb79 5426 tree t = OMP_CLAUSE_LINEAR_STEP (c);
3d483a94 5427 enum tree_code code = PLUS_EXPR;
5428 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5429 code = POINTER_PLUS_EXPR;
e9cf809e 5430 g = gimple_build_assign (iv, code, iv, t);
3d483a94 5431 gsi_insert_before_without_update (&gsi, g,
5432 GSI_SAME_STMT);
5433 break;
5434 }
5435 x = lang_hooks.decls.omp_clause_copy_ctor
5436 (c, unshare_expr (ivar), x);
5437 gimplify_and_add (x, &llist[0]);
5438 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5439 if (x)
7d26f131 5440 gimplify_and_add (x, &llist[1]);
3d483a94 5441 break;
5442 }
e32d171e 5443 if (omp_is_reference (var))
5444 {
5445 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5446 tree new_vard = TREE_OPERAND (new_var, 0);
5447 gcc_assert (DECL_P (new_vard));
5448 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5449 nx = TYPE_SIZE_UNIT (type);
5450 if (TREE_CONSTANT (nx))
5451 {
5452 nx = create_tmp_var_raw (type, get_name (var));
5453 gimple_add_tmp_var (nx);
5454 TREE_ADDRESSABLE (nx) = 1;
5455 nx = build_fold_addr_expr_loc (clause_loc, nx);
5456 nx = fold_convert_loc (clause_loc,
5457 TREE_TYPE (new_vard), nx);
5458 gimplify_assign (new_vard, nx, ilist);
5459 }
5460 }
3d483a94 5461 }
43895be5 5462 x = lang_hooks.decls.omp_clause_copy_ctor
5463 (c, unshare_expr (new_var), x);
1e8e9920 5464 gimplify_and_add (x, ilist);
5465 goto do_dtor;
1e8e9920 5466
bc7bff74 5467 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 5468 case OMP_CLAUSE__REDUCTEMP_:
43895be5 5469 gcc_assert (is_taskreg_ctx (ctx));
bc7bff74 5470 x = build_outer_var_ref (var, ctx);
5471 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5472 gimplify_and_add (x, ilist);
5473 break;
5474
1e8e9920 5475 case OMP_CLAUSE_COPYIN:
e8a588af 5476 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 5477 x = build_receiver_ref (var, by_ref, ctx);
5478 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5479 append_to_statement_list (x, &copyin_seq);
5480 copyin_by_ref |= by_ref;
5481 break;
5482
5483 case OMP_CLAUSE_REDUCTION:
7e5a76c8 5484 case OMP_CLAUSE_IN_REDUCTION:
641a0fa1 5485 /* OpenACC reductions are initialized using the
5486 GOACC_REDUCTION internal function. */
5487 if (is_gimple_omp_oacc (ctx->stmt))
5488 break;
1e8e9920 5489 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5490 {
fd6481cf 5491 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
42acab1c 5492 gimple *tseq;
7e5a76c8 5493 tree ptype = TREE_TYPE (placeholder);
5494 if (cond)
5495 {
5496 x = error_mark_node;
5497 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5498 && !task_reduction_needs_orig_p)
5499 x = var;
5500 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5501 {
5502 tree pptype = build_pointer_type (ptype);
5503 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5504 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5505 size_int (task_reduction_cnt_full
5506 + task_reduction_cntorig - 1),
5507 NULL_TREE, NULL_TREE);
5508 else
5509 {
5510 unsigned int idx
5511 = *ctx->task_reduction_map->get (c);
5512 x = task_reduction_read (ilist, tskred_temp,
5513 pptype, 7 + 3 * idx);
5514 }
5515 x = fold_convert (pptype, x);
5516 x = build_simple_mem_ref (x);
5517 }
5518 }
5519 else
5520 {
5521 x = build_outer_var_ref (var, ctx);
fd6481cf 5522
7e5a76c8 5523 if (omp_is_reference (var)
5524 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5525 x = build_fold_addr_expr_loc (clause_loc, x);
5526 }
fd6481cf 5527 SET_DECL_VALUE_EXPR (placeholder, x);
5528 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
bc7bff74 5529 tree new_vard = new_var;
4954efd4 5530 if (omp_is_reference (var))
bc7bff74 5531 {
5532 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5533 new_vard = TREE_OPERAND (new_var, 0);
5534 gcc_assert (DECL_P (new_vard));
5535 }
b05c7e43 5536 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
da008d72 5537 if (is_simd
5538 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5539 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5540 rvarp = &rvar;
3d483a94 5541 if (is_simd
8e818b28 5542 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
b05c7e43 5543 ivar, lvar, rvarp,
5544 &rvar2))
3d483a94 5545 {
bc7bff74 5546 if (new_vard == new_var)
5547 {
5548 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5549 SET_DECL_VALUE_EXPR (new_var, ivar);
5550 }
5551 else
5552 {
5553 SET_DECL_VALUE_EXPR (new_vard,
5554 build_fold_addr_expr (ivar));
5555 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5556 }
5557 x = lang_hooks.decls.omp_clause_default_ctor
5558 (c, unshare_expr (ivar),
5559 build_outer_var_ref (var, ctx));
3d2b49b2 5560 if (rvarp && ctx->for_simd_scan_phase)
5561 {
5562 if (x)
5563 gimplify_and_add (x, &llist[0]);
5564 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5565 if (x)
5566 gimplify_and_add (x, &llist[1]);
5567 break;
5568 }
5569 else if (rvarp)
da008d72 5570 {
5571 if (x)
5572 {
5573 gimplify_and_add (x, &llist[0]);
5574
5575 tree ivar2 = unshare_expr (lvar);
5576 TREE_OPERAND (ivar2, 1) = sctx.idx;
5577 x = lang_hooks.decls.omp_clause_default_ctor
5578 (c, ivar2, build_outer_var_ref (var, ctx));
5579 gimplify_and_add (x, &llist[0]);
5580
b05c7e43 5581 if (rvar2)
5582 {
5583 x = lang_hooks.decls.omp_clause_default_ctor
5584 (c, unshare_expr (rvar2),
5585 build_outer_var_ref (var, ctx));
5586 gimplify_and_add (x, &llist[0]);
5587 }
5588
da008d72 5589 /* For types that need construction, add another
5590 private var which will be default constructed
5591 and optionally initialized with
5592 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5593 loop we want to assign this value instead of
5594 constructing and destructing it in each
5595 iteration. */
5596 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5597 gimple_add_tmp_var (nv);
b05c7e43 5598 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5599 ? rvar2
5600 : ivar, 0),
da008d72 5601 nv);
5602 x = lang_hooks.decls.omp_clause_default_ctor
5603 (c, nv, build_outer_var_ref (var, ctx));
5604 gimplify_and_add (x, ilist);
5605
5606 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5607 {
5608 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
631dab46 5609 x = DECL_VALUE_EXPR (new_vard);
5610 tree vexpr = nv;
5611 if (new_vard != new_var)
5612 vexpr = build_fold_addr_expr (nv);
5613 SET_DECL_VALUE_EXPR (new_vard, vexpr);
da008d72 5614 lower_omp (&tseq, ctx);
631dab46 5615 SET_DECL_VALUE_EXPR (new_vard, x);
da008d72 5616 gimple_seq_add_seq (ilist, tseq);
5617 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5618 }
5619
5620 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5621 if (x)
7d26f131 5622 gimplify_and_add (x, dlist);
da008d72 5623 }
5624
5625 tree ref = build_outer_var_ref (var, ctx);
5626 x = unshare_expr (ivar);
5627 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5628 ref);
5629 gimplify_and_add (x, &llist[0]);
5630
5631 ref = build_outer_var_ref (var, ctx);
5632 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5633 rvar);
5634 gimplify_and_add (x, &llist[3]);
5635
5636 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5637 if (new_vard == new_var)
5638 SET_DECL_VALUE_EXPR (new_var, lvar);
5639 else
5640 SET_DECL_VALUE_EXPR (new_vard,
5641 build_fold_addr_expr (lvar));
5642
5643 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5644 if (x)
7d26f131 5645 gimplify_and_add (x, &llist[1]);
da008d72 5646
5647 tree ivar2 = unshare_expr (lvar);
5648 TREE_OPERAND (ivar2, 1) = sctx.idx;
5649 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5650 if (x)
7d26f131 5651 gimplify_and_add (x, &llist[1]);
b05c7e43 5652
5653 if (rvar2)
5654 {
5655 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5656 if (x)
7d26f131 5657 gimplify_and_add (x, &llist[1]);
b05c7e43 5658 }
da008d72 5659 break;
5660 }
bc7bff74 5661 if (x)
5662 gimplify_and_add (x, &llist[0]);
5663 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5664 {
5665 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5666 lower_omp (&tseq, ctx);
5667 gimple_seq_add_seq (&llist[0], tseq);
5668 }
5669 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5670 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5671 lower_omp (&tseq, ctx);
5672 gimple_seq_add_seq (&llist[1], tseq);
5673 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5674 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5675 if (new_vard == new_var)
5676 SET_DECL_VALUE_EXPR (new_var, lvar);
5677 else
5678 SET_DECL_VALUE_EXPR (new_vard,
5679 build_fold_addr_expr (lvar));
5680 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5681 if (x)
7d26f131 5682 gimplify_and_add (x, &llist[1]);
bc7bff74 5683 break;
5684 }
09d1c205 5685 /* If this is a reference to constant size reduction var
5686 with placeholder, we haven't emitted the initializer
5687 for it because it is undesirable if SIMD arrays are used.
5688 But if they aren't used, we need to emit the deferred
5689 initialization now. */
4954efd4 5690 else if (omp_is_reference (var) && is_simd)
2712b6de 5691 handle_simd_reference (clause_loc, new_vard, ilist);
7e5a76c8 5692
5693 tree lab2 = NULL_TREE;
5694 if (cond)
5695 {
5696 gimple *g;
5697 if (!is_parallel_ctx (ctx))
5698 {
5699 tree condv = create_tmp_var (boolean_type_node);
5700 tree m = build_simple_mem_ref (cond);
5701 g = gimple_build_assign (condv, m);
5702 gimple_seq_add_stmt (ilist, g);
5703 tree lab1
5704 = create_artificial_label (UNKNOWN_LOCATION);
5705 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5706 g = gimple_build_cond (NE_EXPR, condv,
5707 boolean_false_node,
5708 lab2, lab1);
5709 gimple_seq_add_stmt (ilist, g);
5710 gimple_seq_add_stmt (ilist,
5711 gimple_build_label (lab1));
5712 }
5713 g = gimple_build_assign (build_simple_mem_ref (cond),
5714 boolean_true_node);
5715 gimple_seq_add_stmt (ilist, g);
5716 }
bc7bff74 5717 x = lang_hooks.decls.omp_clause_default_ctor
cf5f881f 5718 (c, unshare_expr (new_var),
7e5a76c8 5719 cond ? NULL_TREE
5720 : build_outer_var_ref (var, ctx));
bc7bff74 5721 if (x)
5722 gimplify_and_add (x, ilist);
da008d72 5723
7d26f131 5724 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5725 && OMP_CLAUSE_REDUCTION_INSCAN (c))
da008d72 5726 {
3d2b49b2 5727 if (ctx->for_simd_scan_phase)
5728 goto do_dtor;
7d26f131 5729 if (x || (!is_simd
5730 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
da008d72 5731 {
631dab46 5732 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
da008d72 5733 gimple_add_tmp_var (nv);
631dab46 5734 ctx->cb.decl_map->put (new_vard, nv);
da008d72 5735 x = lang_hooks.decls.omp_clause_default_ctor
5736 (c, nv, build_outer_var_ref (var, ctx));
7d26f131 5737 if (x)
5738 gimplify_and_add (x, ilist);
da008d72 5739 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5740 {
5741 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
631dab46 5742 tree vexpr = nv;
5743 if (new_vard != new_var)
5744 vexpr = build_fold_addr_expr (nv);
5745 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5746 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
da008d72 5747 lower_omp (&tseq, ctx);
631dab46 5748 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5749 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
da008d72 5750 gimple_seq_add_seq (ilist, tseq);
5751 }
5752 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7d26f131 5753 if (is_simd && ctx->scan_exclusive)
b05c7e43 5754 {
5755 tree nv2
5756 = create_tmp_var_raw (TREE_TYPE (new_var));
5757 gimple_add_tmp_var (nv2);
5758 ctx->cb.decl_map->put (nv, nv2);
5759 x = lang_hooks.decls.omp_clause_default_ctor
5760 (c, nv2, build_outer_var_ref (var, ctx));
5761 gimplify_and_add (x, ilist);
5762 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5763 if (x)
7d26f131 5764 gimplify_and_add (x, dlist);
b05c7e43 5765 }
da008d72 5766 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5767 if (x)
7d26f131 5768 gimplify_and_add (x, dlist);
da008d72 5769 }
7d26f131 5770 else if (is_simd
5771 && ctx->scan_exclusive
b05c7e43 5772 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5773 {
5774 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5775 gimple_add_tmp_var (nv2);
5776 ctx->cb.decl_map->put (new_vard, nv2);
5777 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5778 if (x)
7d26f131 5779 gimplify_and_add (x, dlist);
b05c7e43 5780 }
da008d72 5781 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5782 goto do_dtor;
5783 }
5784
bc7bff74 5785 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5786 {
5787 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5788 lower_omp (&tseq, ctx);
5789 gimple_seq_add_seq (ilist, tseq);
5790 }
75a70cf9 5791 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
bc7bff74 5792 if (is_simd)
5793 {
5794 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5795 lower_omp (&tseq, ctx);
5796 gimple_seq_add_seq (dlist, tseq);
5797 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5798 }
fd6481cf 5799 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
7e5a76c8 5800 if (cond)
5801 {
5802 if (lab2)
5803 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5804 break;
5805 }
bc7bff74 5806 goto do_dtor;
1e8e9920 5807 }
5808 else
5809 {
5810 x = omp_reduction_init (c, TREE_TYPE (new_var));
5811 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
c22ad515 5812 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5813
7e5a76c8 5814 if (cond)
5815 {
5816 gimple *g;
5817 tree lab2 = NULL_TREE;
5818 /* GOMP_taskgroup_reduction_register memsets the whole
5819 array to zero. If the initializer is zero, we don't
5820 need to initialize it again, just mark it as ever
5821 used unconditionally, i.e. cond = true. */
5822 if (initializer_zerop (x))
5823 {
5824 g = gimple_build_assign (build_simple_mem_ref (cond),
5825 boolean_true_node);
5826 gimple_seq_add_stmt (ilist, g);
5827 break;
5828 }
5829
5830 /* Otherwise, emit
5831 if (!cond) { cond = true; new_var = x; } */
5832 if (!is_parallel_ctx (ctx))
5833 {
5834 tree condv = create_tmp_var (boolean_type_node);
5835 tree m = build_simple_mem_ref (cond);
5836 g = gimple_build_assign (condv, m);
5837 gimple_seq_add_stmt (ilist, g);
5838 tree lab1
5839 = create_artificial_label (UNKNOWN_LOCATION);
5840 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5841 g = gimple_build_cond (NE_EXPR, condv,
5842 boolean_false_node,
5843 lab2, lab1);
5844 gimple_seq_add_stmt (ilist, g);
5845 gimple_seq_add_stmt (ilist,
5846 gimple_build_label (lab1));
5847 }
5848 g = gimple_build_assign (build_simple_mem_ref (cond),
5849 boolean_true_node);
5850 gimple_seq_add_stmt (ilist, g);
5851 gimplify_assign (new_var, x, ilist);
5852 if (lab2)
5853 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5854 break;
5855 }
5856
c22ad515 5857 /* reduction(-:var) sums up the partial results, so it
5858 acts identically to reduction(+:var). */
5859 if (code == MINUS_EXPR)
5860 code = PLUS_EXPR;
5861
2712b6de 5862 tree new_vard = new_var;
4954efd4 5863 if (is_simd && omp_is_reference (var))
2712b6de 5864 {
5865 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5866 new_vard = TREE_OPERAND (new_var, 0);
5867 gcc_assert (DECL_P (new_vard));
5868 }
b05c7e43 5869 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
da008d72 5870 if (is_simd
5871 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5872 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5873 rvarp = &rvar;
3d483a94 5874 if (is_simd
8e818b28 5875 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
b05c7e43 5876 ivar, lvar, rvarp,
5877 &rvar2))
3d483a94 5878 {
da008d72 5879 if (new_vard != new_var)
5880 {
5881 SET_DECL_VALUE_EXPR (new_vard,
5882 build_fold_addr_expr (lvar));
5883 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5884 }
5885
3d483a94 5886 tree ref = build_outer_var_ref (var, ctx);
5887
da008d72 5888 if (rvarp)
5889 {
3d2b49b2 5890 if (ctx->for_simd_scan_phase)
5891 break;
da008d72 5892 gimplify_assign (ivar, ref, &llist[0]);
5893 ref = build_outer_var_ref (var, ctx);
5894 gimplify_assign (ref, rvar, &llist[3]);
5895 break;
5896 }
5897
3d483a94 5898 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5899
8e818b28 5900 if (sctx.is_simt)
bab6706a 5901 {
5902 if (!simt_lane)
5903 simt_lane = create_tmp_var (unsigned_type_node);
5904 x = build_call_expr_internal_loc
5905 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5906 TREE_TYPE (ivar), 2, ivar, simt_lane);
5907 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5908 gimplify_assign (ivar, x, &llist[2]);
5909 }
3d483a94 5910 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5911 ref = build_outer_var_ref (var, ctx);
5912 gimplify_assign (ref, x, &llist[1]);
2712b6de 5913
3d483a94 5914 }
631dab46 5915 else
3d483a94 5916 {
4954efd4 5917 if (omp_is_reference (var) && is_simd)
2712b6de 5918 handle_simd_reference (clause_loc, new_vard, ilist);
7d26f131 5919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5920 && OMP_CLAUSE_REDUCTION_INSCAN (c))
631dab46 5921 break;
3d483a94 5922 gimplify_assign (new_var, x, ilist);
5923 if (is_simd)
c22ad515 5924 {
5925 tree ref = build_outer_var_ref (var, ctx);
5926
5927 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5928 ref = build_outer_var_ref (var, ctx);
5929 gimplify_assign (ref, x, dlist);
5930 }
3d483a94 5931 }
1e8e9920 5932 }
5933 break;
5934
5935 default:
5936 gcc_unreachable ();
5937 }
5938 }
5939 }
7e5a76c8 5940 if (tskred_avar)
5941 {
5942 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5943 TREE_THIS_VOLATILE (clobber) = 1;
5944 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5945 }
1e8e9920 5946
9d805ed8 5947 if (known_eq (sctx.max_vf, 1U))
4f4b92d8 5948 {
5949 sctx.is_simt = false;
5950 if (ctx->lastprivate_conditional_map)
5951 {
384aea12 5952 if (gimple_omp_for_combined_into_p (ctx->stmt))
5953 {
5954 /* Signal to lower_omp_1 that it should use parent context. */
a0110ad7 5955 ctx->combined_into_simd_safelen1 = true;
384aea12 5956 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5958 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5959 {
5960 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8d54bfe2 5961 omp_context *outer = ctx->outer;
5962 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5963 outer = outer->outer;
5964 tree *v = ctx->lastprivate_conditional_map->get (o);
5965 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5966 tree *pv = outer->lastprivate_conditional_map->get (po);
384aea12 5967 *v = *pv;
5968 }
5969 }
5970 else
5971 {
5972 /* When not vectorized, treat lastprivate(conditional:) like
5973 normal lastprivate, as there will be just one simd lane
5974 writing the privatized variable. */
5975 delete ctx->lastprivate_conditional_map;
5976 ctx->lastprivate_conditional_map = NULL;
5977 }
4f4b92d8 5978 }
5979 }
1b576300 5980
1d86b8dc 5981 if (nonconst_simd_if)
5982 {
5983 if (sctx.lane == NULL_TREE)
5984 {
5985 sctx.idx = create_tmp_var (unsigned_type_node);
5986 sctx.lane = create_tmp_var (unsigned_type_node);
5987 }
5988 /* FIXME: For now. */
5989 sctx.is_simt = false;
5990 }
5991
1b576300 5992 if (sctx.lane || sctx.is_simt)
3d483a94 5993 {
1b576300 5994 uid = create_tmp_var (ptr_type_node, "simduid");
8e1a382d 5995 /* Don't want uninit warnings on simduid, it is always uninitialized,
5996 but we use it not for the value, but for the DECL_UID only. */
5997 TREE_NO_WARNING (uid) = 1;
1b576300 5998 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5999 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6000 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6001 gimple_omp_for_set_clauses (ctx->stmt, c);
6002 }
6003 /* Emit calls denoting privatized variables and initializing a pointer to
6004 structure that holds private variables as fields after ompdevlow pass. */
6005 if (sctx.is_simt)
6006 {
6007 sctx.simt_eargs[0] = uid;
6008 gimple *g
6009 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6010 gimple_call_set_lhs (g, uid);
6011 gimple_seq_add_stmt (ilist, g);
6012 sctx.simt_eargs.release ();
6013
6014 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6015 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6016 gimple_call_set_lhs (g, simtrec);
6017 gimple_seq_add_stmt (ilist, g);
6018 }
6019 if (sctx.lane)
6020 {
1d86b8dc 6021 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
da008d72 6022 2 + (nonconst_simd_if != NULL),
6023 uid, integer_zero_node,
6024 nonconst_simd_if);
8e818b28 6025 gimple_call_set_lhs (g, sctx.lane);
3d483a94 6026 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6027 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
8e818b28 6028 g = gimple_build_assign (sctx.lane, INTEGER_CST,
e9cf809e 6029 build_int_cst (unsigned_type_node, 0));
3d483a94 6030 gimple_seq_add_stmt (ilist, g);
da008d72 6031 if (sctx.lastlane)
6032 {
6033 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6034 2, uid, sctx.lane);
6035 gimple_call_set_lhs (g, sctx.lastlane);
6036 gimple_seq_add_stmt (dlist, g);
6037 gimple_seq_add_seq (dlist, llist[3]);
6038 }
bab6706a 6039 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6040 if (llist[2])
6041 {
6042 tree simt_vf = create_tmp_var (unsigned_type_node);
6043 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6044 gimple_call_set_lhs (g, simt_vf);
6045 gimple_seq_add_stmt (dlist, g);
6046
6047 tree t = build_int_cst (unsigned_type_node, 1);
6048 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6049 gimple_seq_add_stmt (dlist, g);
6050
6051 t = build_int_cst (unsigned_type_node, 0);
8e818b28 6052 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
bab6706a 6053 gimple_seq_add_stmt (dlist, g);
6054
6055 tree body = create_artificial_label (UNKNOWN_LOCATION);
6056 tree header = create_artificial_label (UNKNOWN_LOCATION);
6057 tree end = create_artificial_label (UNKNOWN_LOCATION);
6058 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6059 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6060
6061 gimple_seq_add_seq (dlist, llist[2]);
6062
6063 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6064 gimple_seq_add_stmt (dlist, g);
6065
6066 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6067 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6068 gimple_seq_add_stmt (dlist, g);
6069
6070 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6071 }
3d483a94 6072 for (int i = 0; i < 2; i++)
6073 if (llist[i])
6074 {
f9e245b2 6075 tree vf = create_tmp_var (unsigned_type_node);
3d483a94 6076 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6077 gimple_call_set_lhs (g, vf);
6078 gimple_seq *seq = i == 0 ? ilist : dlist;
6079 gimple_seq_add_stmt (seq, g);
6080 tree t = build_int_cst (unsigned_type_node, 0);
8e818b28 6081 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
3d483a94 6082 gimple_seq_add_stmt (seq, g);
6083 tree body = create_artificial_label (UNKNOWN_LOCATION);
6084 tree header = create_artificial_label (UNKNOWN_LOCATION);
6085 tree end = create_artificial_label (UNKNOWN_LOCATION);
6086 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6087 gimple_seq_add_stmt (seq, gimple_build_label (body));
6088 gimple_seq_add_seq (seq, llist[i]);
6089 t = build_int_cst (unsigned_type_node, 1);
8e818b28 6090 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
3d483a94 6091 gimple_seq_add_stmt (seq, g);
6092 gimple_seq_add_stmt (seq, gimple_build_label (header));
8e818b28 6093 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
3d483a94 6094 gimple_seq_add_stmt (seq, g);
6095 gimple_seq_add_stmt (seq, gimple_build_label (end));
6096 }
6097 }
1b576300 6098 if (sctx.is_simt)
6099 {
6100 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6101 gimple *g
6102 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6103 gimple_seq_add_stmt (dlist, g);
6104 }
3d483a94 6105
1e8e9920 6106 /* The copyin sequence is not to be executed by the main thread, since
6107 that would result in self-copies. Perhaps not visible to scalars,
6108 but it certainly is to C++ operator=. */
6109 if (copyin_seq)
6110 {
b9a16870 6111 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6112 0);
1e8e9920 6113 x = build2 (NE_EXPR, boolean_type_node, x,
6114 build_int_cst (TREE_TYPE (x), 0));
6115 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6116 gimplify_and_add (x, ilist);
6117 }
6118
6119 /* If any copyin variable is passed by reference, we must ensure the
6120 master thread doesn't modify it before it is copied over in all
f49d7bb5 6121 threads. Similarly for variables in both firstprivate and
6122 lastprivate clauses we need to ensure the lastprivate copying
bc7bff74 6123 happens after firstprivate copying in all threads. And similarly
6124 for UDRs if initializer expression refers to omp_orig. */
7d26f131 6125 if (copyin_by_ref || lastprivate_firstprivate
6126 || (reduction_omp_orig_ref
6127 && !ctx->scan_inclusive
6128 && !ctx->scan_exclusive))
3d483a94 6129 {
6130 /* Don't add any barrier for #pragma omp simd or
6131 #pragma omp distribute. */
7e5a76c8 6132 if (!is_task_ctx (ctx)
6133 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6134 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
4954efd4 6135 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
3d483a94 6136 }
6137
6138 /* If max_vf is non-zero, then we can use only a vectorization factor
6139 up to the max_vf we chose. So stick it into the safelen clause. */
9d805ed8 6140 if (maybe_ne (sctx.max_vf, 0U))
3d483a94 6141 {
4954efd4 6142 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3d483a94 6143 OMP_CLAUSE_SAFELEN);
9d805ed8 6144 poly_uint64 safe_len;
3d483a94 6145 if (c == NULL_TREE
9d805ed8 6146 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6147 && maybe_gt (safe_len, sctx.max_vf)))
3d483a94 6148 {
6149 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6150 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
8e818b28 6151 sctx.max_vf);
3d483a94 6152 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6153 gimple_omp_for_set_clauses (ctx->stmt, c);
6154 }
6155 }
1e8e9920 6156}
6157
9a1d892b 6158/* Create temporary variables for lastprivate(conditional:) implementation
6159 in context CTX with CLAUSES. */
6160
6161static void
6162lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6163{
9a1d892b 6164 tree iter_type = NULL_TREE;
6165 tree cond_ptr = NULL_TREE;
6166 tree iter_var = NULL_TREE;
4f4b92d8 6167 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 6168 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4f4b92d8 6169 tree next = *clauses;
9a1d892b 6170 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6171 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6172 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6173 {
4f4b92d8 6174 if (is_simd)
6175 {
6176 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6177 gcc_assert (cc);
6178 if (iter_type == NULL_TREE)
6179 {
6180 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6181 iter_var = create_tmp_var_raw (iter_type);
6182 DECL_CONTEXT (iter_var) = current_function_decl;
6183 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6184 DECL_CHAIN (iter_var) = ctx->block_vars;
6185 ctx->block_vars = iter_var;
6186 tree c3
6187 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6188 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6189 OMP_CLAUSE_DECL (c3) = iter_var;
6190 OMP_CLAUSE_CHAIN (c3) = *clauses;
6191 *clauses = c3;
6192 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6193 }
6194 next = OMP_CLAUSE_CHAIN (cc);
6195 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6196 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6197 ctx->lastprivate_conditional_map->put (o, v);
6198 continue;
6199 }
9a1d892b 6200 if (iter_type == NULL)
6201 {
eb7a699d 6202 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6203 {
6204 struct omp_for_data fd;
6205 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6206 NULL);
6207 iter_type = unsigned_type_for (fd.iter_type);
6208 }
6209 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6210 iter_type = unsigned_type_node;
48152aa2 6211 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6212 if (c2)
6213 {
6214 cond_ptr
6215 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6216 OMP_CLAUSE_DECL (c2) = cond_ptr;
6217 }
6218 else
6219 {
6220 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6221 DECL_CONTEXT (cond_ptr) = current_function_decl;
6222 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6223 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6224 ctx->block_vars = cond_ptr;
6225 c2 = build_omp_clause (UNKNOWN_LOCATION,
6226 OMP_CLAUSE__CONDTEMP_);
6227 OMP_CLAUSE_DECL (c2) = cond_ptr;
6228 OMP_CLAUSE_CHAIN (c2) = *clauses;
6229 *clauses = c2;
6230 }
9a1d892b 6231 iter_var = create_tmp_var_raw (iter_type);
6232 DECL_CONTEXT (iter_var) = current_function_decl;
6233 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6234 DECL_CHAIN (iter_var) = ctx->block_vars;
6235 ctx->block_vars = iter_var;
9a1d892b 6236 tree c3
6237 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
4f4b92d8 6238 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
9a1d892b 6239 OMP_CLAUSE_DECL (c3) = iter_var;
48152aa2 6240 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
9a1d892b 6241 OMP_CLAUSE_CHAIN (c2) = c3;
9a1d892b 6242 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6243 }
6244 tree v = create_tmp_var_raw (iter_type);
6245 DECL_CONTEXT (v) = current_function_decl;
6246 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6247 DECL_CHAIN (v) = ctx->block_vars;
6248 ctx->block_vars = v;
6249 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6250 ctx->lastprivate_conditional_map->put (o, v);
6251 }
6252}
6253
773c5ba7 6254
1e8e9920 6255/* Generate code to implement the LASTPRIVATE clauses. This is used for
6256 both parallel and workshare constructs. PREDICATE may be NULL if it's
9a1d892b 6257 always true. BODY_P is the sequence to insert early initialization
6258 if needed, STMT_LIST is where the non-conditional lastprivate handling
6259 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6260 section. */
1e8e9920 6261
6262static void
9a1d892b 6263lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6264 gimple_seq *stmt_list, gimple_seq *cstmt_list,
bc7bff74 6265 omp_context *ctx)
1e8e9920 6266{
3d483a94 6267 tree x, c, label = NULL, orig_clauses = clauses;
fd6481cf 6268 bool par_clauses = false;
bab6706a 6269 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
9a1d892b 6270 unsigned HOST_WIDE_INT conditional_off = 0;
a0110ad7 6271 gimple_seq post_stmt_list = NULL;
1e8e9920 6272
3d483a94 6273 /* Early exit if there are no lastprivate or linear clauses. */
6274 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6275 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6276 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6277 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6278 break;
1e8e9920 6279 if (clauses == NULL)
6280 {
6281 /* If this was a workshare clause, see if it had been combined
6282 with its parallel. In that case, look for the clauses on the
6283 parallel statement itself. */
6284 if (is_parallel_ctx (ctx))
6285 return;
6286
6287 ctx = ctx->outer;
6288 if (ctx == NULL || !is_parallel_ctx (ctx))
6289 return;
6290
4954efd4 6291 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1e8e9920 6292 OMP_CLAUSE_LASTPRIVATE);
6293 if (clauses == NULL)
6294 return;
fd6481cf 6295 par_clauses = true;
1e8e9920 6296 }
6297
bab6706a 6298 bool maybe_simt = false;
6299 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 6300 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
bab6706a 6301 {
4954efd4 6302 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6303 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
bab6706a 6304 if (simduid)
6305 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6306 }
6307
75a70cf9 6308 if (predicate)
6309 {
1a91d914 6310 gcond *stmt;
75a70cf9 6311 tree label_true, arm1, arm2;
da80a82f 6312 enum tree_code pred_code = TREE_CODE (predicate);
75a70cf9 6313
e60a6f7b 6314 label = create_artificial_label (UNKNOWN_LOCATION);
6315 label_true = create_artificial_label (UNKNOWN_LOCATION);
da80a82f 6316 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6317 {
6318 arm1 = TREE_OPERAND (predicate, 0);
6319 arm2 = TREE_OPERAND (predicate, 1);
6320 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6321 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6322 }
6323 else
6324 {
6325 arm1 = predicate;
6326 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6327 arm2 = boolean_false_node;
6328 pred_code = NE_EXPR;
6329 }
bab6706a 6330 if (maybe_simt)
6331 {
da80a82f 6332 c = build2 (pred_code, boolean_type_node, arm1, arm2);
bab6706a 6333 c = fold_convert (integer_type_node, c);
6334 simtcond = create_tmp_var (integer_type_node);
6335 gimplify_assign (simtcond, c, stmt_list);
6336 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6337 1, simtcond);
6338 c = create_tmp_var (integer_type_node);
6339 gimple_call_set_lhs (g, c);
6340 gimple_seq_add_stmt (stmt_list, g);
6341 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6342 label_true, label);
6343 }
6344 else
da80a82f 6345 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
75a70cf9 6346 gimple_seq_add_stmt (stmt_list, stmt);
6347 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6348 }
1e8e9920 6349
9a1d892b 6350 tree cond_ptr = NULL_TREE;
fd6481cf 6351 for (c = clauses; c ;)
1e8e9920 6352 {
6353 tree var, new_var;
389dd41b 6354 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9a1d892b 6355 gimple_seq *this_stmt_list = stmt_list;
6356 tree lab2 = NULL_TREE;
6357
6358 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4f4b92d8 6359 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
384aea12 6360 && ctx->lastprivate_conditional_map
a0110ad7 6361 && !ctx->combined_into_simd_safelen1)
9a1d892b 6362 {
4f4b92d8 6363 gcc_assert (body_p);
6364 if (simduid)
6365 goto next;
9a1d892b 6366 if (cond_ptr == NULL_TREE)
6367 {
6368 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6369 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6370 }
6371 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6372 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6373 tree v = *ctx->lastprivate_conditional_map->get (o);
6374 gimplify_assign (v, build_zero_cst (type), body_p);
6375 this_stmt_list = cstmt_list;
48152aa2 6376 tree mem;
6377 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6378 {
6379 mem = build2 (MEM_REF, type, cond_ptr,
6380 build_int_cst (TREE_TYPE (cond_ptr),
6381 conditional_off));
6382 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6383 }
6384 else
6385 mem = build4 (ARRAY_REF, type, cond_ptr,
6386 size_int (conditional_off++), NULL_TREE, NULL_TREE);
9a1d892b 6387 tree mem2 = copy_node (mem);
9a1d892b 6388 gimple_seq seq = NULL;
6389 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6390 gimple_seq_add_seq (this_stmt_list, seq);
6391 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6392 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6393 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6394 gimple_seq_add_stmt (this_stmt_list, g);
6395 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6396 gimplify_assign (mem2, v, this_stmt_list);
6397 }
a0110ad7 6398 else if (predicate
6399 && ctx->combined_into_simd_safelen1
6400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6401 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6402 && ctx->lastprivate_conditional_map)
6403 this_stmt_list = &post_stmt_list;
1e8e9920 6404
3d483a94 6405 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6406 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6407 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
fd6481cf 6408 {
6409 var = OMP_CLAUSE_DECL (c);
43895be5 6410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6411 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6412 && is_taskloop_ctx (ctx))
6413 {
6414 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6415 new_var = lookup_decl (var, ctx->outer);
6416 }
6417 else
aa1ddb66 6418 {
6419 new_var = lookup_decl (var, ctx);
6420 /* Avoid uninitialized warnings for lastprivate and
6421 for linear iterators. */
6422 if (predicate
6423 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6424 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6425 TREE_NO_WARNING (new_var) = 1;
6426 }
1e8e9920 6427
d66340f7 6428 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3d483a94 6429 {
6430 tree val = DECL_VALUE_EXPR (new_var);
d66340f7 6431 if (TREE_CODE (val) == ARRAY_REF
3d483a94 6432 && VAR_P (TREE_OPERAND (val, 0))
6433 && lookup_attribute ("omp simd array",
6434 DECL_ATTRIBUTES (TREE_OPERAND (val,
6435 0))))
6436 {
6437 if (lastlane == NULL)
6438 {
f9e245b2 6439 lastlane = create_tmp_var (unsigned_type_node);
1a91d914 6440 gcall *g
3d483a94 6441 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6442 2, simduid,
6443 TREE_OPERAND (val, 1));
6444 gimple_call_set_lhs (g, lastlane);
9a1d892b 6445 gimple_seq_add_stmt (this_stmt_list, g);
3d483a94 6446 }
6447 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6448 TREE_OPERAND (val, 0), lastlane,
6449 NULL_TREE, NULL_TREE);
da008d72 6450 TREE_THIS_NOTRAP (new_var) = 1;
1b576300 6451 }
d66340f7 6452 }
6453 else if (maybe_simt)
6454 {
6455 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6456 ? DECL_VALUE_EXPR (new_var)
6457 : new_var);
6458 if (simtlast == NULL)
1b576300 6459 {
d66340f7 6460 simtlast = create_tmp_var (unsigned_type_node);
6461 gcall *g = gimple_build_call_internal
6462 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6463 gimple_call_set_lhs (g, simtlast);
9a1d892b 6464 gimple_seq_add_stmt (this_stmt_list, g);
3d483a94 6465 }
d66340f7 6466 x = build_call_expr_internal_loc
6467 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6468 TREE_TYPE (val), 2, val, simtlast);
6469 new_var = unshare_expr (new_var);
9a1d892b 6470 gimplify_assign (new_var, x, this_stmt_list);
d66340f7 6471 new_var = unshare_expr (new_var);
3d483a94 6472 }
6473
6474 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6475 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
75a70cf9 6476 {
e3a19533 6477 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
9a1d892b 6478 gimple_seq_add_seq (this_stmt_list,
75a70cf9 6479 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
3d483a94 6480 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
75a70cf9 6481 }
2b536a17 6482 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6483 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6484 {
6485 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
9a1d892b 6486 gimple_seq_add_seq (this_stmt_list,
2b536a17 6487 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6488 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6489 }
1e8e9920 6490
43895be5 6491 x = NULL_TREE;
6492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
d266deeb 6493 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6494 && is_taskloop_ctx (ctx))
43895be5 6495 {
43895be5 6496 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6497 ctx->outer->outer);
6498 if (is_global_var (ovar))
6499 x = ovar;
6500 }
6501 if (!x)
1f355935 6502 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4954efd4 6503 if (omp_is_reference (var))
182cf5a9 6504 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
fd6481cf 6505 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
9a1d892b 6506 gimplify_and_add (x, this_stmt_list);
6507
6508 if (lab2)
6509 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
fd6481cf 6510 }
9a1d892b 6511
4f4b92d8 6512 next:
fd6481cf 6513 c = OMP_CLAUSE_CHAIN (c);
6514 if (c == NULL && !par_clauses)
6515 {
6516 /* If this was a workshare clause, see if it had been combined
6517 with its parallel. In that case, continue looking for the
6518 clauses also on the parallel statement itself. */
6519 if (is_parallel_ctx (ctx))
6520 break;
6521
6522 ctx = ctx->outer;
6523 if (ctx == NULL || !is_parallel_ctx (ctx))
6524 break;
6525
4954efd4 6526 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
fd6481cf 6527 OMP_CLAUSE_LASTPRIVATE);
6528 par_clauses = true;
6529 }
1e8e9920 6530 }
6531
75a70cf9 6532 if (label)
6533 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
a0110ad7 6534 gimple_seq_add_seq (stmt_list, post_stmt_list);
1e8e9920 6535}
6536
641a0fa1 6537/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6538 (which might be a placeholder). INNER is true if this is an inner
6539 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6540 join markers. Generate the before-loop forking sequence in
6541 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6542 general form of these sequences is
6543
6544 GOACC_REDUCTION_SETUP
6545 GOACC_FORK
6546 GOACC_REDUCTION_INIT
6547 ...
6548 GOACC_REDUCTION_FINI
6549 GOACC_JOIN
6550 GOACC_REDUCTION_TEARDOWN. */
6551
ca4c3545 6552static void
641a0fa1 6553lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6554 gcall *fork, gcall *join, gimple_seq *fork_seq,
6555 gimple_seq *join_seq, omp_context *ctx)
ca4c3545 6556{
641a0fa1 6557 gimple_seq before_fork = NULL;
6558 gimple_seq after_fork = NULL;
6559 gimple_seq before_join = NULL;
6560 gimple_seq after_join = NULL;
6561 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6562 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6563 unsigned offset = 0;
6564
6565 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6567 {
6568 tree orig = OMP_CLAUSE_DECL (c);
6569 tree var = maybe_lookup_decl (orig, ctx);
6570 tree ref_to_res = NULL_TREE;
0c302595 6571 tree incoming, outgoing, v1, v2, v3;
6572 bool is_private = false;
641a0fa1 6573
6574 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6575 if (rcode == MINUS_EXPR)
6576 rcode = PLUS_EXPR;
6577 else if (rcode == TRUTH_ANDIF_EXPR)
6578 rcode = BIT_AND_EXPR;
6579 else if (rcode == TRUTH_ORIF_EXPR)
6580 rcode = BIT_IOR_EXPR;
6581 tree op = build_int_cst (unsigned_type_node, rcode);
6582
6583 if (!var)
6584 var = orig;
641a0fa1 6585
6586 incoming = outgoing = var;
7c6746c9 6587
641a0fa1 6588 if (!inner)
6589 {
6590 /* See if an outer construct also reduces this variable. */
6591 omp_context *outer = ctx;
ca4c3545 6592
641a0fa1 6593 while (omp_context *probe = outer->outer)
6594 {
6595 enum gimple_code type = gimple_code (probe->stmt);
6596 tree cls;
ca4c3545 6597
641a0fa1 6598 switch (type)
6599 {
6600 case GIMPLE_OMP_FOR:
6601 cls = gimple_omp_for_clauses (probe->stmt);
6602 break;
ca4c3545 6603
641a0fa1 6604 case GIMPLE_OMP_TARGET:
6605 if (gimple_omp_target_kind (probe->stmt)
6606 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6607 goto do_lookup;
ca4c3545 6608
641a0fa1 6609 cls = gimple_omp_target_clauses (probe->stmt);
6610 break;
ca4c3545 6611
641a0fa1 6612 default:
6613 goto do_lookup;
6614 }
7c6746c9 6615
641a0fa1 6616 outer = probe;
6617 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6618 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6619 && orig == OMP_CLAUSE_DECL (cls))
0c302595 6620 {
6621 incoming = outgoing = lookup_decl (orig, probe);
6622 goto has_outer_reduction;
6623 }
6624 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6625 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6626 && orig == OMP_CLAUSE_DECL (cls))
6627 {
6628 is_private = true;
6629 goto do_lookup;
6630 }
641a0fa1 6631 }
ca4c3545 6632
641a0fa1 6633 do_lookup:
6634 /* This is the outermost construct with this reduction,
6635 see if there's a mapping for it. */
6636 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
0c302595 6637 && maybe_lookup_field (orig, outer) && !is_private)
641a0fa1 6638 {
6639 ref_to_res = build_receiver_ref (orig, false, outer);
4954efd4 6640 if (omp_is_reference (orig))
641a0fa1 6641 ref_to_res = build_simple_mem_ref (ref_to_res);
ca4c3545 6642
0c302595 6643 tree type = TREE_TYPE (var);
6644 if (POINTER_TYPE_P (type))
6645 type = TREE_TYPE (type);
6646
641a0fa1 6647 outgoing = var;
0c302595 6648 incoming = omp_reduction_init_op (loc, rcode, type);
641a0fa1 6649 }
6650 else
f7896ff0 6651 {
6652 /* Try to look at enclosing contexts for reduction var,
6653 use original if no mapping found. */
6654 tree t = NULL_TREE;
6655 omp_context *c = ctx->outer;
6656 while (c && !t)
6657 {
6658 t = maybe_lookup_decl (orig, c);
6659 c = c->outer;
6660 }
6661 incoming = outgoing = (t ? t : orig);
6662 }
7c6746c9 6663
641a0fa1 6664 has_outer_reduction:;
6665 }
ca4c3545 6666
641a0fa1 6667 if (!ref_to_res)
6668 ref_to_res = integer_zero_node;
ca4c3545 6669
7c6746c9 6670 if (omp_is_reference (orig))
0c302595 6671 {
6672 tree type = TREE_TYPE (var);
6673 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6674
6675 if (!inner)
6676 {
6677 tree x = create_tmp_var (TREE_TYPE (type), id);
6678 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6679 }
6680
6681 v1 = create_tmp_var (type, id);
6682 v2 = create_tmp_var (type, id);
6683 v3 = create_tmp_var (type, id);
6684
6685 gimplify_assign (v1, var, fork_seq);
6686 gimplify_assign (v2, var, fork_seq);
6687 gimplify_assign (v3, var, fork_seq);
6688
6689 var = build_simple_mem_ref (var);
6690 v1 = build_simple_mem_ref (v1);
6691 v2 = build_simple_mem_ref (v2);
6692 v3 = build_simple_mem_ref (v3);
6693 outgoing = build_simple_mem_ref (outgoing);
6694
eae3d589 6695 if (!TREE_CONSTANT (incoming))
0c302595 6696 incoming = build_simple_mem_ref (incoming);
6697 }
6698 else
6699 v1 = v2 = v3 = var;
6700
641a0fa1 6701 /* Determine position in reduction buffer, which may be used
d2401312 6702 by target. The parser has ensured that this is not a
6703 variable-sized type. */
6704 fixed_size_mode mode
6705 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
641a0fa1 6706 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6707 offset = (offset + align - 1) & ~(align - 1);
6708 tree off = build_int_cst (sizetype, offset);
6709 offset += GET_MODE_SIZE (mode);
ca4c3545 6710
641a0fa1 6711 if (!init_code)
6712 {
6713 init_code = build_int_cst (integer_type_node,
6714 IFN_GOACC_REDUCTION_INIT);
6715 fini_code = build_int_cst (integer_type_node,
6716 IFN_GOACC_REDUCTION_FINI);
6717 setup_code = build_int_cst (integer_type_node,
6718 IFN_GOACC_REDUCTION_SETUP);
6719 teardown_code = build_int_cst (integer_type_node,
6720 IFN_GOACC_REDUCTION_TEARDOWN);
6721 }
6722
6723 tree setup_call
6724 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6725 TREE_TYPE (var), 6, setup_code,
6726 unshare_expr (ref_to_res),
6727 incoming, level, op, off);
6728 tree init_call
6729 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6730 TREE_TYPE (var), 6, init_code,
6731 unshare_expr (ref_to_res),
0c302595 6732 v1, level, op, off);
641a0fa1 6733 tree fini_call
6734 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6735 TREE_TYPE (var), 6, fini_code,
6736 unshare_expr (ref_to_res),
0c302595 6737 v2, level, op, off);
641a0fa1 6738 tree teardown_call
6739 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6740 TREE_TYPE (var), 6, teardown_code,
0c302595 6741 ref_to_res, v3, level, op, off);
641a0fa1 6742
0c302595 6743 gimplify_assign (v1, setup_call, &before_fork);
6744 gimplify_assign (v2, init_call, &after_fork);
6745 gimplify_assign (v3, fini_call, &before_join);
641a0fa1 6746 gimplify_assign (outgoing, teardown_call, &after_join);
6747 }
6748
6749 /* Now stitch things together. */
6750 gimple_seq_add_seq (fork_seq, before_fork);
6751 if (fork)
6752 gimple_seq_add_stmt (fork_seq, fork);
6753 gimple_seq_add_seq (fork_seq, after_fork);
6754
6755 gimple_seq_add_seq (join_seq, before_join);
6756 if (join)
6757 gimple_seq_add_stmt (join_seq, join);
6758 gimple_seq_add_seq (join_seq, after_join);
ca4c3545 6759}
773c5ba7 6760
9a1d892b 6761/* Generate code to implement the REDUCTION clauses, append it
6762 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6763 that should be emitted also inside of the critical section,
6764 in that case clear *CLIST afterwards, otherwise leave it as is
6765 and let the caller emit it itself. */
1e8e9920 6766
6767static void
9a1d892b 6768lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6769 gimple_seq *clist, omp_context *ctx)
1e8e9920 6770{
75a70cf9 6771 gimple_seq sub_seq = NULL;
42acab1c 6772 gimple *stmt;
f69b8a4c 6773 tree x, c;
1e8e9920 6774 int count = 0;
6775
641a0fa1 6776 /* OpenACC loop reductions are handled elsewhere. */
6777 if (is_gimple_omp_oacc (ctx->stmt))
6778 return;
6779
3d483a94 6780 /* SIMD reductions are handled in lower_rec_input_clauses. */
6781 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
0076df39 6782 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3d483a94 6783 return;
6784
7d26f131 6785 /* inscan reductions are handled elsewhere. */
6786 if (ctx->scan_inclusive || ctx->scan_exclusive)
6787 return;
6788
1e8e9920 6789 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6790 update in that case, otherwise use a lock. */
6791 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 6792 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6793 && !OMP_CLAUSE_REDUCTION_TASK (c))
1e8e9920 6794 {
43895be5 6795 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6796 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
1e8e9920 6797 {
bc7bff74 6798 /* Never use OMP_ATOMIC for array reductions or UDRs. */
1e8e9920 6799 count = -1;
6800 break;
6801 }
6802 count++;
6803 }
6804
6805 if (count == 0)
6806 return;
6807
6808 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6809 {
43895be5 6810 tree var, ref, new_var, orig_var;
1e8e9920 6811 enum tree_code code;
389dd41b 6812 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 6813
7e5a76c8 6814 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6815 || OMP_CLAUSE_REDUCTION_TASK (c))
1e8e9920 6816 continue;
6817
d96999c6 6818 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
43895be5 6819 orig_var = var = OMP_CLAUSE_DECL (c);
6820 if (TREE_CODE (var) == MEM_REF)
6821 {
6822 var = TREE_OPERAND (var, 0);
9561765e 6823 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6824 var = TREE_OPERAND (var, 0);
d96999c6 6825 if (TREE_CODE (var) == ADDR_EXPR)
43895be5 6826 var = TREE_OPERAND (var, 0);
d96999c6 6827 else
6828 {
6829 /* If this is a pointer or referenced based array
6830 section, the var could be private in the outer
6831 context e.g. on orphaned loop construct. Pretend this
6832 is private variable's outer reference. */
6833 ccode = OMP_CLAUSE_PRIVATE;
6834 if (TREE_CODE (var) == INDIRECT_REF)
6835 var = TREE_OPERAND (var, 0);
6836 }
43895be5 6837 orig_var = var;
6838 if (is_variable_sized (var))
6839 {
6840 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6841 var = DECL_VALUE_EXPR (var);
6842 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6843 var = TREE_OPERAND (var, 0);
6844 gcc_assert (DECL_P (var));
6845 }
6846 }
1e8e9920 6847 new_var = lookup_decl (var, ctx);
4954efd4 6848 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
182cf5a9 6849 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
d96999c6 6850 ref = build_outer_var_ref (var, ctx, ccode);
1e8e9920 6851 code = OMP_CLAUSE_REDUCTION_CODE (c);
773c5ba7 6852
6853 /* reduction(-:var) sums up the partial results, so it acts
6854 identically to reduction(+:var). */
1e8e9920 6855 if (code == MINUS_EXPR)
6856 code = PLUS_EXPR;
6857
641a0fa1 6858 if (count == 1)
1e8e9920 6859 {
389dd41b 6860 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 6861
6862 addr = save_expr (addr);
6863 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
389dd41b 6864 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
1e8e9920 6865 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7e5a76c8 6866 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
75a70cf9 6867 gimplify_and_add (x, stmt_seqp);
1e8e9920 6868 return;
6869 }
43895be5 6870 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6871 {
6872 tree d = OMP_CLAUSE_DECL (c);
6873 tree type = TREE_TYPE (d);
6874 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7e5a76c8 6875 tree i = create_tmp_var (TREE_TYPE (v));
43895be5 6876 tree ptype = build_pointer_type (TREE_TYPE (type));
9561765e 6877 tree bias = TREE_OPERAND (d, 1);
6878 d = TREE_OPERAND (d, 0);
6879 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6880 {
6881 tree b = TREE_OPERAND (d, 1);
6882 b = maybe_lookup_decl (b, ctx);
6883 if (b == NULL)
6884 {
6885 b = TREE_OPERAND (d, 1);
6886 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6887 }
6888 if (integer_zerop (bias))
6889 bias = b;
6890 else
6891 {
6892 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6893 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6894 TREE_TYPE (b), b, bias);
6895 }
6896 d = TREE_OPERAND (d, 0);
6897 }
43895be5 6898 /* For ref build_outer_var_ref already performs this, so
6899 only new_var needs a dereference. */
9561765e 6900 if (TREE_CODE (d) == INDIRECT_REF)
43895be5 6901 {
6902 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4954efd4 6903 gcc_assert (omp_is_reference (var) && var == orig_var);
43895be5 6904 }
9561765e 6905 else if (TREE_CODE (d) == ADDR_EXPR)
43895be5 6906 {
6907 if (orig_var == var)
6908 {
6909 new_var = build_fold_addr_expr (new_var);
6910 ref = build_fold_addr_expr (ref);
6911 }
6912 }
6913 else
6914 {
6915 gcc_assert (orig_var == var);
4954efd4 6916 if (omp_is_reference (var))
43895be5 6917 ref = build_fold_addr_expr (ref);
6918 }
6919 if (DECL_P (v))
6920 {
6921 tree t = maybe_lookup_decl (v, ctx);
6922 if (t)
6923 v = t;
6924 else
6925 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6926 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6927 }
9561765e 6928 if (!integer_zerop (bias))
6929 {
6930 bias = fold_convert_loc (clause_loc, sizetype, bias);
6931 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6932 TREE_TYPE (new_var), new_var,
6933 unshare_expr (bias));
6934 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6935 TREE_TYPE (ref), ref, bias);
6936 }
43895be5 6937 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6938 ref = fold_convert_loc (clause_loc, ptype, ref);
7e5a76c8 6939 tree m = create_tmp_var (ptype);
43895be5 6940 gimplify_assign (m, new_var, stmt_seqp);
6941 new_var = m;
7e5a76c8 6942 m = create_tmp_var (ptype);
43895be5 6943 gimplify_assign (m, ref, stmt_seqp);
6944 ref = m;
6945 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6946 tree body = create_artificial_label (UNKNOWN_LOCATION);
6947 tree end = create_artificial_label (UNKNOWN_LOCATION);
6948 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6949 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6950 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6951 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6952 {
6953 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6954 tree decl_placeholder
6955 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6956 SET_DECL_VALUE_EXPR (placeholder, out);
6957 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6958 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6959 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6960 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6961 gimple_seq_add_seq (&sub_seq,
6962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6964 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6965 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6966 }
6967 else
6968 {
6969 x = build2 (code, TREE_TYPE (out), out, priv);
6970 out = unshare_expr (out);
6971 gimplify_assign (out, x, &sub_seq);
6972 }
6973 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6974 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6975 gimple_seq_add_stmt (&sub_seq, g);
6976 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6977 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6978 gimple_seq_add_stmt (&sub_seq, g);
6979 g = gimple_build_assign (i, PLUS_EXPR, i,
6980 build_int_cst (TREE_TYPE (i), 1));
6981 gimple_seq_add_stmt (&sub_seq, g);
6982 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6983 gimple_seq_add_stmt (&sub_seq, g);
6984 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6985 }
ca4c3545 6986 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1e8e9920 6987 {
6988 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6989
4954efd4 6990 if (omp_is_reference (var)
bc7bff74 6991 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6992 TREE_TYPE (ref)))
389dd41b 6993 ref = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 6994 SET_DECL_VALUE_EXPR (placeholder, ref);
6995 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
e3a19533 6996 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
75a70cf9 6997 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6998 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
1e8e9920 6999 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7000 }
7001 else
7002 {
7003 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7004 ref = build_outer_var_ref (var, ctx);
75a70cf9 7005 gimplify_assign (ref, x, &sub_seq);
1e8e9920 7006 }
7007 }
7008
b9a16870 7009 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7010 0);
75a70cf9 7011 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 7012
75a70cf9 7013 gimple_seq_add_seq (stmt_seqp, sub_seq);
1e8e9920 7014
9a1d892b 7015 if (clist)
7016 {
7017 gimple_seq_add_seq (stmt_seqp, *clist);
7018 *clist = NULL;
7019 }
7020
b9a16870 7021 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7022 0);
75a70cf9 7023 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 7024}
7025
773c5ba7 7026
1e8e9920 7027/* Generate code to implement the COPYPRIVATE clauses. */
7028
7029static void
75a70cf9 7030lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
1e8e9920 7031 omp_context *ctx)
7032{
7033 tree c;
7034
7035 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7036 {
cb561506 7037 tree var, new_var, ref, x;
1e8e9920 7038 bool by_ref;
389dd41b 7039 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 7040
55d6e7cd 7041 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
1e8e9920 7042 continue;
7043
7044 var = OMP_CLAUSE_DECL (c);
e8a588af 7045 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 7046
7047 ref = build_sender_ref (var, ctx);
cb561506 7048 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7049 if (by_ref)
7050 {
7051 x = build_fold_addr_expr_loc (clause_loc, new_var);
7052 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7053 }
75a70cf9 7054 gimplify_assign (ref, x, slist);
1e8e9920 7055
cb561506 7056 ref = build_receiver_ref (var, false, ctx);
7057 if (by_ref)
7058 {
7059 ref = fold_convert_loc (clause_loc,
7060 build_pointer_type (TREE_TYPE (new_var)),
7061 ref);
7062 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7063 }
4954efd4 7064 if (omp_is_reference (var))
1e8e9920 7065 {
cb561506 7066 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
182cf5a9 7067 ref = build_simple_mem_ref_loc (clause_loc, ref);
7068 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 7069 }
cb561506 7070 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
1e8e9920 7071 gimplify_and_add (x, rlist);
7072 }
7073}
7074
773c5ba7 7075
1e8e9920 7076/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7077 and REDUCTION from the sender (aka parent) side. */
7078
7079static void
75a70cf9 7080lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7081 omp_context *ctx)
1e8e9920 7082{
43895be5 7083 tree c, t;
7084 int ignored_looptemp = 0;
7085 bool is_taskloop = false;
7086
7087 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7088 by GOMP_taskloop. */
7089 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7090 {
7091 ignored_looptemp = 2;
7092 is_taskloop = true;
7093 }
1e8e9920 7094
7095 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7096 {
773c5ba7 7097 tree val, ref, x, var;
1e8e9920 7098 bool by_ref, do_in = false, do_out = false;
389dd41b 7099 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 7100
55d6e7cd 7101 switch (OMP_CLAUSE_CODE (c))
1e8e9920 7102 {
fd6481cf 7103 case OMP_CLAUSE_PRIVATE:
7104 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7105 break;
7106 continue;
1e8e9920 7107 case OMP_CLAUSE_FIRSTPRIVATE:
7108 case OMP_CLAUSE_COPYIN:
7109 case OMP_CLAUSE_LASTPRIVATE:
7e5a76c8 7110 case OMP_CLAUSE_IN_REDUCTION:
7111 case OMP_CLAUSE__REDUCTEMP_:
7112 break;
1e8e9920 7113 case OMP_CLAUSE_REDUCTION:
7e5a76c8 7114 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7115 continue;
43895be5 7116 break;
7117 case OMP_CLAUSE_SHARED:
7118 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7119 break;
7120 continue;
bc7bff74 7121 case OMP_CLAUSE__LOOPTEMP_:
43895be5 7122 if (ignored_looptemp)
7123 {
7124 ignored_looptemp--;
7125 continue;
7126 }
1e8e9920 7127 break;
7128 default:
7129 continue;
7130 }
7131
87b31375 7132 val = OMP_CLAUSE_DECL (c);
7e5a76c8 7133 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7134 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
43895be5 7135 && TREE_CODE (val) == MEM_REF)
7136 {
7137 val = TREE_OPERAND (val, 0);
9561765e 7138 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7139 val = TREE_OPERAND (val, 0);
43895be5 7140 if (TREE_CODE (val) == INDIRECT_REF
7141 || TREE_CODE (val) == ADDR_EXPR)
7142 val = TREE_OPERAND (val, 0);
7143 if (is_variable_sized (val))
7144 continue;
7145 }
7146
7147 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7148 outer taskloop region. */
7149 omp_context *ctx_for_o = ctx;
7150 if (is_taskloop
7151 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7152 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7153 ctx_for_o = ctx->outer;
7154
7155 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
773c5ba7 7156
f49d7bb5 7157 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7e5a76c8 7158 && is_global_var (var)
7159 && (val == OMP_CLAUSE_DECL (c)
7160 || !is_task_ctx (ctx)
7161 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7162 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7163 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7164 != POINTER_TYPE)))))
f49d7bb5 7165 continue;
43895be5 7166
7167 t = omp_member_access_dummy_var (var);
7168 if (t)
7169 {
7170 var = DECL_VALUE_EXPR (var);
7171 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7172 if (o != t)
7173 var = unshare_and_remap (var, t, o);
7174 else
7175 var = unshare_expr (var);
7176 }
7177
7178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7179 {
7180 /* Handle taskloop firstprivate/lastprivate, where the
7181 lastprivate on GIMPLE_OMP_TASK is represented as
7182 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7183 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7184 x = omp_build_component_ref (ctx->sender_decl, f);
7185 if (use_pointer_for_field (val, ctx))
7186 var = build_fold_addr_expr (var);
7187 gimplify_assign (x, var, ilist);
7188 DECL_ABSTRACT_ORIGIN (f) = NULL;
7189 continue;
7190 }
7191
7e5a76c8 7192 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7193 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
43895be5 7194 || val == OMP_CLAUSE_DECL (c))
7195 && is_variable_sized (val))
1e8e9920 7196 continue;
e8a588af 7197 by_ref = use_pointer_for_field (val, NULL);
1e8e9920 7198
55d6e7cd 7199 switch (OMP_CLAUSE_CODE (c))
1e8e9920 7200 {
7201 case OMP_CLAUSE_FIRSTPRIVATE:
585aefbb 7202 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7203 && !by_ref
7204 && is_task_ctx (ctx))
7205 TREE_NO_WARNING (var) = 1;
7206 do_in = true;
7207 break;
7208
7209 case OMP_CLAUSE_PRIVATE:
1e8e9920 7210 case OMP_CLAUSE_COPYIN:
bc7bff74 7211 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 7212 case OMP_CLAUSE__REDUCTEMP_:
1e8e9920 7213 do_in = true;
7214 break;
7215
7216 case OMP_CLAUSE_LASTPRIVATE:
4954efd4 7217 if (by_ref || omp_is_reference (val))
1e8e9920 7218 {
7219 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7220 continue;
7221 do_in = true;
7222 }
7223 else
fd6481cf 7224 {
7225 do_out = true;
7226 if (lang_hooks.decls.omp_private_outer_ref (val))
7227 do_in = true;
7228 }
1e8e9920 7229 break;
7230
7231 case OMP_CLAUSE_REDUCTION:
7e5a76c8 7232 case OMP_CLAUSE_IN_REDUCTION:
1e8e9920 7233 do_in = true;
43895be5 7234 if (val == OMP_CLAUSE_DECL (c))
7e5a76c8 7235 {
7236 if (is_task_ctx (ctx))
7237 by_ref = use_pointer_for_field (val, ctx);
7238 else
7239 do_out = !(by_ref || omp_is_reference (val));
7240 }
43895be5 7241 else
7242 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
1e8e9920 7243 break;
7244
7245 default:
7246 gcc_unreachable ();
7247 }
7248
7249 if (do_in)
7250 {
7251 ref = build_sender_ref (val, ctx);
389dd41b 7252 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
75a70cf9 7253 gimplify_assign (ref, x, ilist);
fd6481cf 7254 if (is_task_ctx (ctx))
7255 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
1e8e9920 7256 }
773c5ba7 7257
1e8e9920 7258 if (do_out)
7259 {
7260 ref = build_sender_ref (val, ctx);
75a70cf9 7261 gimplify_assign (var, ref, olist);
1e8e9920 7262 }
7263 }
7264}
7265
75a70cf9 7266/* Generate code to implement SHARED from the sender (aka parent)
7267 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7268 list things that got automatically shared. */
1e8e9920 7269
7270static void
75a70cf9 7271lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
1e8e9920 7272{
43895be5 7273 tree var, ovar, nvar, t, f, x, record_type;
1e8e9920 7274
7275 if (ctx->record_type == NULL)
7276 return;
773c5ba7 7277
fd6481cf 7278 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
1767a056 7279 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
1e8e9920 7280 {
7281 ovar = DECL_ABSTRACT_ORIGIN (f);
43895be5 7282 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7283 continue;
7284
1e8e9920 7285 nvar = maybe_lookup_decl (ovar, ctx);
7286 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7287 continue;
7288
773c5ba7 7289 /* If CTX is a nested parallel directive. Find the immediately
7290 enclosing parallel or workshare construct that contains a
7291 mapping for OVAR. */
87b31375 7292 var = lookup_decl_in_outer_ctx (ovar, ctx);
773c5ba7 7293
43895be5 7294 t = omp_member_access_dummy_var (var);
7295 if (t)
7296 {
7297 var = DECL_VALUE_EXPR (var);
7298 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7299 if (o != t)
7300 var = unshare_and_remap (var, t, o);
7301 else
7302 var = unshare_expr (var);
7303 }
7304
e8a588af 7305 if (use_pointer_for_field (ovar, ctx))
1e8e9920 7306 {
7307 x = build_sender_ref (ovar, ctx);
48152aa2 7308 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7309 && TREE_TYPE (f) == TREE_TYPE (ovar))
7310 {
7311 gcc_assert (is_parallel_ctx (ctx)
7312 && DECL_ARTIFICIAL (ovar));
7313 /* _condtemp_ clause. */
7314 var = build_constructor (TREE_TYPE (x), NULL);
7315 }
7316 else
7317 var = build_fold_addr_expr (var);
75a70cf9 7318 gimplify_assign (x, var, ilist);
1e8e9920 7319 }
7320 else
7321 {
7322 x = build_sender_ref (ovar, ctx);
75a70cf9 7323 gimplify_assign (x, var, ilist);
1e8e9920 7324
d2263ebb 7325 if (!TREE_READONLY (var)
7326 /* We don't need to receive a new reference to a result
7327 or parm decl. In fact we may not store to it as we will
7328 invalidate any pending RSO and generate wrong gimple
7329 during inlining. */
7330 && !((TREE_CODE (var) == RESULT_DECL
7331 || TREE_CODE (var) == PARM_DECL)
7332 && DECL_BY_REFERENCE (var)))
fd6481cf 7333 {
7334 x = build_sender_ref (ovar, ctx);
75a70cf9 7335 gimplify_assign (var, x, olist);
fd6481cf 7336 }
1e8e9920 7337 }
7338 }
7339}
7340
a8e785ba 7341/* Emit an OpenACC head marker call, encapulating the partitioning and
7342 other information that must be processed by the target compiler.
7343 Return the maximum number of dimensions the associated loop might
7344 be partitioned over. */
7345
7346static unsigned
7347lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7348 gimple_seq *seq, omp_context *ctx)
7349{
7350 unsigned levels = 0;
7351 unsigned tag = 0;
7352 tree gang_static = NULL_TREE;
7353 auto_vec<tree, 5> args;
7354
7355 args.quick_push (build_int_cst
7356 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7357 args.quick_push (ddvar);
7358 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7359 {
7360 switch (OMP_CLAUSE_CODE (c))
7361 {
7362 case OMP_CLAUSE_GANG:
7363 tag |= OLF_DIM_GANG;
7364 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7365 /* static:* is represented by -1, and we can ignore it, as
7366 scheduling is always static. */
7367 if (gang_static && integer_minus_onep (gang_static))
7368 gang_static = NULL_TREE;
7369 levels++;
7370 break;
7371
7372 case OMP_CLAUSE_WORKER:
7373 tag |= OLF_DIM_WORKER;
7374 levels++;
7375 break;
7376
7377 case OMP_CLAUSE_VECTOR:
7378 tag |= OLF_DIM_VECTOR;
7379 levels++;
7380 break;
7381
7382 case OMP_CLAUSE_SEQ:
7383 tag |= OLF_SEQ;
7384 break;
7385
7386 case OMP_CLAUSE_AUTO:
7387 tag |= OLF_AUTO;
7388 break;
7389
7390 case OMP_CLAUSE_INDEPENDENT:
7391 tag |= OLF_INDEPENDENT;
7392 break;
7393
719a7570 7394 case OMP_CLAUSE_TILE:
7395 tag |= OLF_TILE;
7396 break;
7397
a8e785ba 7398 default:
7399 continue;
7400 }
7401 }
7402
7403 if (gang_static)
7404 {
7405 if (DECL_P (gang_static))
7406 gang_static = build_outer_var_ref (gang_static, ctx);
7407 tag |= OLF_GANG_STATIC;
7408 }
7409
7410 /* In a parallel region, loops are implicitly INDEPENDENT. */
7411 omp_context *tgt = enclosing_target_ctx (ctx);
7412 if (!tgt || is_oacc_parallel (tgt))
7413 tag |= OLF_INDEPENDENT;
7414
719a7570 7415 if (tag & OLF_TILE)
7416 /* Tiling could use all 3 levels. */
7417 levels = 3;
7418 else
7419 {
7420 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7421 Ensure at least one level, or 2 for possible auto
7422 partitioning */
7423 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7424 << OLF_DIM_BASE) | OLF_SEQ));
7425
7426 if (levels < 1u + maybe_auto)
7427 levels = 1u + maybe_auto;
7428 }
a8e785ba 7429
7430 args.quick_push (build_int_cst (integer_type_node, levels));
7431 args.quick_push (build_int_cst (integer_type_node, tag));
7432 if (gang_static)
7433 args.quick_push (gang_static);
7434
7435 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7436 gimple_set_location (call, loc);
7437 gimple_set_lhs (call, ddvar);
7438 gimple_seq_add_stmt (seq, call);
7439
7440 return levels;
7441}
7442
7443/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7444 partitioning level of the enclosed region. */
7445
7446static void
7447lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7448 tree tofollow, gimple_seq *seq)
7449{
7450 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7451 : IFN_UNIQUE_OACC_TAIL_MARK);
7452 tree marker = build_int_cst (integer_type_node, marker_kind);
7453 int nargs = 2 + (tofollow != NULL_TREE);
7454 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7455 marker, ddvar, tofollow);
7456 gimple_set_location (call, loc);
7457 gimple_set_lhs (call, ddvar);
7458 gimple_seq_add_stmt (seq, call);
7459}
7460
7461/* Generate the before and after OpenACC loop sequences. CLAUSES are
7462 the loop clauses, from which we extract reductions. Initialize
7463 HEAD and TAIL. */
7464
7465static void
7466lower_oacc_head_tail (location_t loc, tree clauses,
7467 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7468{
7469 bool inner = false;
7470 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7471 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7472
7473 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
a8e785ba 7474 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7475 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7476
cff84c1a 7477 gcc_assert (count);
a8e785ba 7478 for (unsigned done = 1; count; count--, done++)
7479 {
7480 gimple_seq fork_seq = NULL;
7481 gimple_seq join_seq = NULL;
7482
7483 tree place = build_int_cst (integer_type_node, -1);
7484 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7485 fork_kind, ddvar, place);
7486 gimple_set_location (fork, loc);
7487 gimple_set_lhs (fork, ddvar);
7488
7489 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7490 join_kind, ddvar, place);
7491 gimple_set_location (join, loc);
7492 gimple_set_lhs (join, ddvar);
7493
7494 /* Mark the beginning of this level sequence. */
7495 if (inner)
7496 lower_oacc_loop_marker (loc, ddvar, true,
7497 build_int_cst (integer_type_node, count),
7498 &fork_seq);
7499 lower_oacc_loop_marker (loc, ddvar, false,
7500 build_int_cst (integer_type_node, done),
7501 &join_seq);
7502
641a0fa1 7503 lower_oacc_reductions (loc, clauses, place, inner,
7504 fork, join, &fork_seq, &join_seq, ctx);
a8e785ba 7505
7506 /* Append this level to head. */
7507 gimple_seq_add_seq (head, fork_seq);
7508 /* Prepend it to tail. */
7509 gimple_seq_add_seq (&join_seq, *tail);
7510 *tail = join_seq;
7511
7512 inner = true;
7513 }
7514
7515 /* Mark the end of the sequence. */
7516 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7517 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7518}
75a70cf9 7519
4954efd4 7520/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7521 catch handler and return it. This prevents programs from violating the
7522 structured block semantics with throws. */
75a70cf9 7523
4954efd4 7524static gimple_seq
7525maybe_catch_exception (gimple_seq body)
75a70cf9 7526{
4954efd4 7527 gimple *g;
7528 tree decl;
56686608 7529
4954efd4 7530 if (!flag_exceptions)
7531 return body;
56686608 7532
4954efd4 7533 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7534 decl = lang_hooks.eh_protect_cleanup_actions ();
7535 else
7536 decl = builtin_decl_explicit (BUILT_IN_TRAP);
56686608 7537
4954efd4 7538 g = gimple_build_eh_must_not_throw (decl);
7539 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7540 GIMPLE_TRY_CATCH);
56686608 7541
4954efd4 7542 return gimple_seq_alloc_with_stmt (g);
56686608 7543}
7544
4954efd4 7545\f
7546/* Routines to lower OMP directives into OMP-GIMPLE. */
75a70cf9 7547
4954efd4 7548/* If ctx is a worksharing context inside of a cancellable parallel
7549 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7550 and conditional branch to parallel's cancel_label to handle
7551 cancellation in the implicit barrier. */
1e8e9920 7552
7553static void
7e5a76c8 7554maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7555 gimple_seq *body)
1e8e9920 7556{
4954efd4 7557 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7558 if (gimple_omp_return_nowait_p (omp_return))
7559 return;
7e5a76c8 7560 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7561 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7562 && outer->cancellable)
7563 {
7564 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7565 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7566 tree lhs = create_tmp_var (c_bool_type);
7567 gimple_omp_return_set_lhs (omp_return, lhs);
7568 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7569 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7570 fold_convert (c_bool_type,
7571 boolean_false_node),
7572 outer->cancel_label, fallthru_label);
7573 gimple_seq_add_stmt (body, g);
7574 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7575 }
7576 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7577 return;
7578}
7579
7580/* Find the first task_reduction or reduction clause or return NULL
7581 if there are none. */
7582
7583static inline tree
7584omp_task_reductions_find_first (tree clauses, enum tree_code code,
7585 enum omp_clause_code ccode)
7586{
7587 while (1)
7588 {
7589 clauses = omp_find_clause (clauses, ccode);
7590 if (clauses == NULL_TREE)
7591 return NULL_TREE;
7592 if (ccode != OMP_CLAUSE_REDUCTION
7593 || code == OMP_TASKLOOP
7594 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7595 return clauses;
7596 clauses = OMP_CLAUSE_CHAIN (clauses);
773c5ba7 7597 }
4954efd4 7598}
1e8e9920 7599
7e5a76c8 7600static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7601 gimple_seq *, gimple_seq *);
7602
4954efd4 7603/* Lower the OpenMP sections directive in the current statement in GSI_P.
7604 CTX is the enclosing OMP context for the current statement. */
1e8e9920 7605
4954efd4 7606static void
7607lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7608{
7609 tree block, control;
7610 gimple_stmt_iterator tgsi;
7611 gomp_sections *stmt;
7612 gimple *t;
7613 gbind *new_stmt, *bind;
eb7a699d 7614 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
1e8e9920 7615
4954efd4 7616 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
1e8e9920 7617
4954efd4 7618 push_gimplify_context ();
bc7bff74 7619
4954efd4 7620 dlist = NULL;
7621 ilist = NULL;
7e5a76c8 7622
7623 tree rclauses
7624 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7625 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7626 tree rtmp = NULL_TREE;
7627 if (rclauses)
7628 {
7629 tree type = build_pointer_type (pointer_sized_int_node);
7630 tree temp = create_tmp_var (type);
7631 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7632 OMP_CLAUSE_DECL (c) = temp;
7633 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7634 gimple_omp_sections_set_clauses (stmt, c);
7635 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7636 gimple_omp_sections_clauses (stmt),
7637 &ilist, &tred_dlist);
7638 rclauses = c;
7639 rtmp = make_ssa_name (type);
7640 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7641 }
7642
4f4b92d8 7643 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7644 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7645
4954efd4 7646 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7647 &ilist, &dlist, ctx, NULL);
1e8e9920 7648
eb7a699d 7649 control = create_tmp_var (unsigned_type_node, ".section");
7650 gimple_omp_sections_set_control (stmt, control);
7651
4954efd4 7652 new_body = gimple_omp_body (stmt);
7653 gimple_omp_set_body (stmt, NULL);
7654 tgsi = gsi_start (new_body);
7655 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
1e8e9920 7656 {
4954efd4 7657 omp_context *sctx;
7658 gimple *sec_start;
773c5ba7 7659
4954efd4 7660 sec_start = gsi_stmt (tgsi);
7661 sctx = maybe_lookup_ctx (sec_start);
7662 gcc_assert (sctx);
7663
7664 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7665 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7666 GSI_CONTINUE_LINKING);
7667 gimple_omp_set_body (sec_start, NULL);
7668
7669 if (gsi_one_before_end_p (tgsi))
773c5ba7 7670 {
4954efd4 7671 gimple_seq l = NULL;
7672 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
eb7a699d 7673 &ilist, &l, &clist, ctx);
4954efd4 7674 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7675 gimple_omp_section_set_last (sec_start);
7676 }
79acaae1 7677
4954efd4 7678 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7679 GSI_CONTINUE_LINKING);
7680 }
773c5ba7 7681
4954efd4 7682 block = make_node (BLOCK);
7683 bind = gimple_build_bind (NULL, new_body, block);
773c5ba7 7684
4954efd4 7685 olist = NULL;
9a1d892b 7686 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
eb7a699d 7687 &clist, ctx);
7688 if (clist)
7689 {
7690 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7691 gcall *g = gimple_build_call (fndecl, 0);
7692 gimple_seq_add_stmt (&olist, g);
7693 gimple_seq_add_seq (&olist, clist);
7694 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7695 g = gimple_build_call (fndecl, 0);
7696 gimple_seq_add_stmt (&olist, g);
7697 }
773c5ba7 7698
4954efd4 7699 block = make_node (BLOCK);
7700 new_stmt = gimple_build_bind (NULL, NULL, block);
7701 gsi_replace (gsi_p, new_stmt, true);
773c5ba7 7702
4954efd4 7703 pop_gimplify_context (new_stmt);
7704 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7705 BLOCK_VARS (block) = gimple_bind_vars (bind);
7706 if (BLOCK_VARS (block))
7707 TREE_USED (block) = 1;
773c5ba7 7708
4954efd4 7709 new_body = NULL;
7710 gimple_seq_add_seq (&new_body, ilist);
7711 gimple_seq_add_stmt (&new_body, stmt);
7712 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7713 gimple_seq_add_stmt (&new_body, bind);
773c5ba7 7714
4954efd4 7715 t = gimple_build_omp_continue (control, control);
4954efd4 7716 gimple_seq_add_stmt (&new_body, t);
773c5ba7 7717
4954efd4 7718 gimple_seq_add_seq (&new_body, olist);
7719 if (ctx->cancellable)
7720 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7721 gimple_seq_add_seq (&new_body, dlist);
79acaae1 7722
4954efd4 7723 new_body = maybe_catch_exception (new_body);
773c5ba7 7724
7c6746c9 7725 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7726 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7727 t = gimple_build_omp_return (nowait);
4954efd4 7728 gimple_seq_add_stmt (&new_body, t);
7e5a76c8 7729 gimple_seq_add_seq (&new_body, tred_dlist);
7730 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7731
7732 if (rclauses)
7733 OMP_CLAUSE_DECL (rclauses) = rtmp;
1e8e9920 7734
4954efd4 7735 gimple_bind_set_body (new_stmt, new_body);
1e8e9920 7736}
7737
40750995 7738
4954efd4 7739/* A subroutine of lower_omp_single. Expand the simple form of
7740 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
40750995 7741
4954efd4 7742 if (GOMP_single_start ())
7743 BODY;
7744 [ GOMP_barrier (); ] -> unless 'nowait' is present.
40750995 7745
4954efd4 7746 FIXME. It may be better to delay expanding the logic of this until
7747 pass_expand_omp. The expanded logic may make the job more difficult
7748 to a synchronization analysis pass. */
fd6481cf 7749
7750static void
4954efd4 7751lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
fd6481cf 7752{
4954efd4 7753 location_t loc = gimple_location (single_stmt);
7754 tree tlabel = create_artificial_label (loc);
7755 tree flabel = create_artificial_label (loc);
7756 gimple *call, *cond;
7757 tree lhs, decl;
2169f33b 7758
4954efd4 7759 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7760 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7761 call = gimple_build_call (decl, 0);
7762 gimple_call_set_lhs (call, lhs);
7763 gimple_seq_add_stmt (pre_p, call);
fd6481cf 7764
4954efd4 7765 cond = gimple_build_cond (EQ_EXPR, lhs,
7766 fold_convert_loc (loc, TREE_TYPE (lhs),
7767 boolean_true_node),
7768 tlabel, flabel);
7769 gimple_seq_add_stmt (pre_p, cond);
7770 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7771 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7772 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
fd6481cf 7773}
7774
7775
4954efd4 7776/* A subroutine of lower_omp_single. Expand the simple form of
7777 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
1e8e9920 7778
4954efd4 7779 #pragma omp single copyprivate (a, b, c)
1e8e9920 7780
4954efd4 7781 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
1e8e9920 7782
4954efd4 7783 {
7784 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7785 {
7786 BODY;
7787 copyout.a = a;
7788 copyout.b = b;
7789 copyout.c = c;
7790 GOMP_single_copy_end (&copyout);
7791 }
7792 else
7793 {
7794 a = copyout_p->a;
7795 b = copyout_p->b;
7796 c = copyout_p->c;
7797 }
7798 GOMP_barrier ();
7799 }
75a70cf9 7800
4954efd4 7801 FIXME. It may be better to delay expanding the logic of this until
7802 pass_expand_omp. The expanded logic may make the job more difficult
7803 to a synchronization analysis pass. */
1e8e9920 7804
4954efd4 7805static void
7806lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7807 omp_context *ctx)
7808{
7809 tree ptr_type, t, l0, l1, l2, bfn_decl;
7810 gimple_seq copyin_seq;
7811 location_t loc = gimple_location (single_stmt);
1e8e9920 7812
4954efd4 7813 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
1e8e9920 7814
4954efd4 7815 ptr_type = build_pointer_type (ctx->record_type);
7816 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
1e8e9920 7817
4954efd4 7818 l0 = create_artificial_label (loc);
7819 l1 = create_artificial_label (loc);
7820 l2 = create_artificial_label (loc);
1e8e9920 7821
4954efd4 7822 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7823 t = build_call_expr_loc (loc, bfn_decl, 0);
7824 t = fold_convert_loc (loc, ptr_type, t);
7825 gimplify_assign (ctx->receiver_decl, t, pre_p);
1e8e9920 7826
4954efd4 7827 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7828 build_int_cst (ptr_type, 0));
7829 t = build3 (COND_EXPR, void_type_node, t,
7830 build_and_jump (&l0), build_and_jump (&l1));
7831 gimplify_and_add (t, pre_p);
1e8e9920 7832
4954efd4 7833 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
1e8e9920 7834
4954efd4 7835 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
1e8e9920 7836
4954efd4 7837 copyin_seq = NULL;
7838 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7839 &copyin_seq, ctx);
1e8e9920 7840
4954efd4 7841 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7842 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7843 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7844 gimplify_and_add (t, pre_p);
5056ba1a 7845
4954efd4 7846 t = build_and_jump (&l2);
7847 gimplify_and_add (t, pre_p);
1e8e9920 7848
4954efd4 7849 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
1e8e9920 7850
4954efd4 7851 gimple_seq_add_seq (pre_p, copyin_seq);
61e47ac8 7852
4954efd4 7853 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
61e47ac8 7854}
773c5ba7 7855
4954efd4 7856
7857/* Expand code for an OpenMP single directive. */
658b4427 7858
7859static void
4954efd4 7860lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
658b4427 7861{
4954efd4 7862 tree block;
4954efd4 7863 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7864 gbind *bind;
7865 gimple_seq bind_body, bind_body_tail = NULL, dlist;
658b4427 7866
4954efd4 7867 push_gimplify_context ();
658b4427 7868
4954efd4 7869 block = make_node (BLOCK);
7870 bind = gimple_build_bind (NULL, NULL, block);
7871 gsi_replace (gsi_p, bind, true);
7872 bind_body = NULL;
7873 dlist = NULL;
7874 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7875 &bind_body, &dlist, ctx, NULL);
7876 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
658b4427 7877
4954efd4 7878 gimple_seq_add_stmt (&bind_body, single_stmt);
658b4427 7879
4954efd4 7880 if (ctx->record_type)
7881 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7882 else
7883 lower_omp_single_simple (single_stmt, &bind_body);
658b4427 7884
4954efd4 7885 gimple_omp_set_body (single_stmt, NULL);
658b4427 7886
4954efd4 7887 gimple_seq_add_seq (&bind_body, dlist);
8e6b4515 7888
4954efd4 7889 bind_body = maybe_catch_exception (bind_body);
8e6b4515 7890
7c6746c9 7891 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7892 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7893 gimple *g = gimple_build_omp_return (nowait);
7894 gimple_seq_add_stmt (&bind_body_tail, g);
7e5a76c8 7895 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
4954efd4 7896 if (ctx->record_type)
7897 {
7898 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7899 tree clobber = build_constructor (ctx->record_type, NULL);
7900 TREE_THIS_VOLATILE (clobber) = 1;
7901 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7902 clobber), GSI_SAME_STMT);
7903 }
7904 gimple_seq_add_seq (&bind_body, bind_body_tail);
7905 gimple_bind_set_body (bind, bind_body);
8e6b4515 7906
4954efd4 7907 pop_gimplify_context (bind);
8e6b4515 7908
4954efd4 7909 gimple_bind_append_vars (bind, ctx->block_vars);
7910 BLOCK_VARS (block) = ctx->block_vars;
7911 if (BLOCK_VARS (block))
7912 TREE_USED (block) = 1;
8e6b4515 7913}
7914
3d483a94 7915
4954efd4 7916/* Expand code for an OpenMP master directive. */
1e8e9920 7917
7918static void
4954efd4 7919lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 7920{
4954efd4 7921 tree block, lab = NULL, x, bfn_decl;
7922 gimple *stmt = gsi_stmt (*gsi_p);
7923 gbind *bind;
7924 location_t loc = gimple_location (stmt);
7925 gimple_seq tseq;
773c5ba7 7926
4954efd4 7927 push_gimplify_context ();
773c5ba7 7928
4954efd4 7929 block = make_node (BLOCK);
7930 bind = gimple_build_bind (NULL, NULL, block);
7931 gsi_replace (gsi_p, bind, true);
7932 gimple_bind_add_stmt (bind, stmt);
773c5ba7 7933
4954efd4 7934 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7935 x = build_call_expr_loc (loc, bfn_decl, 0);
7936 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7937 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7938 tseq = NULL;
7939 gimplify_and_add (x, &tseq);
7940 gimple_bind_add_seq (bind, tseq);
40750995 7941
4954efd4 7942 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7943 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7944 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7945 gimple_omp_set_body (stmt, NULL);
1d22f541 7946
4954efd4 7947 gimple_bind_add_stmt (bind, gimple_build_label (lab));
fbe86b1b 7948
4954efd4 7949 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9561765e 7950
4954efd4 7951 pop_gimplify_context (bind);
48e1416a 7952
4954efd4 7953 gimple_bind_append_vars (bind, ctx->block_vars);
7954 BLOCK_VARS (block) = ctx->block_vars;
1e8e9920 7955}
7956
7e5a76c8 7957/* Helper function for lower_omp_task_reductions. For a specific PASS
7958 find out the current clause it should be processed, or return false
7959 if all have been processed already. */
7960
7961static inline bool
7962omp_task_reduction_iterate (int pass, enum tree_code code,
7963 enum omp_clause_code ccode, tree *c, tree *decl,
7964 tree *type, tree *next)
7965{
7966 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7967 {
7968 if (ccode == OMP_CLAUSE_REDUCTION
7969 && code != OMP_TASKLOOP
7970 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7971 continue;
7972 *decl = OMP_CLAUSE_DECL (*c);
7973 *type = TREE_TYPE (*decl);
7974 if (TREE_CODE (*decl) == MEM_REF)
7975 {
7976 if (pass != 1)
7977 continue;
7978 }
7979 else
7980 {
7981 if (omp_is_reference (*decl))
7982 *type = TREE_TYPE (*type);
7983 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7984 continue;
7985 }
7986 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7987 return true;
7988 }
7989 *decl = NULL_TREE;
7990 *type = NULL_TREE;
7991 *next = NULL_TREE;
7992 return false;
7993}
7994
7995/* Lower task_reduction and reduction clauses (the latter unless CODE is
7996 OMP_TASKGROUP only with task modifier). Register mapping of those in
7997 START sequence and reducing them and unregister them in the END sequence. */
7998
7999static void
8000lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8001 gimple_seq *start, gimple_seq *end)
8002{
8003 enum omp_clause_code ccode
8004 = (code == OMP_TASKGROUP
8005 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8006 tree cancellable = NULL_TREE;
8007 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8008 if (clauses == NULL_TREE)
8009 return;
8010 if (code == OMP_FOR || code == OMP_SECTIONS)
8011 {
8012 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8013 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8014 && outer->cancellable)
8015 {
8016 cancellable = error_mark_node;
8017 break;
8018 }
8019 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8020 break;
8021 }
8022 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8023 tree *last = &TYPE_FIELDS (record_type);
8024 unsigned cnt = 0;
8025 if (cancellable)
8026 {
8027 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8028 ptr_type_node);
8029 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8030 integer_type_node);
8031 *last = field;
8032 DECL_CHAIN (field) = ifield;
8033 last = &DECL_CHAIN (ifield);
c75a1c19 8034 DECL_CONTEXT (field) = record_type;
8035 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8036 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8037 DECL_CONTEXT (ifield) = record_type;
8038 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8039 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7e5a76c8 8040 }
8041 for (int pass = 0; pass < 2; pass++)
8042 {
8043 tree decl, type, next;
8044 for (tree c = clauses;
8045 omp_task_reduction_iterate (pass, code, ccode,
8046 &c, &decl, &type, &next); c = next)
8047 {
8048 ++cnt;
8049 tree new_type = type;
8050 if (ctx->outer)
8051 new_type = remap_type (type, &ctx->outer->cb);
8052 tree field
8053 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8054 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8055 new_type);
8056 if (DECL_P (decl) && type == TREE_TYPE (decl))
8057 {
8058 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8059 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8060 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8061 }
8062 else
8063 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8064 DECL_CONTEXT (field) = record_type;
c75a1c19 8065 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8066 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7e5a76c8 8067 *last = field;
8068 last = &DECL_CHAIN (field);
8069 tree bfield
8070 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8071 boolean_type_node);
8072 DECL_CONTEXT (bfield) = record_type;
c75a1c19 8073 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8074 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7e5a76c8 8075 *last = bfield;
8076 last = &DECL_CHAIN (bfield);
8077 }
8078 }
8079 *last = NULL_TREE;
8080 layout_type (record_type);
8081
8082 /* Build up an array which registers with the runtime all the reductions
8083 and deregisters them at the end. Format documented in libgomp/task.c. */
8084 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8085 tree avar = create_tmp_var_raw (atype);
8086 gimple_add_tmp_var (avar);
8087 TREE_ADDRESSABLE (avar) = 1;
8088 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8089 NULL_TREE, NULL_TREE);
8090 tree t = build_int_cst (pointer_sized_int_node, cnt);
8091 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8092 gimple_seq seq = NULL;
8093 tree sz = fold_convert (pointer_sized_int_node,
8094 TYPE_SIZE_UNIT (record_type));
8095 int cachesz = 64;
8096 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8097 build_int_cst (pointer_sized_int_node, cachesz - 1));
8098 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8099 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8100 ctx->task_reductions.create (1 + cnt);
8101 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8102 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8103 ? sz : NULL_TREE);
8104 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8105 gimple_seq_add_seq (start, seq);
8106 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8107 NULL_TREE, NULL_TREE);
8108 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8109 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8110 NULL_TREE, NULL_TREE);
8111 t = build_int_cst (pointer_sized_int_node,
8112 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8113 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8114 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8115 NULL_TREE, NULL_TREE);
8116 t = build_int_cst (pointer_sized_int_node, -1);
8117 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8118 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8119 NULL_TREE, NULL_TREE);
8120 t = build_int_cst (pointer_sized_int_node, 0);
8121 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8122
8123 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8124 and for each task reduction checks a bool right after the private variable
8125 within that thread's chunk; if the bool is clear, it hasn't been
8126 initialized and thus isn't going to be reduced nor destructed, otherwise
8127 reduce and destruct it. */
8128 tree idx = create_tmp_var (size_type_node);
8129 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8130 tree num_thr_sz = create_tmp_var (size_type_node);
8131 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8132 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8133 tree lab3 = NULL_TREE;
8134 gimple *g;
8135 if (code == OMP_FOR || code == OMP_SECTIONS)
8136 {
8137 /* For worksharing constructs, only perform it in the master thread,
8138 with the exception of cancelled implicit barriers - then only handle
8139 the current thread. */
8140 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8141 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8142 tree thr_num = create_tmp_var (integer_type_node);
8143 g = gimple_build_call (t, 0);
8144 gimple_call_set_lhs (g, thr_num);
8145 gimple_seq_add_stmt (end, g);
8146 if (cancellable)
8147 {
8148 tree c;
8149 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8150 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8151 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8152 if (code == OMP_FOR)
8153 c = gimple_omp_for_clauses (ctx->stmt);
0bd73648 8154 else /* if (code == OMP_SECTIONS) */
7e5a76c8 8155 c = gimple_omp_sections_clauses (ctx->stmt);
8156 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8157 cancellable = c;
8158 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8159 lab5, lab6);
8160 gimple_seq_add_stmt (end, g);
8161 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8162 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8163 gimple_seq_add_stmt (end, g);
8164 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8165 build_one_cst (TREE_TYPE (idx)));
8166 gimple_seq_add_stmt (end, g);
8167 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8168 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8169 }
8170 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8171 gimple_seq_add_stmt (end, g);
8172 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8173 }
8174 if (code != OMP_PARALLEL)
8175 {
8176 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8177 tree num_thr = create_tmp_var (integer_type_node);
8178 g = gimple_build_call (t, 0);
8179 gimple_call_set_lhs (g, num_thr);
8180 gimple_seq_add_stmt (end, g);
8181 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8182 gimple_seq_add_stmt (end, g);
8183 if (cancellable)
8184 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8185 }
8186 else
8187 {
8188 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8189 OMP_CLAUSE__REDUCTEMP_);
8190 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8191 t = fold_convert (size_type_node, t);
8192 gimplify_assign (num_thr_sz, t, end);
8193 }
8194 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8195 NULL_TREE, NULL_TREE);
8196 tree data = create_tmp_var (pointer_sized_int_node);
8197 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8198 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8199 tree ptr;
8200 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8201 ptr = create_tmp_var (build_pointer_type (record_type));
8202 else
8203 ptr = create_tmp_var (ptr_type_node);
8204 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8205
8206 tree field = TYPE_FIELDS (record_type);
8207 cnt = 0;
8208 if (cancellable)
8209 field = DECL_CHAIN (DECL_CHAIN (field));
8210 for (int pass = 0; pass < 2; pass++)
8211 {
8212 tree decl, type, next;
8213 for (tree c = clauses;
8214 omp_task_reduction_iterate (pass, code, ccode,
8215 &c, &decl, &type, &next); c = next)
8216 {
8217 tree var = decl, ref;
8218 if (TREE_CODE (decl) == MEM_REF)
8219 {
8220 var = TREE_OPERAND (var, 0);
8221 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8222 var = TREE_OPERAND (var, 0);
8223 tree v = var;
8224 if (TREE_CODE (var) == ADDR_EXPR)
8225 var = TREE_OPERAND (var, 0);
8226 else if (TREE_CODE (var) == INDIRECT_REF)
8227 var = TREE_OPERAND (var, 0);
8228 tree orig_var = var;
8229 if (is_variable_sized (var))
8230 {
8231 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8232 var = DECL_VALUE_EXPR (var);
8233 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8234 var = TREE_OPERAND (var, 0);
8235 gcc_assert (DECL_P (var));
8236 }
8237 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8238 if (orig_var != var)
8239 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8240 else if (TREE_CODE (v) == ADDR_EXPR)
8241 t = build_fold_addr_expr (t);
8242 else if (TREE_CODE (v) == INDIRECT_REF)
8243 t = build_fold_indirect_ref (t);
8244 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8245 {
8246 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8247 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8248 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8249 }
8250 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8251 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8252 fold_convert (size_type_node,
8253 TREE_OPERAND (decl, 1)));
8254 }
8255 else
8256 {
8257 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8258 if (!omp_is_reference (decl))
8259 t = build_fold_addr_expr (t);
8260 }
8261 t = fold_convert (pointer_sized_int_node, t);
8262 seq = NULL;
8263 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8264 gimple_seq_add_seq (start, seq);
8265 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8266 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8267 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8268 t = unshare_expr (byte_position (field));
8269 t = fold_convert (pointer_sized_int_node, t);
8270 ctx->task_reduction_map->put (c, cnt);
8271 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8272 ? t : NULL_TREE);
8273 seq = NULL;
8274 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8275 gimple_seq_add_seq (start, seq);
8276 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8277 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8278 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8279
8280 tree bfield = DECL_CHAIN (field);
8281 tree cond;
8282 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8283 /* In parallel or worksharing all threads unconditionally
8284 initialize all their task reduction private variables. */
8285 cond = boolean_true_node;
8286 else if (TREE_TYPE (ptr) == ptr_type_node)
8287 {
8288 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8289 unshare_expr (byte_position (bfield)));
8290 seq = NULL;
8291 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8292 gimple_seq_add_seq (end, seq);
8293 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8294 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8295 build_int_cst (pbool, 0));
8296 }
8297 else
8298 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8299 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8300 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8301 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8302 tree condv = create_tmp_var (boolean_type_node);
8303 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8304 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8305 lab3, lab4);
8306 gimple_seq_add_stmt (end, g);
8307 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8308 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8309 {
8310 /* If this reduction doesn't need destruction and parallel
8311 has been cancelled, there is nothing to do for this
8312 reduction, so jump around the merge operation. */
8313 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8314 g = gimple_build_cond (NE_EXPR, cancellable,
8315 build_zero_cst (TREE_TYPE (cancellable)),
8316 lab4, lab5);
8317 gimple_seq_add_stmt (end, g);
8318 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8319 }
8320
8321 tree new_var;
8322 if (TREE_TYPE (ptr) == ptr_type_node)
8323 {
8324 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8325 unshare_expr (byte_position (field)));
8326 seq = NULL;
8327 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8328 gimple_seq_add_seq (end, seq);
8329 tree pbool = build_pointer_type (TREE_TYPE (field));
8330 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8331 build_int_cst (pbool, 0));
8332 }
8333 else
8334 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8335 build_simple_mem_ref (ptr), field, NULL_TREE);
8336
8337 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8338 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8339 ref = build_simple_mem_ref (ref);
8340 /* reduction(-:var) sums up the partial results, so it acts
8341 identically to reduction(+:var). */
8342 if (rcode == MINUS_EXPR)
8343 rcode = PLUS_EXPR;
8344 if (TREE_CODE (decl) == MEM_REF)
8345 {
8346 tree type = TREE_TYPE (new_var);
8347 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8348 tree i = create_tmp_var (TREE_TYPE (v));
8349 tree ptype = build_pointer_type (TREE_TYPE (type));
8350 if (DECL_P (v))
8351 {
8352 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8353 tree vv = create_tmp_var (TREE_TYPE (v));
8354 gimplify_assign (vv, v, start);
8355 v = vv;
8356 }
8357 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8358 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8359 new_var = build_fold_addr_expr (new_var);
8360 new_var = fold_convert (ptype, new_var);
8361 ref = fold_convert (ptype, ref);
8362 tree m = create_tmp_var (ptype);
8363 gimplify_assign (m, new_var, end);
8364 new_var = m;
8365 m = create_tmp_var (ptype);
8366 gimplify_assign (m, ref, end);
8367 ref = m;
8368 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8369 tree body = create_artificial_label (UNKNOWN_LOCATION);
8370 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8371 gimple_seq_add_stmt (end, gimple_build_label (body));
8372 tree priv = build_simple_mem_ref (new_var);
8373 tree out = build_simple_mem_ref (ref);
8374 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8375 {
8376 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8377 tree decl_placeholder
8378 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8379 tree lab6 = NULL_TREE;
8380 if (cancellable)
8381 {
8382 /* If this reduction needs destruction and parallel
8383 has been cancelled, jump around the merge operation
8384 to the destruction. */
8385 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8386 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8387 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8388 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8389 lab6, lab5);
8390 gimple_seq_add_stmt (end, g);
8391 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8392 }
8393 SET_DECL_VALUE_EXPR (placeholder, out);
8394 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8395 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8396 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8397 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8398 gimple_seq_add_seq (end,
8399 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8400 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8402 {
8403 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8404 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8405 }
8406 if (cancellable)
8407 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8408 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8409 if (x)
8410 {
8411 gimple_seq tseq = NULL;
8412 gimplify_stmt (&x, &tseq);
8413 gimple_seq_add_seq (end, tseq);
8414 }
8415 }
8416 else
8417 {
8418 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8419 out = unshare_expr (out);
8420 gimplify_assign (out, x, end);
8421 }
8422 gimple *g
8423 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8424 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8425 gimple_seq_add_stmt (end, g);
8426 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8427 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8428 gimple_seq_add_stmt (end, g);
8429 g = gimple_build_assign (i, PLUS_EXPR, i,
8430 build_int_cst (TREE_TYPE (i), 1));
8431 gimple_seq_add_stmt (end, g);
8432 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8433 gimple_seq_add_stmt (end, g);
8434 gimple_seq_add_stmt (end, gimple_build_label (endl));
8435 }
8436 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8437 {
8438 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8439 tree oldv = NULL_TREE;
8440 tree lab6 = NULL_TREE;
8441 if (cancellable)
8442 {
8443 /* If this reduction needs destruction and parallel
8444 has been cancelled, jump around the merge operation
8445 to the destruction. */
8446 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8447 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8448 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8449 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8450 lab6, lab5);
8451 gimple_seq_add_stmt (end, g);
8452 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8453 }
8454 if (omp_is_reference (decl)
8455 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8456 TREE_TYPE (ref)))
8457 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8458 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8459 tree refv = create_tmp_var (TREE_TYPE (ref));
8460 gimplify_assign (refv, ref, end);
8461 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8462 SET_DECL_VALUE_EXPR (placeholder, ref);
8463 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8464 tree d = maybe_lookup_decl (decl, ctx);
8465 gcc_assert (d);
8466 if (DECL_HAS_VALUE_EXPR_P (d))
8467 oldv = DECL_VALUE_EXPR (d);
8468 if (omp_is_reference (var))
8469 {
8470 tree v = fold_convert (TREE_TYPE (d),
8471 build_fold_addr_expr (new_var));
8472 SET_DECL_VALUE_EXPR (d, v);
8473 }
8474 else
8475 SET_DECL_VALUE_EXPR (d, new_var);
8476 DECL_HAS_VALUE_EXPR_P (d) = 1;
8477 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8478 if (oldv)
8479 SET_DECL_VALUE_EXPR (d, oldv);
8480 else
8481 {
8482 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8483 DECL_HAS_VALUE_EXPR_P (d) = 0;
8484 }
8485 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8486 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8487 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8488 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8489 if (cancellable)
8490 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8491 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8492 if (x)
8493 {
8494 gimple_seq tseq = NULL;
8495 gimplify_stmt (&x, &tseq);
8496 gimple_seq_add_seq (end, tseq);
8497 }
8498 }
8499 else
8500 {
8501 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8502 ref = unshare_expr (ref);
8503 gimplify_assign (ref, x, end);
8504 }
8505 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8506 ++cnt;
8507 field = DECL_CHAIN (bfield);
8508 }
8509 }
8510
8511 if (code == OMP_TASKGROUP)
8512 {
8513 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8514 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8515 gimple_seq_add_stmt (start, g);
8516 }
8517 else
8518 {
8519 tree c;
8520 if (code == OMP_FOR)
8521 c = gimple_omp_for_clauses (ctx->stmt);
8522 else if (code == OMP_SECTIONS)
8523 c = gimple_omp_sections_clauses (ctx->stmt);
8524 else
8525 c = gimple_omp_taskreg_clauses (ctx->stmt);
8526 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8527 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8528 build_fold_addr_expr (avar));
8529 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8530 }
8531
8532 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8533 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8534 size_one_node));
8535 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8536 gimple_seq_add_stmt (end, g);
8537 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8538 if (code == OMP_FOR || code == OMP_SECTIONS)
8539 {
8540 enum built_in_function bfn
8541 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8542 t = builtin_decl_explicit (bfn);
8543 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8544 tree arg;
8545 if (cancellable)
8546 {
8547 arg = create_tmp_var (c_bool_type);
8548 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8549 cancellable));
8550 }
8551 else
8552 arg = build_int_cst (c_bool_type, 0);
8553 g = gimple_build_call (t, 1, arg);
8554 }
8555 else
8556 {
8557 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8558 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8559 }
8560 gimple_seq_add_stmt (end, g);
8561 t = build_constructor (atype, NULL);
8562 TREE_THIS_VOLATILE (t) = 1;
8563 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8564}
a8e785ba 8565
4954efd4 8566/* Expand code for an OpenMP taskgroup directive. */
a8e785ba 8567
4954efd4 8568static void
8569lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
a8e785ba 8570{
4954efd4 8571 gimple *stmt = gsi_stmt (*gsi_p);
8572 gcall *x;
8573 gbind *bind;
7e5a76c8 8574 gimple_seq dseq = NULL;
4954efd4 8575 tree block = make_node (BLOCK);
a8e785ba 8576
4954efd4 8577 bind = gimple_build_bind (NULL, NULL, block);
8578 gsi_replace (gsi_p, bind, true);
8579 gimple_bind_add_stmt (bind, stmt);
a8e785ba 8580
7e5a76c8 8581 push_gimplify_context ();
8582
4954efd4 8583 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8584 0);
8585 gimple_bind_add_stmt (bind, x);
a8e785ba 8586
7e5a76c8 8587 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8588 gimple_omp_taskgroup_clauses (stmt),
8589 gimple_bind_body_ptr (bind), &dseq);
8590
4954efd4 8591 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8592 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8593 gimple_omp_set_body (stmt, NULL);
a8e785ba 8594
4954efd4 8595 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7e5a76c8 8596 gimple_bind_add_seq (bind, dseq);
8597
8598 pop_gimplify_context (bind);
a8e785ba 8599
4954efd4 8600 gimple_bind_append_vars (bind, ctx->block_vars);
8601 BLOCK_VARS (block) = ctx->block_vars;
a8e785ba 8602}
8603
773c5ba7 8604
4954efd4 8605/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
3d483a94 8606
8607static void
4954efd4 8608lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8609 omp_context *ctx)
3d483a94 8610{
4954efd4 8611 struct omp_for_data fd;
8612 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8613 return;
3d483a94 8614
4954efd4 8615 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8616 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8617 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8618 if (!fd.ordered)
8619 return;
bc7bff74 8620
4954efd4 8621 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8622 tree c = gimple_omp_ordered_clauses (ord_stmt);
8623 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8624 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
3d483a94 8625 {
4954efd4 8626 /* Merge depend clauses from multiple adjacent
8627 #pragma omp ordered depend(sink:...) constructs
8628 into one #pragma omp ordered depend(sink:...), so that
8629 we can optimize them together. */
8630 gimple_stmt_iterator gsi = *gsi_p;
8631 gsi_next (&gsi);
8632 while (!gsi_end_p (gsi))
3d483a94 8633 {
4954efd4 8634 gimple *stmt = gsi_stmt (gsi);
8635 if (is_gimple_debug (stmt)
8636 || gimple_code (stmt) == GIMPLE_NOP)
3d483a94 8637 {
4954efd4 8638 gsi_next (&gsi);
8639 continue;
3d483a94 8640 }
4954efd4 8641 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8642 break;
8643 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8644 c = gimple_omp_ordered_clauses (ord_stmt2);
8645 if (c == NULL_TREE
8646 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8647 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8648 break;
8649 while (*list_p)
8650 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8651 *list_p = c;
8652 gsi_remove (&gsi, true);
3d483a94 8653 }
8654 }
3d483a94 8655
4954efd4 8656 /* Canonicalize sink dependence clauses into one folded clause if
8657 possible.
3d483a94 8658
4954efd4 8659 The basic algorithm is to create a sink vector whose first
8660 element is the GCD of all the first elements, and whose remaining
8661 elements are the minimum of the subsequent columns.
3d483a94 8662
4954efd4 8663 We ignore dependence vectors whose first element is zero because
8664 such dependencies are known to be executed by the same thread.
bc7bff74 8665
4954efd4 8666 We take into account the direction of the loop, so a minimum
8667 becomes a maximum if the loop is iterating forwards. We also
8668 ignore sink clauses where the loop direction is unknown, or where
8669 the offsets are clearly invalid because they are not a multiple
8670 of the loop increment.
8671
8672 For example:
8673
8674 #pragma omp for ordered(2)
8675 for (i=0; i < N; ++i)
8676 for (j=0; j < M; ++j)
bc7bff74 8677 {
4954efd4 8678 #pragma omp ordered \
8679 depend(sink:i-8,j-2) \
8680 depend(sink:i,j-1) \ // Completely ignored because i+0.
8681 depend(sink:i-4,j-3) \
8682 depend(sink:i-6,j-4)
8683 #pragma omp ordered depend(source)
bc7bff74 8684 }
bc7bff74 8685
4954efd4 8686 Folded clause is:
3d483a94 8687
4954efd4 8688 depend(sink:-gcd(8,4,6),-min(2,3,4))
8689 -or-
8690 depend(sink:-2,-2)
8691 */
3d483a94 8692
4954efd4 8693 /* FIXME: Computing GCD's where the first element is zero is
8694 non-trivial in the presence of collapsed loops. Do this later. */
8695 if (fd.collapse > 1)
8696 return;
3d483a94 8697
4954efd4 8698 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
a324786b 8699
8700 /* wide_int is not a POD so it must be default-constructed. */
8701 for (unsigned i = 0; i != 2 * len - 1; ++i)
8702 new (static_cast<void*>(folded_deps + i)) wide_int ();
8703
4954efd4 8704 tree folded_dep = NULL_TREE;
8705 /* TRUE if the first dimension's offset is negative. */
8706 bool neg_offset_p = false;
3d483a94 8707
4954efd4 8708 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8709 unsigned int i;
8710 while ((c = *list_p) != NULL)
3d483a94 8711 {
4954efd4 8712 bool remove = false;
3d483a94 8713
4954efd4 8714 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8715 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8716 goto next_ordered_clause;
3d483a94 8717
4954efd4 8718 tree vec;
8719 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8720 vec && TREE_CODE (vec) == TREE_LIST;
8721 vec = TREE_CHAIN (vec), ++i)
3d483a94 8722 {
4954efd4 8723 gcc_assert (i < len);
3d483a94 8724
4954efd4 8725 /* omp_extract_for_data has canonicalized the condition. */
8726 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8727 || fd.loops[i].cond_code == GT_EXPR);
8728 bool forward = fd.loops[i].cond_code == LT_EXPR;
8729 bool maybe_lexically_later = true;
1e8e9920 8730
4954efd4 8731 /* While the committee makes up its mind, bail if we have any
8732 non-constant steps. */
8733 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8734 goto lower_omp_ordered_ret;
1e8e9920 8735
4954efd4 8736 tree itype = TREE_TYPE (TREE_VALUE (vec));
8737 if (POINTER_TYPE_P (itype))
8738 itype = sizetype;
e3d0f65c 8739 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
4954efd4 8740 TYPE_PRECISION (itype),
8741 TYPE_SIGN (itype));
fd6481cf 8742
4954efd4 8743 /* Ignore invalid offsets that are not multiples of the step. */
e3d0f65c 8744 if (!wi::multiple_of_p (wi::abs (offset),
8745 wi::abs (wi::to_wide (fd.loops[i].step)),
8746 UNSIGNED))
44b49e6b 8747 {
4954efd4 8748 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8749 "ignoring sink clause with offset that is not "
8750 "a multiple of the loop step");
8751 remove = true;
8752 goto next_ordered_clause;
44b49e6b 8753 }
43895be5 8754
4954efd4 8755 /* Calculate the first dimension. The first dimension of
8756 the folded dependency vector is the GCD of the first
8757 elements, while ignoring any first elements whose offset
8758 is 0. */
8759 if (i == 0)
44b49e6b 8760 {
4954efd4 8761 /* Ignore dependence vectors whose first dimension is 0. */
8762 if (offset == 0)
44b49e6b 8763 {
4954efd4 8764 remove = true;
8765 goto next_ordered_clause;
44b49e6b 8766 }
43895be5 8767 else
4954efd4 8768 {
8769 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8770 {
8771 error_at (OMP_CLAUSE_LOCATION (c),
8772 "first offset must be in opposite direction "
8773 "of loop iterations");
8774 goto lower_omp_ordered_ret;
8775 }
8776 if (forward)
8777 offset = -offset;
8778 neg_offset_p = forward;
8779 /* Initialize the first time around. */
8780 if (folded_dep == NULL_TREE)
8781 {
8782 folded_dep = c;
8783 folded_deps[0] = offset;
8784 }
8785 else
8786 folded_deps[0] = wi::gcd (folded_deps[0],
8787 offset, UNSIGNED);
8788 }
43895be5 8789 }
4954efd4 8790 /* Calculate minimum for the remaining dimensions. */
43895be5 8791 else
43895be5 8792 {
4954efd4 8793 folded_deps[len + i - 1] = offset;
8794 if (folded_dep == c)
8795 folded_deps[i] = offset;
8796 else if (maybe_lexically_later
8797 && !wi::eq_p (folded_deps[i], offset))
8798 {
8799 if (forward ^ wi::gts_p (folded_deps[i], offset))
8800 {
8801 unsigned int j;
8802 folded_dep = c;
8803 for (j = 1; j <= i; j++)
8804 folded_deps[j] = folded_deps[len + j - 1];
8805 }
8806 else
8807 maybe_lexically_later = false;
8808 }
43895be5 8809 }
43895be5 8810 }
4954efd4 8811 gcc_assert (i == len);
43895be5 8812
4954efd4 8813 remove = true;
8814
8815 next_ordered_clause:
8816 if (remove)
8817 *list_p = OMP_CLAUSE_CHAIN (c);
43895be5 8818 else
4954efd4 8819 list_p = &OMP_CLAUSE_CHAIN (c);
43895be5 8820 }
43895be5 8821
4954efd4 8822 if (folded_dep)
43895be5 8823 {
4954efd4 8824 if (neg_offset_p)
8825 folded_deps[0] = -folded_deps[0];
43895be5 8826
4954efd4 8827 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8828 if (POINTER_TYPE_P (itype))
8829 itype = sizetype;
8830
8831 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8832 = wide_int_to_tree (itype, folded_deps[0]);
8833 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8834 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
43895be5 8835 }
8836
4954efd4 8837 lower_omp_ordered_ret:
43895be5 8838
4954efd4 8839 /* Ordered without clauses is #pragma omp threads, while we want
8840 a nop instead if we remove all clauses. */
8841 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8842 gsi_replace (gsi_p, gimple_build_nop (), true);
43895be5 8843}
8844
8845
4954efd4 8846/* Expand code for an OpenMP ordered directive. */
1e8e9920 8847
61e47ac8 8848static void
4954efd4 8849lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8850{
4954efd4 8851 tree block;
8852 gimple *stmt = gsi_stmt (*gsi_p), *g;
8853 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8854 gcall *x;
8855 gbind *bind;
8856 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8857 OMP_CLAUSE_SIMD);
8858 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8859 loop. */
8860 bool maybe_simt
8861 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8862 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8863 OMP_CLAUSE_THREADS);
43895be5 8864
4954efd4 8865 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8866 OMP_CLAUSE_DEPEND))
43895be5 8867 {
4954efd4 8868 /* FIXME: This is needs to be moved to the expansion to verify various
8869 conditions only testable on cfg with dominators computed, and also
8870 all the depend clauses to be merged still might need to be available
8871 for the runtime checks. */
8872 if (0)
8873 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8874 return;
fd6481cf 8875 }
43895be5 8876
4954efd4 8877 push_gimplify_context ();
8878
8879 block = make_node (BLOCK);
8880 bind = gimple_build_bind (NULL, NULL, block);
8881 gsi_replace (gsi_p, bind, true);
8882 gimple_bind_add_stmt (bind, stmt);
43895be5 8883
4954efd4 8884 if (simd)
79acaae1 8885 {
4954efd4 8886 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8887 build_int_cst (NULL_TREE, threads));
8888 cfun->has_simduid_loops = true;
79acaae1 8889 }
8890 else
4954efd4 8891 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8892 0);
8893 gimple_bind_add_stmt (bind, x);
8894
8895 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8896 if (maybe_simt)
1e8e9920 8897 {
4954efd4 8898 counter = create_tmp_var (integer_type_node);
8899 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8900 gimple_call_set_lhs (g, counter);
8901 gimple_bind_add_stmt (bind, g);
43895be5 8902
4954efd4 8903 body = create_artificial_label (UNKNOWN_LOCATION);
8904 test = create_artificial_label (UNKNOWN_LOCATION);
8905 gimple_bind_add_stmt (bind, gimple_build_label (body));
1e8e9920 8906
4954efd4 8907 tree simt_pred = create_tmp_var (integer_type_node);
8908 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8909 gimple_call_set_lhs (g, simt_pred);
8910 gimple_bind_add_stmt (bind, g);
43895be5 8911
4954efd4 8912 tree t = create_artificial_label (UNKNOWN_LOCATION);
8913 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8914 gimple_bind_add_stmt (bind, g);
3d483a94 8915
4954efd4 8916 gimple_bind_add_stmt (bind, gimple_build_label (t));
bc7bff74 8917 }
4954efd4 8918 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8919 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8920 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8921 gimple_omp_set_body (stmt, NULL);
bc7bff74 8922
4954efd4 8923 if (maybe_simt)
43895be5 8924 {
4954efd4 8925 gimple_bind_add_stmt (bind, gimple_build_label (test));
8926 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8927 gimple_bind_add_stmt (bind, g);
773c5ba7 8928
4954efd4 8929 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8930 tree nonneg = create_tmp_var (integer_type_node);
8931 gimple_seq tseq = NULL;
8932 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8933 gimple_bind_add_seq (bind, tseq);
43895be5 8934
4954efd4 8935 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8936 gimple_call_set_lhs (g, nonneg);
8937 gimple_bind_add_stmt (bind, g);
43895be5 8938
4954efd4 8939 tree end = create_artificial_label (UNKNOWN_LOCATION);
8940 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8941 gimple_bind_add_stmt (bind, g);
773c5ba7 8942
4954efd4 8943 gimple_bind_add_stmt (bind, gimple_build_label (end));
ac6e3339 8944 }
4954efd4 8945 if (simd)
8946 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8947 build_int_cst (NULL_TREE, threads));
61e47ac8 8948 else
4954efd4 8949 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8950 0);
8951 gimple_bind_add_stmt (bind, x);
79acaae1 8952
4954efd4 8953 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
79acaae1 8954
4954efd4 8955 pop_gimplify_context (bind);
79acaae1 8956
4954efd4 8957 gimple_bind_append_vars (bind, ctx->block_vars);
8958 BLOCK_VARS (block) = gimple_bind_vars (bind);
8959}
2131a1a9 8960
2131a1a9 8961
da008d72 8962/* Expand code for an OpenMP scan directive and the structured block
8963 before the scan directive. */
8964
8965static void
8966lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8967{
8968 gimple *stmt = gsi_stmt (*gsi_p);
8969 bool has_clauses
8970 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8971 tree lane = NULL_TREE;
8972 gimple_seq before = NULL;
8973 omp_context *octx = ctx->outer;
8974 gcc_assert (octx);
7d26f131 8975 if (octx->scan_exclusive && !has_clauses)
b05c7e43 8976 {
8977 gimple_stmt_iterator gsi2 = *gsi_p;
8978 gsi_next (&gsi2);
8979 gimple *stmt2 = gsi_stmt (gsi2);
8980 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8981 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8982 the one with exclusive clause(s), comes first. */
8983 if (stmt2
8984 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8985 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8986 {
8987 gsi_remove (gsi_p, false);
8988 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8989 ctx = maybe_lookup_ctx (stmt2);
8990 gcc_assert (ctx);
8991 lower_omp_scan (gsi_p, ctx);
8992 return;
8993 }
8994 }
8995
da008d72 8996 bool input_phase = has_clauses ^ octx->scan_inclusive;
7d26f131 8997 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
0076df39 8998 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
7d26f131 8999 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9000 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9001 && !gimple_omp_for_combined_p (octx->stmt));
3d2b49b2 9002 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9003 if (is_for_simd && octx->for_simd_scan_phase)
9004 is_simd = false;
7d26f131 9005 if (is_simd)
9006 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9007 OMP_CLAUSE__SIMDUID_))
9008 {
9009 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9010 lane = create_tmp_var (unsigned_type_node);
9011 tree t = build_int_cst (integer_type_node,
9012 input_phase ? 1
9013 : octx->scan_inclusive ? 2 : 3);
9014 gimple *g
9015 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9016 gimple_call_set_lhs (g, lane);
9017 gimple_seq_add_stmt (&before, g);
9018 }
9019
9020 if (is_simd || is_for)
da008d72 9021 {
da008d72 9022 for (tree c = gimple_omp_for_clauses (octx->stmt);
9023 c; c = OMP_CLAUSE_CHAIN (c))
9024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9025 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9026 {
631dab46 9027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
da008d72 9028 tree var = OMP_CLAUSE_DECL (c);
9029 tree new_var = lookup_decl (var, octx);
9030 tree val = new_var;
9031 tree var2 = NULL_TREE;
9032 tree var3 = NULL_TREE;
b05c7e43 9033 tree var4 = NULL_TREE;
9034 tree lane0 = NULL_TREE;
631dab46 9035 tree new_vard = new_var;
9036 if (omp_is_reference (var))
9037 {
9038 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9039 val = new_var;
9040 }
9041 if (DECL_HAS_VALUE_EXPR_P (new_vard))
da008d72 9042 {
631dab46 9043 val = DECL_VALUE_EXPR (new_vard);
7d26f131 9044 if (new_vard != new_var)
631dab46 9045 {
9046 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9047 val = TREE_OPERAND (val, 0);
9048 }
da008d72 9049 if (TREE_CODE (val) == ARRAY_REF
9050 && VAR_P (TREE_OPERAND (val, 0)))
9051 {
9052 tree v = TREE_OPERAND (val, 0);
9053 if (lookup_attribute ("omp simd array",
9054 DECL_ATTRIBUTES (v)))
9055 {
9056 val = unshare_expr (val);
b05c7e43 9057 lane0 = TREE_OPERAND (val, 1);
da008d72 9058 TREE_OPERAND (val, 1) = lane;
9059 var2 = lookup_decl (v, octx);
7d26f131 9060 if (octx->scan_exclusive)
b05c7e43 9061 var4 = lookup_decl (var2, octx);
da008d72 9062 if (input_phase
9063 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
b05c7e43 9064 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
da008d72 9065 if (!input_phase)
9066 {
9067 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9068 var2, lane, NULL_TREE, NULL_TREE);
9069 TREE_THIS_NOTRAP (var2) = 1;
7d26f131 9070 if (octx->scan_exclusive)
b05c7e43 9071 {
9072 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9073 var4, lane, NULL_TREE,
9074 NULL_TREE);
9075 TREE_THIS_NOTRAP (var4) = 1;
9076 }
da008d72 9077 }
9078 else
9079 var2 = val;
9080 }
9081 }
631dab46 9082 gcc_assert (var2);
da008d72 9083 }
9084 else
9085 {
9086 var2 = build_outer_var_ref (var, octx);
b05c7e43 9087 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
da008d72 9088 {
631dab46 9089 var3 = maybe_lookup_decl (new_vard, octx);
b05c7e43 9090 if (var3 == new_vard || var3 == NULL_TREE)
da008d72 9091 var3 = NULL_TREE;
7d26f131 9092 else if (is_simd && octx->scan_exclusive && !input_phase)
b05c7e43 9093 {
9094 var4 = maybe_lookup_decl (var3, octx);
9095 if (var4 == var3 || var4 == NULL_TREE)
9096 {
9097 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9098 {
9099 var4 = var3;
9100 var3 = NULL_TREE;
9101 }
9102 else
9103 var4 = NULL_TREE;
9104 }
9105 }
da008d72 9106 }
7d26f131 9107 if (is_simd
9108 && octx->scan_exclusive
9109 && !input_phase
9110 && var4 == NULL_TREE)
b05c7e43 9111 var4 = create_tmp_var (TREE_TYPE (val));
da008d72 9112 }
9113 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9114 {
9115 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9116 if (input_phase)
9117 {
9118 if (var3)
9119 {
9120 /* If we've added a separate identity element
9121 variable, copy it over into val. */
9122 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9123 var3);
9124 gimplify_and_add (x, &before);
9125 }
9126 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9127 {
9128 /* Otherwise, assign to it the identity element. */
9129 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
7d26f131 9130 if (is_for)
9131 tseq = copy_gimple_seq_and_replace_locals (tseq);
da008d72 9132 tree ref = build_outer_var_ref (var, octx);
631dab46 9133 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9134 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9135 if (x)
9136 {
7d26f131 9137 if (new_vard != new_var)
631dab46 9138 val = build_fold_addr_expr_loc (clause_loc, val);
9139 SET_DECL_VALUE_EXPR (new_vard, val);
9140 }
da008d72 9141 SET_DECL_VALUE_EXPR (placeholder, ref);
9142 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9143 lower_omp (&tseq, octx);
631dab46 9144 if (x)
9145 SET_DECL_VALUE_EXPR (new_vard, x);
da008d72 9146 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9147 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
da008d72 9148 gimple_seq_add_seq (&before, tseq);
7d26f131 9149 if (is_simd)
9150 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
da008d72 9151 }
9152 }
7d26f131 9153 else if (is_simd)
da008d72 9154 {
b05c7e43 9155 tree x;
7d26f131 9156 if (octx->scan_exclusive)
b05c7e43 9157 {
9158 tree v4 = unshare_expr (var4);
9159 tree v2 = unshare_expr (var2);
9160 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9161 gimplify_and_add (x, &before);
9162 }
da008d72 9163 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
b05c7e43 9164 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9165 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
631dab46 9166 tree vexpr = val;
7d26f131 9167 if (x && new_vard != new_var)
631dab46 9168 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9169 if (x)
9170 SET_DECL_VALUE_EXPR (new_vard, vexpr);
da008d72 9171 SET_DECL_VALUE_EXPR (placeholder, var2);
9172 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9173 lower_omp (&tseq, octx);
9174 gimple_seq_add_seq (&before, tseq);
9175 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
631dab46 9176 if (x)
9177 SET_DECL_VALUE_EXPR (new_vard, x);
da008d72 9178 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9179 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
b05c7e43 9180 if (octx->scan_inclusive)
9181 {
9182 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9183 var2);
9184 gimplify_and_add (x, &before);
9185 }
9186 else if (lane0 == NULL_TREE)
9187 {
9188 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9189 var4);
9190 gimplify_and_add (x, &before);
9191 }
da008d72 9192 }
9193 }
9194 else
9195 {
9196 if (input_phase)
9197 {
9198 /* input phase. Set val to initializer before
9199 the body. */
9200 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9201 gimplify_assign (val, x, &before);
9202 }
7d26f131 9203 else if (is_simd)
da008d72 9204 {
9205 /* scan phase. */
9206 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9207 if (code == MINUS_EXPR)
9208 code = PLUS_EXPR;
9209
9210 tree x = build2 (code, TREE_TYPE (var2),
9211 unshare_expr (var2), unshare_expr (val));
b05c7e43 9212 if (octx->scan_inclusive)
9213 {
9214 gimplify_assign (unshare_expr (var2), x, &before);
9215 gimplify_assign (val, var2, &before);
9216 }
9217 else
9218 {
9219 gimplify_assign (unshare_expr (var4),
9220 unshare_expr (var2), &before);
9221 gimplify_assign (var2, x, &before);
9222 if (lane0 == NULL_TREE)
9223 gimplify_assign (val, var4, &before);
9224 }
da008d72 9225 }
9226 }
7d26f131 9227 if (octx->scan_exclusive && !input_phase && lane0)
b05c7e43 9228 {
9229 tree vexpr = unshare_expr (var4);
9230 TREE_OPERAND (vexpr, 1) = lane0;
7d26f131 9231 if (new_vard != new_var)
b05c7e43 9232 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9233 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9234 }
da008d72 9235 }
9236 }
3d2b49b2 9237 if (is_simd && !is_for_simd)
7d26f131 9238 {
9239 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9240 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9241 gsi_replace (gsi_p, gimple_build_nop (), true);
739cf959 9242 return;
7d26f131 9243 }
739cf959 9244 lower_omp (gimple_omp_body_ptr (stmt), octx);
9245 if (before)
7d26f131 9246 {
9247 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9248 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9249 }
da008d72 9250}
9251
9252
4954efd4 9253/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9254 substitution of a couple of function calls. But in the NAMED case,
9255 requires that languages coordinate a symbol name. It is therefore
9256 best put here in common code. */
2131a1a9 9257
4954efd4 9258static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
2131a1a9 9259
4954efd4 9260static void
9261lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9262{
9263 tree block;
9264 tree name, lock, unlock;
9265 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9266 gbind *bind;
9267 location_t loc = gimple_location (stmt);
9268 gimple_seq tbody;
2131a1a9 9269
4954efd4 9270 name = gimple_omp_critical_name (stmt);
9271 if (name)
9272 {
9273 tree decl;
2131a1a9 9274
4954efd4 9275 if (!critical_name_mutexes)
9276 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
2131a1a9 9277
4954efd4 9278 tree *n = critical_name_mutexes->get (name);
9279 if (n == NULL)
3d483a94 9280 {
4954efd4 9281 char *new_str;
1e8e9920 9282
4954efd4 9283 decl = create_tmp_var_raw (ptr_type_node);
1e8e9920 9284
4954efd4 9285 new_str = ACONCAT ((".gomp_critical_user_",
9286 IDENTIFIER_POINTER (name), NULL));
9287 DECL_NAME (decl) = get_identifier (new_str);
9288 TREE_PUBLIC (decl) = 1;
9289 TREE_STATIC (decl) = 1;
9290 DECL_COMMON (decl) = 1;
9291 DECL_ARTIFICIAL (decl) = 1;
9292 DECL_IGNORED_P (decl) = 1;
1e8e9920 9293
4954efd4 9294 varpool_node::finalize_decl (decl);
1e8e9920 9295
4954efd4 9296 critical_name_mutexes->put (name, decl);
9297 }
9298 else
9299 decl = *n;
1e8e9920 9300
4954efd4 9301 /* If '#pragma omp critical' is inside offloaded region or
9302 inside function marked as offloadable, the symbol must be
9303 marked as offloadable too. */
9304 omp_context *octx;
9305 if (cgraph_node::get (current_function_decl)->offloadable)
9306 varpool_node::get_create (decl)->offloadable = 1;
9307 else
9308 for (octx = ctx->outer; octx; octx = octx->outer)
9309 if (is_gimple_omp_offloaded (octx->stmt))
9310 {
9311 varpool_node::get_create (decl)->offloadable = 1;
9312 break;
9313 }
61e47ac8 9314
4954efd4 9315 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
7c6746c9 9316 lock = build_call_expr_loc (loc, lock, 1,
9317 build_fold_addr_expr_loc (loc, decl));
61e47ac8 9318
4954efd4 9319 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9320 unlock = build_call_expr_loc (loc, unlock, 1,
9321 build_fold_addr_expr_loc (loc, decl));
bc7bff74 9322 }
bc7bff74 9323 else
8e6b4515 9324 {
4954efd4 9325 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9326 lock = build_call_expr_loc (loc, lock, 0);
8e6b4515 9327
4954efd4 9328 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9329 unlock = build_call_expr_loc (loc, unlock, 0);
bc7bff74 9330 }
1e8e9920 9331
4954efd4 9332 push_gimplify_context ();
31712e83 9333
4954efd4 9334 block = make_node (BLOCK);
9335 bind = gimple_build_bind (NULL, NULL, block);
9336 gsi_replace (gsi_p, bind, true);
9337 gimple_bind_add_stmt (bind, stmt);
31712e83 9338
4954efd4 9339 tbody = gimple_bind_body (bind);
9340 gimplify_and_add (lock, &tbody);
9341 gimple_bind_set_body (bind, tbody);
31712e83 9342
4954efd4 9343 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9344 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9345 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9346 gimple_omp_set_body (stmt, NULL);
1e8e9920 9347
4954efd4 9348 tbody = gimple_bind_body (bind);
9349 gimplify_and_add (unlock, &tbody);
9350 gimple_bind_set_body (bind, tbody);
1e8e9920 9351
4954efd4 9352 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
79acaae1 9353
4954efd4 9354 pop_gimplify_context (bind);
9355 gimple_bind_append_vars (bind, ctx->block_vars);
9356 BLOCK_VARS (block) = gimple_bind_vars (bind);
9357}
773c5ba7 9358
4954efd4 9359/* A subroutine of lower_omp_for. Generate code to emit the predicate
9360 for a lastprivate clause. Given a loop control predicate of (V
9361 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9362 is appended to *DLIST, iterator initialization is appended to
9a1d892b 9363 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9364 to be emitted in a critical section. */
773c5ba7 9365
4954efd4 9366static void
9367lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9a1d892b 9368 gimple_seq *dlist, gimple_seq *clist,
9369 struct omp_context *ctx)
4954efd4 9370{
9371 tree clauses, cond, vinit;
9372 enum tree_code cond_code;
9373 gimple_seq stmts;
1e8e9920 9374
4954efd4 9375 cond_code = fd->loop.cond_code;
9376 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
bc7bff74 9377
4954efd4 9378 /* When possible, use a strict equality expression. This can let VRP
9379 type optimizations deduce the value and remove a copy. */
9380 if (tree_fits_shwi_p (fd->loop.step))
bc7bff74 9381 {
4954efd4 9382 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9383 if (step == 1 || step == -1)
9384 cond_code = EQ_EXPR;
bc7bff74 9385 }
4954efd4 9386
9387 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9388 || gimple_omp_for_grid_phony (fd->for_stmt))
9389 cond = omp_grid_lastprivate_predicate (fd);
fd6481cf 9390 else
bc7bff74 9391 {
4954efd4 9392 tree n2 = fd->loop.n2;
9393 if (fd->collapse > 1
9394 && TREE_CODE (n2) != INTEGER_CST
9395 && gimple_omp_for_combined_into_p (fd->for_stmt))
43895be5 9396 {
4954efd4 9397 struct omp_context *taskreg_ctx = NULL;
9398 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
43895be5 9399 {
4954efd4 9400 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9401 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9402 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
43895be5 9403 {
4954efd4 9404 if (gimple_omp_for_combined_into_p (gfor))
9405 {
9406 gcc_assert (ctx->outer->outer
9407 && is_parallel_ctx (ctx->outer->outer));
9408 taskreg_ctx = ctx->outer->outer;
9409 }
9410 else
9411 {
9412 struct omp_for_data outer_fd;
9413 omp_extract_for_data (gfor, &outer_fd, NULL);
9414 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9415 }
43895be5 9416 }
4954efd4 9417 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9418 taskreg_ctx = ctx->outer->outer;
9419 }
9420 else if (is_taskreg_ctx (ctx->outer))
9421 taskreg_ctx = ctx->outer;
9422 if (taskreg_ctx)
9423 {
9424 int i;
9425 tree taskreg_clauses
9426 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9427 tree innerc = omp_find_clause (taskreg_clauses,
9428 OMP_CLAUSE__LOOPTEMP_);
9429 gcc_assert (innerc);
9430 for (i = 0; i < fd->collapse; i++)
9431 {
9432 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9433 OMP_CLAUSE__LOOPTEMP_);
9434 gcc_assert (innerc);
9435 }
9436 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9437 OMP_CLAUSE__LOOPTEMP_);
9438 if (innerc)
9439 n2 = fold_convert (TREE_TYPE (n2),
9440 lookup_decl (OMP_CLAUSE_DECL (innerc),
9441 taskreg_ctx));
43895be5 9442 }
bc7bff74 9443 }
4954efd4 9444 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
bc7bff74 9445 }
773c5ba7 9446
4954efd4 9447 clauses = gimple_omp_for_clauses (fd->for_stmt);
9448 stmts = NULL;
9a1d892b 9449 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
4954efd4 9450 if (!gimple_seq_empty_p (stmts))
bc7bff74 9451 {
4954efd4 9452 gimple_seq_add_seq (&stmts, *dlist);
9453 *dlist = stmts;
04c2922b 9454
4954efd4 9455 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9456 vinit = fd->loop.n1;
9457 if (cond_code == EQ_EXPR
9458 && tree_fits_shwi_p (fd->loop.n2)
9459 && ! integer_zerop (fd->loop.n2))
9460 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9461 else
9462 vinit = unshare_expr (vinit);
86a932e0 9463
4954efd4 9464 /* Initialize the iterator variable, so that threads that don't execute
9465 any iterations don't execute the lastprivate clauses by accident. */
9466 gimplify_assign (fd->loop.v, vinit, body_p);
bc7bff74 9467 }
1e8e9920 9468}
9469
7d26f131 9470/* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9471
28a60351 9472static tree
7d26f131 9473omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9474 struct walk_stmt_info *wi)
9475{
9476 gimple *stmt = gsi_stmt (*gsi_p);
9477
9478 *handled_ops_p = true;
9479 switch (gimple_code (stmt))
9480 {
9481 WALK_SUBSTMTS;
9482
3d2b49b2 9483 case GIMPLE_OMP_FOR:
0076df39 9484 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
3d2b49b2 9485 && gimple_omp_for_combined_into_p (stmt))
9486 *handled_ops_p = false;
9487 break;
9488
7d26f131 9489 case GIMPLE_OMP_SCAN:
9490 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9491 return integer_zero_node;
9492 default:
9493 break;
9494 }
9495 return NULL;
9496}
9497
9498/* Helper function for lower_omp_for, add transformations for a worksharing
9499 loop with scan directives inside of it.
9500 For worksharing loop not combined with simd, transform:
9501 #pragma omp for reduction(inscan,+:r) private(i)
9502 for (i = 0; i < n; i = i + 1)
9503 {
9504 {
9505 update (r);
9506 }
9507 #pragma omp scan inclusive(r)
9508 {
9509 use (r);
9510 }
9511 }
9512
9513 into two worksharing loops + code to merge results:
9514
9515 num_threads = omp_get_num_threads ();
9516 thread_num = omp_get_thread_num ();
9517 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9518 <D.2099>:
9519 var2 = r;
9520 goto <D.2101>;
9521 <D.2100>:
9522 // For UDRs this is UDR init, or if ctors are needed, copy from
9523 // var3 that has been constructed to contain the neutral element.
9524 var2 = 0;
9525 <D.2101>:
9526 ivar = 0;
9527 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9528 // a shared array with num_threads elements and rprivb to a local array
9529 // number of elements equal to the number of (contiguous) iterations the
9530 // current thread will perform. controlb and controlp variables are
9531 // temporaries to handle deallocation of rprivb at the end of second
9532 // GOMP_FOR.
9533 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9534 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9535 for (i = 0; i < n; i = i + 1)
9536 {
9537 {
9538 // For UDRs this is UDR init or copy from var3.
9539 r = 0;
9540 // This is the input phase from user code.
9541 update (r);
9542 }
9543 {
9544 // For UDRs this is UDR merge.
9545 var2 = var2 + r;
9546 // Rather than handing it over to the user, save to local thread's
9547 // array.
9548 rprivb[ivar] = var2;
9549 // For exclusive scan, the above two statements are swapped.
9550 ivar = ivar + 1;
9551 }
9552 }
9553 // And remember the final value from this thread's into the shared
9554 // rpriva array.
9555 rpriva[(sizetype) thread_num] = var2;
9556 // If more than one thread, compute using Work-Efficient prefix sum
9557 // the inclusive parallel scan of the rpriva array.
9558 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9559 <D.2102>:
9560 GOMP_barrier ();
9561 down = 0;
9562 k = 1;
9563 num_threadsu = (unsigned int) num_threads;
9564 thread_numup1 = (unsigned int) thread_num + 1;
9565 <D.2108>:
9566 twok = k << 1;
9567 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9568 <D.2110>:
9569 down = 4294967295;
9570 k = k >> 1;
9571 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9572 <D.2112>:
9573 k = k >> 1;
9574 <D.2111>:
9575 twok = k << 1;
9576 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9577 mul = REALPART_EXPR <cplx>;
9578 ovf = IMAGPART_EXPR <cplx>;
9579 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9580 <D.2116>:
9581 andv = k & down;
9582 andvm1 = andv + 4294967295;
9583 l = mul + andvm1;
9584 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9585 <D.2120>:
9586 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9587 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9588 rpriva[l] = rpriva[l - k] + rpriva[l];
9589 <D.2117>:
9590 if (down == 0) goto <D.2121>; else goto <D.2122>;
9591 <D.2121>:
9592 k = k << 1;
9593 goto <D.2123>;
9594 <D.2122>:
9595 k = k >> 1;
9596 <D.2123>:
9597 GOMP_barrier ();
9598 if (k != 0) goto <D.2108>; else goto <D.2103>;
9599 <D.2103>:
9600 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9601 <D.2124>:
9602 // For UDRs this is UDR init or copy from var3.
9603 var2 = 0;
9604 goto <D.2126>;
9605 <D.2125>:
9606 var2 = rpriva[thread_num - 1];
9607 <D.2126>:
9608 ivar = 0;
9609 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9610 reduction(inscan,+:r) private(i)
9611 for (i = 0; i < n; i = i + 1)
9612 {
9613 {
28a60351 9614 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9615 r = var2 + rprivb[ivar];
7d26f131 9616 }
9617 {
9618 // This is the scan phase from user code.
9619 use (r);
9620 // Plus a bump of the iterator.
9621 ivar = ivar + 1;
9622 }
9623 } */
9624
9625static void
9626lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9627 struct omp_for_data *fd, omp_context *ctx)
9628{
3d2b49b2 9629 bool is_for_simd = gimple_omp_for_combined_p (stmt);
7d26f131 9630 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9631
9632 gimple_seq body = gimple_omp_body (stmt);
9633 gimple_stmt_iterator input1_gsi = gsi_none ();
9634 struct walk_stmt_info wi;
9635 memset (&wi, 0, sizeof (wi));
9636 wi.val_only = true;
9637 wi.info = (void *) &input1_gsi;
9638 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9639 gcc_assert (!gsi_end_p (input1_gsi));
9640
9641 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9642 gimple_stmt_iterator gsi = input1_gsi;
9643 gsi_next (&gsi);
9644 gimple_stmt_iterator scan1_gsi = gsi;
9645 gimple *scan_stmt1 = gsi_stmt (gsi);
9646 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9647
9648 gimple_seq input_body = gimple_omp_body (input_stmt1);
9649 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9650 gimple_omp_set_body (input_stmt1, NULL);
9651 gimple_omp_set_body (scan_stmt1, NULL);
9652 gimple_omp_set_body (stmt, NULL);
9653
9654 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9655 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9656 gimple_omp_set_body (stmt, body);
9657 gimple_omp_set_body (input_stmt1, input_body);
9658
9659 gimple_stmt_iterator input2_gsi = gsi_none ();
9660 memset (&wi, 0, sizeof (wi));
9661 wi.val_only = true;
9662 wi.info = (void *) &input2_gsi;
9663 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9664 gcc_assert (!gsi_end_p (input2_gsi));
9665
9666 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9667 gsi = input2_gsi;
9668 gsi_next (&gsi);
9669 gimple_stmt_iterator scan2_gsi = gsi;
9670 gimple *scan_stmt2 = gsi_stmt (gsi);
9671 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9672 gimple_omp_set_body (scan_stmt2, scan_body);
9673
3d2b49b2 9674 gimple_stmt_iterator input3_gsi = gsi_none ();
9675 gimple_stmt_iterator scan3_gsi = gsi_none ();
9676 gimple_stmt_iterator input4_gsi = gsi_none ();
9677 gimple_stmt_iterator scan4_gsi = gsi_none ();
9678 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9679 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9680 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9681 if (is_for_simd)
9682 {
9683 memset (&wi, 0, sizeof (wi));
9684 wi.val_only = true;
9685 wi.info = (void *) &input3_gsi;
9686 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9687 gcc_assert (!gsi_end_p (input3_gsi));
9688
9689 input_stmt3 = gsi_stmt (input3_gsi);
9690 gsi = input3_gsi;
9691 gsi_next (&gsi);
9692 scan3_gsi = gsi;
9693 scan_stmt3 = gsi_stmt (gsi);
9694 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9695
9696 memset (&wi, 0, sizeof (wi));
9697 wi.val_only = true;
9698 wi.info = (void *) &input4_gsi;
9699 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9700 gcc_assert (!gsi_end_p (input4_gsi));
9701
9702 input_stmt4 = gsi_stmt (input4_gsi);
9703 gsi = input4_gsi;
9704 gsi_next (&gsi);
9705 scan4_gsi = gsi;
9706 scan_stmt4 = gsi_stmt (gsi);
9707 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9708
9709 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9710 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9711 }
9712
7d26f131 9713 tree num_threads = create_tmp_var (integer_type_node);
9714 tree thread_num = create_tmp_var (integer_type_node);
9715 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9716 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9717 gimple *g = gimple_build_call (nthreads_decl, 0);
9718 gimple_call_set_lhs (g, num_threads);
9719 gimple_seq_add_stmt (body_p, g);
9720 g = gimple_build_call (threadnum_decl, 0);
9721 gimple_call_set_lhs (g, thread_num);
9722 gimple_seq_add_stmt (body_p, g);
9723
9724 tree ivar = create_tmp_var (sizetype);
9725 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9726 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9727 tree k = create_tmp_var (unsigned_type_node);
9728 tree l = create_tmp_var (unsigned_type_node);
9729
9730 gimple_seq clist = NULL, mdlist = NULL;
9731 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9732 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9733 gimple_seq scan1_list = NULL, input2_list = NULL;
9734 gimple_seq last_list = NULL, reduc_list = NULL;
9735 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9736 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9737 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9738 {
9739 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9740 tree var = OMP_CLAUSE_DECL (c);
9741 tree new_var = lookup_decl (var, ctx);
9742 tree var3 = NULL_TREE;
9743 tree new_vard = new_var;
9744 if (omp_is_reference (var))
9745 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9746 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9747 {
9748 var3 = maybe_lookup_decl (new_vard, ctx);
9749 if (var3 == new_vard)
9750 var3 = NULL_TREE;
9751 }
9752
9753 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9754 tree rpriva = create_tmp_var (ptype);
9755 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9756 OMP_CLAUSE_DECL (nc) = rpriva;
9757 *cp1 = nc;
9758 cp1 = &OMP_CLAUSE_CHAIN (nc);
9759
9760 tree rprivb = create_tmp_var (ptype);
9761 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9762 OMP_CLAUSE_DECL (nc) = rprivb;
9763 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9764 *cp1 = nc;
9765 cp1 = &OMP_CLAUSE_CHAIN (nc);
9766
9767 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9768 if (new_vard != new_var)
9769 TREE_ADDRESSABLE (var2) = 1;
9770 gimple_add_tmp_var (var2);
9771
9772 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9773 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9774 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9775 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9776 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9777
9778 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9779 thread_num, integer_minus_one_node);
9780 x = fold_convert_loc (clause_loc, sizetype, x);
9781 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9782 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9783 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9784 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9785
9786 x = fold_convert_loc (clause_loc, sizetype, l);
9787 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9788 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9789 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9790 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9791
9792 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9793 x = fold_convert_loc (clause_loc, sizetype, x);
9794 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9795 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9796 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9797 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9798
9799 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9800 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9801 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9802 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9803
3d2b49b2 9804 tree var4 = is_for_simd ? new_var : var2;
9805 tree var5 = NULL_TREE, var6 = NULL_TREE;
9806 if (is_for_simd)
9807 {
9808 var5 = lookup_decl (var, input_simd_ctx);
9809 var6 = lookup_decl (var, scan_simd_ctx);
9810 if (new_vard != new_var)
9811 {
9812 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9813 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9814 }
9815 }
7d26f131 9816 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9817 {
9818 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9819 tree val = var2;
7d26f131 9820
9821 x = lang_hooks.decls.omp_clause_default_ctor
9822 (c, var2, build_outer_var_ref (var, ctx));
9823 if (x)
9824 gimplify_and_add (x, &clist);
9825
9826 x = build_outer_var_ref (var, ctx);
3d2b49b2 9827 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9828 x);
7d26f131 9829 gimplify_and_add (x, &thr01_list);
9830
9831 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9832 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9833 if (var3)
9834 {
3d2b49b2 9835 x = unshare_expr (var4);
9836 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
7d26f131 9837 gimplify_and_add (x, &thrn1_list);
3d2b49b2 9838 x = unshare_expr (var4);
9839 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
7d26f131 9840 gimplify_and_add (x, &thr02_list);
9841 }
9842 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9843 {
9844 /* Otherwise, assign to it the identity element. */
9845 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9846 tseq = copy_gimple_seq_and_replace_locals (tseq);
3d2b49b2 9847 if (!is_for_simd)
9848 {
9849 if (new_vard != new_var)
9850 val = build_fold_addr_expr_loc (clause_loc, val);
9851 SET_DECL_VALUE_EXPR (new_vard, val);
9852 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9853 }
7d26f131 9854 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9855 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9856 lower_omp (&tseq, ctx);
9857 gimple_seq_add_seq (&thrn1_list, tseq);
9858 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9859 lower_omp (&tseq, ctx);
9860 gimple_seq_add_seq (&thr02_list, tseq);
9861 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9862 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9863 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9864 if (y)
9865 SET_DECL_VALUE_EXPR (new_vard, y);
9866 else
9867 {
9868 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9869 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9870 }
9871 }
9872
3d2b49b2 9873 x = unshare_expr (var4);
9874 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
7d26f131 9875 gimplify_and_add (x, &thrn2_list);
9876
3d2b49b2 9877 if (is_for_simd)
7d26f131 9878 {
9879 x = unshare_expr (rprivb_ref);
3d2b49b2 9880 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
7d26f131 9881 gimplify_and_add (x, &scan1_list);
9882 }
3d2b49b2 9883 else
9884 {
9885 if (ctx->scan_exclusive)
9886 {
9887 x = unshare_expr (rprivb_ref);
9888 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9889 gimplify_and_add (x, &scan1_list);
9890 }
7d26f131 9891
3d2b49b2 9892 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9893 tseq = copy_gimple_seq_and_replace_locals (tseq);
9894 SET_DECL_VALUE_EXPR (placeholder, var2);
9895 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9896 lower_omp (&tseq, ctx);
9897 gimple_seq_add_seq (&scan1_list, tseq);
7d26f131 9898
3d2b49b2 9899 if (ctx->scan_inclusive)
9900 {
9901 x = unshare_expr (rprivb_ref);
9902 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9903 gimplify_and_add (x, &scan1_list);
9904 }
7d26f131 9905 }
9906
9907 x = unshare_expr (rpriva_ref);
3d2b49b2 9908 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9909 unshare_expr (var4));
7d26f131 9910 gimplify_and_add (x, &mdlist);
9911
3d2b49b2 9912 x = unshare_expr (is_for_simd ? var6 : new_var);
9913 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
28a60351 9914 gimplify_and_add (x, &input2_list);
9915
9916 val = rprivb_ref;
9917 if (new_vard != new_var)
9918 val = build_fold_addr_expr_loc (clause_loc, val);
9919
3d2b49b2 9920 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
7d26f131 9921 tseq = copy_gimple_seq_and_replace_locals (tseq);
9922 SET_DECL_VALUE_EXPR (new_vard, val);
9923 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3d2b49b2 9924 if (is_for_simd)
9925 {
9926 SET_DECL_VALUE_EXPR (placeholder, var6);
9927 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9928 }
9929 else
9930 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
7d26f131 9931 lower_omp (&tseq, ctx);
9932 if (y)
9933 SET_DECL_VALUE_EXPR (new_vard, y);
9934 else
9935 {
9936 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9937 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9938 }
3d2b49b2 9939 if (!is_for_simd)
9940 {
9941 SET_DECL_VALUE_EXPR (placeholder, new_var);
9942 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9943 lower_omp (&tseq, ctx);
9944 }
7d26f131 9945 gimple_seq_add_seq (&input2_list, tseq);
9946
7d26f131 9947 x = build_outer_var_ref (var, ctx);
9948 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9949 gimplify_and_add (x, &last_list);
9950
9951 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9952 gimplify_and_add (x, &reduc_list);
9953 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9954 tseq = copy_gimple_seq_and_replace_locals (tseq);
9955 val = rprival_ref;
9956 if (new_vard != new_var)
9957 val = build_fold_addr_expr_loc (clause_loc, val);
9958 SET_DECL_VALUE_EXPR (new_vard, val);
9959 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9960 SET_DECL_VALUE_EXPR (placeholder, var2);
9961 lower_omp (&tseq, ctx);
9962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9963 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9964 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9965 if (y)
9966 SET_DECL_VALUE_EXPR (new_vard, y);
9967 else
9968 {
9969 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9970 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9971 }
9972 gimple_seq_add_seq (&reduc_list, tseq);
9973 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9974 gimplify_and_add (x, &reduc_list);
9975
9976 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9977 if (x)
9978 gimplify_and_add (x, dlist);
9979 }
9980 else
9981 {
9982 x = build_outer_var_ref (var, ctx);
3d2b49b2 9983 gimplify_assign (unshare_expr (var4), x, &thr01_list);
7d26f131 9984
9985 x = omp_reduction_init (c, TREE_TYPE (new_var));
3d2b49b2 9986 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9987 &thrn1_list);
9988 gimplify_assign (unshare_expr (var4), x, &thr02_list);
7d26f131 9989
3d2b49b2 9990 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
7d26f131 9991
9992 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9993 if (code == MINUS_EXPR)
9994 code = PLUS_EXPR;
9995
3d2b49b2 9996 if (is_for_simd)
9997 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
9998 else
9999 {
10000 if (ctx->scan_exclusive)
10001 gimplify_assign (unshare_expr (rprivb_ref), var2,
10002 &scan1_list);
10003 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10004 gimplify_assign (var2, x, &scan1_list);
10005 if (ctx->scan_inclusive)
10006 gimplify_assign (unshare_expr (rprivb_ref), var2,
10007 &scan1_list);
10008 }
7d26f131 10009
3d2b49b2 10010 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10011 &mdlist);
7d26f131 10012
3d2b49b2 10013 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10014 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
7d26f131 10015
10016 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10017 &last_list);
10018
10019 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10020 unshare_expr (rprival_ref));
10021 gimplify_assign (rprival_ref, x, &reduc_list);
10022 }
10023 }
10024
10025 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10026 gimple_seq_add_stmt (&scan1_list, g);
10027 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
3d2b49b2 10028 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10029 ? scan_stmt4 : scan_stmt2), g);
7d26f131 10030
10031 tree controlb = create_tmp_var (boolean_type_node);
10032 tree controlp = create_tmp_var (ptr_type_node);
10033 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10034 OMP_CLAUSE_DECL (nc) = controlb;
10035 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10036 *cp1 = nc;
10037 cp1 = &OMP_CLAUSE_CHAIN (nc);
10038 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10039 OMP_CLAUSE_DECL (nc) = controlp;
10040 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10041 *cp1 = nc;
10042 cp1 = &OMP_CLAUSE_CHAIN (nc);
10043 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10044 OMP_CLAUSE_DECL (nc) = controlb;
10045 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10046 *cp2 = nc;
10047 cp2 = &OMP_CLAUSE_CHAIN (nc);
10048 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10049 OMP_CLAUSE_DECL (nc) = controlp;
10050 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10051 *cp2 = nc;
10052 cp2 = &OMP_CLAUSE_CHAIN (nc);
10053
10054 *cp1 = gimple_omp_for_clauses (stmt);
10055 gimple_omp_for_set_clauses (stmt, new_clauses1);
10056 *cp2 = gimple_omp_for_clauses (new_stmt);
10057 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10058
3d2b49b2 10059 if (is_for_simd)
10060 {
10061 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10062 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10063
10064 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10065 GSI_SAME_STMT);
10066 gsi_remove (&input3_gsi, true);
10067 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10068 GSI_SAME_STMT);
10069 gsi_remove (&scan3_gsi, true);
10070 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10071 GSI_SAME_STMT);
10072 gsi_remove (&input4_gsi, true);
10073 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10074 GSI_SAME_STMT);
10075 gsi_remove (&scan4_gsi, true);
10076 }
10077 else
10078 {
10079 gimple_omp_set_body (scan_stmt1, scan1_list);
10080 gimple_omp_set_body (input_stmt2, input2_list);
10081 }
7d26f131 10082
10083 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10084 GSI_SAME_STMT);
10085 gsi_remove (&input1_gsi, true);
10086 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10087 GSI_SAME_STMT);
10088 gsi_remove (&scan1_gsi, true);
10089 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10090 GSI_SAME_STMT);
10091 gsi_remove (&input2_gsi, true);
10092 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10093 GSI_SAME_STMT);
10094 gsi_remove (&scan2_gsi, true);
10095
10096 gimple_seq_add_seq (body_p, clist);
10097
10098 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10099 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10100 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10101 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10102 gimple_seq_add_stmt (body_p, g);
10103 g = gimple_build_label (lab1);
10104 gimple_seq_add_stmt (body_p, g);
10105 gimple_seq_add_seq (body_p, thr01_list);
10106 g = gimple_build_goto (lab3);
10107 gimple_seq_add_stmt (body_p, g);
10108 g = gimple_build_label (lab2);
10109 gimple_seq_add_stmt (body_p, g);
10110 gimple_seq_add_seq (body_p, thrn1_list);
10111 g = gimple_build_label (lab3);
10112 gimple_seq_add_stmt (body_p, g);
10113
10114 g = gimple_build_assign (ivar, size_zero_node);
10115 gimple_seq_add_stmt (body_p, g);
10116
10117 gimple_seq_add_stmt (body_p, stmt);
10118 gimple_seq_add_seq (body_p, body);
10119 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10120 fd->loop.v));
10121
10122 g = gimple_build_omp_return (true);
10123 gimple_seq_add_stmt (body_p, g);
10124 gimple_seq_add_seq (body_p, mdlist);
10125
10126 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10127 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10128 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10129 gimple_seq_add_stmt (body_p, g);
10130 g = gimple_build_label (lab1);
10131 gimple_seq_add_stmt (body_p, g);
10132
10133 g = omp_build_barrier (NULL);
10134 gimple_seq_add_stmt (body_p, g);
10135
10136 tree down = create_tmp_var (unsigned_type_node);
10137 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10138 gimple_seq_add_stmt (body_p, g);
10139
10140 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10141 gimple_seq_add_stmt (body_p, g);
10142
10143 tree num_threadsu = create_tmp_var (unsigned_type_node);
10144 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10145 gimple_seq_add_stmt (body_p, g);
10146
10147 tree thread_numu = create_tmp_var (unsigned_type_node);
10148 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10149 gimple_seq_add_stmt (body_p, g);
10150
10151 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10152 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10153 build_int_cst (unsigned_type_node, 1));
10154 gimple_seq_add_stmt (body_p, g);
10155
10156 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10157 g = gimple_build_label (lab3);
10158 gimple_seq_add_stmt (body_p, g);
10159
10160 tree twok = create_tmp_var (unsigned_type_node);
10161 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10162 gimple_seq_add_stmt (body_p, g);
10163
10164 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10165 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10166 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10167 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10168 gimple_seq_add_stmt (body_p, g);
10169 g = gimple_build_label (lab4);
10170 gimple_seq_add_stmt (body_p, g);
10171 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10172 gimple_seq_add_stmt (body_p, g);
10173 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10174 gimple_seq_add_stmt (body_p, g);
10175
10176 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10177 gimple_seq_add_stmt (body_p, g);
10178 g = gimple_build_label (lab6);
10179 gimple_seq_add_stmt (body_p, g);
10180
10181 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10182 gimple_seq_add_stmt (body_p, g);
10183
10184 g = gimple_build_label (lab5);
10185 gimple_seq_add_stmt (body_p, g);
10186
10187 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10188 gimple_seq_add_stmt (body_p, g);
10189
10190 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
8bdd1311 10191 DECL_GIMPLE_REG_P (cplx) = 1;
7d26f131 10192 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10193 gimple_call_set_lhs (g, cplx);
10194 gimple_seq_add_stmt (body_p, g);
10195 tree mul = create_tmp_var (unsigned_type_node);
10196 g = gimple_build_assign (mul, REALPART_EXPR,
10197 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10198 gimple_seq_add_stmt (body_p, g);
10199 tree ovf = create_tmp_var (unsigned_type_node);
10200 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10201 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10202 gimple_seq_add_stmt (body_p, g);
10203
10204 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10205 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10206 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10207 lab7, lab8);
10208 gimple_seq_add_stmt (body_p, g);
10209 g = gimple_build_label (lab7);
10210 gimple_seq_add_stmt (body_p, g);
10211
10212 tree andv = create_tmp_var (unsigned_type_node);
10213 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10214 gimple_seq_add_stmt (body_p, g);
10215 tree andvm1 = create_tmp_var (unsigned_type_node);
10216 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10217 build_minus_one_cst (unsigned_type_node));
10218 gimple_seq_add_stmt (body_p, g);
10219
10220 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10221 gimple_seq_add_stmt (body_p, g);
10222
10223 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10224 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10225 gimple_seq_add_stmt (body_p, g);
10226 g = gimple_build_label (lab9);
10227 gimple_seq_add_stmt (body_p, g);
10228 gimple_seq_add_seq (body_p, reduc_list);
10229 g = gimple_build_label (lab8);
10230 gimple_seq_add_stmt (body_p, g);
10231
10232 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10233 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10234 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10235 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10236 lab10, lab11);
10237 gimple_seq_add_stmt (body_p, g);
10238 g = gimple_build_label (lab10);
10239 gimple_seq_add_stmt (body_p, g);
10240 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10241 gimple_seq_add_stmt (body_p, g);
10242 g = gimple_build_goto (lab12);
10243 gimple_seq_add_stmt (body_p, g);
10244 g = gimple_build_label (lab11);
10245 gimple_seq_add_stmt (body_p, g);
10246 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10247 gimple_seq_add_stmt (body_p, g);
10248 g = gimple_build_label (lab12);
10249 gimple_seq_add_stmt (body_p, g);
10250
10251 g = omp_build_barrier (NULL);
10252 gimple_seq_add_stmt (body_p, g);
10253
10254 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10255 lab3, lab2);
10256 gimple_seq_add_stmt (body_p, g);
10257
10258 g = gimple_build_label (lab2);
10259 gimple_seq_add_stmt (body_p, g);
10260
10261 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10262 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10263 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10264 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10265 gimple_seq_add_stmt (body_p, g);
10266 g = gimple_build_label (lab1);
10267 gimple_seq_add_stmt (body_p, g);
10268 gimple_seq_add_seq (body_p, thr02_list);
10269 g = gimple_build_goto (lab3);
10270 gimple_seq_add_stmt (body_p, g);
10271 g = gimple_build_label (lab2);
10272 gimple_seq_add_stmt (body_p, g);
10273 gimple_seq_add_seq (body_p, thrn2_list);
10274 g = gimple_build_label (lab3);
10275 gimple_seq_add_stmt (body_p, g);
10276
10277 g = gimple_build_assign (ivar, size_zero_node);
10278 gimple_seq_add_stmt (body_p, g);
10279 gimple_seq_add_stmt (body_p, new_stmt);
10280 gimple_seq_add_seq (body_p, new_body);
10281
10282 gimple_seq new_dlist = NULL;
10283 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10284 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10285 tree num_threadsm1 = create_tmp_var (integer_type_node);
10286 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10287 integer_minus_one_node);
10288 gimple_seq_add_stmt (&new_dlist, g);
10289 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10290 gimple_seq_add_stmt (&new_dlist, g);
10291 g = gimple_build_label (lab1);
10292 gimple_seq_add_stmt (&new_dlist, g);
10293 gimple_seq_add_seq (&new_dlist, last_list);
10294 g = gimple_build_label (lab2);
10295 gimple_seq_add_stmt (&new_dlist, g);
10296 gimple_seq_add_seq (&new_dlist, *dlist);
10297 *dlist = new_dlist;
10298}
281001a9 10299
4954efd4 10300/* Lower code for an OMP loop directive. */
773c5ba7 10301
4954efd4 10302static void
10303lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10304{
10305 tree *rhs_p, block;
10306 struct omp_for_data fd, *fdp = NULL;
10307 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10308 gbind *new_stmt;
7e5a76c8 10309 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9a1d892b 10310 gimple_seq cnt_list = NULL, clist = NULL;
4954efd4 10311 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10312 size_t i;
1e8e9920 10313
4954efd4 10314 push_gimplify_context ();
1e8e9920 10315
4954efd4 10316 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
1e8e9920 10317
4954efd4 10318 block = make_node (BLOCK);
10319 new_stmt = gimple_build_bind (NULL, NULL, block);
10320 /* Replace at gsi right away, so that 'stmt' is no member
10321 of a sequence anymore as we're going to add to a different
10322 one below. */
10323 gsi_replace (gsi_p, new_stmt, true);
1e8e9920 10324
4954efd4 10325 /* Move declaration of temporaries in the loop body before we make
10326 it go away. */
10327 omp_for_body = gimple_omp_body (stmt);
10328 if (!gimple_seq_empty_p (omp_for_body)
10329 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
bc7bff74 10330 {
4954efd4 10331 gbind *inner_bind
10332 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10333 tree vars = gimple_bind_vars (inner_bind);
10334 gimple_bind_append_vars (new_stmt, vars);
10335 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10336 keep them on the inner_bind and it's block. */
10337 gimple_bind_set_vars (inner_bind, NULL_TREE);
10338 if (gimple_bind_block (inner_bind))
10339 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
bc7bff74 10340 }
773c5ba7 10341
4954efd4 10342 if (gimple_omp_for_combined_into_p (stmt))
8e6b4515 10343 {
4954efd4 10344 omp_extract_for_data (stmt, &fd, NULL);
10345 fdp = &fd;
10346
10347 /* We need two temporaries with fd.loop.v type (istart/iend)
10348 and then (fd.collapse - 1) temporaries with the same
10349 type for count2 ... countN-1 vars if not constant. */
10350 size_t count = 2;
10351 tree type = fd.iter_type;
10352 if (fd.collapse > 1
10353 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10354 count += fd.collapse - 1;
10355 bool taskreg_for
10356 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10357 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10358 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9cf7bec9 10359 tree simtc = NULL;
4954efd4 10360 tree clauses = *pc;
10361 if (taskreg_for)
10362 outerc
10363 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10364 OMP_CLAUSE__LOOPTEMP_);
9cf7bec9 10365 if (ctx->simt_stmt)
10366 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10367 OMP_CLAUSE__LOOPTEMP_);
4954efd4 10368 for (i = 0; i < count; i++)
8e6b4515 10369 {
4954efd4 10370 tree temp;
10371 if (taskreg_for)
10372 {
10373 gcc_assert (outerc);
10374 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10375 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10376 OMP_CLAUSE__LOOPTEMP_);
10377 }
10378 else
8e6b4515 10379 {
9cf7bec9 10380 /* If there are 2 adjacent SIMD stmts, one with _simt_
10381 clause, another without, make sure they have the same
10382 decls in _looptemp_ clauses, because the outer stmt
10383 they are combined into will look up just one inner_stmt. */
10384 if (ctx->simt_stmt)
10385 temp = OMP_CLAUSE_DECL (simtc);
10386 else
10387 temp = create_tmp_var (type);
4954efd4 10388 insert_decl_map (&ctx->outer->cb, temp, temp);
8e6b4515 10389 }
4954efd4 10390 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10391 OMP_CLAUSE_DECL (*pc) = temp;
10392 pc = &OMP_CLAUSE_CHAIN (*pc);
9cf7bec9 10393 if (ctx->simt_stmt)
10394 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10395 OMP_CLAUSE__LOOPTEMP_);
8e6b4515 10396 }
4954efd4 10397 *pc = clauses;
8e6b4515 10398 }
10399
4954efd4 10400 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10401 dlist = NULL;
10402 body = NULL;
7e5a76c8 10403 tree rclauses
10404 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10405 OMP_CLAUSE_REDUCTION);
10406 tree rtmp = NULL_TREE;
10407 if (rclauses)
10408 {
10409 tree type = build_pointer_type (pointer_sized_int_node);
10410 tree temp = create_tmp_var (type);
10411 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10412 OMP_CLAUSE_DECL (c) = temp;
10413 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10414 gimple_omp_for_set_clauses (stmt, c);
10415 lower_omp_task_reductions (ctx, OMP_FOR,
10416 gimple_omp_for_clauses (stmt),
10417 &tred_ilist, &tred_dlist);
10418 rclauses = c;
10419 rtmp = make_ssa_name (type);
10420 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10421 }
10422
4f4b92d8 10423 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10424 ctx);
10425
4954efd4 10426 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10427 fdp);
7e5a76c8 10428 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10429 gimple_omp_for_pre_body (stmt));
79acaae1 10430
4954efd4 10431 lower_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 10432
4954efd4 10433 /* Lower the header expressions. At this point, we can assume that
10434 the header is of the form:
773c5ba7 10435
4954efd4 10436 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
79acaae1 10437
4954efd4 10438 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10439 using the .omp_data_s mapping, if needed. */
10440 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10441 {
10442 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10443 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 10444 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
36ac6a87 10445 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10446 recompute_tree_invariant_for_addr_expr (*rhs_p);
773c5ba7 10447
4954efd4 10448 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10449 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 10450 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
36ac6a87 10451 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10452 recompute_tree_invariant_for_addr_expr (*rhs_p);
43895be5 10453
4954efd4 10454 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10455 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 10456 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
4954efd4 10457 }
7e5a76c8 10458 if (rclauses)
10459 gimple_seq_add_seq (&tred_ilist, cnt_list);
10460 else
10461 gimple_seq_add_seq (&body, cnt_list);
1e8e9920 10462
4954efd4 10463 /* Once lowered, extract the bounds and clauses. */
10464 omp_extract_for_data (stmt, &fd, NULL);
1e8e9920 10465
4954efd4 10466 if (is_gimple_omp_oacc (ctx->stmt)
10467 && !ctx_in_oacc_kernels_region (ctx))
10468 lower_oacc_head_tail (gimple_location (stmt),
10469 gimple_omp_for_clauses (stmt),
10470 &oacc_head, &oacc_tail, ctx);
1e8e9920 10471
7c6746c9 10472 /* Add OpenACC partitioning and reduction markers just before the loop. */
4954efd4 10473 if (oacc_head)
10474 gimple_seq_add_seq (&body, oacc_head);
7c6746c9 10475
9a1d892b 10476 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
bc7bff74 10477
4954efd4 10478 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10479 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
43895be5 10480 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10481 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10482 {
4954efd4 10483 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10484 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10485 OMP_CLAUSE_LINEAR_STEP (c)
10486 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10487 ctx);
43895be5 10488 }
bc7bff74 10489
4954efd4 10490 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10491 && gimple_omp_for_grid_phony (stmt));
7d26f131 10492 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10493 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10494 {
10495 gcc_assert (!phony_loop);
10496 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10497 }
10498 else
10499 {
10500 if (!phony_loop)
10501 gimple_seq_add_stmt (&body, stmt);
10502 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10503 }
4954efd4 10504
10505 if (!phony_loop)
10506 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10507 fd.loop.v));
79acaae1 10508
4954efd4 10509 /* After the loop, add exit clauses. */
9a1d892b 10510 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10511
10512 if (clist)
10513 {
10514 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10515 gcall *g = gimple_build_call (fndecl, 0);
10516 gimple_seq_add_stmt (&body, g);
10517 gimple_seq_add_seq (&body, clist);
10518 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10519 g = gimple_build_call (fndecl, 0);
10520 gimple_seq_add_stmt (&body, g);
10521 }
48e1416a 10522
4954efd4 10523 if (ctx->cancellable)
10524 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
773c5ba7 10525
4954efd4 10526 gimple_seq_add_seq (&body, dlist);
1e8e9920 10527
7e5a76c8 10528 if (rclauses)
10529 {
10530 gimple_seq_add_seq (&tred_ilist, body);
10531 body = tred_ilist;
10532 }
10533
4954efd4 10534 body = maybe_catch_exception (body);
1e8e9920 10535
4954efd4 10536 if (!phony_loop)
bc7bff74 10537 {
4954efd4 10538 /* Region exit marker goes at the end of the loop body. */
7e5a76c8 10539 gimple *g = gimple_build_omp_return (fd.have_nowait);
10540 gimple_seq_add_stmt (&body, g);
10541
10542 gimple_seq_add_seq (&body, tred_dlist);
10543
10544 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10545
10546 if (rclauses)
10547 OMP_CLAUSE_DECL (rclauses) = rtmp;
bc7bff74 10548 }
1e8e9920 10549
4954efd4 10550 /* Add OpenACC joining and reduction markers just after the loop. */
10551 if (oacc_tail)
10552 gimple_seq_add_seq (&body, oacc_tail);
79acaae1 10553
4954efd4 10554 pop_gimplify_context (new_stmt);
79acaae1 10555
4954efd4 10556 gimple_bind_append_vars (new_stmt, ctx->block_vars);
2918f4e9 10557 maybe_remove_omp_member_access_dummy_vars (new_stmt);
4954efd4 10558 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10559 if (BLOCK_VARS (block))
10560 TREE_USED (block) = 1;
79acaae1 10561
4954efd4 10562 gimple_bind_set_body (new_stmt, body);
10563 gimple_omp_set_body (stmt, NULL);
10564 gimple_omp_for_set_pre_body (stmt, NULL);
10565}
264aa959 10566
4954efd4 10567/* Callback for walk_stmts. Check if the current statement only contains
10568 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
79acaae1 10569
4954efd4 10570static tree
10571check_combined_parallel (gimple_stmt_iterator *gsi_p,
10572 bool *handled_ops_p,
10573 struct walk_stmt_info *wi)
10574{
10575 int *info = (int *) wi->info;
10576 gimple *stmt = gsi_stmt (*gsi_p);
79acaae1 10577
4954efd4 10578 *handled_ops_p = true;
10579 switch (gimple_code (stmt))
bc7bff74 10580 {
4954efd4 10581 WALK_SUBSTMTS;
fc1d58e3 10582
bce107d7 10583 case GIMPLE_DEBUG:
10584 break;
4954efd4 10585 case GIMPLE_OMP_FOR:
10586 case GIMPLE_OMP_SECTIONS:
10587 *info = *info == 0 ? 1 : -1;
10588 break;
10589 default:
10590 *info = -1;
10591 break;
bc7bff74 10592 }
4954efd4 10593 return NULL;
1e8e9920 10594}
10595
4954efd4 10596struct omp_taskcopy_context
10597{
10598 /* This field must be at the beginning, as we do "inheritance": Some
10599 callback functions for tree-inline.c (e.g., omp_copy_decl)
10600 receive a copy_body_data pointer that is up-casted to an
10601 omp_context pointer. */
10602 copy_body_data cb;
10603 omp_context *ctx;
10604};
40750995 10605
4954efd4 10606static tree
10607task_copyfn_copy_decl (tree var, copy_body_data *cb)
10608{
10609 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
40750995 10610
4954efd4 10611 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10612 return create_tmp_var (TREE_TYPE (var));
40750995 10613
4954efd4 10614 return var;
10615}
40750995 10616
4954efd4 10617static tree
10618task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
40750995 10619{
4954efd4 10620 tree name, new_fields = NULL, type, f;
40750995 10621
4954efd4 10622 type = lang_hooks.types.make_type (RECORD_TYPE);
10623 name = DECL_NAME (TYPE_NAME (orig_type));
10624 name = build_decl (gimple_location (tcctx->ctx->stmt),
10625 TYPE_DECL, name, type);
10626 TYPE_NAME (type) = name;
40750995 10627
4954efd4 10628 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
40750995 10629 {
4954efd4 10630 tree new_f = copy_node (f);
10631 DECL_CONTEXT (new_f) = type;
10632 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10633 TREE_CHAIN (new_f) = new_fields;
10634 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10635 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10636 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10637 &tcctx->cb, NULL);
10638 new_fields = new_f;
10639 tcctx->cb.decl_map->put (f, new_f);
40750995 10640 }
4954efd4 10641 TYPE_FIELDS (type) = nreverse (new_fields);
10642 layout_type (type);
10643 return type;
10644}
40750995 10645
4954efd4 10646/* Create task copyfn. */
40750995 10647
4954efd4 10648static void
10649create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10650{
10651 struct function *child_cfun;
10652 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10653 tree record_type, srecord_type, bind, list;
10654 bool record_needs_remap = false, srecord_needs_remap = false;
10655 splay_tree_node n;
10656 struct omp_taskcopy_context tcctx;
10657 location_t loc = gimple_location (task_stmt);
99d30117 10658 size_t looptempno = 0;
40750995 10659
4954efd4 10660 child_fn = gimple_omp_task_copy_fn (task_stmt);
10661 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10662 gcc_assert (child_cfun->cfg == NULL);
10663 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
40750995 10664
4954efd4 10665 /* Reset DECL_CONTEXT on function arguments. */
10666 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10667 DECL_CONTEXT (t) = child_fn;
40750995 10668
4954efd4 10669 /* Populate the function. */
10670 push_gimplify_context ();
10671 push_cfun (child_cfun);
40750995 10672
4954efd4 10673 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10674 TREE_SIDE_EFFECTS (bind) = 1;
10675 list = NULL;
10676 DECL_SAVED_TREE (child_fn) = bind;
10677 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
40750995 10678
4954efd4 10679 /* Remap src and dst argument types if needed. */
10680 record_type = ctx->record_type;
10681 srecord_type = ctx->srecord_type;
10682 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10683 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10684 {
10685 record_needs_remap = true;
10686 break;
10687 }
10688 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10689 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10690 {
10691 srecord_needs_remap = true;
10692 break;
10693 }
40750995 10694
4954efd4 10695 if (record_needs_remap || srecord_needs_remap)
40750995 10696 {
4954efd4 10697 memset (&tcctx, '\0', sizeof (tcctx));
10698 tcctx.cb.src_fn = ctx->cb.src_fn;
10699 tcctx.cb.dst_fn = child_fn;
10700 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10701 gcc_checking_assert (tcctx.cb.src_node);
10702 tcctx.cb.dst_node = tcctx.cb.src_node;
10703 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10704 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10705 tcctx.cb.eh_lp_nr = 0;
10706 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10707 tcctx.cb.decl_map = new hash_map<tree, tree>;
10708 tcctx.ctx = ctx;
40750995 10709
4954efd4 10710 if (record_needs_remap)
10711 record_type = task_copyfn_remap_type (&tcctx, record_type);
10712 if (srecord_needs_remap)
10713 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
40750995 10714 }
10715 else
4954efd4 10716 tcctx.cb.decl_map = NULL;
40750995 10717
4954efd4 10718 arg = DECL_ARGUMENTS (child_fn);
10719 TREE_TYPE (arg) = build_pointer_type (record_type);
10720 sarg = DECL_CHAIN (arg);
10721 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
40750995 10722
4954efd4 10723 /* First pass: initialize temporaries used in record_type and srecord_type
10724 sizes and field offsets. */
10725 if (tcctx.cb.decl_map)
10726 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10728 {
10729 tree *p;
40750995 10730
4954efd4 10731 decl = OMP_CLAUSE_DECL (c);
10732 p = tcctx.cb.decl_map->get (decl);
10733 if (p == NULL)
10734 continue;
10735 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10736 sf = (tree) n->value;
10737 sf = *tcctx.cb.decl_map->get (sf);
10738 src = build_simple_mem_ref_loc (loc, sarg);
10739 src = omp_build_component_ref (src, sf);
10740 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10741 append_to_statement_list (t, &list);
10742 }
40750995 10743
4954efd4 10744 /* Second pass: copy shared var pointers and copy construct non-VLA
10745 firstprivate vars. */
10746 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10747 switch (OMP_CLAUSE_CODE (c))
10748 {
10749 splay_tree_key key;
10750 case OMP_CLAUSE_SHARED:
10751 decl = OMP_CLAUSE_DECL (c);
10752 key = (splay_tree_key) decl;
10753 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10754 key = (splay_tree_key) &DECL_UID (decl);
10755 n = splay_tree_lookup (ctx->field_map, key);
10756 if (n == NULL)
10757 break;
10758 f = (tree) n->value;
10759 if (tcctx.cb.decl_map)
10760 f = *tcctx.cb.decl_map->get (f);
10761 n = splay_tree_lookup (ctx->sfield_map, key);
10762 sf = (tree) n->value;
10763 if (tcctx.cb.decl_map)
10764 sf = *tcctx.cb.decl_map->get (sf);
10765 src = build_simple_mem_ref_loc (loc, sarg);
10766 src = omp_build_component_ref (src, sf);
10767 dst = build_simple_mem_ref_loc (loc, arg);
10768 dst = omp_build_component_ref (dst, f);
10769 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10770 append_to_statement_list (t, &list);
10771 break;
7e5a76c8 10772 case OMP_CLAUSE_REDUCTION:
10773 case OMP_CLAUSE_IN_REDUCTION:
10774 decl = OMP_CLAUSE_DECL (c);
10775 if (TREE_CODE (decl) == MEM_REF)
10776 {
10777 decl = TREE_OPERAND (decl, 0);
10778 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10779 decl = TREE_OPERAND (decl, 0);
10780 if (TREE_CODE (decl) == INDIRECT_REF
10781 || TREE_CODE (decl) == ADDR_EXPR)
10782 decl = TREE_OPERAND (decl, 0);
10783 }
10784 key = (splay_tree_key) decl;
10785 n = splay_tree_lookup (ctx->field_map, key);
10786 if (n == NULL)
10787 break;
10788 f = (tree) n->value;
10789 if (tcctx.cb.decl_map)
10790 f = *tcctx.cb.decl_map->get (f);
10791 n = splay_tree_lookup (ctx->sfield_map, key);
10792 sf = (tree) n->value;
10793 if (tcctx.cb.decl_map)
10794 sf = *tcctx.cb.decl_map->get (sf);
10795 src = build_simple_mem_ref_loc (loc, sarg);
10796 src = omp_build_component_ref (src, sf);
10797 if (decl != OMP_CLAUSE_DECL (c)
10798 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10799 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10800 src = build_simple_mem_ref_loc (loc, src);
10801 dst = build_simple_mem_ref_loc (loc, arg);
10802 dst = omp_build_component_ref (dst, f);
10803 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10804 append_to_statement_list (t, &list);
10805 break;
99d30117 10806 case OMP_CLAUSE__LOOPTEMP_:
10807 /* Fields for first two _looptemp_ clauses are initialized by
10808 GOMP_taskloop*, the rest are handled like firstprivate. */
10809 if (looptempno < 2)
10810 {
10811 looptempno++;
10812 break;
10813 }
10814 /* FALLTHRU */
7e5a76c8 10815 case OMP_CLAUSE__REDUCTEMP_:
4954efd4 10816 case OMP_CLAUSE_FIRSTPRIVATE:
10817 decl = OMP_CLAUSE_DECL (c);
10818 if (is_variable_sized (decl))
10819 break;
10820 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10821 if (n == NULL)
10822 break;
10823 f = (tree) n->value;
10824 if (tcctx.cb.decl_map)
10825 f = *tcctx.cb.decl_map->get (f);
10826 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10827 if (n != NULL)
10828 {
10829 sf = (tree) n->value;
10830 if (tcctx.cb.decl_map)
10831 sf = *tcctx.cb.decl_map->get (sf);
10832 src = build_simple_mem_ref_loc (loc, sarg);
10833 src = omp_build_component_ref (src, sf);
10834 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10835 src = build_simple_mem_ref_loc (loc, src);
10836 }
10837 else
10838 src = decl;
10839 dst = build_simple_mem_ref_loc (loc, arg);
10840 dst = omp_build_component_ref (dst, f);
7e5a76c8 10841 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
99d30117 10842 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10843 else
10844 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
4954efd4 10845 append_to_statement_list (t, &list);
10846 break;
10847 case OMP_CLAUSE_PRIVATE:
10848 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10849 break;
10850 decl = OMP_CLAUSE_DECL (c);
10851 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10852 f = (tree) n->value;
10853 if (tcctx.cb.decl_map)
10854 f = *tcctx.cb.decl_map->get (f);
10855 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10856 if (n != NULL)
10857 {
10858 sf = (tree) n->value;
10859 if (tcctx.cb.decl_map)
10860 sf = *tcctx.cb.decl_map->get (sf);
10861 src = build_simple_mem_ref_loc (loc, sarg);
10862 src = omp_build_component_ref (src, sf);
10863 if (use_pointer_for_field (decl, NULL))
10864 src = build_simple_mem_ref_loc (loc, src);
10865 }
10866 else
10867 src = decl;
10868 dst = build_simple_mem_ref_loc (loc, arg);
10869 dst = omp_build_component_ref (dst, f);
10870 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10871 append_to_statement_list (t, &list);
10872 break;
10873 default:
10874 break;
10875 }
3d483a94 10876
4954efd4 10877 /* Last pass: handle VLA firstprivates. */
10878 if (tcctx.cb.decl_map)
10879 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10880 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10881 {
10882 tree ind, ptr, df;
3d483a94 10883
4954efd4 10884 decl = OMP_CLAUSE_DECL (c);
10885 if (!is_variable_sized (decl))
10886 continue;
10887 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10888 if (n == NULL)
10889 continue;
10890 f = (tree) n->value;
10891 f = *tcctx.cb.decl_map->get (f);
10892 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10893 ind = DECL_VALUE_EXPR (decl);
10894 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10895 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10896 n = splay_tree_lookup (ctx->sfield_map,
10897 (splay_tree_key) TREE_OPERAND (ind, 0));
10898 sf = (tree) n->value;
10899 sf = *tcctx.cb.decl_map->get (sf);
10900 src = build_simple_mem_ref_loc (loc, sarg);
10901 src = omp_build_component_ref (src, sf);
10902 src = build_simple_mem_ref_loc (loc, src);
10903 dst = build_simple_mem_ref_loc (loc, arg);
10904 dst = omp_build_component_ref (dst, f);
10905 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10906 append_to_statement_list (t, &list);
10907 n = splay_tree_lookup (ctx->field_map,
10908 (splay_tree_key) TREE_OPERAND (ind, 0));
10909 df = (tree) n->value;
10910 df = *tcctx.cb.decl_map->get (df);
10911 ptr = build_simple_mem_ref_loc (loc, arg);
10912 ptr = omp_build_component_ref (ptr, df);
10913 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10914 build_fold_addr_expr_loc (loc, dst));
10915 append_to_statement_list (t, &list);
10916 }
3d483a94 10917
4954efd4 10918 t = build1 (RETURN_EXPR, void_type_node, NULL);
10919 append_to_statement_list (t, &list);
3d483a94 10920
4954efd4 10921 if (tcctx.cb.decl_map)
10922 delete tcctx.cb.decl_map;
10923 pop_gimplify_context (NULL);
10924 BIND_EXPR_BODY (bind) = list;
10925 pop_cfun ();
10926}
3d483a94 10927
10928static void
4954efd4 10929lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
3d483a94 10930{
4954efd4 10931 tree c, clauses;
10932 gimple *g;
7e5a76c8 10933 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
4954efd4 10934
10935 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10936 gcc_assert (clauses);
10937 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10938 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10939 switch (OMP_CLAUSE_DEPEND_KIND (c))
10940 {
7e5a76c8 10941 case OMP_CLAUSE_DEPEND_LAST:
10942 /* Lowering already done at gimplification. */
10943 return;
4954efd4 10944 case OMP_CLAUSE_DEPEND_IN:
7e5a76c8 10945 cnt[2]++;
4954efd4 10946 break;
10947 case OMP_CLAUSE_DEPEND_OUT:
10948 case OMP_CLAUSE_DEPEND_INOUT:
7e5a76c8 10949 cnt[0]++;
10950 break;
10951 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10952 cnt[1]++;
10953 break;
10954 case OMP_CLAUSE_DEPEND_DEPOBJ:
10955 cnt[3]++;
4954efd4 10956 break;
10957 case OMP_CLAUSE_DEPEND_SOURCE:
10958 case OMP_CLAUSE_DEPEND_SINK:
10959 /* FALLTHRU */
10960 default:
10961 gcc_unreachable ();
10962 }
7e5a76c8 10963 if (cnt[1] || cnt[3])
10964 idx = 5;
10965 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10966 tree type = build_array_type_nelts (ptr_type_node, total + idx);
4954efd4 10967 tree array = create_tmp_var (type);
10968 TREE_ADDRESSABLE (array) = 1;
10969 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10970 NULL_TREE);
7e5a76c8 10971 if (idx == 5)
10972 {
10973 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10974 gimple_seq_add_stmt (iseq, g);
10975 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10976 NULL_TREE);
10977 }
10978 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
4954efd4 10979 gimple_seq_add_stmt (iseq, g);
7e5a76c8 10980 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10981 {
10982 r = build4 (ARRAY_REF, ptr_type_node, array,
10983 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10984 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10985 gimple_seq_add_stmt (iseq, g);
10986 }
10987 for (i = 0; i < 4; i++)
3d483a94 10988 {
7e5a76c8 10989 if (cnt[i] == 0)
4954efd4 10990 continue;
10991 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 10992 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10993 continue;
10994 else
4954efd4 10995 {
7e5a76c8 10996 switch (OMP_CLAUSE_DEPEND_KIND (c))
10997 {
10998 case OMP_CLAUSE_DEPEND_IN:
10999 if (i != 2)
11000 continue;
11001 break;
11002 case OMP_CLAUSE_DEPEND_OUT:
11003 case OMP_CLAUSE_DEPEND_INOUT:
11004 if (i != 0)
11005 continue;
11006 break;
11007 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11008 if (i != 1)
11009 continue;
11010 break;
11011 case OMP_CLAUSE_DEPEND_DEPOBJ:
11012 if (i != 3)
11013 continue;
11014 break;
11015 default:
11016 gcc_unreachable ();
11017 }
4954efd4 11018 tree t = OMP_CLAUSE_DECL (c);
11019 t = fold_convert (ptr_type_node, t);
11020 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11021 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11022 NULL_TREE, NULL_TREE);
11023 g = gimple_build_assign (r, t);
11024 gimple_seq_add_stmt (iseq, g);
11025 }
3d483a94 11026 }
4954efd4 11027 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7e5a76c8 11028 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
4954efd4 11029 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11030 OMP_CLAUSE_CHAIN (c) = *pclauses;
11031 *pclauses = c;
11032 tree clobber = build_constructor (type, NULL);
11033 TREE_THIS_VOLATILE (clobber) = 1;
11034 g = gimple_build_assign (array, clobber);
11035 gimple_seq_add_stmt (oseq, g);
11036}
11037
11038/* Lower the OpenMP parallel or task directive in the current statement
11039 in GSI_P. CTX holds context information for the directive. */
3d483a94 11040
4954efd4 11041static void
11042lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11043{
11044 tree clauses;
11045 tree child_fn, t;
11046 gimple *stmt = gsi_stmt (*gsi_p);
11047 gbind *par_bind, *bind, *dep_bind = NULL;
7e5a76c8 11048 gimple_seq par_body;
4954efd4 11049 location_t loc = gimple_location (stmt);
3d483a94 11050
4954efd4 11051 clauses = gimple_omp_taskreg_clauses (stmt);
7e5a76c8 11052 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11053 && gimple_omp_task_taskwait_p (stmt))
11054 {
11055 par_bind = NULL;
11056 par_body = NULL;
11057 }
11058 else
11059 {
11060 par_bind
11061 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11062 par_body = gimple_bind_body (par_bind);
11063 }
4954efd4 11064 child_fn = ctx->cb.dst_fn;
11065 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11066 && !gimple_omp_parallel_combined_p (stmt))
3d483a94 11067 {
4954efd4 11068 struct walk_stmt_info wi;
11069 int ws_num = 0;
3d483a94 11070
4954efd4 11071 memset (&wi, 0, sizeof (wi));
11072 wi.info = &ws_num;
11073 wi.val_only = true;
11074 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11075 if (ws_num == 1)
11076 gimple_omp_parallel_set_combined_p (stmt, true);
3d483a94 11077 }
4954efd4 11078 gimple_seq dep_ilist = NULL;
11079 gimple_seq dep_olist = NULL;
11080 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11081 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
bc7bff74 11082 {
4954efd4 11083 push_gimplify_context ();
11084 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11085 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11086 &dep_ilist, &dep_olist);
bab6706a 11087 }
bab6706a 11088
7e5a76c8 11089 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11090 && gimple_omp_task_taskwait_p (stmt))
11091 {
11092 if (dep_bind)
11093 {
11094 gsi_replace (gsi_p, dep_bind, true);
11095 gimple_bind_add_seq (dep_bind, dep_ilist);
11096 gimple_bind_add_stmt (dep_bind, stmt);
11097 gimple_bind_add_seq (dep_bind, dep_olist);
11098 pop_gimplify_context (dep_bind);
11099 }
11100 return;
11101 }
11102
4954efd4 11103 if (ctx->srecord_type)
11104 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
bab6706a 11105
7e5a76c8 11106 gimple_seq tskred_ilist = NULL;
11107 gimple_seq tskred_olist = NULL;
11108 if ((is_task_ctx (ctx)
11109 && gimple_omp_task_taskloop_p (ctx->stmt)
11110 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11111 OMP_CLAUSE_REDUCTION))
11112 || (is_parallel_ctx (ctx)
11113 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11114 OMP_CLAUSE__REDUCTEMP_)))
11115 {
11116 if (dep_bind == NULL)
11117 {
11118 push_gimplify_context ();
11119 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11120 }
11121 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11122 : OMP_PARALLEL,
11123 gimple_omp_taskreg_clauses (ctx->stmt),
11124 &tskred_ilist, &tskred_olist);
11125 }
11126
4954efd4 11127 push_gimplify_context ();
3d483a94 11128
7e5a76c8 11129 gimple_seq par_olist = NULL;
11130 gimple_seq par_ilist = NULL;
11131 gimple_seq par_rlist = NULL;
4954efd4 11132 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11133 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11134 if (phony_construct && ctx->record_type)
bab6706a 11135 {
4954efd4 11136 gcc_checking_assert (!ctx->receiver_decl);
11137 ctx->receiver_decl = create_tmp_var
11138 (build_reference_type (ctx->record_type), ".omp_rec");
bab6706a 11139 }
4954efd4 11140 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11141 lower_omp (&par_body, ctx);
11142 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
9a1d892b 11143 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
bab6706a 11144
4954efd4 11145 /* Declare all the variables created by mapping and the variables
11146 declared in the scope of the parallel body. */
11147 record_vars_into (ctx->block_vars, child_fn);
2918f4e9 11148 maybe_remove_omp_member_access_dummy_vars (par_bind);
4954efd4 11149 record_vars_into (gimple_bind_vars (par_bind), child_fn);
3d483a94 11150
4954efd4 11151 if (ctx->record_type)
3d483a94 11152 {
4954efd4 11153 ctx->sender_decl
11154 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11155 : ctx->record_type, ".omp_data_o");
11156 DECL_NAMELESS (ctx->sender_decl) = 1;
11157 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11158 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
3d483a94 11159 }
3d483a94 11160
7e5a76c8 11161 gimple_seq olist = NULL;
11162 gimple_seq ilist = NULL;
4954efd4 11163 lower_send_clauses (clauses, &ilist, &olist, ctx);
11164 lower_send_shared_vars (&ilist, &olist, ctx);
bab6706a 11165
4954efd4 11166 if (ctx->record_type)
3d483a94 11167 {
4954efd4 11168 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
11169 TREE_THIS_VOLATILE (clobber) = 1;
11170 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11171 clobber));
43895be5 11172 }
43895be5 11173
4954efd4 11174 /* Once all the expansions are done, sequence all the different
11175 fragments inside gimple_omp_body. */
43895be5 11176
7e5a76c8 11177 gimple_seq new_body = NULL;
43895be5 11178
4954efd4 11179 if (ctx->record_type)
43895be5 11180 {
4954efd4 11181 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11182 /* fixup_child_record_type might have changed receiver_decl's type. */
11183 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11184 gimple_seq_add_stmt (&new_body,
11185 gimple_build_assign (ctx->receiver_decl, t));
43895be5 11186 }
11187
4954efd4 11188 gimple_seq_add_seq (&new_body, par_ilist);
11189 gimple_seq_add_seq (&new_body, par_body);
11190 gimple_seq_add_seq (&new_body, par_rlist);
11191 if (ctx->cancellable)
11192 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11193 gimple_seq_add_seq (&new_body, par_olist);
11194 new_body = maybe_catch_exception (new_body);
11195 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11196 gimple_seq_add_stmt (&new_body,
11197 gimple_build_omp_continue (integer_zero_node,
11198 integer_zero_node));
11199 if (!phony_construct)
43895be5 11200 {
4954efd4 11201 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11202 gimple_omp_set_body (stmt, new_body);
43895be5 11203 }
11204
7e5a76c8 11205 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11206 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11207 else
11208 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
4954efd4 11209 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11210 gimple_bind_add_seq (bind, ilist);
11211 if (!phony_construct)
11212 gimple_bind_add_stmt (bind, stmt);
43895be5 11213 else
4954efd4 11214 gimple_bind_add_seq (bind, new_body);
11215 gimple_bind_add_seq (bind, olist);
43895be5 11216
4954efd4 11217 pop_gimplify_context (NULL);
11218
11219 if (dep_bind)
43895be5 11220 {
4954efd4 11221 gimple_bind_add_seq (dep_bind, dep_ilist);
7e5a76c8 11222 gimple_bind_add_seq (dep_bind, tskred_ilist);
4954efd4 11223 gimple_bind_add_stmt (dep_bind, bind);
7e5a76c8 11224 gimple_bind_add_seq (dep_bind, tskred_olist);
4954efd4 11225 gimple_bind_add_seq (dep_bind, dep_olist);
11226 pop_gimplify_context (dep_bind);
43895be5 11227 }
43895be5 11228}
11229
4954efd4 11230/* Lower the GIMPLE_OMP_TARGET in the current statement
11231 in GSI_P. CTX holds context information for the directive. */
43895be5 11232
11233static void
4954efd4 11234lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
43895be5 11235{
4954efd4 11236 tree clauses;
11237 tree child_fn, t, c;
11238 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11239 gbind *tgt_bind, *bind, *dep_bind = NULL;
11240 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11241 location_t loc = gimple_location (stmt);
11242 bool offloaded, data_region;
11243 unsigned int map_cnt = 0;
43895be5 11244
4954efd4 11245 offloaded = is_gimple_omp_offloaded (stmt);
11246 switch (gimple_omp_target_kind (stmt))
43895be5 11247 {
4954efd4 11248 case GF_OMP_TARGET_KIND_REGION:
11249 case GF_OMP_TARGET_KIND_UPDATE:
11250 case GF_OMP_TARGET_KIND_ENTER_DATA:
11251 case GF_OMP_TARGET_KIND_EXIT_DATA:
11252 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11253 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11254 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11255 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11256 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11257 data_region = false;
11258 break;
11259 case GF_OMP_TARGET_KIND_DATA:
11260 case GF_OMP_TARGET_KIND_OACC_DATA:
11261 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11262 data_region = true;
11263 break;
11264 default:
11265 gcc_unreachable ();
3d483a94 11266 }
3d483a94 11267
4954efd4 11268 clauses = gimple_omp_target_clauses (stmt);
43895be5 11269
4954efd4 11270 gimple_seq dep_ilist = NULL;
11271 gimple_seq dep_olist = NULL;
11272 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
43895be5 11273 {
4954efd4 11274 push_gimplify_context ();
11275 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11276 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11277 &dep_ilist, &dep_olist);
43895be5 11278 }
1e8e9920 11279
4954efd4 11280 tgt_bind = NULL;
11281 tgt_body = NULL;
11282 if (offloaded)
a8e785ba 11283 {
4954efd4 11284 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11285 tgt_body = gimple_bind_body (tgt_bind);
a8e785ba 11286 }
4954efd4 11287 else if (data_region)
11288 tgt_body = gimple_omp_body (stmt);
11289 child_fn = ctx->cb.dst_fn;
a8e785ba 11290
4954efd4 11291 push_gimplify_context ();
11292 fplist = NULL;
a8e785ba 11293
4954efd4 11294 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11295 switch (OMP_CLAUSE_CODE (c))
11296 {
11297 tree var, x;
a8e785ba 11298
4954efd4 11299 default:
11300 break;
11301 case OMP_CLAUSE_MAP:
11302#if CHECKING_P
11303 /* First check what we're prepared to handle in the following. */
11304 switch (OMP_CLAUSE_MAP_KIND (c))
11305 {
11306 case GOMP_MAP_ALLOC:
11307 case GOMP_MAP_TO:
11308 case GOMP_MAP_FROM:
11309 case GOMP_MAP_TOFROM:
11310 case GOMP_MAP_POINTER:
11311 case GOMP_MAP_TO_PSET:
11312 case GOMP_MAP_DELETE:
11313 case GOMP_MAP_RELEASE:
11314 case GOMP_MAP_ALWAYS_TO:
11315 case GOMP_MAP_ALWAYS_FROM:
11316 case GOMP_MAP_ALWAYS_TOFROM:
11317 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11318 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11319 case GOMP_MAP_STRUCT:
11320 case GOMP_MAP_ALWAYS_POINTER:
11321 break;
11322 case GOMP_MAP_FORCE_ALLOC:
11323 case GOMP_MAP_FORCE_TO:
11324 case GOMP_MAP_FORCE_FROM:
11325 case GOMP_MAP_FORCE_TOFROM:
11326 case GOMP_MAP_FORCE_PRESENT:
11327 case GOMP_MAP_FORCE_DEVICEPTR:
11328 case GOMP_MAP_DEVICE_RESIDENT:
11329 case GOMP_MAP_LINK:
11330 gcc_assert (is_gimple_omp_oacc (stmt));
11331 break;
11332 default:
11333 gcc_unreachable ();
11334 }
11335#endif
11336 /* FALLTHRU */
11337 case OMP_CLAUSE_TO:
11338 case OMP_CLAUSE_FROM:
11339 oacc_firstprivate:
11340 var = OMP_CLAUSE_DECL (c);
11341 if (!DECL_P (var))
11342 {
11343 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11344 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11345 && (OMP_CLAUSE_MAP_KIND (c)
11346 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11347 map_cnt++;
11348 continue;
11349 }
a8e785ba 11350
4954efd4 11351 if (DECL_SIZE (var)
11352 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11353 {
11354 tree var2 = DECL_VALUE_EXPR (var);
11355 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11356 var2 = TREE_OPERAND (var2, 0);
11357 gcc_assert (DECL_P (var2));
11358 var = var2;
11359 }
a8e785ba 11360
4954efd4 11361 if (offloaded
11362 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11363 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11364 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11365 {
11366 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11367 {
11368 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11369 && varpool_node::get_create (var)->offloadable)
11370 continue;
a8e785ba 11371
4954efd4 11372 tree type = build_pointer_type (TREE_TYPE (var));
11373 tree new_var = lookup_decl (var, ctx);
11374 x = create_tmp_var_raw (type, get_name (new_var));
11375 gimple_add_tmp_var (x);
11376 x = build_simple_mem_ref (x);
11377 SET_DECL_VALUE_EXPR (new_var, x);
11378 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11379 }
11380 continue;
11381 }
a8e785ba 11382
4954efd4 11383 if (!maybe_lookup_field (var, ctx))
11384 continue;
a8e785ba 11385
4954efd4 11386 /* Don't remap oacc parallel reduction variables, because the
11387 intermediate result must be local to each gang. */
11388 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11389 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11390 {
11391 x = build_receiver_ref (var, true, ctx);
11392 tree new_var = lookup_decl (var, ctx);
a8e785ba 11393
4954efd4 11394 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11395 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11396 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11397 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11398 x = build_simple_mem_ref (x);
11399 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11400 {
11401 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
a209c8cc 11402 if (omp_is_reference (new_var)
11403 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
4954efd4 11404 {
11405 /* Create a local object to hold the instance
11406 value. */
11407 tree type = TREE_TYPE (TREE_TYPE (new_var));
11408 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11409 tree inst = create_tmp_var (type, id);
11410 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11411 x = build_fold_addr_expr (inst);
11412 }
11413 gimplify_assign (new_var, x, &fplist);
11414 }
11415 else if (DECL_P (new_var))
11416 {
11417 SET_DECL_VALUE_EXPR (new_var, x);
11418 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11419 }
11420 else
11421 gcc_unreachable ();
11422 }
11423 map_cnt++;
11424 break;
a8e785ba 11425
4954efd4 11426 case OMP_CLAUSE_FIRSTPRIVATE:
11427 if (is_oacc_parallel (ctx))
11428 goto oacc_firstprivate;
11429 map_cnt++;
11430 var = OMP_CLAUSE_DECL (c);
11431 if (!omp_is_reference (var)
11432 && !is_gimple_reg_type (TREE_TYPE (var)))
11433 {
11434 tree new_var = lookup_decl (var, ctx);
11435 if (is_variable_sized (var))
11436 {
11437 tree pvar = DECL_VALUE_EXPR (var);
11438 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11439 pvar = TREE_OPERAND (pvar, 0);
11440 gcc_assert (DECL_P (pvar));
11441 tree new_pvar = lookup_decl (pvar, ctx);
11442 x = build_fold_indirect_ref (new_pvar);
11443 TREE_THIS_NOTRAP (x) = 1;
11444 }
11445 else
11446 x = build_receiver_ref (var, true, ctx);
11447 SET_DECL_VALUE_EXPR (new_var, x);
11448 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11449 }
11450 break;
a8e785ba 11451
4954efd4 11452 case OMP_CLAUSE_PRIVATE:
11453 if (is_gimple_omp_oacc (ctx->stmt))
11454 break;
11455 var = OMP_CLAUSE_DECL (c);
11456 if (is_variable_sized (var))
11457 {
11458 tree new_var = lookup_decl (var, ctx);
11459 tree pvar = DECL_VALUE_EXPR (var);
11460 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11461 pvar = TREE_OPERAND (pvar, 0);
11462 gcc_assert (DECL_P (pvar));
11463 tree new_pvar = lookup_decl (pvar, ctx);
11464 x = build_fold_indirect_ref (new_pvar);
11465 TREE_THIS_NOTRAP (x) = 1;
11466 SET_DECL_VALUE_EXPR (new_var, x);
11467 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11468 }
11469 break;
a8e785ba 11470
4954efd4 11471 case OMP_CLAUSE_USE_DEVICE_PTR:
75145cfe 11472 case OMP_CLAUSE_USE_DEVICE_ADDR:
4954efd4 11473 case OMP_CLAUSE_IS_DEVICE_PTR:
11474 var = OMP_CLAUSE_DECL (c);
11475 map_cnt++;
11476 if (is_variable_sized (var))
11477 {
11478 tree new_var = lookup_decl (var, ctx);
11479 tree pvar = DECL_VALUE_EXPR (var);
11480 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11481 pvar = TREE_OPERAND (pvar, 0);
11482 gcc_assert (DECL_P (pvar));
11483 tree new_pvar = lookup_decl (pvar, ctx);
11484 x = build_fold_indirect_ref (new_pvar);
11485 TREE_THIS_NOTRAP (x) = 1;
11486 SET_DECL_VALUE_EXPR (new_var, x);
11487 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11488 }
75145cfe 11489 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11490 && !omp_is_reference (var))
11491 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
4954efd4 11492 {
11493 tree new_var = lookup_decl (var, ctx);
11494 tree type = build_pointer_type (TREE_TYPE (var));
11495 x = create_tmp_var_raw (type, get_name (new_var));
11496 gimple_add_tmp_var (x);
11497 x = build_simple_mem_ref (x);
11498 SET_DECL_VALUE_EXPR (new_var, x);
11499 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11500 }
11501 else
11502 {
11503 tree new_var = lookup_decl (var, ctx);
11504 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11505 gimple_add_tmp_var (x);
11506 SET_DECL_VALUE_EXPR (new_var, x);
11507 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11508 }
11509 break;
11510 }
a8e785ba 11511
4954efd4 11512 if (offloaded)
a8e785ba 11513 {
4954efd4 11514 target_nesting_level++;
11515 lower_omp (&tgt_body, ctx);
11516 target_nesting_level--;
a8e785ba 11517 }
4954efd4 11518 else if (data_region)
11519 lower_omp (&tgt_body, ctx);
a8e785ba 11520
4954efd4 11521 if (offloaded)
a8e785ba 11522 {
4954efd4 11523 /* Declare all the variables created by mapping and the variables
11524 declared in the scope of the target body. */
11525 record_vars_into (ctx->block_vars, child_fn);
2918f4e9 11526 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
4954efd4 11527 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
a8e785ba 11528 }
11529
4954efd4 11530 olist = NULL;
11531 ilist = NULL;
11532 if (ctx->record_type)
a8e785ba 11533 {
4954efd4 11534 ctx->sender_decl
11535 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11536 DECL_NAMELESS (ctx->sender_decl) = 1;
11537 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11538 t = make_tree_vec (3);
11539 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11540 TREE_VEC_ELT (t, 1)
11541 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11542 ".omp_data_sizes");
11543 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11544 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11545 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11546 tree tkind_type = short_unsigned_type_node;
11547 int talign_shift = 8;
11548 TREE_VEC_ELT (t, 2)
11549 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11550 ".omp_data_kinds");
11551 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11552 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11553 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11554 gimple_omp_target_set_data_arg (stmt, t);
1e8e9920 11555
4954efd4 11556 vec<constructor_elt, va_gc> *vsize;
11557 vec<constructor_elt, va_gc> *vkind;
11558 vec_alloc (vsize, map_cnt);
11559 vec_alloc (vkind, map_cnt);
11560 unsigned int map_idx = 0;
1e8e9920 11561
4954efd4 11562 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11563 switch (OMP_CLAUSE_CODE (c))
1e8e9920 11564 {
4954efd4 11565 tree ovar, nc, s, purpose, var, x, type;
11566 unsigned int talign;
1e8e9920 11567
4954efd4 11568 default:
11569 break;
1e8e9920 11570
4954efd4 11571 case OMP_CLAUSE_MAP:
11572 case OMP_CLAUSE_TO:
11573 case OMP_CLAUSE_FROM:
11574 oacc_firstprivate_map:
11575 nc = c;
11576 ovar = OMP_CLAUSE_DECL (c);
11577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11578 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11579 || (OMP_CLAUSE_MAP_KIND (c)
11580 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11581 break;
11582 if (!DECL_P (ovar))
9884aaf8 11583 {
4954efd4 11584 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11585 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11586 {
11587 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11588 == get_base_address (ovar));
11589 nc = OMP_CLAUSE_CHAIN (c);
11590 ovar = OMP_CLAUSE_DECL (nc);
11591 }
11592 else
11593 {
11594 tree x = build_sender_ref (ovar, ctx);
11595 tree v
11596 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11597 gimplify_assign (x, v, &ilist);
11598 nc = NULL_TREE;
11599 }
11600 }
11601 else
11602 {
11603 if (DECL_SIZE (ovar)
11604 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11605 {
11606 tree ovar2 = DECL_VALUE_EXPR (ovar);
11607 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11608 ovar2 = TREE_OPERAND (ovar2, 0);
11609 gcc_assert (DECL_P (ovar2));
11610 ovar = ovar2;
11611 }
11612 if (!maybe_lookup_field (ovar, ctx))
11613 continue;
9884aaf8 11614 }
61e47ac8 11615
4954efd4 11616 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11617 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11618 talign = DECL_ALIGN_UNIT (ovar);
11619 if (nc)
11620 {
11621 var = lookup_decl_in_outer_ctx (ovar, ctx);
11622 x = build_sender_ref (ovar, ctx);
61e47ac8 11623
4954efd4 11624 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11625 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11626 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11627 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11628 {
11629 gcc_assert (offloaded);
11630 tree avar
11631 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11632 mark_addressable (avar);
11633 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11634 talign = DECL_ALIGN_UNIT (avar);
11635 avar = build_fold_addr_expr (avar);
11636 gimplify_assign (x, avar, &ilist);
11637 }
11638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11639 {
11640 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11641 if (!omp_is_reference (var))
11642 {
11643 if (is_gimple_reg (var)
11644 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11645 TREE_NO_WARNING (var) = 1;
11646 var = build_fold_addr_expr (var);
11647 }
11648 else
11649 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11650 gimplify_assign (x, var, &ilist);
11651 }
11652 else if (is_gimple_reg (var))
11653 {
11654 gcc_assert (offloaded);
11655 tree avar = create_tmp_var (TREE_TYPE (var));
11656 mark_addressable (avar);
11657 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11658 if (GOMP_MAP_COPY_TO_P (map_kind)
11659 || map_kind == GOMP_MAP_POINTER
11660 || map_kind == GOMP_MAP_TO_PSET
11661 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11662 {
11663 /* If we need to initialize a temporary
11664 with VAR because it is not addressable, and
11665 the variable hasn't been initialized yet, then
11666 we'll get a warning for the store to avar.
11667 Don't warn in that case, the mapping might
11668 be implicit. */
11669 TREE_NO_WARNING (var) = 1;
11670 gimplify_assign (avar, var, &ilist);
11671 }
11672 avar = build_fold_addr_expr (avar);
11673 gimplify_assign (x, avar, &ilist);
11674 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11675 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11676 && !TYPE_READONLY (TREE_TYPE (var)))
11677 {
11678 x = unshare_expr (x);
11679 x = build_simple_mem_ref (x);
11680 gimplify_assign (var, x, &olist);
11681 }
11682 }
11683 else
11684 {
11685 var = build_fold_addr_expr (var);
11686 gimplify_assign (x, var, &ilist);
11687 }
11688 }
11689 s = NULL_TREE;
11690 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11691 {
11692 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11693 s = TREE_TYPE (ovar);
11694 if (TREE_CODE (s) == REFERENCE_TYPE)
11695 s = TREE_TYPE (s);
11696 s = TYPE_SIZE_UNIT (s);
11697 }
11698 else
11699 s = OMP_CLAUSE_SIZE (c);
11700 if (s == NULL_TREE)
11701 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11702 s = fold_convert (size_type_node, s);
11703 purpose = size_int (map_idx++);
11704 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11705 if (TREE_CODE (s) != INTEGER_CST)
11706 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
61e47ac8 11707
4954efd4 11708 unsigned HOST_WIDE_INT tkind, tkind_zero;
11709 switch (OMP_CLAUSE_CODE (c))
11710 {
11711 case OMP_CLAUSE_MAP:
11712 tkind = OMP_CLAUSE_MAP_KIND (c);
11713 tkind_zero = tkind;
11714 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11715 switch (tkind)
11716 {
11717 case GOMP_MAP_ALLOC:
11718 case GOMP_MAP_TO:
11719 case GOMP_MAP_FROM:
11720 case GOMP_MAP_TOFROM:
11721 case GOMP_MAP_ALWAYS_TO:
11722 case GOMP_MAP_ALWAYS_FROM:
11723 case GOMP_MAP_ALWAYS_TOFROM:
11724 case GOMP_MAP_RELEASE:
11725 case GOMP_MAP_FORCE_TO:
11726 case GOMP_MAP_FORCE_FROM:
11727 case GOMP_MAP_FORCE_TOFROM:
11728 case GOMP_MAP_FORCE_PRESENT:
11729 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11730 break;
11731 case GOMP_MAP_DELETE:
11732 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11733 default:
11734 break;
11735 }
11736 if (tkind_zero != tkind)
11737 {
11738 if (integer_zerop (s))
11739 tkind = tkind_zero;
11740 else if (integer_nonzerop (s))
11741 tkind_zero = tkind;
11742 }
11743 break;
11744 case OMP_CLAUSE_FIRSTPRIVATE:
11745 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11746 tkind = GOMP_MAP_TO;
11747 tkind_zero = tkind;
11748 break;
11749 case OMP_CLAUSE_TO:
11750 tkind = GOMP_MAP_TO;
11751 tkind_zero = tkind;
11752 break;
11753 case OMP_CLAUSE_FROM:
11754 tkind = GOMP_MAP_FROM;
11755 tkind_zero = tkind;
11756 break;
11757 default:
11758 gcc_unreachable ();
11759 }
11760 gcc_checking_assert (tkind
11761 < (HOST_WIDE_INT_C (1U) << talign_shift));
11762 gcc_checking_assert (tkind_zero
11763 < (HOST_WIDE_INT_C (1U) << talign_shift));
11764 talign = ceil_log2 (talign);
11765 tkind |= talign << talign_shift;
11766 tkind_zero |= talign << talign_shift;
11767 gcc_checking_assert (tkind
11768 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11769 gcc_checking_assert (tkind_zero
11770 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11771 if (tkind == tkind_zero)
11772 x = build_int_cstu (tkind_type, tkind);
11773 else
11774 {
11775 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11776 x = build3 (COND_EXPR, tkind_type,
11777 fold_build2 (EQ_EXPR, boolean_type_node,
11778 unshare_expr (s), size_zero_node),
11779 build_int_cstu (tkind_type, tkind_zero),
11780 build_int_cstu (tkind_type, tkind));
11781 }
11782 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11783 if (nc && nc != c)
11784 c = nc;
11785 break;
3ec11c49 11786
4954efd4 11787 case OMP_CLAUSE_FIRSTPRIVATE:
11788 if (is_oacc_parallel (ctx))
11789 goto oacc_firstprivate_map;
11790 ovar = OMP_CLAUSE_DECL (c);
11791 if (omp_is_reference (ovar))
11792 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11793 else
11794 talign = DECL_ALIGN_UNIT (ovar);
11795 var = lookup_decl_in_outer_ctx (ovar, ctx);
11796 x = build_sender_ref (ovar, ctx);
11797 tkind = GOMP_MAP_FIRSTPRIVATE;
11798 type = TREE_TYPE (ovar);
11799 if (omp_is_reference (ovar))
11800 type = TREE_TYPE (type);
11801 if ((INTEGRAL_TYPE_P (type)
11802 && TYPE_PRECISION (type) <= POINTER_SIZE)
11803 || TREE_CODE (type) == POINTER_TYPE)
11804 {
11805 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11806 tree t = var;
11807 if (omp_is_reference (var))
11808 t = build_simple_mem_ref (var);
11809 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11810 TREE_NO_WARNING (var) = 1;
11811 if (TREE_CODE (type) != POINTER_TYPE)
11812 t = fold_convert (pointer_sized_int_node, t);
11813 t = fold_convert (TREE_TYPE (x), t);
11814 gimplify_assign (x, t, &ilist);
11815 }
11816 else if (omp_is_reference (var))
11817 gimplify_assign (x, var, &ilist);
11818 else if (is_gimple_reg (var))
11819 {
11820 tree avar = create_tmp_var (TREE_TYPE (var));
11821 mark_addressable (avar);
11822 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11823 TREE_NO_WARNING (var) = 1;
11824 gimplify_assign (avar, var, &ilist);
11825 avar = build_fold_addr_expr (avar);
11826 gimplify_assign (x, avar, &ilist);
11827 }
11828 else
11829 {
11830 var = build_fold_addr_expr (var);
11831 gimplify_assign (x, var, &ilist);
11832 }
11833 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11834 s = size_int (0);
11835 else if (omp_is_reference (ovar))
11836 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11837 else
11838 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11839 s = fold_convert (size_type_node, s);
11840 purpose = size_int (map_idx++);
11841 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11842 if (TREE_CODE (s) != INTEGER_CST)
11843 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
3ec11c49 11844
4954efd4 11845 gcc_checking_assert (tkind
11846 < (HOST_WIDE_INT_C (1U) << talign_shift));
11847 talign = ceil_log2 (talign);
11848 tkind |= talign << talign_shift;
11849 gcc_checking_assert (tkind
11850 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11851 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11852 build_int_cstu (tkind_type, tkind));
11853 break;
3ec11c49 11854
4954efd4 11855 case OMP_CLAUSE_USE_DEVICE_PTR:
75145cfe 11856 case OMP_CLAUSE_USE_DEVICE_ADDR:
4954efd4 11857 case OMP_CLAUSE_IS_DEVICE_PTR:
11858 ovar = OMP_CLAUSE_DECL (c);
11859 var = lookup_decl_in_outer_ctx (ovar, ctx);
75145cfe 11860 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
7082a9c7 11861 {
11862 tkind = GOMP_MAP_USE_DEVICE_PTR;
11863 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11864 }
4954efd4 11865 else
7082a9c7 11866 {
11867 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11868 x = build_sender_ref (ovar, ctx);
11869 }
4954efd4 11870 type = TREE_TYPE (ovar);
75145cfe 11871 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11872 && !omp_is_reference (ovar))
11873 || TREE_CODE (type) == ARRAY_TYPE)
4954efd4 11874 var = build_fold_addr_expr (var);
11875 else
11876 {
11877 if (omp_is_reference (ovar))
11878 {
11879 type = TREE_TYPE (type);
75145cfe 11880 if (TREE_CODE (type) != ARRAY_TYPE
11881 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR)
4954efd4 11882 var = build_simple_mem_ref (var);
11883 var = fold_convert (TREE_TYPE (x), var);
11884 }
11885 }
11886 gimplify_assign (x, var, &ilist);
11887 s = size_int (0);
11888 purpose = size_int (map_idx++);
11889 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11890 gcc_checking_assert (tkind
11891 < (HOST_WIDE_INT_C (1U) << talign_shift));
11892 gcc_checking_assert (tkind
11893 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11894 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11895 build_int_cstu (tkind_type, tkind));
11896 break;
11897 }
3ec11c49 11898
4954efd4 11899 gcc_assert (map_idx == map_cnt);
2169f33b 11900
4954efd4 11901 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11902 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11903 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11904 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11905 for (int i = 1; i <= 2; i++)
11906 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11907 {
11908 gimple_seq initlist = NULL;
11909 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11910 TREE_VEC_ELT (t, i)),
11911 &initlist, true, NULL_TREE);
11912 gimple_seq_add_seq (&ilist, initlist);
2169f33b 11913
4954efd4 11914 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11915 NULL);
11916 TREE_THIS_VOLATILE (clobber) = 1;
11917 gimple_seq_add_stmt (&olist,
11918 gimple_build_assign (TREE_VEC_ELT (t, i),
11919 clobber));
11920 }
3ec11c49 11921
4954efd4 11922 tree clobber = build_constructor (ctx->record_type, NULL);
11923 TREE_THIS_VOLATILE (clobber) = 1;
11924 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11925 clobber));
11926 }
3ec11c49 11927
4954efd4 11928 /* Once all the expansions are done, sequence all the different
11929 fragments inside gimple_omp_body. */
3ec11c49 11930
4954efd4 11931 new_body = NULL;
3ec11c49 11932
4954efd4 11933 if (offloaded
11934 && ctx->record_type)
3ec11c49 11935 {
4954efd4 11936 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11937 /* fixup_child_record_type might have changed receiver_decl's type. */
11938 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11939 gimple_seq_add_stmt (&new_body,
11940 gimple_build_assign (ctx->receiver_decl, t));
3ec11c49 11941 }
4954efd4 11942 gimple_seq_add_seq (&new_body, fplist);
3ec11c49 11943
4954efd4 11944 if (offloaded || data_region)
7740abd8 11945 {
4954efd4 11946 tree prev = NULL_TREE;
11947 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11948 switch (OMP_CLAUSE_CODE (c))
7740abd8 11949 {
4954efd4 11950 tree var, x;
11951 default:
11952 break;
11953 case OMP_CLAUSE_FIRSTPRIVATE:
11954 if (is_gimple_omp_oacc (ctx->stmt))
11955 break;
11956 var = OMP_CLAUSE_DECL (c);
11957 if (omp_is_reference (var)
11958 || is_gimple_reg_type (TREE_TYPE (var)))
7740abd8 11959 {
4954efd4 11960 tree new_var = lookup_decl (var, ctx);
11961 tree type;
11962 type = TREE_TYPE (var);
11963 if (omp_is_reference (var))
11964 type = TREE_TYPE (type);
11965 if ((INTEGRAL_TYPE_P (type)
11966 && TYPE_PRECISION (type) <= POINTER_SIZE)
11967 || TREE_CODE (type) == POINTER_TYPE)
11968 {
11969 x = build_receiver_ref (var, false, ctx);
11970 if (TREE_CODE (type) != POINTER_TYPE)
11971 x = fold_convert (pointer_sized_int_node, x);
11972 x = fold_convert (type, x);
11973 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11974 fb_rvalue);
11975 if (omp_is_reference (var))
11976 {
11977 tree v = create_tmp_var_raw (type, get_name (var));
11978 gimple_add_tmp_var (v);
11979 TREE_ADDRESSABLE (v) = 1;
11980 gimple_seq_add_stmt (&new_body,
11981 gimple_build_assign (v, x));
11982 x = build_fold_addr_expr (v);
11983 }
11984 gimple_seq_add_stmt (&new_body,
11985 gimple_build_assign (new_var, x));
11986 }
11987 else
11988 {
11989 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11990 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11991 fb_rvalue);
11992 gimple_seq_add_stmt (&new_body,
11993 gimple_build_assign (new_var, x));
11994 }
11995 }
11996 else if (is_variable_sized (var))
11997 {
11998 tree pvar = DECL_VALUE_EXPR (var);
11999 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12000 pvar = TREE_OPERAND (pvar, 0);
12001 gcc_assert (DECL_P (pvar));
12002 tree new_var = lookup_decl (pvar, ctx);
12003 x = build_receiver_ref (var, false, ctx);
12004 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12005 gimple_seq_add_stmt (&new_body,
12006 gimple_build_assign (new_var, x));
12007 }
12008 break;
12009 case OMP_CLAUSE_PRIVATE:
12010 if (is_gimple_omp_oacc (ctx->stmt))
12011 break;
12012 var = OMP_CLAUSE_DECL (c);
12013 if (omp_is_reference (var))
12014 {
12015 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12016 tree new_var = lookup_decl (var, ctx);
12017 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12018 if (TREE_CONSTANT (x))
12019 {
12020 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12021 get_name (var));
12022 gimple_add_tmp_var (x);
12023 TREE_ADDRESSABLE (x) = 1;
12024 x = build_fold_addr_expr_loc (clause_loc, x);
12025 }
12026 else
12027 break;
cbba99a0 12028
4954efd4 12029 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12030 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12031 gimple_seq_add_stmt (&new_body,
12032 gimple_build_assign (new_var, x));
12033 }
12034 break;
12035 case OMP_CLAUSE_USE_DEVICE_PTR:
75145cfe 12036 case OMP_CLAUSE_USE_DEVICE_ADDR:
4954efd4 12037 case OMP_CLAUSE_IS_DEVICE_PTR:
12038 var = OMP_CLAUSE_DECL (c);
75145cfe 12039 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
7082a9c7 12040 x = build_sender_ref ((splay_tree_key) &DECL_UID (var), ctx);
4954efd4 12041 else
12042 x = build_receiver_ref (var, false, ctx);
12043 if (is_variable_sized (var))
12044 {
12045 tree pvar = DECL_VALUE_EXPR (var);
12046 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12047 pvar = TREE_OPERAND (pvar, 0);
12048 gcc_assert (DECL_P (pvar));
12049 tree new_var = lookup_decl (pvar, ctx);
12050 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12051 gimple_seq_add_stmt (&new_body,
12052 gimple_build_assign (new_var, x));
12053 }
75145cfe 12054 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12055 && !omp_is_reference (var))
12056 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
4954efd4 12057 {
12058 tree new_var = lookup_decl (var, ctx);
12059 new_var = DECL_VALUE_EXPR (new_var);
12060 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12061 new_var = TREE_OPERAND (new_var, 0);
12062 gcc_assert (DECL_P (new_var));
12063 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12064 gimple_seq_add_stmt (&new_body,
12065 gimple_build_assign (new_var, x));
12066 }
cbba99a0 12067 else
4954efd4 12068 {
12069 tree type = TREE_TYPE (var);
12070 tree new_var = lookup_decl (var, ctx);
12071 if (omp_is_reference (var))
12072 {
12073 type = TREE_TYPE (type);
75145cfe 12074 if (TREE_CODE (type) != ARRAY_TYPE
12075 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR)
4954efd4 12076 {
12077 tree v = create_tmp_var_raw (type, get_name (var));
12078 gimple_add_tmp_var (v);
12079 TREE_ADDRESSABLE (v) = 1;
12080 x = fold_convert (type, x);
12081 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12082 fb_rvalue);
12083 gimple_seq_add_stmt (&new_body,
12084 gimple_build_assign (v, x));
12085 x = build_fold_addr_expr (v);
12086 }
12087 }
12088 new_var = DECL_VALUE_EXPR (new_var);
12089 x = fold_convert (TREE_TYPE (new_var), x);
12090 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12091 gimple_seq_add_stmt (&new_body,
12092 gimple_build_assign (new_var, x));
12093 }
12094 break;
cbba99a0 12095 }
4954efd4 12096 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12097 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12098 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12099 or references to VLAs. */
12100 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12101 switch (OMP_CLAUSE_CODE (c))
12102 {
12103 tree var;
12104 default:
12105 break;
12106 case OMP_CLAUSE_MAP:
12107 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12108 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12109 {
12110 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
773078cb 12111 poly_int64 offset = 0;
4954efd4 12112 gcc_assert (prev);
12113 var = OMP_CLAUSE_DECL (c);
12114 if (DECL_P (var)
12115 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12116 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12117 ctx))
12118 && varpool_node::get_create (var)->offloadable)
12119 break;
12120 if (TREE_CODE (var) == INDIRECT_REF
12121 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12122 var = TREE_OPERAND (var, 0);
12123 if (TREE_CODE (var) == COMPONENT_REF)
12124 {
12125 var = get_addr_base_and_unit_offset (var, &offset);
12126 gcc_assert (var != NULL_TREE && DECL_P (var));
12127 }
12128 else if (DECL_SIZE (var)
12129 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12130 {
12131 tree var2 = DECL_VALUE_EXPR (var);
12132 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12133 var2 = TREE_OPERAND (var2, 0);
12134 gcc_assert (DECL_P (var2));
12135 var = var2;
12136 }
12137 tree new_var = lookup_decl (var, ctx), x;
12138 tree type = TREE_TYPE (new_var);
12139 bool is_ref;
12140 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12141 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12142 == COMPONENT_REF))
12143 {
12144 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12145 is_ref = true;
12146 new_var = build2 (MEM_REF, type,
12147 build_fold_addr_expr (new_var),
12148 build_int_cst (build_pointer_type (type),
12149 offset));
12150 }
12151 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12152 {
12153 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12154 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12155 new_var = build2 (MEM_REF, type,
12156 build_fold_addr_expr (new_var),
12157 build_int_cst (build_pointer_type (type),
12158 offset));
12159 }
12160 else
12161 is_ref = omp_is_reference (var);
12162 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12163 is_ref = false;
12164 bool ref_to_array = false;
12165 if (is_ref)
12166 {
12167 type = TREE_TYPE (type);
12168 if (TREE_CODE (type) == ARRAY_TYPE)
12169 {
12170 type = build_pointer_type (type);
12171 ref_to_array = true;
12172 }
12173 }
12174 else if (TREE_CODE (type) == ARRAY_TYPE)
12175 {
12176 tree decl2 = DECL_VALUE_EXPR (new_var);
12177 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12178 decl2 = TREE_OPERAND (decl2, 0);
12179 gcc_assert (DECL_P (decl2));
12180 new_var = decl2;
12181 type = TREE_TYPE (new_var);
12182 }
12183 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12184 x = fold_convert_loc (clause_loc, type, x);
12185 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12186 {
12187 tree bias = OMP_CLAUSE_SIZE (c);
12188 if (DECL_P (bias))
12189 bias = lookup_decl (bias, ctx);
12190 bias = fold_convert_loc (clause_loc, sizetype, bias);
12191 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12192 bias);
12193 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12194 TREE_TYPE (x), x, bias);
12195 }
12196 if (ref_to_array)
12197 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12198 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12199 if (is_ref && !ref_to_array)
12200 {
12201 tree t = create_tmp_var_raw (type, get_name (var));
12202 gimple_add_tmp_var (t);
12203 TREE_ADDRESSABLE (t) = 1;
12204 gimple_seq_add_stmt (&new_body,
12205 gimple_build_assign (t, x));
12206 x = build_fold_addr_expr_loc (clause_loc, t);
12207 }
12208 gimple_seq_add_stmt (&new_body,
12209 gimple_build_assign (new_var, x));
12210 prev = NULL_TREE;
12211 }
12212 else if (OMP_CLAUSE_CHAIN (c)
12213 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12214 == OMP_CLAUSE_MAP
12215 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12216 == GOMP_MAP_FIRSTPRIVATE_POINTER
12217 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12218 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12219 prev = c;
12220 break;
12221 case OMP_CLAUSE_PRIVATE:
12222 var = OMP_CLAUSE_DECL (c);
12223 if (is_variable_sized (var))
12224 {
12225 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12226 tree new_var = lookup_decl (var, ctx);
12227 tree pvar = DECL_VALUE_EXPR (var);
12228 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12229 pvar = TREE_OPERAND (pvar, 0);
12230 gcc_assert (DECL_P (pvar));
12231 tree new_pvar = lookup_decl (pvar, ctx);
12232 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12233 tree al = size_int (DECL_ALIGN (var));
12234 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12235 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12236 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12237 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12238 gimple_seq_add_stmt (&new_body,
12239 gimple_build_assign (new_pvar, x));
12240 }
12241 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12242 {
12243 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12244 tree new_var = lookup_decl (var, ctx);
12245 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12246 if (TREE_CONSTANT (x))
12247 break;
12248 else
12249 {
12250 tree atmp
12251 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12252 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12253 tree al = size_int (TYPE_ALIGN (rtype));
12254 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12255 }
cbba99a0 12256
4954efd4 12257 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12258 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12259 gimple_seq_add_stmt (&new_body,
12260 gimple_build_assign (new_var, x));
12261 }
12262 break;
12263 }
cbba99a0 12264
4954efd4 12265 gimple_seq fork_seq = NULL;
12266 gimple_seq join_seq = NULL;
cbba99a0 12267
4954efd4 12268 if (is_oacc_parallel (ctx))
cbba99a0 12269 {
4954efd4 12270 /* If there are reductions on the offloaded region itself, treat
12271 them as a dummy GANG loop. */
12272 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
cbba99a0 12273
4954efd4 12274 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12275 false, NULL, NULL, &fork_seq, &join_seq, ctx);
cbba99a0 12276 }
cbba99a0 12277
4954efd4 12278 gimple_seq_add_seq (&new_body, fork_seq);
12279 gimple_seq_add_seq (&new_body, tgt_body);
12280 gimple_seq_add_seq (&new_body, join_seq);
cbba99a0 12281
4954efd4 12282 if (offloaded)
12283 new_body = maybe_catch_exception (new_body);
cbba99a0 12284
4954efd4 12285 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12286 gimple_omp_set_body (stmt, new_body);
cbba99a0 12287 }
12288
4954efd4 12289 bind = gimple_build_bind (NULL, NULL,
12290 tgt_bind ? gimple_bind_block (tgt_bind)
12291 : NULL_TREE);
12292 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12293 gimple_bind_add_seq (bind, ilist);
12294 gimple_bind_add_stmt (bind, stmt);
12295 gimple_bind_add_seq (bind, olist);
cbba99a0 12296
12297 pop_gimplify_context (NULL);
12298
4954efd4 12299 if (dep_bind)
948eee2f 12300 {
4954efd4 12301 gimple_bind_add_seq (dep_bind, dep_ilist);
12302 gimple_bind_add_stmt (dep_bind, bind);
12303 gimple_bind_add_seq (dep_bind, dep_olist);
12304 pop_gimplify_context (dep_bind);
948eee2f 12305 }
948eee2f 12306}
12307
4954efd4 12308/* Expand code for an OpenMP teams directive. */
0bb0f256 12309
6f431819 12310static void
4954efd4 12311lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
0bb0f256 12312{
4954efd4 12313 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12314 push_gimplify_context ();
0bb0f256 12315
4954efd4 12316 tree block = make_node (BLOCK);
12317 gbind *bind = gimple_build_bind (NULL, NULL, block);
12318 gsi_replace (gsi_p, bind, true);
12319 gimple_seq bind_body = NULL;
12320 gimple_seq dlist = NULL;
12321 gimple_seq olist = NULL;
0bb0f256 12322
4954efd4 12323 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12324 OMP_CLAUSE_NUM_TEAMS);
12325 if (num_teams == NULL_TREE)
12326 num_teams = build_int_cst (unsigned_type_node, 0);
12327 else
0bb0f256 12328 {
4954efd4 12329 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12330 num_teams = fold_convert (unsigned_type_node, num_teams);
12331 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
0bb0f256 12332 }
4954efd4 12333 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12334 OMP_CLAUSE_THREAD_LIMIT);
12335 if (thread_limit == NULL_TREE)
12336 thread_limit = build_int_cst (unsigned_type_node, 0);
12337 else
0bb0f256 12338 {
4954efd4 12339 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12340 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12341 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12342 fb_rvalue);
0bb0f256 12343 }
cbba99a0 12344
4954efd4 12345 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12346 &bind_body, &dlist, ctx, NULL);
12347 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9a1d892b 12348 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12349 NULL, ctx);
4954efd4 12350 if (!gimple_omp_teams_grid_phony (teams_stmt))
cbba99a0 12351 {
4954efd4 12352 gimple_seq_add_stmt (&bind_body, teams_stmt);
12353 location_t loc = gimple_location (teams_stmt);
12354 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12355 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12356 gimple_set_location (call, loc);
12357 gimple_seq_add_stmt (&bind_body, call);
cbba99a0 12358 }
12359
4954efd4 12360 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12361 gimple_omp_set_body (teams_stmt, NULL);
12362 gimple_seq_add_seq (&bind_body, olist);
12363 gimple_seq_add_seq (&bind_body, dlist);
12364 if (!gimple_omp_teams_grid_phony (teams_stmt))
12365 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12366 gimple_bind_set_body (bind, bind_body);
cbba99a0 12367
4954efd4 12368 pop_gimplify_context (bind);
cbba99a0 12369
4954efd4 12370 gimple_bind_append_vars (bind, ctx->block_vars);
12371 BLOCK_VARS (block) = ctx->block_vars;
12372 if (BLOCK_VARS (block))
12373 TREE_USED (block) = 1;
cbba99a0 12374}
12375
4954efd4 12376/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
cbba99a0 12377
4954efd4 12378static void
12379lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
cbba99a0 12380{
4954efd4 12381 gimple *stmt = gsi_stmt (*gsi_p);
12382 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12383 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12384 gimple_build_omp_return (false));
cbba99a0 12385}
12386
cbba99a0 12387
4954efd4 12388/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12389 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12390 of OMP context, but with task_shared_vars set. */
cbba99a0 12391
4954efd4 12392static tree
12393lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12394 void *data)
cbba99a0 12395{
4954efd4 12396 tree t = *tp;
cbba99a0 12397
4954efd4 12398 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12399 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12400 return t;
cbba99a0 12401
4954efd4 12402 if (task_shared_vars
12403 && DECL_P (t)
12404 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12405 return t;
cbba99a0 12406
4954efd4 12407 /* If a global variable has been privatized, TREE_CONSTANT on
12408 ADDR_EXPR might be wrong. */
12409 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12410 recompute_tree_invariant_for_addr_expr (t);
cbba99a0 12411
4954efd4 12412 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12413 return NULL_TREE;
cbba99a0 12414}
12415
4954efd4 12416/* Data to be communicated between lower_omp_regimplify_operands and
12417 lower_omp_regimplify_operands_p. */
cbba99a0 12418
4954efd4 12419struct lower_omp_regimplify_operands_data
cbba99a0 12420{
4954efd4 12421 omp_context *ctx;
12422 vec<tree> *decls;
12423};
cbba99a0 12424
4954efd4 12425/* Helper function for lower_omp_regimplify_operands. Find
12426 omp_member_access_dummy_var vars and adjust temporarily their
12427 DECL_VALUE_EXPRs if needed. */
cbba99a0 12428
4954efd4 12429static tree
12430lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12431 void *data)
cbba99a0 12432{
4954efd4 12433 tree t = omp_member_access_dummy_var (*tp);
12434 if (t)
cbba99a0 12435 {
4954efd4 12436 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12437 lower_omp_regimplify_operands_data *ldata
12438 = (lower_omp_regimplify_operands_data *) wi->info;
12439 tree o = maybe_lookup_decl (t, ldata->ctx);
12440 if (o != t)
cbba99a0 12441 {
4954efd4 12442 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12443 ldata->decls->safe_push (*tp);
12444 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12445 SET_DECL_VALUE_EXPR (*tp, v);
cbba99a0 12446 }
cbba99a0 12447 }
4954efd4 12448 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12449 return NULL_TREE;
cbba99a0 12450}
12451
4954efd4 12452/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12453 of omp_member_access_dummy_var vars during regimplification. */
cbba99a0 12454
12455static void
4954efd4 12456lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12457 gimple_stmt_iterator *gsi_p)
cbba99a0 12458{
4954efd4 12459 auto_vec<tree, 10> decls;
12460 if (ctx)
12461 {
12462 struct walk_stmt_info wi;
12463 memset (&wi, '\0', sizeof (wi));
12464 struct lower_omp_regimplify_operands_data data;
12465 data.ctx = ctx;
12466 data.decls = &decls;
12467 wi.info = &data;
12468 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12469 }
12470 gimple_regimplify_operands (stmt, gsi_p);
12471 while (!decls.is_empty ())
12472 {
12473 tree t = decls.pop ();
12474 tree v = decls.pop ();
12475 SET_DECL_VALUE_EXPR (t, v);
12476 }
cbba99a0 12477}
12478
cbba99a0 12479static void
4954efd4 12480lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
cbba99a0 12481{
4954efd4 12482 gimple *stmt = gsi_stmt (*gsi_p);
12483 struct walk_stmt_info wi;
12484 gcall *call_stmt;
cbba99a0 12485
4954efd4 12486 if (gimple_has_location (stmt))
12487 input_location = gimple_location (stmt);
cbba99a0 12488
4954efd4 12489 if (task_shared_vars)
12490 memset (&wi, '\0', sizeof (wi));
cbba99a0 12491
4954efd4 12492 /* If we have issued syntax errors, avoid doing any heavy lifting.
12493 Just replace the OMP directives with a NOP to avoid
12494 confusing RTL expansion. */
12495 if (seen_error () && is_gimple_omp (stmt))
cbba99a0 12496 {
4954efd4 12497 gsi_replace (gsi_p, gimple_build_nop (), true);
12498 return;
12499 }
cbba99a0 12500
4954efd4 12501 switch (gimple_code (stmt))
12502 {
12503 case GIMPLE_COND:
12504 {
12505 gcond *cond_stmt = as_a <gcond *> (stmt);
12506 if ((ctx || task_shared_vars)
12507 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12508 lower_omp_regimplify_p,
12509 ctx ? NULL : &wi, NULL)
12510 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12511 lower_omp_regimplify_p,
12512 ctx ? NULL : &wi, NULL)))
12513 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12514 }
12515 break;
12516 case GIMPLE_CATCH:
12517 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12518 break;
12519 case GIMPLE_EH_FILTER:
12520 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12521 break;
12522 case GIMPLE_TRY:
12523 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12524 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12525 break;
12526 case GIMPLE_TRANSACTION:
7c6746c9 12527 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
4954efd4 12528 ctx);
12529 break;
12530 case GIMPLE_BIND:
12531 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
2918f4e9 12532 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
4954efd4 12533 break;
12534 case GIMPLE_OMP_PARALLEL:
12535 case GIMPLE_OMP_TASK:
12536 ctx = maybe_lookup_ctx (stmt);
12537 gcc_assert (ctx);
12538 if (ctx->cancellable)
12539 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12540 lower_omp_taskreg (gsi_p, ctx);
12541 break;
12542 case GIMPLE_OMP_FOR:
12543 ctx = maybe_lookup_ctx (stmt);
12544 gcc_assert (ctx);
12545 if (ctx->cancellable)
12546 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12547 lower_omp_for (gsi_p, ctx);
12548 break;
12549 case GIMPLE_OMP_SECTIONS:
12550 ctx = maybe_lookup_ctx (stmt);
12551 gcc_assert (ctx);
12552 if (ctx->cancellable)
12553 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12554 lower_omp_sections (gsi_p, ctx);
12555 break;
12556 case GIMPLE_OMP_SINGLE:
12557 ctx = maybe_lookup_ctx (stmt);
12558 gcc_assert (ctx);
12559 lower_omp_single (gsi_p, ctx);
12560 break;
12561 case GIMPLE_OMP_MASTER:
12562 ctx = maybe_lookup_ctx (stmt);
12563 gcc_assert (ctx);
12564 lower_omp_master (gsi_p, ctx);
12565 break;
12566 case GIMPLE_OMP_TASKGROUP:
12567 ctx = maybe_lookup_ctx (stmt);
12568 gcc_assert (ctx);
12569 lower_omp_taskgroup (gsi_p, ctx);
12570 break;
12571 case GIMPLE_OMP_ORDERED:
12572 ctx = maybe_lookup_ctx (stmt);
12573 gcc_assert (ctx);
12574 lower_omp_ordered (gsi_p, ctx);
12575 break;
70a6624c 12576 case GIMPLE_OMP_SCAN:
12577 ctx = maybe_lookup_ctx (stmt);
12578 gcc_assert (ctx);
da008d72 12579 lower_omp_scan (gsi_p, ctx);
70a6624c 12580 break;
4954efd4 12581 case GIMPLE_OMP_CRITICAL:
12582 ctx = maybe_lookup_ctx (stmt);
12583 gcc_assert (ctx);
12584 lower_omp_critical (gsi_p, ctx);
12585 break;
12586 case GIMPLE_OMP_ATOMIC_LOAD:
12587 if ((ctx || task_shared_vars)
12588 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12589 as_a <gomp_atomic_load *> (stmt)),
12590 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12591 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12592 break;
12593 case GIMPLE_OMP_TARGET:
12594 ctx = maybe_lookup_ctx (stmt);
12595 gcc_assert (ctx);
12596 lower_omp_target (gsi_p, ctx);
12597 break;
12598 case GIMPLE_OMP_TEAMS:
12599 ctx = maybe_lookup_ctx (stmt);
12600 gcc_assert (ctx);
7e5a76c8 12601 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12602 lower_omp_taskreg (gsi_p, ctx);
12603 else
12604 lower_omp_teams (gsi_p, ctx);
4954efd4 12605 break;
12606 case GIMPLE_OMP_GRID_BODY:
12607 ctx = maybe_lookup_ctx (stmt);
12608 gcc_assert (ctx);
12609 lower_omp_grid_body (gsi_p, ctx);
12610 break;
12611 case GIMPLE_CALL:
12612 tree fndecl;
12613 call_stmt = as_a <gcall *> (stmt);
12614 fndecl = gimple_call_fndecl (call_stmt);
12615 if (fndecl
a0e9bfbb 12616 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4954efd4 12617 switch (DECL_FUNCTION_CODE (fndecl))
cbba99a0 12618 {
4954efd4 12619 case BUILT_IN_GOMP_BARRIER:
12620 if (ctx == NULL)
12621 break;
12622 /* FALLTHRU */
12623 case BUILT_IN_GOMP_CANCEL:
12624 case BUILT_IN_GOMP_CANCELLATION_POINT:
12625 omp_context *cctx;
12626 cctx = ctx;
12627 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12628 cctx = cctx->outer;
12629 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12630 if (!cctx->cancellable)
12631 {
12632 if (DECL_FUNCTION_CODE (fndecl)
12633 == BUILT_IN_GOMP_CANCELLATION_POINT)
12634 {
12635 stmt = gimple_build_nop ();
12636 gsi_replace (gsi_p, stmt, false);
12637 }
12638 break;
12639 }
12640 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12641 {
12642 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12643 gimple_call_set_fndecl (call_stmt, fndecl);
12644 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12645 }
12646 tree lhs;
12647 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12648 gimple_call_set_lhs (call_stmt, lhs);
12649 tree fallthru_label;
12650 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12651 gimple *g;
12652 g = gimple_build_label (fallthru_label);
12653 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12654 g = gimple_build_cond (NE_EXPR, lhs,
12655 fold_convert (TREE_TYPE (lhs),
12656 boolean_false_node),
12657 cctx->cancel_label, fallthru_label);
12658 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12659 break;
12660 default:
12661 break;
cbba99a0 12662 }
9a1d892b 12663 goto regimplify;
12664
12665 case GIMPLE_ASSIGN:
09cf268c 12666 for (omp_context *up = ctx; up; up = up->outer)
9a1d892b 12667 {
09cf268c 12668 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12669 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12670 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
48152aa2 12671 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
70a6624c 12672 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
48152aa2 12673 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12674 && (gimple_omp_target_kind (up->stmt)
12675 == GF_OMP_TARGET_KIND_DATA)))
09cf268c 12676 continue;
12677 else if (!up->lastprivate_conditional_map)
12678 break;
9a1d892b 12679 tree lhs = get_base_address (gimple_assign_lhs (stmt));
8259fae1 12680 if (TREE_CODE (lhs) == MEM_REF
12681 && DECL_P (TREE_OPERAND (lhs, 0))
12682 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12683 0))) == REFERENCE_TYPE)
12684 lhs = TREE_OPERAND (lhs, 0);
9a1d892b 12685 if (DECL_P (lhs))
09cf268c 12686 if (tree *v = up->lastprivate_conditional_map->get (lhs))
9a1d892b 12687 {
eb7a699d 12688 tree clauses;
a0110ad7 12689 if (up->combined_into_simd_safelen1)
8d54bfe2 12690 {
12691 up = up->outer;
12692 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12693 up = up->outer;
12694 }
eb7a699d 12695 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12696 clauses = gimple_omp_for_clauses (up->stmt);
12697 else
12698 clauses = gimple_omp_sections_clauses (up->stmt);
9a1d892b 12699 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
4f4b92d8 12700 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12701 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12702 OMP_CLAUSE__CONDTEMP_);
12703 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
9a1d892b 12704 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12705 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12706 }
12707 }
4954efd4 12708 /* FALLTHRU */
9a1d892b 12709
4954efd4 12710 default:
9a1d892b 12711 regimplify:
4954efd4 12712 if ((ctx || task_shared_vars)
12713 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12714 ctx ? NULL : &wi))
cbba99a0 12715 {
4954efd4 12716 /* Just remove clobbers, this should happen only if we have
12717 "privatized" local addressable variables in SIMD regions,
12718 the clobber isn't needed in that case and gimplifying address
12719 of the ARRAY_REF into a pointer and creating MEM_REF based
12720 clobber would create worse code than we get with the clobber
12721 dropped. */
12722 if (gimple_clobber_p (stmt))
584fba4b 12723 {
4954efd4 12724 gsi_replace (gsi_p, gimple_build_nop (), true);
12725 break;
cbba99a0 12726 }
4954efd4 12727 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
cbba99a0 12728 }
4954efd4 12729 break;
cbba99a0 12730 }
cbba99a0 12731}
12732
cbba99a0 12733static void
4954efd4 12734lower_omp (gimple_seq *body, omp_context *ctx)
cbba99a0 12735{
4954efd4 12736 location_t saved_location = input_location;
12737 gimple_stmt_iterator gsi;
12738 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12739 lower_omp_1 (&gsi, ctx);
12740 /* During gimplification, we haven't folded statments inside offloading
12741 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12742 if (target_nesting_level || taskreg_nesting_level)
12743 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12744 fold_stmt (&gsi);
12745 input_location = saved_location;
cbba99a0 12746}
12747
4954efd4 12748/* Main entry point. */
cbba99a0 12749
4954efd4 12750static unsigned int
12751execute_lower_omp (void)
cbba99a0 12752{
4954efd4 12753 gimple_seq body;
12754 int i;
12755 omp_context *ctx;
cbba99a0 12756
4954efd4 12757 /* This pass always runs, to provide PROP_gimple_lomp.
12758 But often, there is nothing to do. */
efa02472 12759 if (flag_openacc == 0 && flag_openmp == 0
4954efd4 12760 && flag_openmp_simd == 0)
12761 return 0;
cbba99a0 12762
4954efd4 12763 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12764 delete_omp_context);
cbba99a0 12765
4954efd4 12766 body = gimple_body (current_function_decl);
cbba99a0 12767
4954efd4 12768 if (hsa_gen_requested_p ())
12769 omp_grid_gridify_all_targets (&body);
12770
12771 scan_omp (&body, NULL);
12772 gcc_assert (taskreg_nesting_level == 0);
12773 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12774 finish_taskreg_scan (ctx);
12775 taskreg_contexts.release ();
cbba99a0 12776
4954efd4 12777 if (all_contexts->root)
12778 {
12779 if (task_shared_vars)
12780 push_gimplify_context ();
12781 lower_omp (&body, NULL);
12782 if (task_shared_vars)
12783 pop_gimplify_context (NULL);
12784 }
12785
12786 if (all_contexts)
12787 {
12788 splay_tree_delete (all_contexts);
12789 all_contexts = NULL;
cbba99a0 12790 }
4954efd4 12791 BITMAP_FREE (task_shared_vars);
70ef2520 12792 BITMAP_FREE (global_nonaddressable_vars);
2918f4e9 12793
12794 /* If current function is a method, remove artificial dummy VAR_DECL created
12795 for non-static data member privatization, they aren't needed for
12796 debuginfo nor anything else, have been already replaced everywhere in the
12797 IL and cause problems with LTO. */
12798 if (DECL_ARGUMENTS (current_function_decl)
12799 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12800 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12801 == POINTER_TYPE))
12802 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
4954efd4 12803 return 0;
cbba99a0 12804}
12805
4954efd4 12806namespace {
cbba99a0 12807
4954efd4 12808const pass_data pass_data_lower_omp =
cbba99a0 12809{
4954efd4 12810 GIMPLE_PASS, /* type */
12811 "omplower", /* name */
f57c8178 12812 OPTGROUP_OMP, /* optinfo_flags */
4954efd4 12813 TV_NONE, /* tv_id */
12814 PROP_gimple_any, /* properties_required */
12815 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12816 0, /* properties_destroyed */
12817 0, /* todo_flags_start */
12818 0, /* todo_flags_finish */
12819};
cbba99a0 12820
4954efd4 12821class pass_lower_omp : public gimple_opt_pass
12822{
12823public:
12824 pass_lower_omp (gcc::context *ctxt)
12825 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12826 {}
cbba99a0 12827
4954efd4 12828 /* opt_pass methods: */
12829 virtual unsigned int execute (function *) { return execute_lower_omp (); }
cbba99a0 12830
4954efd4 12831}; // class pass_lower_omp
cbba99a0 12832
4954efd4 12833} // anon namespace
cbba99a0 12834
4954efd4 12835gimple_opt_pass *
12836make_pass_lower_omp (gcc::context *ctxt)
12837{
12838 return new pass_lower_omp (ctxt);
cbba99a0 12839}
4954efd4 12840\f
12841/* The following is a utility to diagnose structured block violations.
12842 It is not part of the "omplower" pass, as that's invoked too late. It
12843 should be invoked by the respective front ends after gimplification. */
cbba99a0 12844
4954efd4 12845static splay_tree all_labels;
cbba99a0 12846
4954efd4 12847/* Check for mismatched contexts and generate an error if needed. Return
12848 true if an error is detected. */
cbba99a0 12849
4954efd4 12850static bool
12851diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12852 gimple *branch_ctx, gimple *label_ctx)
12853{
12854 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12855 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
cbba99a0 12856
4954efd4 12857 if (label_ctx == branch_ctx)
12858 return false;
cbba99a0 12859
4954efd4 12860 const char* kind = NULL;
cbba99a0 12861
4954efd4 12862 if (flag_openacc)
cbba99a0 12863 {
4954efd4 12864 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12865 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
cbba99a0 12866 {
4954efd4 12867 gcc_checking_assert (kind == NULL);
12868 kind = "OpenACC";
cbba99a0 12869 }
12870 }
4954efd4 12871 if (kind == NULL)
28e869d0 12872 {
93c7cd0f 12873 gcc_checking_assert (flag_openmp || flag_openmp_simd);
4954efd4 12874 kind = "OpenMP";
28e869d0 12875 }
cbba99a0 12876
7c6746c9 12877 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
4954efd4 12878 so we could traverse it and issue a correct "exit" or "enter" error
12879 message upon a structured block violation.
f4f5b4b4 12880
4954efd4 12881 We built the context by building a list with tree_cons'ing, but there is
12882 no easy counterpart in gimple tuples. It seems like far too much work
12883 for issuing exit/enter error messages. If someone really misses the
7c6746c9 12884 distinct error message... patches welcome. */
f4f5b4b4 12885
4954efd4 12886#if 0
12887 /* Try to avoid confusing the user by producing and error message
12888 with correct "exit" or "enter" verbiage. We prefer "exit"
12889 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12890 if (branch_ctx == NULL)
12891 exit_p = false;
12892 else
28e869d0 12893 {
4954efd4 12894 while (label_ctx)
12895 {
12896 if (TREE_VALUE (label_ctx) == branch_ctx)
12897 {
12898 exit_p = false;
12899 break;
12900 }
12901 label_ctx = TREE_CHAIN (label_ctx);
12902 }
28e869d0 12903 }
12904
4954efd4 12905 if (exit_p)
12906 error ("invalid exit from %s structured block", kind);
12907 else
12908 error ("invalid entry to %s structured block", kind);
12909#endif
28e869d0 12910
4954efd4 12911 /* If it's obvious we have an invalid entry, be specific about the error. */
12912 if (branch_ctx == NULL)
12913 error ("invalid entry to %s structured block", kind);
12914 else
f4f5b4b4 12915 {
4954efd4 12916 /* Otherwise, be vague and lazy, but efficient. */
12917 error ("invalid branch to/from %s structured block", kind);
f4f5b4b4 12918 }
28e869d0 12919
4954efd4 12920 gsi_replace (gsi_p, gimple_build_nop (), false);
12921 return true;
f4f5b4b4 12922}
12923
4954efd4 12924/* Pass 1: Create a minimal tree of structured blocks, and record
12925 where each label is found. */
cbba99a0 12926
4954efd4 12927static tree
12928diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12929 struct walk_stmt_info *wi)
cbba99a0 12930{
4954efd4 12931 gimple *context = (gimple *) wi->info;
12932 gimple *inner_context;
12933 gimple *stmt = gsi_stmt (*gsi_p);
cbba99a0 12934
4954efd4 12935 *handled_ops_p = true;
c25f1934 12936
4954efd4 12937 switch (gimple_code (stmt))
12938 {
12939 WALK_SUBSTMTS;
c25f1934 12940
4954efd4 12941 case GIMPLE_OMP_PARALLEL:
12942 case GIMPLE_OMP_TASK:
12943 case GIMPLE_OMP_SECTIONS:
12944 case GIMPLE_OMP_SINGLE:
12945 case GIMPLE_OMP_SECTION:
12946 case GIMPLE_OMP_MASTER:
12947 case GIMPLE_OMP_ORDERED:
70a6624c 12948 case GIMPLE_OMP_SCAN:
4954efd4 12949 case GIMPLE_OMP_CRITICAL:
12950 case GIMPLE_OMP_TARGET:
12951 case GIMPLE_OMP_TEAMS:
12952 case GIMPLE_OMP_TASKGROUP:
12953 /* The minimal context here is just the current OMP construct. */
12954 inner_context = stmt;
12955 wi->info = inner_context;
12956 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12957 wi->info = context;
12958 break;
641a0fa1 12959
4954efd4 12960 case GIMPLE_OMP_FOR:
12961 inner_context = stmt;
12962 wi->info = inner_context;
12963 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12964 walk them. */
12965 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12966 diagnose_sb_1, NULL, wi);
12967 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12968 wi->info = context;
12969 break;
641a0fa1 12970
4954efd4 12971 case GIMPLE_LABEL:
12972 splay_tree_insert (all_labels,
12973 (splay_tree_key) gimple_label_label (
12974 as_a <glabel *> (stmt)),
12975 (splay_tree_value) context);
12976 break;
641a0fa1 12977
4954efd4 12978 default:
12979 break;
641a0fa1 12980 }
12981
4954efd4 12982 return NULL_TREE;
641a0fa1 12983}
12984
4954efd4 12985/* Pass 2: Check each branch and see if its context differs from that of
12986 the destination label's context. */
0bb0f256 12987
4954efd4 12988static tree
12989diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12990 struct walk_stmt_info *wi)
0bb0f256 12991{
4954efd4 12992 gimple *context = (gimple *) wi->info;
12993 splay_tree_node n;
12994 gimple *stmt = gsi_stmt (*gsi_p);
6f431819 12995
4954efd4 12996 *handled_ops_p = true;
6f431819 12997
4954efd4 12998 switch (gimple_code (stmt))
cbba99a0 12999 {
4954efd4 13000 WALK_SUBSTMTS;
cbba99a0 13001
4954efd4 13002 case GIMPLE_OMP_PARALLEL:
13003 case GIMPLE_OMP_TASK:
13004 case GIMPLE_OMP_SECTIONS:
13005 case GIMPLE_OMP_SINGLE:
13006 case GIMPLE_OMP_SECTION:
13007 case GIMPLE_OMP_MASTER:
13008 case GIMPLE_OMP_ORDERED:
70a6624c 13009 case GIMPLE_OMP_SCAN:
4954efd4 13010 case GIMPLE_OMP_CRITICAL:
13011 case GIMPLE_OMP_TARGET:
13012 case GIMPLE_OMP_TEAMS:
13013 case GIMPLE_OMP_TASKGROUP:
13014 wi->info = stmt;
13015 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13016 wi->info = context;
13017 break;
641a0fa1 13018
4954efd4 13019 case GIMPLE_OMP_FOR:
13020 wi->info = stmt;
13021 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13022 walk them. */
13023 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13024 diagnose_sb_2, NULL, wi);
13025 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13026 wi->info = context;
13027 break;
641a0fa1 13028
4954efd4 13029 case GIMPLE_COND:
13030 {
13031 gcond *cond_stmt = as_a <gcond *> (stmt);
13032 tree lab = gimple_cond_true_label (cond_stmt);
13033 if (lab)
cbba99a0 13034 {
4954efd4 13035 n = splay_tree_lookup (all_labels,
13036 (splay_tree_key) lab);
13037 diagnose_sb_0 (gsi_p, context,
13038 n ? (gimple *) n->value : NULL);
cbba99a0 13039 }
4954efd4 13040 lab = gimple_cond_false_label (cond_stmt);
13041 if (lab)
13042 {
13043 n = splay_tree_lookup (all_labels,
13044 (splay_tree_key) lab);
13045 diagnose_sb_0 (gsi_p, context,
13046 n ? (gimple *) n->value : NULL);
13047 }
13048 }
13049 break;
cbba99a0 13050
4954efd4 13051 case GIMPLE_GOTO:
13052 {
13053 tree lab = gimple_goto_dest (stmt);
13054 if (TREE_CODE (lab) != LABEL_DECL)
13055 break;
13056
13057 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13058 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13059 }
13060 break;
cbba99a0 13061
4954efd4 13062 case GIMPLE_SWITCH:
13063 {
13064 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13065 unsigned int i;
13066 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
cbba99a0 13067 {
4954efd4 13068 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13069 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13070 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13071 break;
cbba99a0 13072 }
cbba99a0 13073 }
4954efd4 13074 break;
cbba99a0 13075
4954efd4 13076 case GIMPLE_RETURN:
13077 diagnose_sb_0 (gsi_p, context, NULL);
13078 break;
0bb0f256 13079
4954efd4 13080 default:
13081 break;
0bb0f256 13082 }
13083
4954efd4 13084 return NULL_TREE;
e1037942 13085}
13086
4954efd4 13087static unsigned int
13088diagnose_omp_structured_block_errors (void)
0bb0f256 13089{
4954efd4 13090 struct walk_stmt_info wi;
13091 gimple_seq body = gimple_body (current_function_decl);
7c727679 13092
4954efd4 13093 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
0bb0f256 13094
4954efd4 13095 memset (&wi, 0, sizeof (wi));
13096 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
0bb0f256 13097
4954efd4 13098 memset (&wi, 0, sizeof (wi));
13099 wi.want_locations = true;
13100 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
0bb0f256 13101
4954efd4 13102 gimple_set_body (current_function_decl, body);
bab6706a 13103
4954efd4 13104 splay_tree_delete (all_labels);
13105 all_labels = NULL;
bab6706a 13106
bab6706a 13107 return 0;
13108}
13109
13110namespace {
13111
4954efd4 13112const pass_data pass_data_diagnose_omp_blocks =
bab6706a 13113{
13114 GIMPLE_PASS, /* type */
4954efd4 13115 "*diagnose_omp_blocks", /* name */
f57c8178 13116 OPTGROUP_OMP, /* optinfo_flags */
bab6706a 13117 TV_NONE, /* tv_id */
4954efd4 13118 PROP_gimple_any, /* properties_required */
13119 0, /* properties_provided */
bab6706a 13120 0, /* properties_destroyed */
13121 0, /* todo_flags_start */
4954efd4 13122 0, /* todo_flags_finish */
bab6706a 13123};
13124
4954efd4 13125class pass_diagnose_omp_blocks : public gimple_opt_pass
bab6706a 13126{
13127public:
4954efd4 13128 pass_diagnose_omp_blocks (gcc::context *ctxt)
13129 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
bab6706a 13130 {}
13131
13132 /* opt_pass methods: */
4954efd4 13133 virtual bool gate (function *)
13134 {
efa02472 13135 return flag_openacc || flag_openmp || flag_openmp_simd;
4954efd4 13136 }
bab6706a 13137 virtual unsigned int execute (function *)
13138 {
4954efd4 13139 return diagnose_omp_structured_block_errors ();
c0998828 13140 }
13141
4954efd4 13142}; // class pass_diagnose_omp_blocks
c0998828 13143
13144} // anon namespace
13145
13146gimple_opt_pass *
4954efd4 13147make_pass_diagnose_omp_blocks (gcc::context *ctxt)
c0998828 13148{
4954efd4 13149 return new pass_diagnose_omp_blocks (ctxt);
c0998828 13150}
4954efd4 13151\f
c0998828 13152
1e8e9920 13153#include "gt-omp-low.h"