]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
[PR90862] OpenACC 'declare' ICE when nested inside another construct
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
ca4c3545 1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
1e8e9920 5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
fbd26352 7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
1e8e9920 8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
8c4c00c1 13Software Foundation; either version 3, or (at your option) any later
1e8e9920 14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
8c4c00c1 22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
1e8e9920 24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
9ef16211 28#include "backend.h"
7c29e30e 29#include "target.h"
1e8e9920 30#include "tree.h"
9ef16211 31#include "gimple.h"
7c29e30e 32#include "tree-pass.h"
9ef16211 33#include "ssa.h"
7c29e30e 34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
bc61cadb 39#include "internal-fn.h"
40#include "gimple-fold.h"
a8783bee 41#include "gimplify.h"
dcf1a1ec 42#include "gimple-iterator.h"
e795d6e1 43#include "gimplify-me.h"
dcf1a1ec 44#include "gimple-walk.h"
75a70cf9 45#include "tree-iterator.h"
1e8e9920 46#include "tree-inline.h"
47#include "langhooks.h"
073c1fd5 48#include "tree-dfa.h"
69ee5dbb 49#include "tree-ssa.h"
e3022db7 50#include "splay-tree.h"
4954efd4 51#include "omp-general.h"
7740abd8 52#include "omp-low.h"
4954efd4 53#include "omp-grid.h"
424a4a92 54#include "gimple-low.h"
2cc80ac3 55#include "symbol-summary.h"
e797f49f 56#include "tree-nested.h"
b0c5e347 57#include "context.h"
ca4c3545 58#include "gomp-constants.h"
cbba99a0 59#include "gimple-pretty-print.h"
ef2beaf2 60#include "hsa-common.h"
30a86690 61#include "stringpool.h"
62#include "attribs.h"
1e8e9920 63
ca4c3545 64/* Lowering of OMP parallel and workshare constructs proceeds in two
1e8e9920 65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
334ec2d8 68 re-gimplifying things when variables have been replaced with complex
1e8e9920 69 expressions.
70
d134bccc 71 Final code generation is done by pass_expand_omp. The flowgraph is
ca4c3545 72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
1e8e9920 74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
6dc50383 78struct omp_context
1e8e9920 79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
42acab1c 88 gimple *stmt;
1e8e9920 89
48e1416a 90 /* Map variables to fields in a structure that allows communication
1e8e9920 91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
fd6481cf 97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
1e8e9920 105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
bc7bff74 109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
9cf7bec9 113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
7e5a76c8 117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
9a1d892b 122 /* A hash map from the reduction clauses to the registered array
7e5a76c8 123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
9a1d892b 126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
1e8e9920 130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
1e8e9920 135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
bc7bff74 137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
384aea12 140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen0;
da008d72 144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
6dc50383 147};
1e8e9920 148
1e8e9920 149static splay_tree all_contexts;
fd6481cf 150static int taskreg_nesting_level;
bc7bff74 151static int target_nesting_level;
fd6481cf 152static bitmap task_shared_vars;
37eaded9 153static vec<omp_context *> taskreg_contexts;
1e8e9920 154
ab129075 155static void scan_omp (gimple_seq *, omp_context *);
75a70cf9 156static tree scan_omp_1_op (tree *, int *, void *);
157
158#define WALK_SUBSTMTS \
159 case GIMPLE_BIND: \
160 case GIMPLE_TRY: \
161 case GIMPLE_CATCH: \
162 case GIMPLE_EH_FILTER: \
4c0315d0 163 case GIMPLE_TRANSACTION: \
75a70cf9 164 /* The sub-statements for these should be walked. */ \
165 *handled_ops_p = false; \
166 break;
167
a8e785ba 168/* Return true if CTX corresponds to an oacc parallel region. */
169
170static bool
171is_oacc_parallel (omp_context *ctx)
172{
173 enum gimple_code outer_type = gimple_code (ctx->stmt);
174 return ((outer_type == GIMPLE_OMP_TARGET)
175 && (gimple_omp_target_kind (ctx->stmt)
176 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
177}
178
179/* Return true if CTX corresponds to an oacc kernels region. */
180
181static bool
182is_oacc_kernels (omp_context *ctx)
183{
184 enum gimple_code outer_type = gimple_code (ctx->stmt);
185 return ((outer_type == GIMPLE_OMP_TARGET)
186 && (gimple_omp_target_kind (ctx->stmt)
187 == GF_OMP_TARGET_KIND_OACC_KERNELS));
188}
189
43895be5 190/* If DECL is the artificial dummy VAR_DECL created for non-static
191 data member privatization, return the underlying "this" parameter,
192 otherwise return NULL. */
193
194tree
195omp_member_access_dummy_var (tree decl)
196{
197 if (!VAR_P (decl)
198 || !DECL_ARTIFICIAL (decl)
199 || !DECL_IGNORED_P (decl)
200 || !DECL_HAS_VALUE_EXPR_P (decl)
201 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
202 return NULL_TREE;
203
204 tree v = DECL_VALUE_EXPR (decl);
205 if (TREE_CODE (v) != COMPONENT_REF)
206 return NULL_TREE;
207
208 while (1)
209 switch (TREE_CODE (v))
210 {
211 case COMPONENT_REF:
212 case MEM_REF:
213 case INDIRECT_REF:
214 CASE_CONVERT:
215 case POINTER_PLUS_EXPR:
216 v = TREE_OPERAND (v, 0);
217 continue;
218 case PARM_DECL:
219 if (DECL_CONTEXT (v) == current_function_decl
220 && DECL_ARTIFICIAL (v)
221 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
222 return v;
223 return NULL_TREE;
224 default:
225 return NULL_TREE;
226 }
227}
228
229/* Helper for unshare_and_remap, called through walk_tree. */
230
231static tree
232unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
233{
234 tree *pair = (tree *) data;
235 if (*tp == pair[0])
236 {
237 *tp = unshare_expr (pair[1]);
238 *walk_subtrees = 0;
239 }
240 else if (IS_TYPE_OR_DECL_P (*tp))
241 *walk_subtrees = 0;
242 return NULL_TREE;
243}
244
245/* Return unshare_expr (X) with all occurrences of FROM
246 replaced with TO. */
247
248static tree
249unshare_and_remap (tree x, tree from, tree to)
250{
251 tree pair[2] = { from, to };
252 x = unshare_expr (x);
253 walk_tree (&x, unshare_and_remap_1, pair, NULL);
254 return x;
255}
256
75a70cf9 257/* Convenience function for calling scan_omp_1_op on tree operands. */
258
259static inline tree
260scan_omp_op (tree *tp, omp_context *ctx)
261{
262 struct walk_stmt_info wi;
263
264 memset (&wi, 0, sizeof (wi));
265 wi.info = ctx;
266 wi.want_locations = true;
267
268 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
269}
270
e3a19533 271static void lower_omp (gimple_seq *, omp_context *);
f49d7bb5 272static tree lookup_decl_in_outer_ctx (tree, omp_context *);
273static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
1e8e9920 274
1e8e9920 275/* Return true if CTX is for an omp parallel. */
276
277static inline bool
278is_parallel_ctx (omp_context *ctx)
279{
75a70cf9 280 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
1e8e9920 281}
282
773c5ba7 283
fd6481cf 284/* Return true if CTX is for an omp task. */
285
286static inline bool
287is_task_ctx (omp_context *ctx)
288{
75a70cf9 289 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 290}
291
292
43895be5 293/* Return true if CTX is for an omp taskloop. */
294
295static inline bool
296is_taskloop_ctx (omp_context *ctx)
297{
298 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
299 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
300}
301
302
7e5a76c8 303/* Return true if CTX is for a host omp teams. */
304
305static inline bool
306is_host_teams_ctx (omp_context *ctx)
307{
308 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
309 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
310}
311
312/* Return true if CTX is for an omp parallel or omp task or host omp teams
313 (the last one is strictly not a task region in OpenMP speak, but we
314 need to treat it similarly). */
fd6481cf 315
316static inline bool
317is_taskreg_ctx (omp_context *ctx)
318{
7e5a76c8 319 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
fd6481cf 320}
321
1e8e9920 322/* Return true if EXPR is variable sized. */
323
324static inline bool
1f1872fd 325is_variable_sized (const_tree expr)
1e8e9920 326{
327 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
328}
329
ca4c3545 330/* Lookup variables. The "maybe" form
1e8e9920 331 allows for the variable form to not have been entered, otherwise we
332 assert that the variable must have been entered. */
333
334static inline tree
335lookup_decl (tree var, omp_context *ctx)
336{
06ecf488 337 tree *n = ctx->cb.decl_map->get (var);
e3022db7 338 return *n;
1e8e9920 339}
340
341static inline tree
e8a588af 342maybe_lookup_decl (const_tree var, omp_context *ctx)
1e8e9920 343{
06ecf488 344 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
e3022db7 345 return n ? *n : NULL_TREE;
1e8e9920 346}
347
348static inline tree
349lookup_field (tree var, omp_context *ctx)
350{
351 splay_tree_node n;
352 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
353 return (tree) n->value;
354}
355
fd6481cf 356static inline tree
43895be5 357lookup_sfield (splay_tree_key key, omp_context *ctx)
fd6481cf 358{
359 splay_tree_node n;
360 n = splay_tree_lookup (ctx->sfield_map
43895be5 361 ? ctx->sfield_map : ctx->field_map, key);
fd6481cf 362 return (tree) n->value;
363}
364
1e8e9920 365static inline tree
43895be5 366lookup_sfield (tree var, omp_context *ctx)
367{
368 return lookup_sfield ((splay_tree_key) var, ctx);
369}
370
371static inline tree
372maybe_lookup_field (splay_tree_key key, omp_context *ctx)
1e8e9920 373{
374 splay_tree_node n;
43895be5 375 n = splay_tree_lookup (ctx->field_map, key);
1e8e9920 376 return n ? (tree) n->value : NULL_TREE;
377}
378
43895be5 379static inline tree
380maybe_lookup_field (tree var, omp_context *ctx)
381{
382 return maybe_lookup_field ((splay_tree_key) var, ctx);
383}
384
e8a588af 385/* Return true if DECL should be copied by pointer. SHARED_CTX is
386 the parallel context if DECL is to be shared. */
1e8e9920 387
388static bool
fd6481cf 389use_pointer_for_field (tree decl, omp_context *shared_ctx)
1e8e9920 390{
0b80c4b2 391 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
392 || TYPE_ATOMIC (TREE_TYPE (decl)))
1e8e9920 393 return true;
394
554f2707 395 /* We can only use copy-in/copy-out semantics for shared variables
1e8e9920 396 when we know the value is not accessible from an outer scope. */
e8a588af 397 if (shared_ctx)
1e8e9920 398 {
ca4c3545 399 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
400
1e8e9920 401 /* ??? Trivially accessible from anywhere. But why would we even
402 be passing an address in this case? Should we simply assert
403 this to be false, or should we have a cleanup pass that removes
404 these from the list of mappings? */
7e5a76c8 405 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
1e8e9920 406 return true;
407
408 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
409 without analyzing the expression whether or not its location
410 is accessible to anyone else. In the case of nested parallel
411 regions it certainly may be. */
df2c34fc 412 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
1e8e9920 413 return true;
414
415 /* Do not use copy-in/copy-out for variables that have their
416 address taken. */
417 if (TREE_ADDRESSABLE (decl))
418 return true;
e8a588af 419
b8214689 420 /* lower_send_shared_vars only uses copy-in, but not copy-out
421 for these. */
422 if (TREE_READONLY (decl)
423 || ((TREE_CODE (decl) == RESULT_DECL
424 || TREE_CODE (decl) == PARM_DECL)
425 && DECL_BY_REFERENCE (decl)))
426 return false;
427
e8a588af 428 /* Disallow copy-in/out in nested parallel if
429 decl is shared in outer parallel, otherwise
430 each thread could store the shared variable
431 in its own copy-in location, making the
432 variable no longer really shared. */
b8214689 433 if (shared_ctx->is_nested)
e8a588af 434 {
435 omp_context *up;
436
437 for (up = shared_ctx->outer; up; up = up->outer)
0cb159ec 438 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
e8a588af 439 break;
440
0cb159ec 441 if (up)
e8a588af 442 {
443 tree c;
444
75a70cf9 445 for (c = gimple_omp_taskreg_clauses (up->stmt);
e8a588af 446 c; c = OMP_CLAUSE_CHAIN (c))
447 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
448 && OMP_CLAUSE_DECL (c) == decl)
449 break;
450
451 if (c)
784ad964 452 goto maybe_mark_addressable_and_ret;
e8a588af 453 }
454 }
fd6481cf 455
b8214689 456 /* For tasks avoid using copy-in/out. As tasks can be
fd6481cf 457 deferred or executed in different thread, when GOMP_task
458 returns, the task hasn't necessarily terminated. */
b8214689 459 if (is_task_ctx (shared_ctx))
fd6481cf 460 {
784ad964 461 tree outer;
462 maybe_mark_addressable_and_ret:
463 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
43895be5 464 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
fd6481cf 465 {
466 /* Taking address of OUTER in lower_send_shared_vars
467 might need regimplification of everything that uses the
468 variable. */
469 if (!task_shared_vars)
470 task_shared_vars = BITMAP_ALLOC (NULL);
471 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
472 TREE_ADDRESSABLE (outer) = 1;
473 }
474 return true;
475 }
1e8e9920 476 }
477
478 return false;
479}
480
79acaae1 481/* Construct a new automatic decl similar to VAR. */
482
483static tree
484omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
485{
486 tree copy = copy_var_decl (var, name, type);
487
488 DECL_CONTEXT (copy) = current_function_decl;
1767a056 489 DECL_CHAIN (copy) = ctx->block_vars;
43895be5 490 /* If VAR is listed in task_shared_vars, it means it wasn't
491 originally addressable and is just because task needs to take
492 it's address. But we don't need to take address of privatizations
493 from that var. */
494 if (TREE_ADDRESSABLE (var)
495 && task_shared_vars
496 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
497 TREE_ADDRESSABLE (copy) = 0;
1e8e9920 498 ctx->block_vars = copy;
499
500 return copy;
501}
502
503static tree
504omp_copy_decl_1 (tree var, omp_context *ctx)
505{
506 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
507}
508
445d06b6 509/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
510 as appropriate. */
511static tree
512omp_build_component_ref (tree obj, tree field)
513{
514 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
515 if (TREE_THIS_VOLATILE (field))
516 TREE_THIS_VOLATILE (ret) |= 1;
517 if (TREE_READONLY (field))
518 TREE_READONLY (ret) |= 1;
519 return ret;
520}
521
1e8e9920 522/* Build tree nodes to access the field for VAR on the receiver side. */
523
524static tree
525build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
526{
527 tree x, field = lookup_field (var, ctx);
528
529 /* If the receiver record type was remapped in the child function,
530 remap the field into the new record type. */
531 x = maybe_lookup_field (field, ctx);
532 if (x != NULL)
533 field = x;
534
182cf5a9 535 x = build_simple_mem_ref (ctx->receiver_decl);
75c20a2e 536 TREE_THIS_NOTRAP (x) = 1;
445d06b6 537 x = omp_build_component_ref (x, field);
1e8e9920 538 if (by_ref)
b16d27b9 539 {
540 x = build_simple_mem_ref (x);
541 TREE_THIS_NOTRAP (x) = 1;
542 }
1e8e9920 543
544 return x;
545}
546
547/* Build tree nodes to access VAR in the scope outer to CTX. In the case
548 of a parallel, this is a component reference; for workshare constructs
549 this is some variable. */
550
551static tree
1f355935 552build_outer_var_ref (tree var, omp_context *ctx,
553 enum omp_clause_code code = OMP_CLAUSE_ERROR)
1e8e9920 554{
555 tree x;
7e5a76c8 556 omp_context *outer = ctx->outer;
557 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
558 outer = outer->outer;
1e8e9920 559
f49d7bb5 560 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
1e8e9920 561 x = var;
562 else if (is_variable_sized (var))
563 {
564 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1f355935 565 x = build_outer_var_ref (x, ctx, code);
182cf5a9 566 x = build_simple_mem_ref (x);
1e8e9920 567 }
fd6481cf 568 else if (is_taskreg_ctx (ctx))
1e8e9920 569 {
e8a588af 570 bool by_ref = use_pointer_for_field (var, NULL);
1e8e9920 571 x = build_receiver_ref (var, by_ref, ctx);
572 }
1f355935 573 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
574 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
575 || (code == OMP_CLAUSE_PRIVATE
576 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
577 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
578 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
3d483a94 579 {
1f355935 580 /* #pragma omp simd isn't a worksharing construct, and can reference
581 even private vars in its linear etc. clauses.
582 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
583 to private vars in all worksharing constructs. */
3d483a94 584 x = NULL_TREE;
7e5a76c8 585 if (outer && is_taskreg_ctx (outer))
586 x = lookup_decl (var, outer);
587 else if (outer)
84cb1020 588 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
3d483a94 589 if (x == NULL_TREE)
590 x = var;
591 }
1f355935 592 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
43895be5 593 {
7e5a76c8 594 gcc_assert (outer);
43895be5 595 splay_tree_node n
7e5a76c8 596 = splay_tree_lookup (outer->field_map,
43895be5 597 (splay_tree_key) &DECL_UID (var));
598 if (n == NULL)
599 {
7e5a76c8 600 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
43895be5 601 x = var;
602 else
7e5a76c8 603 x = lookup_decl (var, outer);
43895be5 604 }
605 else
606 {
607 tree field = (tree) n->value;
608 /* If the receiver record type was remapped in the child function,
609 remap the field into the new record type. */
7e5a76c8 610 x = maybe_lookup_field (field, outer);
43895be5 611 if (x != NULL)
612 field = x;
613
7e5a76c8 614 x = build_simple_mem_ref (outer->receiver_decl);
43895be5 615 x = omp_build_component_ref (x, field);
7e5a76c8 616 if (use_pointer_for_field (var, outer))
43895be5 617 x = build_simple_mem_ref (x);
618 }
619 }
7e5a76c8 620 else if (outer)
56686608 621 {
56686608 622 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
623 {
624 outer = outer->outer;
625 gcc_assert (outer
626 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
627 }
1f355935 628 x = lookup_decl (var, outer);
56686608 629 }
4954efd4 630 else if (omp_is_reference (var))
9438af57 631 /* This can happen with orphaned constructs. If var is reference, it is
632 possible it is shared and as such valid. */
633 x = var;
43895be5 634 else if (omp_member_access_dummy_var (var))
635 x = var;
1e8e9920 636 else
637 gcc_unreachable ();
638
43895be5 639 if (x == var)
640 {
641 tree t = omp_member_access_dummy_var (var);
642 if (t)
643 {
644 x = DECL_VALUE_EXPR (var);
645 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
646 if (o != t)
647 x = unshare_and_remap (x, t, o);
648 else
649 x = unshare_expr (x);
650 }
651 }
652
4954efd4 653 if (omp_is_reference (var))
182cf5a9 654 x = build_simple_mem_ref (x);
1e8e9920 655
656 return x;
657}
658
659/* Build tree nodes to access the field for VAR on the sender side. */
660
661static tree
43895be5 662build_sender_ref (splay_tree_key key, omp_context *ctx)
1e8e9920 663{
43895be5 664 tree field = lookup_sfield (key, ctx);
445d06b6 665 return omp_build_component_ref (ctx->sender_decl, field);
1e8e9920 666}
667
43895be5 668static tree
669build_sender_ref (tree var, omp_context *ctx)
670{
671 return build_sender_ref ((splay_tree_key) var, ctx);
672}
673
12dc9a16 674/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
675 BASE_POINTERS_RESTRICT, declare the field with restrict. */
1e8e9920 676
677static void
737cc978 678install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
1e8e9920 679{
fd6481cf 680 tree field, type, sfield = NULL_TREE;
43895be5 681 splay_tree_key key = (splay_tree_key) var;
1e8e9920 682
43895be5 683 if ((mask & 8) != 0)
684 {
685 key = (splay_tree_key) &DECL_UID (var);
686 gcc_checking_assert (key != (splay_tree_key) var);
687 }
fd6481cf 688 gcc_assert ((mask & 1) == 0
43895be5 689 || !splay_tree_lookup (ctx->field_map, key));
fd6481cf 690 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
43895be5 691 || !splay_tree_lookup (ctx->sfield_map, key));
ca4c3545 692 gcc_assert ((mask & 3) == 3
693 || !is_gimple_omp_oacc (ctx->stmt));
1e8e9920 694
695 type = TREE_TYPE (var);
44c0112f 696 /* Prevent redeclaring the var in the split-off function with a restrict
697 pointer type. Note that we only clear type itself, restrict qualifiers in
698 the pointed-to type will be ignored by points-to analysis. */
699 if (POINTER_TYPE_P (type)
700 && TYPE_RESTRICT (type))
701 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
702
bc7bff74 703 if (mask & 4)
704 {
705 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
706 type = build_pointer_type (build_pointer_type (type));
707 }
708 else if (by_ref)
737cc978 709 type = build_pointer_type (type);
4954efd4 710 else if ((mask & 3) == 1 && omp_is_reference (var))
fd6481cf 711 type = TREE_TYPE (type);
1e8e9920 712
e60a6f7b 713 field = build_decl (DECL_SOURCE_LOCATION (var),
714 FIELD_DECL, DECL_NAME (var), type);
1e8e9920 715
716 /* Remember what variable this field was created for. This does have a
717 side effect of making dwarf2out ignore this member, so for helpful
718 debugging we clear it later in delete_omp_context. */
719 DECL_ABSTRACT_ORIGIN (field) = var;
fd6481cf 720 if (type == TREE_TYPE (var))
721 {
5d4b30ea 722 SET_DECL_ALIGN (field, DECL_ALIGN (var));
fd6481cf 723 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
724 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
725 }
726 else
5d4b30ea 727 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
1e8e9920 728
fd6481cf 729 if ((mask & 3) == 3)
730 {
731 insert_field_into_struct (ctx->record_type, field);
732 if (ctx->srecord_type)
733 {
e60a6f7b 734 sfield = build_decl (DECL_SOURCE_LOCATION (var),
735 FIELD_DECL, DECL_NAME (var), type);
fd6481cf 736 DECL_ABSTRACT_ORIGIN (sfield) = var;
5d4b30ea 737 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
fd6481cf 738 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
739 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
740 insert_field_into_struct (ctx->srecord_type, sfield);
741 }
742 }
743 else
744 {
745 if (ctx->srecord_type == NULL_TREE)
746 {
747 tree t;
748
749 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
750 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
751 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
752 {
43895be5 753 sfield = build_decl (DECL_SOURCE_LOCATION (t),
e60a6f7b 754 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
fd6481cf 755 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
756 insert_field_into_struct (ctx->srecord_type, sfield);
757 splay_tree_insert (ctx->sfield_map,
758 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
759 (splay_tree_value) sfield);
760 }
761 }
762 sfield = field;
763 insert_field_into_struct ((mask & 1) ? ctx->record_type
764 : ctx->srecord_type, field);
765 }
1e8e9920 766
fd6481cf 767 if (mask & 1)
43895be5 768 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
fd6481cf 769 if ((mask & 2) && ctx->sfield_map)
43895be5 770 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
1e8e9920 771}
772
773static tree
774install_var_local (tree var, omp_context *ctx)
775{
776 tree new_var = omp_copy_decl_1 (var, ctx);
777 insert_decl_map (&ctx->cb, var, new_var);
778 return new_var;
779}
780
781/* Adjust the replacement for DECL in CTX for the new context. This means
782 copying the DECL_VALUE_EXPR, and fixing up the type. */
783
784static void
785fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
786{
787 tree new_decl, size;
788
789 new_decl = lookup_decl (decl, ctx);
790
791 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
792
793 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
794 && DECL_HAS_VALUE_EXPR_P (decl))
795 {
796 tree ve = DECL_VALUE_EXPR (decl);
75a70cf9 797 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
1e8e9920 798 SET_DECL_VALUE_EXPR (new_decl, ve);
799 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
800 }
801
802 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
803 {
804 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
805 if (size == error_mark_node)
806 size = TYPE_SIZE (TREE_TYPE (new_decl));
807 DECL_SIZE (new_decl) = size;
808
809 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
810 if (size == error_mark_node)
811 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
812 DECL_SIZE_UNIT (new_decl) = size;
813 }
814}
815
816/* The callback for remap_decl. Search all containing contexts for a
817 mapping of the variable; this avoids having to duplicate the splay
818 tree ahead of time. We know a mapping doesn't already exist in the
819 given context. Create new mappings to implement default semantics. */
820
821static tree
822omp_copy_decl (tree var, copy_body_data *cb)
823{
824 omp_context *ctx = (omp_context *) cb;
825 tree new_var;
826
1e8e9920 827 if (TREE_CODE (var) == LABEL_DECL)
828 {
ed5078db 829 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
830 return var;
e60a6f7b 831 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
773c5ba7 832 DECL_CONTEXT (new_var) = current_function_decl;
1e8e9920 833 insert_decl_map (&ctx->cb, var, new_var);
834 return new_var;
835 }
836
fd6481cf 837 while (!is_taskreg_ctx (ctx))
1e8e9920 838 {
839 ctx = ctx->outer;
840 if (ctx == NULL)
841 return var;
842 new_var = maybe_lookup_decl (var, ctx);
843 if (new_var)
844 return new_var;
845 }
846
f49d7bb5 847 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
848 return var;
849
1e8e9920 850 return error_mark_node;
851}
852
4954efd4 853/* Create a new context, with OUTER_CTX being the surrounding context. */
773c5ba7 854
4954efd4 855static omp_context *
856new_omp_context (gimple *stmt, omp_context *outer_ctx)
773c5ba7 857{
4954efd4 858 omp_context *ctx = XCNEW (omp_context);
773c5ba7 859
4954efd4 860 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
861 (splay_tree_value) ctx);
862 ctx->stmt = stmt;
773c5ba7 863
4954efd4 864 if (outer_ctx)
61e47ac8 865 {
4954efd4 866 ctx->outer = outer_ctx;
867 ctx->cb = outer_ctx->cb;
868 ctx->cb.block = NULL;
869 ctx->depth = outer_ctx->depth + 1;
1e8e9920 870 }
871 else
872 {
873 ctx->cb.src_fn = current_function_decl;
874 ctx->cb.dst_fn = current_function_decl;
415d1b9a 875 ctx->cb.src_node = cgraph_node::get (current_function_decl);
53f79206 876 gcc_checking_assert (ctx->cb.src_node);
1e8e9920 877 ctx->cb.dst_node = ctx->cb.src_node;
878 ctx->cb.src_cfun = cfun;
879 ctx->cb.copy_decl = omp_copy_decl;
e38def9c 880 ctx->cb.eh_lp_nr = 0;
1e8e9920 881 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
744a33a1 882 ctx->cb.adjust_array_error_bounds = true;
883 ctx->cb.dont_remap_vla_if_no_change = true;
1e8e9920 884 ctx->depth = 1;
885 }
886
06ecf488 887 ctx->cb.decl_map = new hash_map<tree, tree>;
1e8e9920 888
889 return ctx;
890}
891
75a70cf9 892static gimple_seq maybe_catch_exception (gimple_seq);
f6430caa 893
894/* Finalize task copyfn. */
895
896static void
1a91d914 897finalize_task_copyfn (gomp_task *task_stmt)
f6430caa 898{
899 struct function *child_cfun;
9078126c 900 tree child_fn;
e3a19533 901 gimple_seq seq = NULL, new_seq;
1a91d914 902 gbind *bind;
f6430caa 903
75a70cf9 904 child_fn = gimple_omp_task_copy_fn (task_stmt);
f6430caa 905 if (child_fn == NULL_TREE)
906 return;
907
908 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
82b40354 909 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
f6430caa 910
f6430caa 911 push_cfun (child_cfun);
7e3aae05 912 bind = gimplify_body (child_fn, false);
75a70cf9 913 gimple_seq_add_stmt (&seq, bind);
914 new_seq = maybe_catch_exception (seq);
915 if (new_seq != seq)
916 {
917 bind = gimple_build_bind (NULL, new_seq, NULL);
e3a19533 918 seq = NULL;
75a70cf9 919 gimple_seq_add_stmt (&seq, bind);
920 }
921 gimple_set_body (child_fn, seq);
f6430caa 922 pop_cfun ();
f6430caa 923
82b40354 924 /* Inform the callgraph about the new function. */
47300487 925 cgraph_node *node = cgraph_node::get_create (child_fn);
926 node->parallelized_function = 1;
415d1b9a 927 cgraph_node::add_new_function (child_fn, false);
f6430caa 928}
929
1e8e9920 930/* Destroy a omp_context data structures. Called through the splay tree
931 value delete callback. */
932
933static void
934delete_omp_context (splay_tree_value value)
935{
936 omp_context *ctx = (omp_context *) value;
937
06ecf488 938 delete ctx->cb.decl_map;
1e8e9920 939
940 if (ctx->field_map)
941 splay_tree_delete (ctx->field_map);
fd6481cf 942 if (ctx->sfield_map)
943 splay_tree_delete (ctx->sfield_map);
1e8e9920 944
945 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
946 it produces corrupt debug information. */
947 if (ctx->record_type)
948 {
949 tree t;
1767a056 950 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1e8e9920 951 DECL_ABSTRACT_ORIGIN (t) = NULL;
952 }
fd6481cf 953 if (ctx->srecord_type)
954 {
955 tree t;
1767a056 956 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
fd6481cf 957 DECL_ABSTRACT_ORIGIN (t) = NULL;
958 }
1e8e9920 959
f6430caa 960 if (is_task_ctx (ctx))
1a91d914 961 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
f6430caa 962
7e5a76c8 963 if (ctx->task_reduction_map)
964 {
965 ctx->task_reductions.release ();
966 delete ctx->task_reduction_map;
967 }
968
9a1d892b 969 delete ctx->lastprivate_conditional_map;
970
1e8e9920 971 XDELETE (ctx);
972}
973
974/* Fix up RECEIVER_DECL with a type that has been remapped to the child
975 context. */
976
977static void
978fixup_child_record_type (omp_context *ctx)
979{
980 tree f, type = ctx->record_type;
981
56686608 982 if (!ctx->receiver_decl)
983 return;
1e8e9920 984 /* ??? It isn't sufficient to just call remap_type here, because
985 variably_modified_type_p doesn't work the way we expect for
986 record types. Testing each field for whether it needs remapping
987 and creating a new record by hand works, however. */
1767a056 988 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1e8e9920 989 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
990 break;
991 if (f)
992 {
993 tree name, new_fields = NULL;
994
995 type = lang_hooks.types.make_type (RECORD_TYPE);
996 name = DECL_NAME (TYPE_NAME (ctx->record_type));
e60a6f7b 997 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
998 TYPE_DECL, name, type);
1e8e9920 999 TYPE_NAME (type) = name;
1000
1767a056 1001 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1e8e9920 1002 {
1003 tree new_f = copy_node (f);
1004 DECL_CONTEXT (new_f) = type;
1005 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1767a056 1006 DECL_CHAIN (new_f) = new_fields;
75a70cf9 1007 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1008 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1009 &ctx->cb, NULL);
1010 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1011 &ctx->cb, NULL);
1e8e9920 1012 new_fields = new_f;
1013
1014 /* Arrange to be able to look up the receiver field
1015 given the sender field. */
1016 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1017 (splay_tree_value) new_f);
1018 }
1019 TYPE_FIELDS (type) = nreverse (new_fields);
1020 layout_type (type);
1021 }
1022
43895be5 1023 /* In a target region we never modify any of the pointers in *.omp_data_i,
1024 so attempt to help the optimizers. */
1025 if (is_gimple_omp_offloaded (ctx->stmt))
1026 type = build_qualified_type (type, TYPE_QUAL_CONST);
1027
5455b100 1028 TREE_TYPE (ctx->receiver_decl)
1029 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1e8e9920 1030}
1031
1032/* Instantiate decls as necessary in CTX to satisfy the data sharing
737cc978 1033 specified by CLAUSES. */
1e8e9920 1034
1035static void
737cc978 1036scan_sharing_clauses (tree clauses, omp_context *ctx)
1e8e9920 1037{
1038 tree c, decl;
1039 bool scan_array_reductions = false;
1040
1041 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1042 {
1043 bool by_ref;
1044
55d6e7cd 1045 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1046 {
1047 case OMP_CLAUSE_PRIVATE:
1048 decl = OMP_CLAUSE_DECL (c);
fd6481cf 1049 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1050 goto do_private;
1051 else if (!is_variable_sized (decl))
1e8e9920 1052 install_var_local (decl, ctx);
1053 break;
1054
1055 case OMP_CLAUSE_SHARED:
5fddcf34 1056 decl = OMP_CLAUSE_DECL (c);
7e5a76c8 1057 /* Ignore shared directives in teams construct inside of
1058 target construct. */
1059 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1060 && !is_host_teams_ctx (ctx))
5fddcf34 1061 {
1062 /* Global variables don't need to be copied,
1063 the receiver side will use them directly. */
1064 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1065 if (is_global_var (odecl))
1066 break;
1067 insert_decl_map (&ctx->cb, decl, odecl);
1068 break;
1069 }
fd6481cf 1070 gcc_assert (is_taskreg_ctx (ctx));
e7327393 1071 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1072 || !is_variable_sized (decl));
f49d7bb5 1073 /* Global variables don't need to be copied,
1074 the receiver side will use them directly. */
1075 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1076 break;
43895be5 1077 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
98588013 1078 {
1079 use_pointer_for_field (decl, ctx);
1080 break;
1081 }
1082 by_ref = use_pointer_for_field (decl, NULL);
1083 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1e8e9920 1084 || TREE_ADDRESSABLE (decl)
1085 || by_ref
4954efd4 1086 || omp_is_reference (decl))
1e8e9920 1087 {
98588013 1088 by_ref = use_pointer_for_field (decl, ctx);
fd6481cf 1089 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1090 install_var_local (decl, ctx);
1091 break;
1092 }
1093 /* We don't need to copy const scalar vars back. */
55d6e7cd 1094 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1e8e9920 1095 goto do_private;
1096
43895be5 1097 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1098 case OMP_CLAUSE_IN_REDUCTION:
43895be5 1099 decl = OMP_CLAUSE_DECL (c);
7e5a76c8 1100 if (TREE_CODE (decl) == MEM_REF)
43895be5 1101 {
1102 tree t = TREE_OPERAND (decl, 0);
9561765e 1103 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1104 t = TREE_OPERAND (t, 0);
43895be5 1105 if (TREE_CODE (t) == INDIRECT_REF
1106 || TREE_CODE (t) == ADDR_EXPR)
1107 t = TREE_OPERAND (t, 0);
1108 install_var_local (t, ctx);
1109 if (is_taskreg_ctx (ctx)
7e5a76c8 1110 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1111 || (is_task_ctx (ctx)
1112 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1113 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1114 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1115 == POINTER_TYPE)))))
1116 && !is_variable_sized (t)
1117 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1118 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1119 && !is_task_ctx (ctx))))
43895be5 1120 {
7e5a76c8 1121 by_ref = use_pointer_for_field (t, NULL);
1122 if (is_task_ctx (ctx)
1123 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1124 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1125 {
1126 install_var_field (t, false, 1, ctx);
1127 install_var_field (t, by_ref, 2, ctx);
1128 }
1129 else
1130 install_var_field (t, by_ref, 3, ctx);
43895be5 1131 }
1132 break;
1133 }
7e5a76c8 1134 if (is_task_ctx (ctx)
1135 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1136 && OMP_CLAUSE_REDUCTION_TASK (c)
1137 && is_parallel_ctx (ctx)))
1138 {
1139 /* Global variables don't need to be copied,
1140 the receiver side will use them directly. */
1141 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1142 {
1143 by_ref = use_pointer_for_field (decl, ctx);
1144 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1145 install_var_field (decl, by_ref, 3, ctx);
1146 }
1147 install_var_local (decl, ctx);
1148 break;
1149 }
1150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1151 && OMP_CLAUSE_REDUCTION_TASK (c))
1152 {
1153 install_var_local (decl, ctx);
1154 break;
1155 }
43895be5 1156 goto do_private;
1157
1e8e9920 1158 case OMP_CLAUSE_LASTPRIVATE:
1159 /* Let the corresponding firstprivate clause create
1160 the variable. */
1161 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1162 break;
1163 /* FALLTHRU */
1164
1165 case OMP_CLAUSE_FIRSTPRIVATE:
3d483a94 1166 case OMP_CLAUSE_LINEAR:
1e8e9920 1167 decl = OMP_CLAUSE_DECL (c);
1168 do_private:
43895be5 1169 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1170 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1171 && is_gimple_omp_offloaded (ctx->stmt))
1172 {
1173 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
4954efd4 1174 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
43895be5 1175 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1176 install_var_field (decl, true, 3, ctx);
1177 else
1178 install_var_field (decl, false, 3, ctx);
1179 }
1e8e9920 1180 if (is_variable_sized (decl))
1e8e9920 1181 {
fd6481cf 1182 if (is_task_ctx (ctx))
1183 install_var_field (decl, false, 1, ctx);
1184 break;
1185 }
1186 else if (is_taskreg_ctx (ctx))
1187 {
1188 bool global
1189 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
e8a588af 1190 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1191
1192 if (is_task_ctx (ctx)
4954efd4 1193 && (global || by_ref || omp_is_reference (decl)))
fd6481cf 1194 {
1195 install_var_field (decl, false, 1, ctx);
1196 if (!global)
1197 install_var_field (decl, by_ref, 2, ctx);
1198 }
1199 else if (!global)
1200 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1201 }
1202 install_var_local (decl, ctx);
1203 break;
1204
43895be5 1205 case OMP_CLAUSE_USE_DEVICE_PTR:
1206 decl = OMP_CLAUSE_DECL (c);
1207 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1208 install_var_field (decl, true, 3, ctx);
1209 else
1210 install_var_field (decl, false, 3, ctx);
1211 if (DECL_SIZE (decl)
1212 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1213 {
1214 tree decl2 = DECL_VALUE_EXPR (decl);
1215 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1216 decl2 = TREE_OPERAND (decl2, 0);
1217 gcc_assert (DECL_P (decl2));
1218 install_var_local (decl2, ctx);
1219 }
1220 install_var_local (decl, ctx);
1221 break;
1222
1223 case OMP_CLAUSE_IS_DEVICE_PTR:
1224 decl = OMP_CLAUSE_DECL (c);
1225 goto do_private;
1226
bc7bff74 1227 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 1228 case OMP_CLAUSE__REDUCTEMP_:
43895be5 1229 gcc_assert (is_taskreg_ctx (ctx));
bc7bff74 1230 decl = OMP_CLAUSE_DECL (c);
1231 install_var_field (decl, false, 3, ctx);
1232 install_var_local (decl, ctx);
1233 break;
1234
1e8e9920 1235 case OMP_CLAUSE_COPYPRIVATE:
1e8e9920 1236 case OMP_CLAUSE_COPYIN:
1237 decl = OMP_CLAUSE_DECL (c);
e8a588af 1238 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1239 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1240 break;
1241
2169f33b 1242 case OMP_CLAUSE_FINAL:
1e8e9920 1243 case OMP_CLAUSE_IF:
1244 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1245 case OMP_CLAUSE_NUM_TEAMS:
1246 case OMP_CLAUSE_THREAD_LIMIT:
1247 case OMP_CLAUSE_DEVICE:
1e8e9920 1248 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1249 case OMP_CLAUSE_DIST_SCHEDULE:
1250 case OMP_CLAUSE_DEPEND:
43895be5 1251 case OMP_CLAUSE_PRIORITY:
1252 case OMP_CLAUSE_GRAINSIZE:
1253 case OMP_CLAUSE_NUM_TASKS:
ca4c3545 1254 case OMP_CLAUSE_NUM_GANGS:
1255 case OMP_CLAUSE_NUM_WORKERS:
1256 case OMP_CLAUSE_VECTOR_LENGTH:
1e8e9920 1257 if (ctx->outer)
75a70cf9 1258 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1e8e9920 1259 break;
1260
bc7bff74 1261 case OMP_CLAUSE_TO:
1262 case OMP_CLAUSE_FROM:
1263 case OMP_CLAUSE_MAP:
1264 if (ctx->outer)
1265 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1266 decl = OMP_CLAUSE_DECL (c);
1267 /* Global variables with "omp declare target" attribute
1268 don't need to be copied, the receiver side will use them
c0998828 1269 directly. However, global variables with "omp declare target link"
c1eaba31 1270 attribute need to be copied. Or when ALWAYS modifier is used. */
bc7bff74 1271 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1272 && DECL_P (decl)
9561765e 1273 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1274 && (OMP_CLAUSE_MAP_KIND (c)
1275 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 1276 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
c1eaba31 1277 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1278 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1279 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
bc7bff74 1280 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
c0998828 1281 && varpool_node::get_create (decl)->offloadable
1282 && !lookup_attribute ("omp declare target link",
1283 DECL_ATTRIBUTES (decl)))
bc7bff74 1284 break;
1285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
ca4c3545 1286 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
bc7bff74 1287 {
ca4c3545 1288 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1289 not offloaded; there is nothing to map for those. */
1290 if (!is_gimple_omp_offloaded (ctx->stmt)
c1a114c2 1291 && !POINTER_TYPE_P (TREE_TYPE (decl))
1292 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
bc7bff74 1293 break;
1294 }
43895be5 1295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9561765e 1296 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1297 || (OMP_CLAUSE_MAP_KIND (c)
1298 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
43895be5 1299 {
1300 if (TREE_CODE (decl) == COMPONENT_REF
1301 || (TREE_CODE (decl) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1303 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1304 == REFERENCE_TYPE)))
1305 break;
1306 if (DECL_SIZE (decl)
1307 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1308 {
1309 tree decl2 = DECL_VALUE_EXPR (decl);
1310 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1311 decl2 = TREE_OPERAND (decl2, 0);
1312 gcc_assert (DECL_P (decl2));
1313 install_var_local (decl2, ctx);
1314 }
1315 install_var_local (decl, ctx);
1316 break;
1317 }
bc7bff74 1318 if (DECL_P (decl))
1319 {
1320 if (DECL_SIZE (decl)
1321 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1322 {
1323 tree decl2 = DECL_VALUE_EXPR (decl);
1324 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1325 decl2 = TREE_OPERAND (decl2, 0);
1326 gcc_assert (DECL_P (decl2));
9561765e 1327 install_var_field (decl2, true, 3, ctx);
bc7bff74 1328 install_var_local (decl2, ctx);
1329 install_var_local (decl, ctx);
1330 }
1331 else
1332 {
1333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
ca4c3545 1334 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
bc7bff74 1335 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1336 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 install_var_field (decl, true, 7, ctx);
1338 else
737cc978 1339 install_var_field (decl, true, 3, ctx);
0c302595 1340 if (is_gimple_omp_offloaded (ctx->stmt)
1341 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
bc7bff74 1342 install_var_local (decl, ctx);
1343 }
1344 }
1345 else
1346 {
1347 tree base = get_base_address (decl);
1348 tree nc = OMP_CLAUSE_CHAIN (c);
1349 if (DECL_P (base)
1350 && nc != NULL_TREE
1351 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1352 && OMP_CLAUSE_DECL (nc) == base
ca4c3545 1353 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
bc7bff74 1354 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1355 {
1356 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1357 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1358 }
1359 else
1360 {
691447ab 1361 if (ctx->outer)
1362 {
1363 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1364 decl = OMP_CLAUSE_DECL (c);
1365 }
bc7bff74 1366 gcc_assert (!splay_tree_lookup (ctx->field_map,
1367 (splay_tree_key) decl));
1368 tree field
1369 = build_decl (OMP_CLAUSE_LOCATION (c),
1370 FIELD_DECL, NULL_TREE, ptr_type_node);
5d4b30ea 1371 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
bc7bff74 1372 insert_field_into_struct (ctx->record_type, field);
1373 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1374 (splay_tree_value) field);
1375 }
1376 }
1377 break;
1378
56686608 1379 case OMP_CLAUSE__GRIDDIM_:
1380 if (ctx->outer)
1381 {
1382 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1383 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1384 }
1385 break;
1386
1e8e9920 1387 case OMP_CLAUSE_NOWAIT:
1388 case OMP_CLAUSE_ORDERED:
fd6481cf 1389 case OMP_CLAUSE_COLLAPSE:
1390 case OMP_CLAUSE_UNTIED:
2169f33b 1391 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1392 case OMP_CLAUSE_PROC_BIND:
3d483a94 1393 case OMP_CLAUSE_SAFELEN:
43895be5 1394 case OMP_CLAUSE_SIMDLEN:
1395 case OMP_CLAUSE_THREADS:
1396 case OMP_CLAUSE_SIMD:
1397 case OMP_CLAUSE_NOGROUP:
1398 case OMP_CLAUSE_DEFAULTMAP:
ca4c3545 1399 case OMP_CLAUSE_ASYNC:
1400 case OMP_CLAUSE_WAIT:
1401 case OMP_CLAUSE_GANG:
1402 case OMP_CLAUSE_WORKER:
1403 case OMP_CLAUSE_VECTOR:
f4f5b4b4 1404 case OMP_CLAUSE_INDEPENDENT:
1405 case OMP_CLAUSE_AUTO:
1406 case OMP_CLAUSE_SEQ:
719a7570 1407 case OMP_CLAUSE_TILE:
57f872be 1408 case OMP_CLAUSE__SIMT_:
093c94dd 1409 case OMP_CLAUSE_DEFAULT:
7e5a76c8 1410 case OMP_CLAUSE_NONTEMPORAL:
737cc978 1411 case OMP_CLAUSE_IF_PRESENT:
1412 case OMP_CLAUSE_FINALIZE:
7e5a76c8 1413 case OMP_CLAUSE_TASK_REDUCTION:
1e8e9920 1414 break;
1415
bc7bff74 1416 case OMP_CLAUSE_ALIGNED:
1417 decl = OMP_CLAUSE_DECL (c);
1418 if (is_global_var (decl)
1419 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1420 install_var_local (decl, ctx);
1421 break;
1422
48152aa2 1423 case OMP_CLAUSE__CONDTEMP_:
4f4b92d8 1424 decl = OMP_CLAUSE_DECL (c);
48152aa2 1425 if (is_parallel_ctx (ctx))
1426 {
48152aa2 1427 install_var_field (decl, false, 3, ctx);
1428 install_var_local (decl, ctx);
1429 }
4f4b92d8 1430 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1431 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1432 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1433 install_var_local (decl, ctx);
48152aa2 1434 break;
1435
ca4c3545 1436 case OMP_CLAUSE__CACHE_:
1e8e9920 1437 default:
1438 gcc_unreachable ();
1439 }
1440 }
1441
1442 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1443 {
55d6e7cd 1444 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1445 {
1446 case OMP_CLAUSE_LASTPRIVATE:
1447 /* Let the corresponding firstprivate clause create
1448 the variable. */
75a70cf9 1449 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
fd6481cf 1450 scan_array_reductions = true;
1e8e9920 1451 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1452 break;
1453 /* FALLTHRU */
1454
1e8e9920 1455 case OMP_CLAUSE_FIRSTPRIVATE:
ca4c3545 1456 case OMP_CLAUSE_PRIVATE:
3d483a94 1457 case OMP_CLAUSE_LINEAR:
43895be5 1458 case OMP_CLAUSE_IS_DEVICE_PTR:
1e8e9920 1459 decl = OMP_CLAUSE_DECL (c);
1460 if (is_variable_sized (decl))
43895be5 1461 {
1462 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1463 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1464 && is_gimple_omp_offloaded (ctx->stmt))
1465 {
1466 tree decl2 = DECL_VALUE_EXPR (decl);
1467 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1468 decl2 = TREE_OPERAND (decl2, 0);
1469 gcc_assert (DECL_P (decl2));
1470 install_var_local (decl2, ctx);
1471 fixup_remapped_decl (decl2, ctx, false);
1472 }
1473 install_var_local (decl, ctx);
1474 }
1e8e9920 1475 fixup_remapped_decl (decl, ctx,
55d6e7cd 1476 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1e8e9920 1477 && OMP_CLAUSE_PRIVATE_DEBUG (c));
43895be5 1478 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1479 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1e8e9920 1480 scan_array_reductions = true;
43895be5 1481 break;
1482
1483 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1484 case OMP_CLAUSE_IN_REDUCTION:
43895be5 1485 decl = OMP_CLAUSE_DECL (c);
1486 if (TREE_CODE (decl) != MEM_REF)
1487 {
1488 if (is_variable_sized (decl))
1489 install_var_local (decl, ctx);
1490 fixup_remapped_decl (decl, ctx, false);
1491 }
1492 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2b536a17 1493 scan_array_reductions = true;
1e8e9920 1494 break;
1495
7e5a76c8 1496 case OMP_CLAUSE_TASK_REDUCTION:
1497 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1498 scan_array_reductions = true;
1499 break;
1500
1e8e9920 1501 case OMP_CLAUSE_SHARED:
7e5a76c8 1502 /* Ignore shared directives in teams construct inside of
1503 target construct. */
1504 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1505 && !is_host_teams_ctx (ctx))
bc7bff74 1506 break;
1e8e9920 1507 decl = OMP_CLAUSE_DECL (c);
43895be5 1508 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1509 break;
1510 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1511 {
1512 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1513 ctx->outer)))
1514 break;
1515 bool by_ref = use_pointer_for_field (decl, ctx);
1516 install_var_field (decl, by_ref, 11, ctx);
1517 break;
1518 }
1519 fixup_remapped_decl (decl, ctx, false);
1e8e9920 1520 break;
1521
bc7bff74 1522 case OMP_CLAUSE_MAP:
ca4c3545 1523 if (!is_gimple_omp_offloaded (ctx->stmt))
bc7bff74 1524 break;
1525 decl = OMP_CLAUSE_DECL (c);
1526 if (DECL_P (decl)
9561765e 1527 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1528 && (OMP_CLAUSE_MAP_KIND (c)
1529 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 1530 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
bc7bff74 1531 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
b0c5e347 1532 && varpool_node::get_create (decl)->offloadable)
bc7bff74 1533 break;
1534 if (DECL_P (decl))
1535 {
43895be5 1536 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1537 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
bc7bff74 1538 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1539 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1540 {
1541 tree new_decl = lookup_decl (decl, ctx);
1542 TREE_TYPE (new_decl)
1543 = remap_type (TREE_TYPE (decl), &ctx->cb);
1544 }
1545 else if (DECL_SIZE (decl)
1546 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1547 {
1548 tree decl2 = DECL_VALUE_EXPR (decl);
1549 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1550 decl2 = TREE_OPERAND (decl2, 0);
1551 gcc_assert (DECL_P (decl2));
1552 fixup_remapped_decl (decl2, ctx, false);
1553 fixup_remapped_decl (decl, ctx, true);
1554 }
1555 else
1556 fixup_remapped_decl (decl, ctx, false);
1557 }
1558 break;
1559
1e8e9920 1560 case OMP_CLAUSE_COPYPRIVATE:
1561 case OMP_CLAUSE_COPYIN:
1562 case OMP_CLAUSE_DEFAULT:
1563 case OMP_CLAUSE_IF:
1564 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1565 case OMP_CLAUSE_NUM_TEAMS:
1566 case OMP_CLAUSE_THREAD_LIMIT:
1567 case OMP_CLAUSE_DEVICE:
1e8e9920 1568 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1569 case OMP_CLAUSE_DIST_SCHEDULE:
1e8e9920 1570 case OMP_CLAUSE_NOWAIT:
1571 case OMP_CLAUSE_ORDERED:
fd6481cf 1572 case OMP_CLAUSE_COLLAPSE:
1573 case OMP_CLAUSE_UNTIED:
2169f33b 1574 case OMP_CLAUSE_FINAL:
1575 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1576 case OMP_CLAUSE_PROC_BIND:
3d483a94 1577 case OMP_CLAUSE_SAFELEN:
43895be5 1578 case OMP_CLAUSE_SIMDLEN:
bc7bff74 1579 case OMP_CLAUSE_ALIGNED:
1580 case OMP_CLAUSE_DEPEND:
1581 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 1582 case OMP_CLAUSE__REDUCTEMP_:
bc7bff74 1583 case OMP_CLAUSE_TO:
1584 case OMP_CLAUSE_FROM:
43895be5 1585 case OMP_CLAUSE_PRIORITY:
1586 case OMP_CLAUSE_GRAINSIZE:
1587 case OMP_CLAUSE_NUM_TASKS:
1588 case OMP_CLAUSE_THREADS:
1589 case OMP_CLAUSE_SIMD:
1590 case OMP_CLAUSE_NOGROUP:
1591 case OMP_CLAUSE_DEFAULTMAP:
1592 case OMP_CLAUSE_USE_DEVICE_PTR:
7e5a76c8 1593 case OMP_CLAUSE_NONTEMPORAL:
ca4c3545 1594 case OMP_CLAUSE_ASYNC:
1595 case OMP_CLAUSE_WAIT:
1596 case OMP_CLAUSE_NUM_GANGS:
1597 case OMP_CLAUSE_NUM_WORKERS:
1598 case OMP_CLAUSE_VECTOR_LENGTH:
1599 case OMP_CLAUSE_GANG:
1600 case OMP_CLAUSE_WORKER:
1601 case OMP_CLAUSE_VECTOR:
f4f5b4b4 1602 case OMP_CLAUSE_INDEPENDENT:
1603 case OMP_CLAUSE_AUTO:
1604 case OMP_CLAUSE_SEQ:
719a7570 1605 case OMP_CLAUSE_TILE:
56686608 1606 case OMP_CLAUSE__GRIDDIM_:
57f872be 1607 case OMP_CLAUSE__SIMT_:
737cc978 1608 case OMP_CLAUSE_IF_PRESENT:
1609 case OMP_CLAUSE_FINALIZE:
48152aa2 1610 case OMP_CLAUSE__CONDTEMP_:
ca4c3545 1611 break;
1612
ca4c3545 1613 case OMP_CLAUSE__CACHE_:
1e8e9920 1614 default:
1615 gcc_unreachable ();
1616 }
1617 }
1618
ca4c3545 1619 gcc_checking_assert (!scan_array_reductions
1620 || !is_gimple_omp_oacc (ctx->stmt));
1e8e9920 1621 if (scan_array_reductions)
d3831f71 1622 {
1623 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 1624 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1625 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1626 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
d3831f71 1627 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1628 {
1629 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1630 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1631 }
1632 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1633 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1634 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1636 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1637 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1638 }
1e8e9920 1639}
1640
efa02472 1641/* Create a new name for omp child function. Returns an identifier. */
1e8e9920 1642
1e8e9920 1643static tree
efa02472 1644create_omp_child_function_name (bool task_copy)
1e8e9920 1645{
87943388 1646 return clone_function_name_numbered (current_function_decl,
1647 task_copy ? "_omp_cpyfn" : "_omp_fn");
40750995 1648}
1649
bab6706a 1650/* Return true if CTX may belong to offloaded code: either if current function
1651 is offloaded, or any enclosing context corresponds to a target region. */
1652
1653static bool
1654omp_maybe_offloaded_ctx (omp_context *ctx)
1655{
1656 if (cgraph_node::get (current_function_decl)->offloadable)
1657 return true;
1658 for (; ctx; ctx = ctx->outer)
1659 if (is_gimple_omp_offloaded (ctx->stmt))
1660 return true;
1661 return false;
1662}
1663
1e8e9920 1664/* Build a decl for the omp child function. It'll not contain a body
1665 yet, just the bare decl. */
1666
1667static void
fd6481cf 1668create_omp_child_function (omp_context *ctx, bool task_copy)
1e8e9920 1669{
1670 tree decl, type, name, t;
1671
efa02472 1672 name = create_omp_child_function_name (task_copy);
fd6481cf 1673 if (task_copy)
1674 type = build_function_type_list (void_type_node, ptr_type_node,
1675 ptr_type_node, NULL_TREE);
1676 else
1677 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1e8e9920 1678
40750995 1679 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1e8e9920 1680
ca4c3545 1681 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1682 || !task_copy);
fd6481cf 1683 if (!task_copy)
1684 ctx->cb.dst_fn = decl;
1685 else
75a70cf9 1686 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1e8e9920 1687
1688 TREE_STATIC (decl) = 1;
1689 TREE_USED (decl) = 1;
1690 DECL_ARTIFICIAL (decl) = 1;
1691 DECL_IGNORED_P (decl) = 0;
1692 TREE_PUBLIC (decl) = 0;
1693 DECL_UNINLINABLE (decl) = 1;
1694 DECL_EXTERNAL (decl) = 0;
1695 DECL_CONTEXT (decl) = NULL_TREE;
773c5ba7 1696 DECL_INITIAL (decl) = make_node (BLOCK);
2a066179 1697 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1a12ad87 1698 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
02e955b2 1699 /* Remove omp declare simd attribute from the new attributes. */
1700 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1701 {
1702 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1703 a = a2;
1704 a = TREE_CHAIN (a);
1705 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1706 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1707 *p = TREE_CHAIN (*p);
1708 else
1709 {
1710 tree chain = TREE_CHAIN (*p);
1711 *p = copy_node (*p);
1712 p = &TREE_CHAIN (*p);
1713 *p = chain;
1714 }
1715 }
1a12ad87 1716 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1717 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1718 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1719 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1720 DECL_FUNCTION_VERSIONED (decl)
1721 = DECL_FUNCTION_VERSIONED (current_function_decl);
1722
bab6706a 1723 if (omp_maybe_offloaded_ctx (ctx))
bc7bff74 1724 {
bab6706a 1725 cgraph_node::get_create (decl)->offloadable = 1;
1726 if (ENABLE_OFFLOADING)
1727 g->have_offload = true;
bc7bff74 1728 }
1e8e9920 1729
ec12b31a 1730 if (cgraph_node::get_create (decl)->offloadable
1731 && !lookup_attribute ("omp declare target",
1732 DECL_ATTRIBUTES (current_function_decl)))
bab6706a 1733 {
1734 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1735 ? "omp target entrypoint"
1736 : "omp declare target");
1737 DECL_ATTRIBUTES (decl)
1738 = tree_cons (get_identifier (target_attr),
1739 NULL_TREE, DECL_ATTRIBUTES (decl));
1740 }
ec12b31a 1741
e60a6f7b 1742 t = build_decl (DECL_SOURCE_LOCATION (decl),
1743 RESULT_DECL, NULL_TREE, void_type_node);
1e8e9920 1744 DECL_ARTIFICIAL (t) = 1;
1745 DECL_IGNORED_P (t) = 1;
8e5b4ed6 1746 DECL_CONTEXT (t) = decl;
1e8e9920 1747 DECL_RESULT (decl) = t;
1748
40750995 1749 tree data_name = get_identifier (".omp_data_i");
1750 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1751 ptr_type_node);
1e8e9920 1752 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1753 DECL_NAMELESS (t) = 1;
1e8e9920 1754 DECL_ARG_TYPE (t) = ptr_type_node;
773c5ba7 1755 DECL_CONTEXT (t) = current_function_decl;
1e8e9920 1756 TREE_USED (t) = 1;
43895be5 1757 TREE_READONLY (t) = 1;
1e8e9920 1758 DECL_ARGUMENTS (decl) = t;
fd6481cf 1759 if (!task_copy)
1760 ctx->receiver_decl = t;
1761 else
1762 {
e60a6f7b 1763 t = build_decl (DECL_SOURCE_LOCATION (decl),
1764 PARM_DECL, get_identifier (".omp_data_o"),
fd6481cf 1765 ptr_type_node);
1766 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1767 DECL_NAMELESS (t) = 1;
fd6481cf 1768 DECL_ARG_TYPE (t) = ptr_type_node;
1769 DECL_CONTEXT (t) = current_function_decl;
1770 TREE_USED (t) = 1;
86f2ad37 1771 TREE_ADDRESSABLE (t) = 1;
1767a056 1772 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
fd6481cf 1773 DECL_ARGUMENTS (decl) = t;
1774 }
1e8e9920 1775
48e1416a 1776 /* Allocate memory for the function structure. The call to
773c5ba7 1777 allocate_struct_function clobbers CFUN, so we need to restore
1e8e9920 1778 it afterward. */
87d4aa85 1779 push_struct_function (decl);
75a70cf9 1780 cfun->function_end_locus = gimple_location (ctx->stmt);
9ae1b28a 1781 init_tree_ssa (cfun);
87d4aa85 1782 pop_cfun ();
1e8e9920 1783}
1784
bc7bff74 1785/* Callback for walk_gimple_seq. Check if combined parallel
1786 contains gimple_omp_for_combined_into_p OMP_FOR. */
1787
4954efd4 1788tree
1789omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1790 bool *handled_ops_p,
1791 struct walk_stmt_info *wi)
bc7bff74 1792{
42acab1c 1793 gimple *stmt = gsi_stmt (*gsi_p);
bc7bff74 1794
1795 *handled_ops_p = true;
1796 switch (gimple_code (stmt))
1797 {
1798 WALK_SUBSTMTS;
1799
1800 case GIMPLE_OMP_FOR:
1801 if (gimple_omp_for_combined_into_p (stmt)
43895be5 1802 && gimple_omp_for_kind (stmt)
1803 == *(const enum gf_mask *) (wi->info))
bc7bff74 1804 {
1805 wi->info = stmt;
1806 return integer_zero_node;
1807 }
1808 break;
1809 default:
1810 break;
1811 }
1812 return NULL;
1813}
1814
7e5a76c8 1815/* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
43895be5 1816
1817static void
1818add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1819 omp_context *outer_ctx)
1820{
1821 struct walk_stmt_info wi;
1822
1823 memset (&wi, 0, sizeof (wi));
1824 wi.val_only = true;
1825 wi.info = (void *) &msk;
4954efd4 1826 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
43895be5 1827 if (wi.info != (void *) &msk)
1828 {
1829 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1830 struct omp_for_data fd;
4954efd4 1831 omp_extract_for_data (for_stmt, &fd, NULL);
43895be5 1832 /* We need two temporaries with fd.loop.v type (istart/iend)
1833 and then (fd.collapse - 1) temporaries with the same
1834 type for count2 ... countN-1 vars if not constant. */
1835 size_t count = 2, i;
1836 tree type = fd.iter_type;
1837 if (fd.collapse > 1
1838 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1839 {
1840 count += fd.collapse - 1;
9561765e 1841 /* If there are lastprivate clauses on the inner
43895be5 1842 GIMPLE_OMP_FOR, add one more temporaries for the total number
1843 of iterations (product of count1 ... countN-1). */
4954efd4 1844 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
9561765e 1845 OMP_CLAUSE_LASTPRIVATE))
1846 count++;
1847 else if (msk == GF_OMP_FOR_KIND_FOR
4954efd4 1848 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9561765e 1849 OMP_CLAUSE_LASTPRIVATE))
43895be5 1850 count++;
1851 }
1852 for (i = 0; i < count; i++)
1853 {
1854 tree temp = create_tmp_var (type);
1855 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1856 insert_decl_map (&outer_ctx->cb, temp, temp);
1857 OMP_CLAUSE_DECL (c) = temp;
1858 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1859 gimple_omp_taskreg_set_clauses (stmt, c);
1860 }
1861 }
7e5a76c8 1862 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1863 && omp_find_clause (gimple_omp_task_clauses (stmt),
1864 OMP_CLAUSE_REDUCTION))
1865 {
1866 tree type = build_pointer_type (pointer_sized_int_node);
1867 tree temp = create_tmp_var (type);
1868 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1869 insert_decl_map (&outer_ctx->cb, temp, temp);
1870 OMP_CLAUSE_DECL (c) = temp;
1871 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1872 gimple_omp_task_set_clauses (stmt, c);
1873 }
43895be5 1874}
1875
1e8e9920 1876/* Scan an OpenMP parallel directive. */
1877
1878static void
75a70cf9 1879scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1e8e9920 1880{
1881 omp_context *ctx;
1882 tree name;
1a91d914 1883 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1e8e9920 1884
1885 /* Ignore parallel directives with empty bodies, unless there
1886 are copyin clauses. */
1887 if (optimize > 0
75a70cf9 1888 && empty_body_p (gimple_omp_body (stmt))
4954efd4 1889 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
75a70cf9 1890 OMP_CLAUSE_COPYIN) == NULL)
1e8e9920 1891 {
75a70cf9 1892 gsi_replace (gsi, gimple_build_nop (), false);
1e8e9920 1893 return;
1894 }
1895
bc7bff74 1896 if (gimple_omp_parallel_combined_p (stmt))
43895be5 1897 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
7e5a76c8 1898 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1899 OMP_CLAUSE_REDUCTION);
1900 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1901 if (OMP_CLAUSE_REDUCTION_TASK (c))
1902 {
1903 tree type = build_pointer_type (pointer_sized_int_node);
1904 tree temp = create_tmp_var (type);
1905 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1906 if (outer_ctx)
1907 insert_decl_map (&outer_ctx->cb, temp, temp);
1908 OMP_CLAUSE_DECL (c) = temp;
1909 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1910 gimple_omp_parallel_set_clauses (stmt, c);
1911 break;
1912 }
1913 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1914 break;
bc7bff74 1915
75a70cf9 1916 ctx = new_omp_context (stmt, outer_ctx);
37eaded9 1917 taskreg_contexts.safe_push (ctx);
fd6481cf 1918 if (taskreg_nesting_level > 1)
773c5ba7 1919 ctx->is_nested = true;
1e8e9920 1920 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1e8e9920 1921 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1e8e9920 1922 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 1923 name = build_decl (gimple_location (stmt),
1924 TYPE_DECL, name, ctx->record_type);
84bfaaeb 1925 DECL_ARTIFICIAL (name) = 1;
1926 DECL_NAMELESS (name) = 1;
1e8e9920 1927 TYPE_NAME (ctx->record_type) = name;
240131b5 1928 TYPE_ARTIFICIAL (ctx->record_type) = 1;
56686608 1929 if (!gimple_omp_parallel_grid_phony (stmt))
1930 {
1931 create_omp_child_function (ctx, false);
1932 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1933 }
1e8e9920 1934
75a70cf9 1935 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
ab129075 1936 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 1937
1938 if (TYPE_FIELDS (ctx->record_type) == NULL)
1939 ctx->record_type = ctx->receiver_decl = NULL;
1e8e9920 1940}
1941
fd6481cf 1942/* Scan an OpenMP task directive. */
1943
1944static void
75a70cf9 1945scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
fd6481cf 1946{
1947 omp_context *ctx;
75a70cf9 1948 tree name, t;
1a91d914 1949 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
fd6481cf 1950
e51b4c73 1951 /* Ignore task directives with empty bodies, unless they have depend
1952 clause. */
fd6481cf 1953 if (optimize > 0
7e5a76c8 1954 && gimple_omp_body (stmt)
e51b4c73 1955 && empty_body_p (gimple_omp_body (stmt))
1956 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
fd6481cf 1957 {
75a70cf9 1958 gsi_replace (gsi, gimple_build_nop (), false);
fd6481cf 1959 return;
1960 }
1961
43895be5 1962 if (gimple_omp_task_taskloop_p (stmt))
1963 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1964
75a70cf9 1965 ctx = new_omp_context (stmt, outer_ctx);
7e5a76c8 1966
1967 if (gimple_omp_task_taskwait_p (stmt))
1968 {
1969 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1970 return;
1971 }
1972
37eaded9 1973 taskreg_contexts.safe_push (ctx);
fd6481cf 1974 if (taskreg_nesting_level > 1)
1975 ctx->is_nested = true;
1976 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
fd6481cf 1977 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1978 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 1979 name = build_decl (gimple_location (stmt),
1980 TYPE_DECL, name, ctx->record_type);
84bfaaeb 1981 DECL_ARTIFICIAL (name) = 1;
1982 DECL_NAMELESS (name) = 1;
fd6481cf 1983 TYPE_NAME (ctx->record_type) = name;
240131b5 1984 TYPE_ARTIFICIAL (ctx->record_type) = 1;
fd6481cf 1985 create_omp_child_function (ctx, false);
75a70cf9 1986 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
fd6481cf 1987
75a70cf9 1988 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
fd6481cf 1989
1990 if (ctx->srecord_type)
1991 {
1992 name = create_tmp_var_name (".omp_data_a");
e60a6f7b 1993 name = build_decl (gimple_location (stmt),
1994 TYPE_DECL, name, ctx->srecord_type);
84bfaaeb 1995 DECL_ARTIFICIAL (name) = 1;
1996 DECL_NAMELESS (name) = 1;
fd6481cf 1997 TYPE_NAME (ctx->srecord_type) = name;
240131b5 1998 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
fd6481cf 1999 create_omp_child_function (ctx, true);
2000 }
2001
ab129075 2002 scan_omp (gimple_omp_body_ptr (stmt), ctx);
fd6481cf 2003
2004 if (TYPE_FIELDS (ctx->record_type) == NULL)
2005 {
2006 ctx->record_type = ctx->receiver_decl = NULL;
75a70cf9 2007 t = build_int_cst (long_integer_type_node, 0);
2008 gimple_omp_task_set_arg_size (stmt, t);
2009 t = build_int_cst (long_integer_type_node, 1);
2010 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2011 }
37eaded9 2012}
2013
b9238860 2014/* Helper function for finish_taskreg_scan, called through walk_tree.
2015 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2016 tree, replace it in the expression. */
2017
2018static tree
2019finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2020{
2021 if (VAR_P (*tp))
2022 {
2023 omp_context *ctx = (omp_context *) data;
2024 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2025 if (t != *tp)
2026 {
2027 if (DECL_HAS_VALUE_EXPR_P (t))
2028 t = unshare_expr (DECL_VALUE_EXPR (t));
2029 *tp = t;
2030 }
2031 *walk_subtrees = 0;
2032 }
2033 else if (IS_TYPE_OR_DECL_P (*tp))
2034 *walk_subtrees = 0;
2035 return NULL_TREE;
2036}
37eaded9 2037
2038/* If any decls have been made addressable during scan_omp,
2039 adjust their fields if needed, and layout record types
2040 of parallel/task constructs. */
2041
2042static void
2043finish_taskreg_scan (omp_context *ctx)
2044{
2045 if (ctx->record_type == NULL_TREE)
2046 return;
2047
2048 /* If any task_shared_vars were needed, verify all
7e5a76c8 2049 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
37eaded9 2050 statements if use_pointer_for_field hasn't changed
2051 because of that. If it did, update field types now. */
2052 if (task_shared_vars)
2053 {
2054 tree c;
2055
2056 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2057 c; c = OMP_CLAUSE_CHAIN (c))
43895be5 2058 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2059 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
37eaded9 2060 {
2061 tree decl = OMP_CLAUSE_DECL (c);
2062
2063 /* Global variables don't need to be copied,
2064 the receiver side will use them directly. */
2065 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2066 continue;
2067 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2068 || !use_pointer_for_field (decl, ctx))
2069 continue;
2070 tree field = lookup_field (decl, ctx);
2071 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2072 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2073 continue;
2074 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2075 TREE_THIS_VOLATILE (field) = 0;
2076 DECL_USER_ALIGN (field) = 0;
5d4b30ea 2077 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
37eaded9 2078 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
5d4b30ea 2079 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
37eaded9 2080 if (ctx->srecord_type)
2081 {
2082 tree sfield = lookup_sfield (decl, ctx);
2083 TREE_TYPE (sfield) = TREE_TYPE (field);
2084 TREE_THIS_VOLATILE (sfield) = 0;
2085 DECL_USER_ALIGN (sfield) = 0;
5d4b30ea 2086 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
37eaded9 2087 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
5d4b30ea 2088 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
37eaded9 2089 }
2090 }
2091 }
2092
2093 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
7e5a76c8 2094 {
2095 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2096 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2097 if (c)
2098 {
2099 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2100 expects to find it at the start of data. */
2101 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2102 tree *p = &TYPE_FIELDS (ctx->record_type);
2103 while (*p)
2104 if (*p == f)
2105 {
2106 *p = DECL_CHAIN (*p);
2107 break;
2108 }
2109 else
2110 p = &DECL_CHAIN (*p);
2111 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2112 TYPE_FIELDS (ctx->record_type) = f;
2113 }
2114 layout_type (ctx->record_type);
2115 fixup_child_record_type (ctx);
2116 }
2117 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
37eaded9 2118 {
2119 layout_type (ctx->record_type);
2120 fixup_child_record_type (ctx);
2121 }
fd6481cf 2122 else
2123 {
37eaded9 2124 location_t loc = gimple_location (ctx->stmt);
fd6481cf 2125 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2126 /* Move VLA fields to the end. */
2127 p = &TYPE_FIELDS (ctx->record_type);
2128 while (*p)
2129 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2130 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2131 {
2132 *q = *p;
2133 *p = TREE_CHAIN (*p);
2134 TREE_CHAIN (*q) = NULL_TREE;
2135 q = &TREE_CHAIN (*q);
2136 }
2137 else
1767a056 2138 p = &DECL_CHAIN (*p);
fd6481cf 2139 *p = vla_fields;
43895be5 2140 if (gimple_omp_task_taskloop_p (ctx->stmt))
2141 {
2142 /* Move fields corresponding to first and second _looptemp_
2143 clause first. There are filled by GOMP_taskloop
2144 and thus need to be in specific positions. */
7e5a76c8 2145 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2146 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
4954efd4 2147 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
43895be5 2148 OMP_CLAUSE__LOOPTEMP_);
7e5a76c8 2149 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
43895be5 2150 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2151 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
7e5a76c8 2152 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
43895be5 2153 p = &TYPE_FIELDS (ctx->record_type);
2154 while (*p)
7e5a76c8 2155 if (*p == f1 || *p == f2 || *p == f3)
43895be5 2156 *p = DECL_CHAIN (*p);
2157 else
2158 p = &DECL_CHAIN (*p);
2159 DECL_CHAIN (f1) = f2;
7e5a76c8 2160 if (c3)
2161 {
2162 DECL_CHAIN (f2) = f3;
2163 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2164 }
2165 else
2166 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
43895be5 2167 TYPE_FIELDS (ctx->record_type) = f1;
2168 if (ctx->srecord_type)
2169 {
2170 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2171 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
7e5a76c8 2172 if (c3)
2173 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
43895be5 2174 p = &TYPE_FIELDS (ctx->srecord_type);
2175 while (*p)
7e5a76c8 2176 if (*p == f1 || *p == f2 || *p == f3)
43895be5 2177 *p = DECL_CHAIN (*p);
2178 else
2179 p = &DECL_CHAIN (*p);
2180 DECL_CHAIN (f1) = f2;
2181 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
7e5a76c8 2182 if (c3)
2183 {
2184 DECL_CHAIN (f2) = f3;
2185 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2186 }
2187 else
2188 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
43895be5 2189 TYPE_FIELDS (ctx->srecord_type) = f1;
2190 }
2191 }
fd6481cf 2192 layout_type (ctx->record_type);
2193 fixup_child_record_type (ctx);
2194 if (ctx->srecord_type)
2195 layout_type (ctx->srecord_type);
37eaded9 2196 tree t = fold_convert_loc (loc, long_integer_type_node,
2197 TYPE_SIZE_UNIT (ctx->record_type));
b9238860 2198 if (TREE_CODE (t) != INTEGER_CST)
2199 {
2200 t = unshare_expr (t);
2201 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2202 }
37eaded9 2203 gimple_omp_task_set_arg_size (ctx->stmt, t);
75a70cf9 2204 t = build_int_cst (long_integer_type_node,
fd6481cf 2205 TYPE_ALIGN_UNIT (ctx->record_type));
37eaded9 2206 gimple_omp_task_set_arg_align (ctx->stmt, t);
fd6481cf 2207 }
2208}
2209
a8e785ba 2210/* Find the enclosing offload context. */
1e8e9920 2211
ca4c3545 2212static omp_context *
2213enclosing_target_ctx (omp_context *ctx)
2214{
a8e785ba 2215 for (; ctx; ctx = ctx->outer)
2216 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2217 break;
2218
ca4c3545 2219 return ctx;
2220}
2221
a8e785ba 2222/* Return true if ctx is part of an oacc kernels region. */
2223
ca4c3545 2224static bool
a8e785ba 2225ctx_in_oacc_kernels_region (omp_context *ctx)
ca4c3545 2226{
a8e785ba 2227 for (;ctx != NULL; ctx = ctx->outer)
2228 {
2229 gimple *stmt = ctx->stmt;
2230 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2231 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2232 return true;
2233 }
2234
2235 return false;
2236}
2237
2238/* Check the parallelism clauses inside a kernels regions.
2239 Until kernels handling moves to use the same loop indirection
2240 scheme as parallel, we need to do this checking early. */
2241
2242static unsigned
2243check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2244{
2245 bool checking = true;
2246 unsigned outer_mask = 0;
2247 unsigned this_mask = 0;
2248 bool has_seq = false, has_auto = false;
2249
2250 if (ctx->outer)
2251 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2252 if (!stmt)
2253 {
2254 checking = false;
2255 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2256 return outer_mask;
2257 stmt = as_a <gomp_for *> (ctx->stmt);
2258 }
2259
2260 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2261 {
2262 switch (OMP_CLAUSE_CODE (c))
2263 {
2264 case OMP_CLAUSE_GANG:
2265 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2266 break;
2267 case OMP_CLAUSE_WORKER:
2268 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2269 break;
2270 case OMP_CLAUSE_VECTOR:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2272 break;
2273 case OMP_CLAUSE_SEQ:
2274 has_seq = true;
2275 break;
2276 case OMP_CLAUSE_AUTO:
2277 has_auto = true;
2278 break;
2279 default:
2280 break;
2281 }
2282 }
2283
2284 if (checking)
2285 {
2286 if (has_seq && (this_mask || has_auto))
2287 error_at (gimple_location (stmt), "%<seq%> overrides other"
2288 " OpenACC loop specifiers");
2289 else if (has_auto && this_mask)
2290 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2291 " OpenACC loop specifiers");
2292
2293 if (this_mask & outer_mask)
2294 error_at (gimple_location (stmt), "inner loop uses same"
2295 " OpenACC parallelism as containing loop");
2296 }
2297
2298 return outer_mask | this_mask;
ca4c3545 2299}
2300
2301/* Scan a GIMPLE_OMP_FOR. */
1e8e9920 2302
9cf7bec9 2303static omp_context *
1a91d914 2304scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
1e8e9920 2305{
773c5ba7 2306 omp_context *ctx;
75a70cf9 2307 size_t i;
ca4c3545 2308 tree clauses = gimple_omp_for_clauses (stmt);
2309
773c5ba7 2310 ctx = new_omp_context (stmt, outer_ctx);
1e8e9920 2311
ca4c3545 2312 if (is_gimple_omp_oacc (stmt))
2313 {
a8e785ba 2314 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2315
2316 if (!tgt || is_oacc_parallel (tgt))
2317 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2318 {
2319 char const *check = NULL;
2320
2321 switch (OMP_CLAUSE_CODE (c))
2322 {
2323 case OMP_CLAUSE_GANG:
2324 check = "gang";
2325 break;
2326
2327 case OMP_CLAUSE_WORKER:
2328 check = "worker";
2329 break;
2330
2331 case OMP_CLAUSE_VECTOR:
2332 check = "vector";
2333 break;
2334
2335 default:
2336 break;
2337 }
2338
2339 if (check && OMP_CLAUSE_OPERAND (c, 0))
2340 error_at (gimple_location (stmt),
2341 "argument not permitted on %qs clause in"
2342 " OpenACC %<parallel%>", check);
2343 }
2344
2345 if (tgt && is_oacc_kernels (tgt))
2346 {
7e5a76c8 2347 /* Strip out reductions, as they are not handled yet. */
a8e785ba 2348 tree *prev_ptr = &clauses;
2349
2350 while (tree probe = *prev_ptr)
ca4c3545 2351 {
a8e785ba 2352 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2353
2354 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2355 *prev_ptr = *next_ptr;
2356 else
2357 prev_ptr = next_ptr;
ca4c3545 2358 }
a8e785ba 2359
2360 gimple_omp_for_set_clauses (stmt, clauses);
2361 check_oacc_kernel_gwv (stmt, ctx);
ca4c3545 2362 }
2363 }
2364
2365 scan_sharing_clauses (clauses, ctx);
1e8e9920 2366
ab129075 2367 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
75a70cf9 2368 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 2369 {
75a70cf9 2370 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2371 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2372 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2373 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
fd6481cf 2374 }
ab129075 2375 scan_omp (gimple_omp_body_ptr (stmt), ctx);
9cf7bec9 2376 return ctx;
1e8e9920 2377}
2378
57f872be 2379/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2380
2381static void
2382scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2383 omp_context *outer_ctx)
2384{
2385 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2386 gsi_replace (gsi, bind, false);
2387 gimple_seq seq = NULL;
2388 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2389 tree cond = create_tmp_var_raw (integer_type_node);
2390 DECL_CONTEXT (cond) = current_function_decl;
2391 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2392 gimple_bind_set_vars (bind, cond);
2393 gimple_call_set_lhs (g, cond);
2394 gimple_seq_add_stmt (&seq, g);
2395 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2396 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2397 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2398 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2399 gimple_seq_add_stmt (&seq, g);
2400 g = gimple_build_label (lab1);
2401 gimple_seq_add_stmt (&seq, g);
2402 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2403 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2404 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2405 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2406 gimple_omp_for_set_clauses (new_stmt, clause);
2407 gimple_seq_add_stmt (&seq, new_stmt);
2408 g = gimple_build_goto (lab3);
2409 gimple_seq_add_stmt (&seq, g);
2410 g = gimple_build_label (lab2);
2411 gimple_seq_add_stmt (&seq, g);
2412 gimple_seq_add_stmt (&seq, stmt);
2413 g = gimple_build_label (lab3);
2414 gimple_seq_add_stmt (&seq, g);
2415 gimple_bind_set_body (bind, seq);
2416 update_stmt (bind);
2417 scan_omp_for (new_stmt, outer_ctx);
9cf7bec9 2418 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
57f872be 2419}
2420
1e8e9920 2421/* Scan an OpenMP sections directive. */
2422
2423static void
1a91d914 2424scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
1e8e9920 2425{
1e8e9920 2426 omp_context *ctx;
2427
2428 ctx = new_omp_context (stmt, outer_ctx);
75a70cf9 2429 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
ab129075 2430 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2431}
2432
2433/* Scan an OpenMP single directive. */
2434
2435static void
1a91d914 2436scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
1e8e9920 2437{
1e8e9920 2438 omp_context *ctx;
2439 tree name;
2440
2441 ctx = new_omp_context (stmt, outer_ctx);
2442 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2443 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2444 name = create_tmp_var_name (".omp_copy_s");
e60a6f7b 2445 name = build_decl (gimple_location (stmt),
2446 TYPE_DECL, name, ctx->record_type);
1e8e9920 2447 TYPE_NAME (ctx->record_type) = name;
2448
75a70cf9 2449 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
ab129075 2450 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2451
2452 if (TYPE_FIELDS (ctx->record_type) == NULL)
2453 ctx->record_type = NULL;
2454 else
2455 layout_type (ctx->record_type);
2456}
2457
ca4c3545 2458/* Scan a GIMPLE_OMP_TARGET. */
bc7bff74 2459
2460static void
1a91d914 2461scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
bc7bff74 2462{
2463 omp_context *ctx;
2464 tree name;
ca4c3545 2465 bool offloaded = is_gimple_omp_offloaded (stmt);
2466 tree clauses = gimple_omp_target_clauses (stmt);
bc7bff74 2467
2468 ctx = new_omp_context (stmt, outer_ctx);
2469 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
bc7bff74 2470 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2471 name = create_tmp_var_name (".omp_data_t");
2472 name = build_decl (gimple_location (stmt),
2473 TYPE_DECL, name, ctx->record_type);
2474 DECL_ARTIFICIAL (name) = 1;
2475 DECL_NAMELESS (name) = 1;
2476 TYPE_NAME (ctx->record_type) = name;
240131b5 2477 TYPE_ARTIFICIAL (ctx->record_type) = 1;
12dc9a16 2478
ca4c3545 2479 if (offloaded)
bc7bff74 2480 {
2481 create_omp_child_function (ctx, false);
2482 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2483 }
2484
737cc978 2485 scan_sharing_clauses (clauses, ctx);
bc7bff74 2486 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2487
2488 if (TYPE_FIELDS (ctx->record_type) == NULL)
2489 ctx->record_type = ctx->receiver_decl = NULL;
2490 else
2491 {
2492 TYPE_FIELDS (ctx->record_type)
2493 = nreverse (TYPE_FIELDS (ctx->record_type));
382ecba7 2494 if (flag_checking)
2495 {
2496 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2497 for (tree field = TYPE_FIELDS (ctx->record_type);
2498 field;
2499 field = DECL_CHAIN (field))
2500 gcc_assert (DECL_ALIGN (field) == align);
2501 }
bc7bff74 2502 layout_type (ctx->record_type);
ca4c3545 2503 if (offloaded)
bc7bff74 2504 fixup_child_record_type (ctx);
2505 }
2506}
2507
2508/* Scan an OpenMP teams directive. */
2509
2510static void
1a91d914 2511scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
bc7bff74 2512{
2513 omp_context *ctx = new_omp_context (stmt, outer_ctx);
7e5a76c8 2514
2515 if (!gimple_omp_teams_host (stmt))
2516 {
2517 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2518 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2519 return;
2520 }
2521 taskreg_contexts.safe_push (ctx);
2522 gcc_assert (taskreg_nesting_level == 1);
2523 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2524 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2525 tree name = create_tmp_var_name (".omp_data_s");
2526 name = build_decl (gimple_location (stmt),
2527 TYPE_DECL, name, ctx->record_type);
2528 DECL_ARTIFICIAL (name) = 1;
2529 DECL_NAMELESS (name) = 1;
2530 TYPE_NAME (ctx->record_type) = name;
2531 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2532 create_omp_child_function (ctx, false);
2533 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2534
bc7bff74 2535 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2536 scan_omp (gimple_omp_body_ptr (stmt), ctx);
7e5a76c8 2537
2538 if (TYPE_FIELDS (ctx->record_type) == NULL)
2539 ctx->record_type = ctx->receiver_decl = NULL;
bc7bff74 2540}
1e8e9920 2541
ca4c3545 2542/* Check nesting restrictions. */
ab129075 2543static bool
42acab1c 2544check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
c1d127dd 2545{
43895be5 2546 tree c;
2547
56686608 2548 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2549 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2550 the original copy of its contents. */
2551 return true;
2552
ca4c3545 2553 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2554 inside an OpenACC CTX. */
2555 if (!(is_gimple_omp (stmt)
32f692e2 2556 && is_gimple_omp_oacc (stmt))
2557 /* Except for atomic codes that we share with OpenMP. */
2558 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2559 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2560 {
4954efd4 2561 if (oacc_get_fn_attrib (cfun->decl) != NULL)
32f692e2 2562 {
2563 error_at (gimple_location (stmt),
2564 "non-OpenACC construct inside of OpenACC routine");
2565 return false;
2566 }
2567 else
2568 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2569 if (is_gimple_omp (octx->stmt)
2570 && is_gimple_omp_oacc (octx->stmt))
2571 {
2572 error_at (gimple_location (stmt),
2573 "non-OpenACC construct inside of OpenACC region");
2574 return false;
2575 }
ca4c3545 2576 }
2577
3d483a94 2578 if (ctx != NULL)
2579 {
70a6624c 2580 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2581 && ctx->outer
2582 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2583 ctx = ctx->outer;
3d483a94 2584 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
10c55644 2585 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3d483a94 2586 {
43895be5 2587 c = NULL_TREE;
2588 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2589 {
2590 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
4954efd4 2591 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
7821c1b5 2592 {
4954efd4 2593 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
7821c1b5 2594 && (ctx->outer == NULL
2595 || !gimple_omp_for_combined_into_p (ctx->stmt)
2596 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2597 || (gimple_omp_for_kind (ctx->outer->stmt)
2598 != GF_OMP_FOR_KIND_FOR)
2599 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2600 {
2601 error_at (gimple_location (stmt),
2602 "%<ordered simd threads%> must be closely "
2603 "nested inside of %<for simd%> region");
2604 return false;
2605 }
2606 return true;
2607 }
43895be5 2608 }
7e5a76c8 2609 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
70a6624c 2610 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2611 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
7e5a76c8 2612 return true;
3d483a94 2613 error_at (gimple_location (stmt),
43895be5 2614 "OpenMP constructs other than %<#pragma omp ordered simd%>"
7e5a76c8 2615 " or %<#pragma omp atomic%> may not be nested inside"
2616 " %<simd%> region");
3d483a94 2617 return false;
2618 }
bc7bff74 2619 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2620 {
2621 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
da80a82f 2622 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2623 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
bc7bff74 2624 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2625 {
2626 error_at (gimple_location (stmt),
7821c1b5 2627 "only %<distribute%> or %<parallel%> regions are "
2628 "allowed to be strictly nested inside %<teams%> "
2629 "region");
bc7bff74 2630 return false;
2631 }
2632 }
3d483a94 2633 }
75a70cf9 2634 switch (gimple_code (stmt))
c1d127dd 2635 {
75a70cf9 2636 case GIMPLE_OMP_FOR:
10c55644 2637 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
3d483a94 2638 return true;
bc7bff74 2639 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2640 {
2641 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2642 {
2643 error_at (gimple_location (stmt),
7821c1b5 2644 "%<distribute%> region must be strictly nested "
2645 "inside %<teams%> construct");
bc7bff74 2646 return false;
2647 }
2648 return true;
2649 }
43895be5 2650 /* We split taskloop into task and nested taskloop in it. */
2651 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2652 return true;
7a1ed40d 2653 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2654 {
2655 bool ok = false;
7c6746c9 2656
7a1ed40d 2657 if (ctx)
2658 switch (gimple_code (ctx->stmt))
2659 {
2660 case GIMPLE_OMP_FOR:
2661 ok = (gimple_omp_for_kind (ctx->stmt)
2662 == GF_OMP_FOR_KIND_OACC_LOOP);
2663 break;
2664
2665 case GIMPLE_OMP_TARGET:
2666 switch (gimple_omp_target_kind (ctx->stmt))
2667 {
2668 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2669 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2670 ok = true;
2671 break;
2672
2673 default:
2674 break;
2675 }
2676
2677 default:
2678 break;
2679 }
4954efd4 2680 else if (oacc_get_fn_attrib (current_function_decl))
7a1ed40d 2681 ok = true;
2682 if (!ok)
2683 {
2684 error_at (gimple_location (stmt),
2685 "OpenACC loop directive must be associated with"
2686 " an OpenACC compute region");
2687 return false;
2688 }
2689 }
bc7bff74 2690 /* FALLTHRU */
2691 case GIMPLE_CALL:
2692 if (is_gimple_call (stmt)
2693 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2694 == BUILT_IN_GOMP_CANCEL
2695 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2696 == BUILT_IN_GOMP_CANCELLATION_POINT))
2697 {
2698 const char *bad = NULL;
2699 const char *kind = NULL;
7821c1b5 2700 const char *construct
2701 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2702 == BUILT_IN_GOMP_CANCEL)
2703 ? "#pragma omp cancel"
2704 : "#pragma omp cancellation point";
bc7bff74 2705 if (ctx == NULL)
2706 {
2707 error_at (gimple_location (stmt), "orphaned %qs construct",
7821c1b5 2708 construct);
bc7bff74 2709 return false;
2710 }
6b409616 2711 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2712 ? tree_to_shwi (gimple_call_arg (stmt, 0))
bc7bff74 2713 : 0)
2714 {
2715 case 1:
2716 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2717 bad = "#pragma omp parallel";
2718 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2719 == BUILT_IN_GOMP_CANCEL
2720 && !integer_zerop (gimple_call_arg (stmt, 1)))
2721 ctx->cancellable = true;
2722 kind = "parallel";
2723 break;
2724 case 2:
2725 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2726 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2727 bad = "#pragma omp for";
2728 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2729 == BUILT_IN_GOMP_CANCEL
2730 && !integer_zerop (gimple_call_arg (stmt, 1)))
2731 {
2732 ctx->cancellable = true;
4954efd4 2733 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
bc7bff74 2734 OMP_CLAUSE_NOWAIT))
2735 warning_at (gimple_location (stmt), 0,
2736 "%<#pragma omp cancel for%> inside "
2737 "%<nowait%> for construct");
4954efd4 2738 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
bc7bff74 2739 OMP_CLAUSE_ORDERED))
2740 warning_at (gimple_location (stmt), 0,
2741 "%<#pragma omp cancel for%> inside "
2742 "%<ordered%> for construct");
2743 }
2744 kind = "for";
2745 break;
2746 case 4:
2747 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2748 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2749 bad = "#pragma omp sections";
2750 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2751 == BUILT_IN_GOMP_CANCEL
2752 && !integer_zerop (gimple_call_arg (stmt, 1)))
2753 {
2754 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2755 {
2756 ctx->cancellable = true;
4954efd4 2757 if (omp_find_clause (gimple_omp_sections_clauses
bc7bff74 2758 (ctx->stmt),
2759 OMP_CLAUSE_NOWAIT))
2760 warning_at (gimple_location (stmt), 0,
2761 "%<#pragma omp cancel sections%> inside "
2762 "%<nowait%> sections construct");
2763 }
2764 else
2765 {
2766 gcc_assert (ctx->outer
2767 && gimple_code (ctx->outer->stmt)
2768 == GIMPLE_OMP_SECTIONS);
2769 ctx->outer->cancellable = true;
4954efd4 2770 if (omp_find_clause (gimple_omp_sections_clauses
bc7bff74 2771 (ctx->outer->stmt),
2772 OMP_CLAUSE_NOWAIT))
2773 warning_at (gimple_location (stmt), 0,
2774 "%<#pragma omp cancel sections%> inside "
2775 "%<nowait%> sections construct");
2776 }
2777 }
2778 kind = "sections";
2779 break;
2780 case 8:
69cc430f 2781 if (!is_task_ctx (ctx)
2782 && (!is_taskloop_ctx (ctx)
2783 || ctx->outer == NULL
2784 || !is_task_ctx (ctx->outer)))
bc7bff74 2785 bad = "#pragma omp task";
2786 else
7821c1b5 2787 {
2788 for (omp_context *octx = ctx->outer;
2789 octx; octx = octx->outer)
2790 {
2791 switch (gimple_code (octx->stmt))
2792 {
2793 case GIMPLE_OMP_TASKGROUP:
2794 break;
2795 case GIMPLE_OMP_TARGET:
2796 if (gimple_omp_target_kind (octx->stmt)
2797 != GF_OMP_TARGET_KIND_REGION)
2798 continue;
2799 /* FALLTHRU */
2800 case GIMPLE_OMP_PARALLEL:
2801 case GIMPLE_OMP_TEAMS:
2802 error_at (gimple_location (stmt),
2803 "%<%s taskgroup%> construct not closely "
2804 "nested inside of %<taskgroup%> region",
2805 construct);
2806 return false;
69cc430f 2807 case GIMPLE_OMP_TASK:
2808 if (gimple_omp_task_taskloop_p (octx->stmt)
2809 && octx->outer
2810 && is_taskloop_ctx (octx->outer))
2811 {
2812 tree clauses
2813 = gimple_omp_for_clauses (octx->outer->stmt);
2814 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2815 break;
2816 }
2817 continue;
7821c1b5 2818 default:
2819 continue;
2820 }
2821 break;
2822 }
2823 ctx->cancellable = true;
2824 }
bc7bff74 2825 kind = "taskgroup";
2826 break;
2827 default:
2828 error_at (gimple_location (stmt), "invalid arguments");
2829 return false;
2830 }
2831 if (bad)
2832 {
2833 error_at (gimple_location (stmt),
2834 "%<%s %s%> construct not closely nested inside of %qs",
7821c1b5 2835 construct, kind, bad);
bc7bff74 2836 return false;
2837 }
2838 }
3d483a94 2839 /* FALLTHRU */
75a70cf9 2840 case GIMPLE_OMP_SECTIONS:
2841 case GIMPLE_OMP_SINGLE:
c1d127dd 2842 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2843 switch (gimple_code (ctx->stmt))
c1d127dd 2844 {
75a70cf9 2845 case GIMPLE_OMP_FOR:
7821c1b5 2846 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2847 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2848 break;
2849 /* FALLTHRU */
75a70cf9 2850 case GIMPLE_OMP_SECTIONS:
2851 case GIMPLE_OMP_SINGLE:
2852 case GIMPLE_OMP_ORDERED:
2853 case GIMPLE_OMP_MASTER:
2854 case GIMPLE_OMP_TASK:
bc7bff74 2855 case GIMPLE_OMP_CRITICAL:
75a70cf9 2856 if (is_gimple_call (stmt))
fd6481cf 2857 {
bc7bff74 2858 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2859 != BUILT_IN_GOMP_BARRIER)
2860 return true;
ab129075 2861 error_at (gimple_location (stmt),
2862 "barrier region may not be closely nested inside "
7821c1b5 2863 "of work-sharing, %<critical%>, %<ordered%>, "
2864 "%<master%>, explicit %<task%> or %<taskloop%> "
2865 "region");
ab129075 2866 return false;
fd6481cf 2867 }
ab129075 2868 error_at (gimple_location (stmt),
2869 "work-sharing region may not be closely nested inside "
7821c1b5 2870 "of work-sharing, %<critical%>, %<ordered%>, "
2871 "%<master%>, explicit %<task%> or %<taskloop%> region");
ab129075 2872 return false;
75a70cf9 2873 case GIMPLE_OMP_PARALLEL:
7821c1b5 2874 case GIMPLE_OMP_TEAMS:
ab129075 2875 return true;
7821c1b5 2876 case GIMPLE_OMP_TARGET:
2877 if (gimple_omp_target_kind (ctx->stmt)
2878 == GF_OMP_TARGET_KIND_REGION)
2879 return true;
2880 break;
c1d127dd 2881 default:
2882 break;
2883 }
2884 break;
75a70cf9 2885 case GIMPLE_OMP_MASTER:
c1d127dd 2886 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2887 switch (gimple_code (ctx->stmt))
c1d127dd 2888 {
75a70cf9 2889 case GIMPLE_OMP_FOR:
7821c1b5 2890 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2891 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2892 break;
2893 /* FALLTHRU */
75a70cf9 2894 case GIMPLE_OMP_SECTIONS:
2895 case GIMPLE_OMP_SINGLE:
2896 case GIMPLE_OMP_TASK:
ab129075 2897 error_at (gimple_location (stmt),
7821c1b5 2898 "%<master%> region may not be closely nested inside "
2899 "of work-sharing, explicit %<task%> or %<taskloop%> "
2900 "region");
ab129075 2901 return false;
75a70cf9 2902 case GIMPLE_OMP_PARALLEL:
7821c1b5 2903 case GIMPLE_OMP_TEAMS:
ab129075 2904 return true;
7821c1b5 2905 case GIMPLE_OMP_TARGET:
2906 if (gimple_omp_target_kind (ctx->stmt)
2907 == GF_OMP_TARGET_KIND_REGION)
2908 return true;
2909 break;
c1d127dd 2910 default:
2911 break;
2912 }
2913 break;
43895be5 2914 case GIMPLE_OMP_TASK:
2915 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2916 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2917 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2918 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2919 {
2920 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2921 error_at (OMP_CLAUSE_LOCATION (c),
2922 "%<depend(%s)%> is only allowed in %<omp ordered%>",
2923 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2924 return false;
2925 }
2926 break;
75a70cf9 2927 case GIMPLE_OMP_ORDERED:
43895be5 2928 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2929 c; c = OMP_CLAUSE_CHAIN (c))
2930 {
2931 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2932 {
2933 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
7821c1b5 2934 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
43895be5 2935 continue;
2936 }
2937 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2938 if (kind == OMP_CLAUSE_DEPEND_SOURCE
2939 || kind == OMP_CLAUSE_DEPEND_SINK)
2940 {
2941 tree oclause;
2942 /* Look for containing ordered(N) loop. */
2943 if (ctx == NULL
2944 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2945 || (oclause
4954efd4 2946 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
43895be5 2947 OMP_CLAUSE_ORDERED)) == NULL_TREE)
2948 {
2949 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 2950 "%<ordered%> construct with %<depend%> clause "
2951 "must be closely nested inside an %<ordered%> "
2952 "loop");
43895be5 2953 return false;
2954 }
2955 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2956 {
2957 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 2958 "%<ordered%> construct with %<depend%> clause "
2959 "must be closely nested inside a loop with "
2960 "%<ordered%> clause with a parameter");
43895be5 2961 return false;
2962 }
2963 }
2964 else
2965 {
2966 error_at (OMP_CLAUSE_LOCATION (c),
7821c1b5 2967 "invalid depend kind in omp %<ordered%> %<depend%>");
2968 return false;
2969 }
2970 }
2971 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
4954efd4 2972 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
7821c1b5 2973 {
2974 /* ordered simd must be closely nested inside of simd region,
2975 and simd region must not encounter constructs other than
2976 ordered simd, therefore ordered simd may be either orphaned,
2977 or ctx->stmt must be simd. The latter case is handled already
2978 earlier. */
2979 if (ctx != NULL)
2980 {
2981 error_at (gimple_location (stmt),
2982 "%<ordered%> %<simd%> must be closely nested inside "
2983 "%<simd%> region");
43895be5 2984 return false;
2985 }
2986 }
c1d127dd 2987 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2988 switch (gimple_code (ctx->stmt))
c1d127dd 2989 {
75a70cf9 2990 case GIMPLE_OMP_CRITICAL:
2991 case GIMPLE_OMP_TASK:
7821c1b5 2992 case GIMPLE_OMP_ORDERED:
2993 ordered_in_taskloop:
ab129075 2994 error_at (gimple_location (stmt),
7821c1b5 2995 "%<ordered%> region may not be closely nested inside "
2996 "of %<critical%>, %<ordered%>, explicit %<task%> or "
2997 "%<taskloop%> region");
ab129075 2998 return false;
75a70cf9 2999 case GIMPLE_OMP_FOR:
7821c1b5 3000 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3001 goto ordered_in_taskloop;
a0226394 3002 tree o;
3003 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3004 OMP_CLAUSE_ORDERED);
3005 if (o == NULL)
ab129075 3006 {
3007 error_at (gimple_location (stmt),
7821c1b5 3008 "%<ordered%> region must be closely nested inside "
3009 "a loop region with an %<ordered%> clause");
ab129075 3010 return false;
3011 }
a0226394 3012 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3013 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3014 {
3015 error_at (gimple_location (stmt),
3016 "%<ordered%> region without %<depend%> clause may "
3017 "not be closely nested inside a loop region with "
3018 "an %<ordered%> clause with a parameter");
3019 return false;
3020 }
ab129075 3021 return true;
7821c1b5 3022 case GIMPLE_OMP_TARGET:
3023 if (gimple_omp_target_kind (ctx->stmt)
3024 != GF_OMP_TARGET_KIND_REGION)
3025 break;
3026 /* FALLTHRU */
75a70cf9 3027 case GIMPLE_OMP_PARALLEL:
7821c1b5 3028 case GIMPLE_OMP_TEAMS:
bc7bff74 3029 error_at (gimple_location (stmt),
7821c1b5 3030 "%<ordered%> region must be closely nested inside "
3031 "a loop region with an %<ordered%> clause");
bc7bff74 3032 return false;
c1d127dd 3033 default:
3034 break;
3035 }
3036 break;
75a70cf9 3037 case GIMPLE_OMP_CRITICAL:
1a91d914 3038 {
3039 tree this_stmt_name
3040 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3041 for (; ctx != NULL; ctx = ctx->outer)
3042 if (gomp_critical *other_crit
3043 = dyn_cast <gomp_critical *> (ctx->stmt))
3044 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3045 {
3046 error_at (gimple_location (stmt),
7821c1b5 3047 "%<critical%> region may not be nested inside "
3048 "a %<critical%> region with the same name");
1a91d914 3049 return false;
3050 }
3051 }
c1d127dd 3052 break;
bc7bff74 3053 case GIMPLE_OMP_TEAMS:
7e5a76c8 3054 if (ctx == NULL)
3055 break;
3056 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3057 || (gimple_omp_target_kind (ctx->stmt)
3058 != GF_OMP_TARGET_KIND_REGION))
bc7bff74 3059 {
7e5a76c8 3060 /* Teams construct can appear either strictly nested inside of
3061 target construct with no intervening stmts, or can be encountered
3062 only by initial task (so must not appear inside any OpenMP
3063 construct. */
bc7bff74 3064 error_at (gimple_location (stmt),
7e5a76c8 3065 "%<teams%> construct must be closely nested inside of "
3066 "%<target%> construct or not nested in any OpenMP "
3067 "construct");
bc7bff74 3068 return false;
3069 }
3070 break;
691447ab 3071 case GIMPLE_OMP_TARGET:
43895be5 3072 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3073 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3074 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3075 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3076 {
3077 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3078 error_at (OMP_CLAUSE_LOCATION (c),
3079 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3080 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3081 return false;
3082 }
32f692e2 3083 if (is_gimple_omp_offloaded (stmt)
4954efd4 3084 && oacc_get_fn_attrib (cfun->decl) != NULL)
32f692e2 3085 {
3086 error_at (gimple_location (stmt),
3087 "OpenACC region inside of OpenACC routine, nested "
3088 "parallelism not supported yet");
3089 return false;
3090 }
691447ab 3091 for (; ctx != NULL; ctx = ctx->outer)
ca4c3545 3092 {
3093 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3094 {
3095 if (is_gimple_omp (stmt)
3096 && is_gimple_omp_oacc (stmt)
3097 && is_gimple_omp (ctx->stmt))
3098 {
3099 error_at (gimple_location (stmt),
3100 "OpenACC construct inside of non-OpenACC region");
3101 return false;
3102 }
3103 continue;
3104 }
3105
3106 const char *stmt_name, *ctx_stmt_name;
3107 switch (gimple_omp_target_kind (stmt))
3108 {
3109 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3110 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3111 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
43895be5 3112 case GF_OMP_TARGET_KIND_ENTER_DATA:
3113 stmt_name = "target enter data"; break;
3114 case GF_OMP_TARGET_KIND_EXIT_DATA:
3115 stmt_name = "target exit data"; break;
ca4c3545 3116 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3117 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3118 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3119 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
43895be5 3120 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3121 stmt_name = "enter/exit data"; break;
acb41570 3122 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
571b3486 3123 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3124 break;
ca4c3545 3125 default: gcc_unreachable ();
3126 }
3127 switch (gimple_omp_target_kind (ctx->stmt))
3128 {
3129 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3130 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
43895be5 3131 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3132 ctx_stmt_name = "parallel"; break;
3133 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3134 ctx_stmt_name = "kernels"; break;
ca4c3545 3135 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
571b3486 3136 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3137 ctx_stmt_name = "host_data"; break;
ca4c3545 3138 default: gcc_unreachable ();
3139 }
3140
3141 /* OpenACC/OpenMP mismatch? */
3142 if (is_gimple_omp_oacc (stmt)
3143 != is_gimple_omp_oacc (ctx->stmt))
3144 {
3145 error_at (gimple_location (stmt),
7821c1b5 3146 "%s %qs construct inside of %s %qs region",
ca4c3545 3147 (is_gimple_omp_oacc (stmt)
3148 ? "OpenACC" : "OpenMP"), stmt_name,
3149 (is_gimple_omp_oacc (ctx->stmt)
3150 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3151 return false;
3152 }
3153 if (is_gimple_omp_offloaded (ctx->stmt))
3154 {
3155 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3156 if (is_gimple_omp_oacc (ctx->stmt))
3157 {
3158 error_at (gimple_location (stmt),
7821c1b5 3159 "%qs construct inside of %qs region",
ca4c3545 3160 stmt_name, ctx_stmt_name);
3161 return false;
3162 }
3163 else
3164 {
ca4c3545 3165 warning_at (gimple_location (stmt), 0,
7821c1b5 3166 "%qs construct inside of %qs region",
ca4c3545 3167 stmt_name, ctx_stmt_name);
3168 }
3169 }
3170 }
691447ab 3171 break;
c1d127dd 3172 default:
3173 break;
3174 }
ab129075 3175 return true;
c1d127dd 3176}
3177
3178
75a70cf9 3179/* Helper function scan_omp.
3180
3181 Callback for walk_tree or operators in walk_gimple_stmt used to
ca4c3545 3182 scan for OMP directives in TP. */
1e8e9920 3183
3184static tree
75a70cf9 3185scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
1e8e9920 3186{
4077bf7a 3187 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3188 omp_context *ctx = (omp_context *) wi->info;
1e8e9920 3189 tree t = *tp;
3190
75a70cf9 3191 switch (TREE_CODE (t))
3192 {
3193 case VAR_DECL:
3194 case PARM_DECL:
3195 case LABEL_DECL:
3196 case RESULT_DECL:
3197 if (ctx)
56686608 3198 {
3199 tree repl = remap_decl (t, &ctx->cb);
3200 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3201 *tp = repl;
3202 }
75a70cf9 3203 break;
3204
3205 default:
3206 if (ctx && TYPE_P (t))
3207 *tp = remap_type (t, &ctx->cb);
3208 else if (!DECL_P (t))
7cf869dd 3209 {
3210 *walk_subtrees = 1;
3211 if (ctx)
182cf5a9 3212 {
3213 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3214 if (tem != TREE_TYPE (t))
3215 {
3216 if (TREE_CODE (t) == INTEGER_CST)
e3d0f65c 3217 *tp = wide_int_to_tree (tem, wi::to_wide (t));
182cf5a9 3218 else
3219 TREE_TYPE (t) = tem;
3220 }
3221 }
7cf869dd 3222 }
75a70cf9 3223 break;
3224 }
3225
3226 return NULL_TREE;
3227}
3228
f2697631 3229/* Return true if FNDECL is a setjmp or a longjmp. */
3230
3231static bool
3232setjmp_or_longjmp_p (const_tree fndecl)
3233{
a0e9bfbb 3234 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3235 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
f2697631 3236 return true;
3237
3238 tree declname = DECL_NAME (fndecl);
3239 if (!declname)
3240 return false;
3241 const char *name = IDENTIFIER_POINTER (declname);
3242 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3243}
3244
75a70cf9 3245
3246/* Helper function for scan_omp.
3247
ca4c3545 3248 Callback for walk_gimple_stmt used to scan for OMP directives in
75a70cf9 3249 the current statement in GSI. */
3250
3251static tree
3252scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3253 struct walk_stmt_info *wi)
3254{
42acab1c 3255 gimple *stmt = gsi_stmt (*gsi);
75a70cf9 3256 omp_context *ctx = (omp_context *) wi->info;
3257
3258 if (gimple_has_location (stmt))
3259 input_location = gimple_location (stmt);
1e8e9920 3260
ca4c3545 3261 /* Check the nesting restrictions. */
bc7bff74 3262 bool remove = false;
3263 if (is_gimple_omp (stmt))
3264 remove = !check_omp_nesting_restrictions (stmt, ctx);
3265 else if (is_gimple_call (stmt))
3266 {
3267 tree fndecl = gimple_call_fndecl (stmt);
f2697631 3268 if (fndecl)
3269 {
3270 if (setjmp_or_longjmp_p (fndecl)
3271 && ctx
3272 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
10c55644 3273 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
f2697631 3274 {
3275 remove = true;
3276 error_at (gimple_location (stmt),
3277 "setjmp/longjmp inside simd construct");
3278 }
3279 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3280 switch (DECL_FUNCTION_CODE (fndecl))
3281 {
3282 case BUILT_IN_GOMP_BARRIER:
3283 case BUILT_IN_GOMP_CANCEL:
3284 case BUILT_IN_GOMP_CANCELLATION_POINT:
3285 case BUILT_IN_GOMP_TASKYIELD:
3286 case BUILT_IN_GOMP_TASKWAIT:
3287 case BUILT_IN_GOMP_TASKGROUP_START:
3288 case BUILT_IN_GOMP_TASKGROUP_END:
3289 remove = !check_omp_nesting_restrictions (stmt, ctx);
3290 break;
3291 default:
3292 break;
3293 }
3294 }
bc7bff74 3295 }
3296 if (remove)
3297 {
3298 stmt = gimple_build_nop ();
3299 gsi_replace (gsi, stmt, false);
fd6481cf 3300 }
c1d127dd 3301
75a70cf9 3302 *handled_ops_p = true;
3303
3304 switch (gimple_code (stmt))
1e8e9920 3305 {
75a70cf9 3306 case GIMPLE_OMP_PARALLEL:
fd6481cf 3307 taskreg_nesting_level++;
75a70cf9 3308 scan_omp_parallel (gsi, ctx);
fd6481cf 3309 taskreg_nesting_level--;
3310 break;
3311
75a70cf9 3312 case GIMPLE_OMP_TASK:
fd6481cf 3313 taskreg_nesting_level++;
75a70cf9 3314 scan_omp_task (gsi, ctx);
fd6481cf 3315 taskreg_nesting_level--;
1e8e9920 3316 break;
3317
75a70cf9 3318 case GIMPLE_OMP_FOR:
57f872be 3319 if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3320 & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3321 && omp_maybe_offloaded_ctx (ctx)
3322 && omp_max_simt_vf ())
3323 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3324 else
3325 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
1e8e9920 3326 break;
3327
75a70cf9 3328 case GIMPLE_OMP_SECTIONS:
1a91d914 3329 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
1e8e9920 3330 break;
3331
75a70cf9 3332 case GIMPLE_OMP_SINGLE:
1a91d914 3333 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
1e8e9920 3334 break;
3335
da008d72 3336 case GIMPLE_OMP_SCAN:
3337 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3338 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3339 ctx->scan_inclusive = true;
3340 /* FALLTHRU */
75a70cf9 3341 case GIMPLE_OMP_SECTION:
3342 case GIMPLE_OMP_MASTER:
3343 case GIMPLE_OMP_ORDERED:
3344 case GIMPLE_OMP_CRITICAL:
56686608 3345 case GIMPLE_OMP_GRID_BODY:
75a70cf9 3346 ctx = new_omp_context (stmt, ctx);
ab129075 3347 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 3348 break;
3349
7e5a76c8 3350 case GIMPLE_OMP_TASKGROUP:
3351 ctx = new_omp_context (stmt, ctx);
3352 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3353 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3354 break;
3355
bc7bff74 3356 case GIMPLE_OMP_TARGET:
1a91d914 3357 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
bc7bff74 3358 break;
3359
3360 case GIMPLE_OMP_TEAMS:
7e5a76c8 3361 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3362 {
3363 taskreg_nesting_level++;
3364 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3365 taskreg_nesting_level--;
3366 }
3367 else
3368 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
bc7bff74 3369 break;
3370
75a70cf9 3371 case GIMPLE_BIND:
1e8e9920 3372 {
3373 tree var;
1e8e9920 3374
75a70cf9 3375 *handled_ops_p = false;
3376 if (ctx)
1a91d914 3377 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3378 var ;
3379 var = DECL_CHAIN (var))
75a70cf9 3380 insert_decl_map (&ctx->cb, var, var);
1e8e9920 3381 }
3382 break;
1e8e9920 3383 default:
75a70cf9 3384 *handled_ops_p = false;
1e8e9920 3385 break;
3386 }
3387
3388 return NULL_TREE;
3389}
3390
3391
75a70cf9 3392/* Scan all the statements starting at the current statement. CTX
ca4c3545 3393 contains context information about the OMP directives and
75a70cf9 3394 clauses found during the scan. */
1e8e9920 3395
3396static void
ab129075 3397scan_omp (gimple_seq *body_p, omp_context *ctx)
1e8e9920 3398{
3399 location_t saved_location;
3400 struct walk_stmt_info wi;
3401
3402 memset (&wi, 0, sizeof (wi));
1e8e9920 3403 wi.info = ctx;
1e8e9920 3404 wi.want_locations = true;
3405
3406 saved_location = input_location;
ab129075 3407 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
1e8e9920 3408 input_location = saved_location;
3409}
3410\f
3411/* Re-gimplification and code generation routines. */
3412
2918f4e9 3413/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3414 of BIND if in a method. */
3415
3416static void
3417maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3418{
3419 if (DECL_ARGUMENTS (current_function_decl)
3420 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3421 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3422 == POINTER_TYPE))
3423 {
3424 tree vars = gimple_bind_vars (bind);
3425 for (tree *pvar = &vars; *pvar; )
3426 if (omp_member_access_dummy_var (*pvar))
3427 *pvar = DECL_CHAIN (*pvar);
3428 else
3429 pvar = &DECL_CHAIN (*pvar);
3430 gimple_bind_set_vars (bind, vars);
3431 }
3432}
3433
3434/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3435 block and its subblocks. */
3436
3437static void
3438remove_member_access_dummy_vars (tree block)
3439{
3440 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3441 if (omp_member_access_dummy_var (*pvar))
3442 *pvar = DECL_CHAIN (*pvar);
3443 else
3444 pvar = &DECL_CHAIN (*pvar);
3445
3446 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3447 remove_member_access_dummy_vars (block);
3448}
3449
1e8e9920 3450/* If a context was created for STMT when it was scanned, return it. */
3451
3452static omp_context *
42acab1c 3453maybe_lookup_ctx (gimple *stmt)
1e8e9920 3454{
3455 splay_tree_node n;
3456 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3457 return n ? (omp_context *) n->value : NULL;
3458}
3459
773c5ba7 3460
3461/* Find the mapping for DECL in CTX or the immediately enclosing
3462 context that has a mapping for DECL.
3463
3464 If CTX is a nested parallel directive, we may have to use the decl
3465 mappings created in CTX's parent context. Suppose that we have the
3466 following parallel nesting (variable UIDs showed for clarity):
3467
3468 iD.1562 = 0;
3469 #omp parallel shared(iD.1562) -> outer parallel
3470 iD.1562 = iD.1562 + 1;
3471
3472 #omp parallel shared (iD.1562) -> inner parallel
3473 iD.1562 = iD.1562 - 1;
3474
3475 Each parallel structure will create a distinct .omp_data_s structure
3476 for copying iD.1562 in/out of the directive:
3477
3478 outer parallel .omp_data_s.1.i -> iD.1562
3479 inner parallel .omp_data_s.2.i -> iD.1562
3480
3481 A shared variable mapping will produce a copy-out operation before
3482 the parallel directive and a copy-in operation after it. So, in
3483 this case we would have:
3484
3485 iD.1562 = 0;
3486 .omp_data_o.1.i = iD.1562;
3487 #omp parallel shared(iD.1562) -> outer parallel
3488 .omp_data_i.1 = &.omp_data_o.1
3489 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3490
3491 .omp_data_o.2.i = iD.1562; -> **
3492 #omp parallel shared(iD.1562) -> inner parallel
3493 .omp_data_i.2 = &.omp_data_o.2
3494 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3495
3496
3497 ** This is a problem. The symbol iD.1562 cannot be referenced
3498 inside the body of the outer parallel region. But since we are
3499 emitting this copy operation while expanding the inner parallel
3500 directive, we need to access the CTX structure of the outer
3501 parallel directive to get the correct mapping:
3502
3503 .omp_data_o.2.i = .omp_data_i.1->i
3504
3505 Since there may be other workshare or parallel directives enclosing
3506 the parallel directive, it may be necessary to walk up the context
3507 parent chain. This is not a problem in general because nested
3508 parallelism happens only rarely. */
3509
3510static tree
3511lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3512{
3513 tree t;
3514 omp_context *up;
3515
773c5ba7 3516 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3517 t = maybe_lookup_decl (decl, up);
3518
87b31375 3519 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
773c5ba7 3520
c37594c7 3521 return t ? t : decl;
773c5ba7 3522}
3523
3524
f49d7bb5 3525/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3526 in outer contexts. */
3527
3528static tree
3529maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3530{
3531 tree t = NULL;
3532 omp_context *up;
3533
87b31375 3534 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3535 t = maybe_lookup_decl (decl, up);
f49d7bb5 3536
3537 return t ? t : decl;
3538}
3539
3540
df67b98c 3541/* Construct the initialization value for reduction operation OP. */
1e8e9920 3542
3543tree
df67b98c 3544omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
1e8e9920 3545{
df67b98c 3546 switch (op)
1e8e9920 3547 {
3548 case PLUS_EXPR:
3549 case MINUS_EXPR:
3550 case BIT_IOR_EXPR:
3551 case BIT_XOR_EXPR:
3552 case TRUTH_OR_EXPR:
3553 case TRUTH_ORIF_EXPR:
3554 case TRUTH_XOR_EXPR:
3555 case NE_EXPR:
385f3f36 3556 return build_zero_cst (type);
1e8e9920 3557
3558 case MULT_EXPR:
3559 case TRUTH_AND_EXPR:
3560 case TRUTH_ANDIF_EXPR:
3561 case EQ_EXPR:
389dd41b 3562 return fold_convert_loc (loc, type, integer_one_node);
1e8e9920 3563
3564 case BIT_AND_EXPR:
389dd41b 3565 return fold_convert_loc (loc, type, integer_minus_one_node);
1e8e9920 3566
3567 case MAX_EXPR:
3568 if (SCALAR_FLOAT_TYPE_P (type))
3569 {
3570 REAL_VALUE_TYPE max, min;
fe994837 3571 if (HONOR_INFINITIES (type))
1e8e9920 3572 {
3573 real_inf (&max);
3574 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3575 }
3576 else
3577 real_maxval (&min, 1, TYPE_MODE (type));
3578 return build_real (type, min);
3579 }
5902cce5 3580 else if (POINTER_TYPE_P (type))
3581 {
3582 wide_int min
3583 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3584 return wide_int_to_tree (type, min);
3585 }
1e8e9920 3586 else
3587 {
3588 gcc_assert (INTEGRAL_TYPE_P (type));
3589 return TYPE_MIN_VALUE (type);
3590 }
3591
3592 case MIN_EXPR:
3593 if (SCALAR_FLOAT_TYPE_P (type))
3594 {
3595 REAL_VALUE_TYPE max;
fe994837 3596 if (HONOR_INFINITIES (type))
1e8e9920 3597 real_inf (&max);
3598 else
3599 real_maxval (&max, 0, TYPE_MODE (type));
3600 return build_real (type, max);
3601 }
5902cce5 3602 else if (POINTER_TYPE_P (type))
3603 {
3604 wide_int max
3605 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3606 return wide_int_to_tree (type, max);
3607 }
1e8e9920 3608 else
3609 {
3610 gcc_assert (INTEGRAL_TYPE_P (type));
3611 return TYPE_MAX_VALUE (type);
3612 }
3613
3614 default:
3615 gcc_unreachable ();
3616 }
3617}
3618
df67b98c 3619/* Construct the initialization value for reduction CLAUSE. */
3620
3621tree
3622omp_reduction_init (tree clause, tree type)
3623{
3624 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3625 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3626}
3627
bc7bff74 3628/* Return alignment to be assumed for var in CLAUSE, which should be
3629 OMP_CLAUSE_ALIGNED. */
3630
3631static tree
3632omp_clause_aligned_alignment (tree clause)
3633{
3634 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3635 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3636
3637 /* Otherwise return implementation defined alignment. */
3638 unsigned int al = 1;
2b8f5b8a 3639 opt_scalar_mode mode_iter;
3106770a 3640 auto_vector_sizes sizes;
e7419472 3641 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3106770a 3642 poly_uint64 vs = 0;
3643 for (unsigned int i = 0; i < sizes.length (); ++i)
3644 vs = ordered_max (vs, sizes[i]);
bc7bff74 3645 static enum mode_class classes[]
3646 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3647 for (int i = 0; i < 4; i += 2)
2b8f5b8a 3648 /* The for loop above dictates that we only walk through scalar classes. */
3649 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
bc7bff74 3650 {
2b8f5b8a 3651 scalar_mode mode = mode_iter.require ();
3652 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
bc7bff74 3653 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3654 continue;
3106770a 3655 while (maybe_ne (vs, 0U)
3656 && known_lt (GET_MODE_SIZE (vmode), vs)
28ebc73c 3657 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3658 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
7c6746c9 3659
bc7bff74 3660 tree type = lang_hooks.types.type_for_mode (mode, 1);
3661 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3662 continue;
52acb7ae 3663 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3664 GET_MODE_SIZE (mode));
3106770a 3665 type = build_vector_type (type, nelts);
bc7bff74 3666 if (TYPE_MODE (type) != vmode)
3667 continue;
3668 if (TYPE_ALIGN_UNIT (type) > al)
3669 al = TYPE_ALIGN_UNIT (type);
3670 }
3671 return build_int_cst (integer_type_node, al);
3672}
3673
8e818b28 3674
3675/* This structure is part of the interface between lower_rec_simd_input_clauses
3676 and lower_rec_input_clauses. */
3677
3678struct omplow_simd_context {
9d805ed8 3679 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
8e818b28 3680 tree idx;
3681 tree lane;
da008d72 3682 tree lastlane;
1b576300 3683 vec<tree, va_heap> simt_eargs;
3684 gimple_seq simt_dlist;
9d805ed8 3685 poly_uint64_pod max_vf;
8e818b28 3686 bool is_simt;
3687};
3688
3d483a94 3689/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3690 privatization. */
3691
3692static bool
8e818b28 3693lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
da008d72 3694 omplow_simd_context *sctx, tree &ivar,
3695 tree &lvar, tree *rvar = NULL)
3d483a94 3696{
9d805ed8 3697 if (known_eq (sctx->max_vf, 0U))
3d483a94 3698 {
8e818b28 3699 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
9d805ed8 3700 if (maybe_gt (sctx->max_vf, 1U))
3d483a94 3701 {
4954efd4 3702 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3d483a94 3703 OMP_CLAUSE_SAFELEN);
9d805ed8 3704 if (c)
3705 {
3706 poly_uint64 safe_len;
3707 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3708 || maybe_lt (safe_len, 1U))
3709 sctx->max_vf = 1;
3710 else
3711 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3712 }
3d483a94 3713 }
9d805ed8 3714 if (maybe_gt (sctx->max_vf, 1U))
3d483a94 3715 {
8e818b28 3716 sctx->idx = create_tmp_var (unsigned_type_node);
3717 sctx->lane = create_tmp_var (unsigned_type_node);
3d483a94 3718 }
3719 }
9d805ed8 3720 if (known_eq (sctx->max_vf, 1U))
3d483a94 3721 return false;
3722
1b576300 3723 if (sctx->is_simt)
3724 {
3725 if (is_gimple_reg (new_var))
3726 {
3727 ivar = lvar = new_var;
3728 return true;
3729 }
3730 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3731 ivar = lvar = create_tmp_var (type);
3732 TREE_ADDRESSABLE (ivar) = 1;
3733 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3734 NULL, DECL_ATTRIBUTES (ivar));
3735 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3736 tree clobber = build_constructor (type, NULL);
3737 TREE_THIS_VOLATILE (clobber) = 1;
3738 gimple *g = gimple_build_assign (ivar, clobber);
3739 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3740 }
3741 else
3742 {
3743 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3744 tree avar = create_tmp_var_raw (atype);
3745 if (TREE_ADDRESSABLE (new_var))
3746 TREE_ADDRESSABLE (avar) = 1;
3747 DECL_ATTRIBUTES (avar)
3748 = tree_cons (get_identifier ("omp simd array"), NULL,
3749 DECL_ATTRIBUTES (avar));
3750 gimple_add_tmp_var (avar);
da008d72 3751 tree iavar = avar;
3752 if (rvar)
3753 {
3754 /* For inscan reductions, create another array temporary,
3755 which will hold the reduced value. */
3756 iavar = create_tmp_var_raw (atype);
3757 if (TREE_ADDRESSABLE (new_var))
3758 TREE_ADDRESSABLE (iavar) = 1;
3759 DECL_ATTRIBUTES (iavar)
3760 = tree_cons (get_identifier ("omp simd array"), NULL,
3761 tree_cons (get_identifier ("omp simd inscan"), NULL,
3762 DECL_ATTRIBUTES (iavar)));
3763 gimple_add_tmp_var (iavar);
3764 ctx->cb.decl_map->put (avar, iavar);
3765 if (sctx->lastlane == NULL_TREE)
3766 sctx->lastlane = create_tmp_var (unsigned_type_node);
3767 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3768 sctx->lastlane, NULL_TREE, NULL_TREE);
3769 TREE_THIS_NOTRAP (*rvar) = 1;
3770 }
3771 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
1b576300 3772 NULL_TREE, NULL_TREE);
3773 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3774 NULL_TREE, NULL_TREE);
443a28c5 3775 TREE_THIS_NOTRAP (ivar) = 1;
3776 TREE_THIS_NOTRAP (lvar) = 1;
1b576300 3777 }
bc7bff74 3778 if (DECL_P (new_var))
3779 {
3780 SET_DECL_VALUE_EXPR (new_var, lvar);
3781 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3782 }
3d483a94 3783 return true;
3784}
3785
2712b6de 3786/* Helper function of lower_rec_input_clauses. For a reference
3787 in simd reduction, add an underlying variable it will reference. */
3788
3789static void
3790handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3791{
3792 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3793 if (TREE_CONSTANT (z))
3794 {
43895be5 3795 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3796 get_name (new_vard));
2712b6de 3797 gimple_add_tmp_var (z);
3798 TREE_ADDRESSABLE (z) = 1;
3799 z = build_fold_addr_expr_loc (loc, z);
3800 gimplify_assign (new_vard, z, ilist);
3801 }
3802}
3803
7e5a76c8 3804/* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3805 code to emit (type) (tskred_temp[idx]). */
3806
3807static tree
3808task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3809 unsigned idx)
3810{
3811 unsigned HOST_WIDE_INT sz
3812 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3813 tree r = build2 (MEM_REF, pointer_sized_int_node,
3814 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3815 idx * sz));
3816 tree v = create_tmp_var (pointer_sized_int_node);
3817 gimple *g = gimple_build_assign (v, r);
3818 gimple_seq_add_stmt (ilist, g);
3819 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3820 {
3821 v = create_tmp_var (type);
3822 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3823 gimple_seq_add_stmt (ilist, g);
3824 }
3825 return v;
3826}
3827
1e8e9920 3828/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3829 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3830 private variables. Initialization statements go in ILIST, while calls
3831 to destructors go in DLIST. */
3832
3833static void
75a70cf9 3834lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
bc7bff74 3835 omp_context *ctx, struct omp_for_data *fd)
1e8e9920 3836{
c2f47e15 3837 tree c, dtor, copyin_seq, x, ptr;
1e8e9920 3838 bool copyin_by_ref = false;
f49d7bb5 3839 bool lastprivate_firstprivate = false;
bc7bff74 3840 bool reduction_omp_orig_ref = false;
1e8e9920 3841 int pass;
3d483a94 3842 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
10c55644 3843 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
8e818b28 3844 omplow_simd_context sctx = omplow_simd_context ();
1b576300 3845 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3846 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
da008d72 3847 gimple_seq llist[4] = { };
1d86b8dc 3848 tree nonconst_simd_if = NULL_TREE;
1e8e9920 3849
1e8e9920 3850 copyin_seq = NULL;
8e818b28 3851 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
1e8e9920 3852
3d483a94 3853 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3854 with data sharing clauses referencing variable sized vars. That
3855 is unnecessarily hard to support and very unlikely to result in
3856 vectorized code anyway. */
3857 if (is_simd)
3858 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3859 switch (OMP_CLAUSE_CODE (c))
3860 {
9580cb79 3861 case OMP_CLAUSE_LINEAR:
3862 if (OMP_CLAUSE_LINEAR_ARRAY (c))
8e818b28 3863 sctx.max_vf = 1;
9580cb79 3864 /* FALLTHRU */
3d483a94 3865 case OMP_CLAUSE_PRIVATE:
3866 case OMP_CLAUSE_FIRSTPRIVATE:
3867 case OMP_CLAUSE_LASTPRIVATE:
3d483a94 3868 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
8e818b28 3869 sctx.max_vf = 1;
e32d171e 3870 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3871 {
3872 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3873 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3874 sctx.max_vf = 1;
3875 }
3d483a94 3876 break;
43895be5 3877 case OMP_CLAUSE_REDUCTION:
7e5a76c8 3878 case OMP_CLAUSE_IN_REDUCTION:
43895be5 3879 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3880 || is_variable_sized (OMP_CLAUSE_DECL (c)))
8e818b28 3881 sctx.max_vf = 1;
e32d171e 3882 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3883 {
3884 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3885 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3886 sctx.max_vf = 1;
3887 }
43895be5 3888 break;
9144258a 3889 case OMP_CLAUSE_IF:
3890 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3891 sctx.max_vf = 1;
1d86b8dc 3892 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3893 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
9144258a 3894 break;
3895 case OMP_CLAUSE_SIMDLEN:
3896 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3897 sctx.max_vf = 1;
3898 break;
4f4b92d8 3899 case OMP_CLAUSE__CONDTEMP_:
3900 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
3901 if (sctx.is_simt)
3902 sctx.max_vf = 1;
3903 break;
3d483a94 3904 default:
3905 continue;
3906 }
3907
1b576300 3908 /* Add a placeholder for simduid. */
9d805ed8 3909 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
1b576300 3910 sctx.simt_eargs.safe_push (NULL_TREE);
3911
7e5a76c8 3912 unsigned task_reduction_cnt = 0;
3913 unsigned task_reduction_cntorig = 0;
3914 unsigned task_reduction_cnt_full = 0;
3915 unsigned task_reduction_cntorig_full = 0;
3916 unsigned task_reduction_other_cnt = 0;
3917 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3918 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
1e8e9920 3919 /* Do all the fixed sized types in the first pass, and the variable sized
3920 types in the second pass. This makes sure that the scalar arguments to
48e1416a 3921 the variable sized types are processed before we use them in the
7e5a76c8 3922 variable sized operations. For task reductions we use 4 passes, in the
3923 first two we ignore them, in the third one gather arguments for
3924 GOMP_task_reduction_remap call and in the last pass actually handle
3925 the task reductions. */
3926 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3927 ? 4 : 2); ++pass)
3928 {
3929 if (pass == 2 && task_reduction_cnt)
3930 {
3931 tskred_atype
3932 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3933 + task_reduction_cntorig);
3934 tskred_avar = create_tmp_var_raw (tskred_atype);
3935 gimple_add_tmp_var (tskred_avar);
3936 TREE_ADDRESSABLE (tskred_avar) = 1;
3937 task_reduction_cnt_full = task_reduction_cnt;
3938 task_reduction_cntorig_full = task_reduction_cntorig;
3939 }
3940 else if (pass == 3 && task_reduction_cnt)
3941 {
3942 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3943 gimple *g
3944 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3945 size_int (task_reduction_cntorig),
3946 build_fold_addr_expr (tskred_avar));
3947 gimple_seq_add_stmt (ilist, g);
3948 }
3949 if (pass == 3 && task_reduction_other_cnt)
3950 {
3951 /* For reduction clauses, build
3952 tskred_base = (void *) tskred_temp[2]
3953 + omp_get_thread_num () * tskred_temp[1]
3954 or if tskred_temp[1] is known to be constant, that constant
3955 directly. This is the start of the private reduction copy block
3956 for the current thread. */
3957 tree v = create_tmp_var (integer_type_node);
3958 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3959 gimple *g = gimple_build_call (x, 0);
3960 gimple_call_set_lhs (g, v);
3961 gimple_seq_add_stmt (ilist, g);
3962 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3963 tskred_temp = OMP_CLAUSE_DECL (c);
3964 if (is_taskreg_ctx (ctx))
3965 tskred_temp = lookup_decl (tskred_temp, ctx);
3966 tree v2 = create_tmp_var (sizetype);
3967 g = gimple_build_assign (v2, NOP_EXPR, v);
3968 gimple_seq_add_stmt (ilist, g);
3969 if (ctx->task_reductions[0])
3970 v = fold_convert (sizetype, ctx->task_reductions[0]);
3971 else
3972 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3973 tree v3 = create_tmp_var (sizetype);
3974 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3975 gimple_seq_add_stmt (ilist, g);
3976 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3977 tskred_base = create_tmp_var (ptr_type_node);
3978 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3979 gimple_seq_add_stmt (ilist, g);
3980 }
3981 task_reduction_cnt = 0;
3982 task_reduction_cntorig = 0;
3983 task_reduction_other_cnt = 0;
1e8e9920 3984 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3985 {
55d6e7cd 3986 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1e8e9920 3987 tree var, new_var;
3988 bool by_ref;
389dd41b 3989 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7e5a76c8 3990 bool task_reduction_p = false;
3991 bool task_reduction_needs_orig_p = false;
3992 tree cond = NULL_TREE;
1e8e9920 3993
3994 switch (c_kind)
3995 {
3996 case OMP_CLAUSE_PRIVATE:
3997 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3998 continue;
3999 break;
4000 case OMP_CLAUSE_SHARED:
7e5a76c8 4001 /* Ignore shared directives in teams construct inside
4002 of target construct. */
4003 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4004 && !is_host_teams_ctx (ctx))
bc7bff74 4005 continue;
f49d7bb5 4006 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4007 {
43895be5 4008 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4009 || is_global_var (OMP_CLAUSE_DECL (c)));
f49d7bb5 4010 continue;
4011 }
1e8e9920 4012 case OMP_CLAUSE_FIRSTPRIVATE:
1e8e9920 4013 case OMP_CLAUSE_COPYIN:
43895be5 4014 break;
bc7bff74 4015 case OMP_CLAUSE_LINEAR:
43895be5 4016 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4017 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4018 lastprivate_firstprivate = true;
bc7bff74 4019 break;
1e8e9920 4020 case OMP_CLAUSE_REDUCTION:
7e5a76c8 4021 case OMP_CLAUSE_IN_REDUCTION:
4022 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4023 {
4024 task_reduction_p = true;
4025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4026 {
4027 task_reduction_other_cnt++;
4028 if (pass == 2)
4029 continue;
4030 }
4031 else
4032 task_reduction_cnt++;
4033 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4034 {
4035 var = OMP_CLAUSE_DECL (c);
4036 /* If var is a global variable that isn't privatized
4037 in outer contexts, we don't need to look up the
4038 original address, it is always the address of the
4039 global variable itself. */
4040 if (!DECL_P (var)
4041 || omp_is_reference (var)
4042 || !is_global_var
4043 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4044 {
4045 task_reduction_needs_orig_p = true;
4046 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4047 task_reduction_cntorig++;
4048 }
4049 }
4050 }
4051 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
bc7bff74 4052 reduction_omp_orig_ref = true;
1e8e9920 4053 break;
7e5a76c8 4054 case OMP_CLAUSE__REDUCTEMP_:
4055 if (!is_taskreg_ctx (ctx))
4056 continue;
4057 /* FALLTHRU */
bc7bff74 4058 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 4059 /* Handle _looptemp_/_reductemp_ clauses only on
4060 parallel/task. */
bc7bff74 4061 if (fd)
4062 continue;
3d483a94 4063 break;
df2c34fc 4064 case OMP_CLAUSE_LASTPRIVATE:
f49d7bb5 4065 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4066 {
4067 lastprivate_firstprivate = true;
43895be5 4068 if (pass != 0 || is_taskloop_ctx (ctx))
f49d7bb5 4069 continue;
4070 }
cf5f881f 4071 /* Even without corresponding firstprivate, if
4072 decl is Fortran allocatable, it needs outer var
4073 reference. */
4074 else if (pass == 0
4075 && lang_hooks.decls.omp_private_outer_ref
4076 (OMP_CLAUSE_DECL (c)))
4077 lastprivate_firstprivate = true;
df2c34fc 4078 break;
bc7bff74 4079 case OMP_CLAUSE_ALIGNED:
7e5a76c8 4080 if (pass != 1)
bc7bff74 4081 continue;
4082 var = OMP_CLAUSE_DECL (c);
4083 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4084 && !is_global_var (var))
4085 {
4086 new_var = maybe_lookup_decl (var, ctx);
4087 if (new_var == NULL_TREE)
4088 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4089 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
9cf2d600 4090 tree alarg = omp_clause_aligned_alignment (c);
4091 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4092 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
bc7bff74 4093 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4094 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4095 gimplify_and_add (x, ilist);
4096 }
4097 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4098 && is_global_var (var))
4099 {
4100 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4101 new_var = lookup_decl (var, ctx);
4102 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4103 t = build_fold_addr_expr_loc (clause_loc, t);
4104 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
9cf2d600 4105 tree alarg = omp_clause_aligned_alignment (c);
4106 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4107 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
bc7bff74 4108 t = fold_convert_loc (clause_loc, ptype, t);
f9e245b2 4109 x = create_tmp_var (ptype);
bc7bff74 4110 t = build2 (MODIFY_EXPR, ptype, x, t);
4111 gimplify_and_add (t, ilist);
4112 t = build_simple_mem_ref_loc (clause_loc, x);
4113 SET_DECL_VALUE_EXPR (new_var, t);
4114 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4115 }
4116 continue;
48152aa2 4117 case OMP_CLAUSE__CONDTEMP_:
4f4b92d8 4118 if (is_parallel_ctx (ctx)
4119 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
48152aa2 4120 break;
4121 continue;
1e8e9920 4122 default:
4123 continue;
4124 }
4125
7e5a76c8 4126 if (task_reduction_p != (pass >= 2))
4127 continue;
4128
1e8e9920 4129 new_var = var = OMP_CLAUSE_DECL (c);
7e5a76c8 4130 if ((c_kind == OMP_CLAUSE_REDUCTION
4131 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4132 && TREE_CODE (var) == MEM_REF)
43895be5 4133 {
4134 var = TREE_OPERAND (var, 0);
9561765e 4135 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4136 var = TREE_OPERAND (var, 0);
43895be5 4137 if (TREE_CODE (var) == INDIRECT_REF
4138 || TREE_CODE (var) == ADDR_EXPR)
4139 var = TREE_OPERAND (var, 0);
4140 if (is_variable_sized (var))
4141 {
4142 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4143 var = DECL_VALUE_EXPR (var);
4144 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4145 var = TREE_OPERAND (var, 0);
4146 gcc_assert (DECL_P (var));
4147 }
4148 new_var = var;
4149 }
1e8e9920 4150 if (c_kind != OMP_CLAUSE_COPYIN)
4151 new_var = lookup_decl (var, ctx);
4152
4153 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4154 {
4155 if (pass != 0)
4156 continue;
4157 }
43895be5 4158 /* C/C++ array section reductions. */
7e5a76c8 4159 else if ((c_kind == OMP_CLAUSE_REDUCTION
4160 || c_kind == OMP_CLAUSE_IN_REDUCTION)
43895be5 4161 && var != OMP_CLAUSE_DECL (c))
1e8e9920 4162 {
4163 if (pass == 0)
4164 continue;
4165
9561765e 4166 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
43895be5 4167 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
7e5a76c8 4168
9561765e 4169 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4170 {
4171 tree b = TREE_OPERAND (orig_var, 1);
4172 b = maybe_lookup_decl (b, ctx);
4173 if (b == NULL)
4174 {
4175 b = TREE_OPERAND (orig_var, 1);
4176 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4177 }
4178 if (integer_zerop (bias))
4179 bias = b;
4180 else
4181 {
4182 bias = fold_convert_loc (clause_loc,
4183 TREE_TYPE (b), bias);
4184 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4185 TREE_TYPE (b), b, bias);
4186 }
4187 orig_var = TREE_OPERAND (orig_var, 0);
4188 }
7e5a76c8 4189 if (pass == 2)
4190 {
4191 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4192 if (is_global_var (out)
4193 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4194 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4195 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4196 != POINTER_TYPE)))
4197 x = var;
4198 else
4199 {
4200 bool by_ref = use_pointer_for_field (var, NULL);
4201 x = build_receiver_ref (var, by_ref, ctx);
4202 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4203 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4204 == POINTER_TYPE))
4205 x = build_fold_addr_expr (x);
4206 }
4207 if (TREE_CODE (orig_var) == INDIRECT_REF)
4208 x = build_simple_mem_ref (x);
4209 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4210 {
4211 if (var == TREE_OPERAND (orig_var, 0))
4212 x = build_fold_addr_expr (x);
4213 }
4214 bias = fold_convert (sizetype, bias);
4215 x = fold_convert (ptr_type_node, x);
4216 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4217 TREE_TYPE (x), x, bias);
4218 unsigned cnt = task_reduction_cnt - 1;
4219 if (!task_reduction_needs_orig_p)
4220 cnt += (task_reduction_cntorig_full
4221 - task_reduction_cntorig);
4222 else
4223 cnt = task_reduction_cntorig - 1;
4224 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4225 size_int (cnt), NULL_TREE, NULL_TREE);
4226 gimplify_assign (r, x, ilist);
4227 continue;
4228 }
4229
43895be5 4230 if (TREE_CODE (orig_var) == INDIRECT_REF
4231 || TREE_CODE (orig_var) == ADDR_EXPR)
4232 orig_var = TREE_OPERAND (orig_var, 0);
4233 tree d = OMP_CLAUSE_DECL (c);
4234 tree type = TREE_TYPE (d);
4235 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4236 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4237 const char *name = get_name (orig_var);
7e5a76c8 4238 if (pass == 3)
4239 {
4240 tree xv = create_tmp_var (ptr_type_node);
4241 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4242 {
4243 unsigned cnt = task_reduction_cnt - 1;
4244 if (!task_reduction_needs_orig_p)
4245 cnt += (task_reduction_cntorig_full
4246 - task_reduction_cntorig);
4247 else
4248 cnt = task_reduction_cntorig - 1;
4249 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4250 size_int (cnt), NULL_TREE, NULL_TREE);
4251
4252 gimple *g = gimple_build_assign (xv, x);
4253 gimple_seq_add_stmt (ilist, g);
4254 }
4255 else
4256 {
4257 unsigned int idx = *ctx->task_reduction_map->get (c);
4258 tree off;
4259 if (ctx->task_reductions[1 + idx])
4260 off = fold_convert (sizetype,
4261 ctx->task_reductions[1 + idx]);
4262 else
4263 off = task_reduction_read (ilist, tskred_temp, sizetype,
4264 7 + 3 * idx + 1);
4265 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4266 tskred_base, off);
4267 gimple_seq_add_stmt (ilist, g);
4268 }
4269 x = fold_convert (build_pointer_type (boolean_type_node),
4270 xv);
4271 if (TREE_CONSTANT (v))
4272 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4273 TYPE_SIZE_UNIT (type));
4274 else
4275 {
4276 tree t = maybe_lookup_decl (v, ctx);
4277 if (t)
4278 v = t;
4279 else
4280 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4281 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4282 fb_rvalue);
4283 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4284 TREE_TYPE (v), v,
4285 build_int_cst (TREE_TYPE (v), 1));
4286 t = fold_build2_loc (clause_loc, MULT_EXPR,
4287 TREE_TYPE (v), t,
4288 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4289 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4290 }
4291 cond = create_tmp_var (TREE_TYPE (x));
4292 gimplify_assign (cond, x, ilist);
4293 x = xv;
4294 }
4295 else if (TREE_CONSTANT (v))
fd6481cf 4296 {
43895be5 4297 x = create_tmp_var_raw (type, name);
4298 gimple_add_tmp_var (x);
4299 TREE_ADDRESSABLE (x) = 1;
4300 x = build_fold_addr_expr_loc (clause_loc, x);
4301 }
4302 else
4303 {
4304 tree atmp
4305 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4306 tree t = maybe_lookup_decl (v, ctx);
4307 if (t)
4308 v = t;
4309 else
4310 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4311 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4312 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4313 TREE_TYPE (v), v,
4314 build_int_cst (TREE_TYPE (v), 1));
4315 t = fold_build2_loc (clause_loc, MULT_EXPR,
4316 TREE_TYPE (v), t,
4317 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4318 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4319 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4320 }
4321
4322 tree ptype = build_pointer_type (TREE_TYPE (type));
4323 x = fold_convert_loc (clause_loc, ptype, x);
4324 tree y = create_tmp_var (ptype, name);
4325 gimplify_assign (y, x, ilist);
4326 x = y;
9561765e 4327 tree yb = y;
4328
4329 if (!integer_zerop (bias))
4330 {
219e09fc 4331 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4332 bias);
4333 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4334 x);
4335 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4336 pointer_sized_int_node, yb, bias);
4337 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
9561765e 4338 yb = create_tmp_var (ptype, name);
4339 gimplify_assign (yb, x, ilist);
4340 x = yb;
4341 }
4342
4343 d = TREE_OPERAND (d, 0);
4344 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4345 d = TREE_OPERAND (d, 0);
4346 if (TREE_CODE (d) == ADDR_EXPR)
43895be5 4347 {
4348 if (orig_var != var)
4349 {
4350 gcc_assert (is_variable_sized (orig_var));
4351 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4352 x);
4353 gimplify_assign (new_var, x, ilist);
4354 tree new_orig_var = lookup_decl (orig_var, ctx);
4355 tree t = build_fold_indirect_ref (new_var);
4356 DECL_IGNORED_P (new_var) = 0;
7e5a76c8 4357 TREE_THIS_NOTRAP (t) = 1;
43895be5 4358 SET_DECL_VALUE_EXPR (new_orig_var, t);
4359 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4360 }
4361 else
4362 {
4363 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4364 build_int_cst (ptype, 0));
4365 SET_DECL_VALUE_EXPR (new_var, x);
4366 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4367 }
4368 }
4369 else
4370 {
4371 gcc_assert (orig_var == var);
9561765e 4372 if (TREE_CODE (d) == INDIRECT_REF)
43895be5 4373 {
4374 x = create_tmp_var (ptype, name);
4375 TREE_ADDRESSABLE (x) = 1;
9561765e 4376 gimplify_assign (x, yb, ilist);
43895be5 4377 x = build_fold_addr_expr_loc (clause_loc, x);
4378 }
4379 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4380 gimplify_assign (new_var, x, ilist);
4381 }
7e5a76c8 4382 /* GOMP_taskgroup_reduction_register memsets the whole
4383 array to zero. If the initializer is zero, we don't
4384 need to initialize it again, just mark it as ever
4385 used unconditionally, i.e. cond = true. */
4386 if (cond
4387 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4388 && initializer_zerop (omp_reduction_init (c,
4389 TREE_TYPE (type))))
4390 {
4391 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4392 boolean_true_node);
4393 gimple_seq_add_stmt (ilist, g);
4394 continue;
4395 }
4396 tree end = create_artificial_label (UNKNOWN_LOCATION);
4397 if (cond)
4398 {
4399 gimple *g;
4400 if (!is_parallel_ctx (ctx))
4401 {
4402 tree condv = create_tmp_var (boolean_type_node);
4403 g = gimple_build_assign (condv,
4404 build_simple_mem_ref (cond));
4405 gimple_seq_add_stmt (ilist, g);
4406 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4407 g = gimple_build_cond (NE_EXPR, condv,
4408 boolean_false_node, end, lab1);
4409 gimple_seq_add_stmt (ilist, g);
4410 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4411 }
4412 g = gimple_build_assign (build_simple_mem_ref (cond),
4413 boolean_true_node);
4414 gimple_seq_add_stmt (ilist, g);
4415 }
4416
4417 tree y1 = create_tmp_var (ptype);
43895be5 4418 gimplify_assign (y1, y, ilist);
4419 tree i2 = NULL_TREE, y2 = NULL_TREE;
4420 tree body2 = NULL_TREE, end2 = NULL_TREE;
4421 tree y3 = NULL_TREE, y4 = NULL_TREE;
7e5a76c8 4422 if (task_reduction_needs_orig_p)
43895be5 4423 {
7e5a76c8 4424 y3 = create_tmp_var (ptype);
4425 tree ref;
4426 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4427 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4428 size_int (task_reduction_cnt_full
4429 + task_reduction_cntorig - 1),
4430 NULL_TREE, NULL_TREE);
4431 else
43895be5 4432 {
7e5a76c8 4433 unsigned int idx = *ctx->task_reduction_map->get (c);
4434 ref = task_reduction_read (ilist, tskred_temp, ptype,
4435 7 + 3 * idx);
43895be5 4436 }
7e5a76c8 4437 gimplify_assign (y3, ref, ilist);
4438 }
4439 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4440 {
4441 if (pass != 3)
43895be5 4442 {
7e5a76c8 4443 y2 = create_tmp_var (ptype);
4444 gimplify_assign (y2, y, ilist);
4445 }
4446 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4447 {
4448 tree ref = build_outer_var_ref (var, ctx);
4449 /* For ref build_outer_var_ref already performs this. */
4450 if (TREE_CODE (d) == INDIRECT_REF)
4451 gcc_assert (omp_is_reference (var));
4452 else if (TREE_CODE (d) == ADDR_EXPR)
4453 ref = build_fold_addr_expr (ref);
4454 else if (omp_is_reference (var))
4455 ref = build_fold_addr_expr (ref);
4456 ref = fold_convert_loc (clause_loc, ptype, ref);
4457 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4458 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4459 {
4460 y3 = create_tmp_var (ptype);
4461 gimplify_assign (y3, unshare_expr (ref), ilist);
4462 }
4463 if (is_simd)
4464 {
4465 y4 = create_tmp_var (ptype);
4466 gimplify_assign (y4, ref, dlist);
4467 }
43895be5 4468 }
4469 }
7e5a76c8 4470 tree i = create_tmp_var (TREE_TYPE (v));
43895be5 4471 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4472 tree body = create_artificial_label (UNKNOWN_LOCATION);
43895be5 4473 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4474 if (y2)
4475 {
7e5a76c8 4476 i2 = create_tmp_var (TREE_TYPE (v));
43895be5 4477 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4478 body2 = create_artificial_label (UNKNOWN_LOCATION);
4479 end2 = create_artificial_label (UNKNOWN_LOCATION);
4480 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4481 }
4482 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4483 {
4484 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4485 tree decl_placeholder
4486 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4487 SET_DECL_VALUE_EXPR (decl_placeholder,
4488 build_simple_mem_ref (y1));
4489 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4490 SET_DECL_VALUE_EXPR (placeholder,
4491 y3 ? build_simple_mem_ref (y3)
4492 : error_mark_node);
4493 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4494 x = lang_hooks.decls.omp_clause_default_ctor
4495 (c, build_simple_mem_ref (y1),
4496 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4497 if (x)
4498 gimplify_and_add (x, ilist);
4499 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4500 {
4501 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4502 lower_omp (&tseq, ctx);
4503 gimple_seq_add_seq (ilist, tseq);
4504 }
4505 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4506 if (is_simd)
4507 {
4508 SET_DECL_VALUE_EXPR (decl_placeholder,
4509 build_simple_mem_ref (y2));
4510 SET_DECL_VALUE_EXPR (placeholder,
4511 build_simple_mem_ref (y4));
4512 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4513 lower_omp (&tseq, ctx);
4514 gimple_seq_add_seq (dlist, tseq);
4515 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4516 }
4517 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4518 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
7e5a76c8 4519 if (y2)
43895be5 4520 {
7e5a76c8 4521 x = lang_hooks.decls.omp_clause_dtor
4522 (c, build_simple_mem_ref (y2));
4523 if (x)
4524 {
4525 gimple_seq tseq = NULL;
4526 dtor = x;
4527 gimplify_stmt (&dtor, &tseq);
4528 gimple_seq_add_seq (dlist, tseq);
4529 }
43895be5 4530 }
4531 }
4532 else
4533 {
4534 x = omp_reduction_init (c, TREE_TYPE (type));
4535 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4536
4537 /* reduction(-:var) sums up the partial results, so it
4538 acts identically to reduction(+:var). */
4539 if (code == MINUS_EXPR)
4540 code = PLUS_EXPR;
4541
4542 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4543 if (is_simd)
4544 {
4545 x = build2 (code, TREE_TYPE (type),
4546 build_simple_mem_ref (y4),
4547 build_simple_mem_ref (y2));
4548 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4549 }
4550 }
4551 gimple *g
4552 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4553 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4554 gimple_seq_add_stmt (ilist, g);
4555 if (y3)
4556 {
4557 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4558 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4559 gimple_seq_add_stmt (ilist, g);
4560 }
4561 g = gimple_build_assign (i, PLUS_EXPR, i,
4562 build_int_cst (TREE_TYPE (i), 1));
4563 gimple_seq_add_stmt (ilist, g);
4564 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4565 gimple_seq_add_stmt (ilist, g);
4566 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4567 if (y2)
4568 {
4569 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4570 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4571 gimple_seq_add_stmt (dlist, g);
4572 if (y4)
4573 {
4574 g = gimple_build_assign
4575 (y4, POINTER_PLUS_EXPR, y4,
4576 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4577 gimple_seq_add_stmt (dlist, g);
4578 }
4579 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4580 build_int_cst (TREE_TYPE (i2), 1));
4581 gimple_seq_add_stmt (dlist, g);
4582 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4583 gimple_seq_add_stmt (dlist, g);
4584 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4585 }
4586 continue;
4587 }
7e5a76c8 4588 else if (pass == 2)
4589 {
4590 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4591 x = var;
4592 else
4593 {
4594 bool by_ref = use_pointer_for_field (var, ctx);
4595 x = build_receiver_ref (var, by_ref, ctx);
4596 }
4597 if (!omp_is_reference (var))
4598 x = build_fold_addr_expr (x);
4599 x = fold_convert (ptr_type_node, x);
4600 unsigned cnt = task_reduction_cnt - 1;
4601 if (!task_reduction_needs_orig_p)
4602 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4603 else
4604 cnt = task_reduction_cntorig - 1;
4605 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4606 size_int (cnt), NULL_TREE, NULL_TREE);
4607 gimplify_assign (r, x, ilist);
4608 continue;
4609 }
4610 else if (pass == 3)
4611 {
4612 tree type = TREE_TYPE (new_var);
4613 if (!omp_is_reference (var))
4614 type = build_pointer_type (type);
4615 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4616 {
4617 unsigned cnt = task_reduction_cnt - 1;
4618 if (!task_reduction_needs_orig_p)
4619 cnt += (task_reduction_cntorig_full
4620 - task_reduction_cntorig);
4621 else
4622 cnt = task_reduction_cntorig - 1;
4623 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4624 size_int (cnt), NULL_TREE, NULL_TREE);
4625 }
4626 else
4627 {
4628 unsigned int idx = *ctx->task_reduction_map->get (c);
4629 tree off;
4630 if (ctx->task_reductions[1 + idx])
4631 off = fold_convert (sizetype,
4632 ctx->task_reductions[1 + idx]);
4633 else
4634 off = task_reduction_read (ilist, tskred_temp, sizetype,
4635 7 + 3 * idx + 1);
4636 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4637 tskred_base, off);
4638 }
4639 x = fold_convert (type, x);
4640 tree t;
4641 if (omp_is_reference (var))
4642 {
4643 gimplify_assign (new_var, x, ilist);
4644 t = new_var;
4645 new_var = build_simple_mem_ref (new_var);
4646 }
4647 else
4648 {
4649 t = create_tmp_var (type);
4650 gimplify_assign (t, x, ilist);
4651 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4652 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4653 }
4654 t = fold_convert (build_pointer_type (boolean_type_node), t);
4655 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4656 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4657 cond = create_tmp_var (TREE_TYPE (t));
4658 gimplify_assign (cond, t, ilist);
4659 }
43895be5 4660 else if (is_variable_sized (var))
4661 {
4662 /* For variable sized types, we need to allocate the
4663 actual storage here. Call alloca and store the
4664 result in the pointer decl that we created elsewhere. */
4665 if (pass == 0)
4666 continue;
4667
4668 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4669 {
4670 gcall *stmt;
4671 tree tmp, atmp;
4672
4673 ptr = DECL_VALUE_EXPR (new_var);
4674 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4675 ptr = TREE_OPERAND (ptr, 0);
fd6481cf 4676 gcc_assert (DECL_P (ptr));
4677 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
75a70cf9 4678
4679 /* void *tmp = __builtin_alloca */
43895be5 4680 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4681 stmt = gimple_build_call (atmp, 2, x,
4682 size_int (DECL_ALIGN (var)));
f9e245b2 4683 tmp = create_tmp_var_raw (ptr_type_node);
75a70cf9 4684 gimple_add_tmp_var (tmp);
4685 gimple_call_set_lhs (stmt, tmp);
4686
4687 gimple_seq_add_stmt (ilist, stmt);
4688
389dd41b 4689 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
75a70cf9 4690 gimplify_assign (ptr, x, ilist);
fd6481cf 4691 }
1e8e9920 4692 }
7e5a76c8 4693 else if (omp_is_reference (var)
4694 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4695 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
1e8e9920 4696 {
773c5ba7 4697 /* For references that are being privatized for Fortran,
4698 allocate new backing storage for the new pointer
4699 variable. This allows us to avoid changing all the
4700 code that expects a pointer to something that expects
bc7bff74 4701 a direct variable. */
1e8e9920 4702 if (pass == 0)
4703 continue;
4704
4705 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
fd6481cf 4706 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4707 {
4708 x = build_receiver_ref (var, false, ctx);
389dd41b 4709 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 4710 }
4711 else if (TREE_CONSTANT (x))
1e8e9920 4712 {
2712b6de 4713 /* For reduction in SIMD loop, defer adding the
4714 initialization of the reference, because if we decide
4715 to use SIMD array for it, the initilization could cause
e32d171e 4716 expansion ICE. Ditto for other privatization clauses. */
4717 if (is_simd)
09d1c205 4718 x = NULL_TREE;
4719 else
4720 {
09d1c205 4721 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
43895be5 4722 get_name (var));
09d1c205 4723 gimple_add_tmp_var (x);
4724 TREE_ADDRESSABLE (x) = 1;
4725 x = build_fold_addr_expr_loc (clause_loc, x);
4726 }
1e8e9920 4727 }
4728 else
4729 {
43895be5 4730 tree atmp
4731 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4732 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4733 tree al = size_int (TYPE_ALIGN (rtype));
4734 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
1e8e9920 4735 }
4736
09d1c205 4737 if (x)
4738 {
4739 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4740 gimplify_assign (new_var, x, ilist);
4741 }
1e8e9920 4742
182cf5a9 4743 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 4744 }
7e5a76c8 4745 else if ((c_kind == OMP_CLAUSE_REDUCTION
4746 || c_kind == OMP_CLAUSE_IN_REDUCTION)
1e8e9920 4747 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4748 {
4749 if (pass == 0)
4750 continue;
4751 }
4752 else if (pass != 0)
4753 continue;
4754
55d6e7cd 4755 switch (OMP_CLAUSE_CODE (c))
1e8e9920 4756 {
4757 case OMP_CLAUSE_SHARED:
7e5a76c8 4758 /* Ignore shared directives in teams construct inside
4759 target construct. */
4760 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4761 && !is_host_teams_ctx (ctx))
bc7bff74 4762 continue;
f49d7bb5 4763 /* Shared global vars are just accessed directly. */
4764 if (is_global_var (new_var))
4765 break;
43895be5 4766 /* For taskloop firstprivate/lastprivate, represented
4767 as firstprivate and shared clause on the task, new_var
4768 is the firstprivate var. */
4769 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4770 break;
1e8e9920 4771 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4772 needs to be delayed until after fixup_child_record_type so
4773 that we get the correct type during the dereference. */
e8a588af 4774 by_ref = use_pointer_for_field (var, ctx);
1e8e9920 4775 x = build_receiver_ref (var, by_ref, ctx);
4776 SET_DECL_VALUE_EXPR (new_var, x);
4777 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4778
4779 /* ??? If VAR is not passed by reference, and the variable
4780 hasn't been initialized yet, then we'll get a warning for
4781 the store into the omp_data_s structure. Ideally, we'd be
48e1416a 4782 able to notice this and not store anything at all, but
1e8e9920 4783 we're generating code too early. Suppress the warning. */
4784 if (!by_ref)
4785 TREE_NO_WARNING (var) = 1;
4786 break;
4787
48152aa2 4788 case OMP_CLAUSE__CONDTEMP_:
4789 if (is_parallel_ctx (ctx))
4790 {
4791 x = build_receiver_ref (var, false, ctx);
4792 SET_DECL_VALUE_EXPR (new_var, x);
4793 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4794 }
4f4b92d8 4795 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4796 {
4797 x = build_zero_cst (TREE_TYPE (var));
4798 goto do_private;
4799 }
48152aa2 4800 break;
4801
1e8e9920 4802 case OMP_CLAUSE_LASTPRIVATE:
4803 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4804 break;
4805 /* FALLTHRU */
4806
4807 case OMP_CLAUSE_PRIVATE:
fd6481cf 4808 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4809 x = build_outer_var_ref (var, ctx);
4810 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4811 {
4812 if (is_task_ctx (ctx))
4813 x = build_receiver_ref (var, false, ctx);
4814 else
1f355935 4815 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
fd6481cf 4816 }
4817 else
4818 x = NULL;
3d483a94 4819 do_private:
bc7bff74 4820 tree nx;
43895be5 4821 nx = lang_hooks.decls.omp_clause_default_ctor
4822 (c, unshare_expr (new_var), x);
3d483a94 4823 if (is_simd)
4824 {
4825 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
bc7bff74 4826 if ((TREE_ADDRESSABLE (new_var) || nx || y
4f4b92d8 4827 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
e32d171e 4828 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4829 || omp_is_reference (var))
8e818b28 4830 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4831 ivar, lvar))
3d483a94 4832 {
e32d171e 4833 if (omp_is_reference (var))
4834 {
4835 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4836 tree new_vard = TREE_OPERAND (new_var, 0);
4837 gcc_assert (DECL_P (new_vard));
4838 SET_DECL_VALUE_EXPR (new_vard,
4839 build_fold_addr_expr (lvar));
4840 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4841 }
4842
bc7bff74 4843 if (nx)
3d483a94 4844 x = lang_hooks.decls.omp_clause_default_ctor
4845 (c, unshare_expr (ivar), x);
4f4b92d8 4846 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4847 {
4848 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4849 unshare_expr (ivar), x);
4850 nx = x;
4851 }
bc7bff74 4852 if (nx && x)
3d483a94 4853 gimplify_and_add (x, &llist[0]);
4f4b92d8 4854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4855 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4856 {
8259fae1 4857 tree v = new_var;
4858 if (!DECL_P (v))
4859 {
4860 gcc_assert (TREE_CODE (v) == MEM_REF);
4861 v = TREE_OPERAND (v, 0);
4862 gcc_assert (DECL_P (v));
4863 }
4864 v = *ctx->lastprivate_conditional_map->get (v);
4f4b92d8 4865 tree t = create_tmp_var (TREE_TYPE (v));
4866 tree z = build_zero_cst (TREE_TYPE (v));
4867 tree orig_v
4868 = build_outer_var_ref (var, ctx,
4869 OMP_CLAUSE_LASTPRIVATE);
4870 gimple_seq_add_stmt (dlist,
4871 gimple_build_assign (t, z));
4872 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4873 tree civar = DECL_VALUE_EXPR (v);
4874 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4875 civar = unshare_expr (civar);
4876 TREE_OPERAND (civar, 1) = sctx.idx;
4877 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4878 unshare_expr (civar));
4879 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4880 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4881 orig_v, unshare_expr (ivar)));
4882 tree cond = build2 (LT_EXPR, boolean_type_node, t,
4883 civar);
4884 x = build3 (COND_EXPR, void_type_node, cond, x,
4885 void_node);
4886 gimple_seq tseq = NULL;
4887 gimplify_and_add (x, &tseq);
384aea12 4888 if (ctx->outer)
4889 lower_omp (&tseq, ctx->outer);
4f4b92d8 4890 gimple_seq_add_seq (&llist[1], tseq);
4891 }
3d483a94 4892 if (y)
4893 {
4894 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4895 if (y)
4896 {
4897 gimple_seq tseq = NULL;
4898
4899 dtor = y;
4900 gimplify_stmt (&dtor, &tseq);
4901 gimple_seq_add_seq (&llist[1], tseq);
4902 }
4903 }
4904 break;
4905 }
e32d171e 4906 if (omp_is_reference (var))
4907 {
4908 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4909 tree new_vard = TREE_OPERAND (new_var, 0);
4910 gcc_assert (DECL_P (new_vard));
4911 tree type = TREE_TYPE (TREE_TYPE (new_vard));
4912 x = TYPE_SIZE_UNIT (type);
4913 if (TREE_CONSTANT (x))
4914 {
4915 x = create_tmp_var_raw (type, get_name (var));
4916 gimple_add_tmp_var (x);
4917 TREE_ADDRESSABLE (x) = 1;
4918 x = build_fold_addr_expr_loc (clause_loc, x);
4919 x = fold_convert_loc (clause_loc,
4920 TREE_TYPE (new_vard), x);
4921 gimplify_assign (new_vard, x, ilist);
4922 }
4923 }
3d483a94 4924 }
bc7bff74 4925 if (nx)
4926 gimplify_and_add (nx, ilist);
1e8e9920 4927 /* FALLTHRU */
4928
4929 do_dtor:
4930 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4931 if (x)
4932 {
75a70cf9 4933 gimple_seq tseq = NULL;
4934
1e8e9920 4935 dtor = x;
75a70cf9 4936 gimplify_stmt (&dtor, &tseq);
e3a19533 4937 gimple_seq_add_seq (dlist, tseq);
1e8e9920 4938 }
4939 break;
4940
3d483a94 4941 case OMP_CLAUSE_LINEAR:
4942 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4943 goto do_firstprivate;
4944 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4945 x = NULL;
4946 else
4947 x = build_outer_var_ref (var, ctx);
4948 goto do_private;
4949
1e8e9920 4950 case OMP_CLAUSE_FIRSTPRIVATE:
fd6481cf 4951 if (is_task_ctx (ctx))
4952 {
7e5a76c8 4953 if ((omp_is_reference (var)
4954 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4955 || is_variable_sized (var))
fd6481cf 4956 goto do_dtor;
4957 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4958 ctx))
4959 || use_pointer_for_field (var, NULL))
4960 {
4961 x = build_receiver_ref (var, false, ctx);
4962 SET_DECL_VALUE_EXPR (new_var, x);
4963 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4964 goto do_dtor;
4965 }
4966 }
7e5a76c8 4967 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4968 && omp_is_reference (var))
4969 {
4970 x = build_outer_var_ref (var, ctx);
4971 gcc_assert (TREE_CODE (x) == MEM_REF
4972 && integer_zerop (TREE_OPERAND (x, 1)));
4973 x = TREE_OPERAND (x, 0);
4974 x = lang_hooks.decls.omp_clause_copy_ctor
4975 (c, unshare_expr (new_var), x);
4976 gimplify_and_add (x, ilist);
4977 goto do_dtor;
4978 }
3d483a94 4979 do_firstprivate:
1e8e9920 4980 x = build_outer_var_ref (var, ctx);
3d483a94 4981 if (is_simd)
4982 {
bc7bff74 4983 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4984 && gimple_omp_for_combined_into_p (ctx->stmt))
4985 {
9580cb79 4986 tree t = OMP_CLAUSE_LINEAR_STEP (c);
4987 tree stept = TREE_TYPE (t);
4954efd4 4988 tree ct = omp_find_clause (clauses,
9580cb79 4989 OMP_CLAUSE__LOOPTEMP_);
4990 gcc_assert (ct);
4991 tree l = OMP_CLAUSE_DECL (ct);
e471cc6f 4992 tree n1 = fd->loop.n1;
4993 tree step = fd->loop.step;
4994 tree itype = TREE_TYPE (l);
4995 if (POINTER_TYPE_P (itype))
4996 itype = signed_type_for (itype);
4997 l = fold_build2 (MINUS_EXPR, itype, l, n1);
4998 if (TYPE_UNSIGNED (itype)
4999 && fd->loop.cond_code == GT_EXPR)
5000 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5001 fold_build1 (NEGATE_EXPR, itype, l),
5002 fold_build1 (NEGATE_EXPR,
5003 itype, step));
5004 else
5005 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
bc7bff74 5006 t = fold_build2 (MULT_EXPR, stept,
5007 fold_convert (stept, l), t);
9580cb79 5008
5009 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5010 {
e32d171e 5011 if (omp_is_reference (var))
5012 {
5013 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5014 tree new_vard = TREE_OPERAND (new_var, 0);
5015 gcc_assert (DECL_P (new_vard));
5016 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5017 nx = TYPE_SIZE_UNIT (type);
5018 if (TREE_CONSTANT (nx))
5019 {
5020 nx = create_tmp_var_raw (type,
5021 get_name (var));
5022 gimple_add_tmp_var (nx);
5023 TREE_ADDRESSABLE (nx) = 1;
5024 nx = build_fold_addr_expr_loc (clause_loc,
5025 nx);
5026 nx = fold_convert_loc (clause_loc,
5027 TREE_TYPE (new_vard),
5028 nx);
5029 gimplify_assign (new_vard, nx, ilist);
5030 }
5031 }
5032
9580cb79 5033 x = lang_hooks.decls.omp_clause_linear_ctor
5034 (c, new_var, x, t);
5035 gimplify_and_add (x, ilist);
5036 goto do_dtor;
5037 }
5038
bc7bff74 5039 if (POINTER_TYPE_P (TREE_TYPE (x)))
5040 x = fold_build2 (POINTER_PLUS_EXPR,
5041 TREE_TYPE (x), x, t);
5042 else
5043 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5044 }
5045
3d483a94 5046 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
e32d171e 5047 || TREE_ADDRESSABLE (new_var)
5048 || omp_is_reference (var))
8e818b28 5049 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5050 ivar, lvar))
3d483a94 5051 {
e32d171e 5052 if (omp_is_reference (var))
5053 {
5054 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5055 tree new_vard = TREE_OPERAND (new_var, 0);
5056 gcc_assert (DECL_P (new_vard));
5057 SET_DECL_VALUE_EXPR (new_vard,
5058 build_fold_addr_expr (lvar));
5059 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5060 }
3d483a94 5061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5062 {
f9e245b2 5063 tree iv = create_tmp_var (TREE_TYPE (new_var));
3d483a94 5064 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5065 gimplify_and_add (x, ilist);
5066 gimple_stmt_iterator gsi
5067 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
1a91d914 5068 gassign *g
3d483a94 5069 = gimple_build_assign (unshare_expr (lvar), iv);
5070 gsi_insert_before_without_update (&gsi, g,
5071 GSI_SAME_STMT);
9580cb79 5072 tree t = OMP_CLAUSE_LINEAR_STEP (c);
3d483a94 5073 enum tree_code code = PLUS_EXPR;
5074 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5075 code = POINTER_PLUS_EXPR;
e9cf809e 5076 g = gimple_build_assign (iv, code, iv, t);
3d483a94 5077 gsi_insert_before_without_update (&gsi, g,
5078 GSI_SAME_STMT);
5079 break;
5080 }
5081 x = lang_hooks.decls.omp_clause_copy_ctor
5082 (c, unshare_expr (ivar), x);
5083 gimplify_and_add (x, &llist[0]);
5084 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5085 if (x)
5086 {
5087 gimple_seq tseq = NULL;
5088
5089 dtor = x;
5090 gimplify_stmt (&dtor, &tseq);
5091 gimple_seq_add_seq (&llist[1], tseq);
5092 }
5093 break;
5094 }
e32d171e 5095 if (omp_is_reference (var))
5096 {
5097 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5098 tree new_vard = TREE_OPERAND (new_var, 0);
5099 gcc_assert (DECL_P (new_vard));
5100 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5101 nx = TYPE_SIZE_UNIT (type);
5102 if (TREE_CONSTANT (nx))
5103 {
5104 nx = create_tmp_var_raw (type, get_name (var));
5105 gimple_add_tmp_var (nx);
5106 TREE_ADDRESSABLE (nx) = 1;
5107 nx = build_fold_addr_expr_loc (clause_loc, nx);
5108 nx = fold_convert_loc (clause_loc,
5109 TREE_TYPE (new_vard), nx);
5110 gimplify_assign (new_vard, nx, ilist);
5111 }
5112 }
3d483a94 5113 }
43895be5 5114 x = lang_hooks.decls.omp_clause_copy_ctor
5115 (c, unshare_expr (new_var), x);
1e8e9920 5116 gimplify_and_add (x, ilist);
5117 goto do_dtor;
1e8e9920 5118
bc7bff74 5119 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 5120 case OMP_CLAUSE__REDUCTEMP_:
43895be5 5121 gcc_assert (is_taskreg_ctx (ctx));
bc7bff74 5122 x = build_outer_var_ref (var, ctx);
5123 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5124 gimplify_and_add (x, ilist);
5125 break;
5126
1e8e9920 5127 case OMP_CLAUSE_COPYIN:
e8a588af 5128 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 5129 x = build_receiver_ref (var, by_ref, ctx);
5130 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5131 append_to_statement_list (x, &copyin_seq);
5132 copyin_by_ref |= by_ref;
5133 break;
5134
5135 case OMP_CLAUSE_REDUCTION:
7e5a76c8 5136 case OMP_CLAUSE_IN_REDUCTION:
641a0fa1 5137 /* OpenACC reductions are initialized using the
5138 GOACC_REDUCTION internal function. */
5139 if (is_gimple_omp_oacc (ctx->stmt))
5140 break;
1e8e9920 5141 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5142 {
fd6481cf 5143 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
42acab1c 5144 gimple *tseq;
7e5a76c8 5145 tree ptype = TREE_TYPE (placeholder);
5146 if (cond)
5147 {
5148 x = error_mark_node;
5149 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5150 && !task_reduction_needs_orig_p)
5151 x = var;
5152 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5153 {
5154 tree pptype = build_pointer_type (ptype);
5155 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5156 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5157 size_int (task_reduction_cnt_full
5158 + task_reduction_cntorig - 1),
5159 NULL_TREE, NULL_TREE);
5160 else
5161 {
5162 unsigned int idx
5163 = *ctx->task_reduction_map->get (c);
5164 x = task_reduction_read (ilist, tskred_temp,
5165 pptype, 7 + 3 * idx);
5166 }
5167 x = fold_convert (pptype, x);
5168 x = build_simple_mem_ref (x);
5169 }
5170 }
5171 else
5172 {
5173 x = build_outer_var_ref (var, ctx);
fd6481cf 5174
7e5a76c8 5175 if (omp_is_reference (var)
5176 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5177 x = build_fold_addr_expr_loc (clause_loc, x);
5178 }
fd6481cf 5179 SET_DECL_VALUE_EXPR (placeholder, x);
5180 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
bc7bff74 5181 tree new_vard = new_var;
4954efd4 5182 if (omp_is_reference (var))
bc7bff74 5183 {
5184 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5185 new_vard = TREE_OPERAND (new_var, 0);
5186 gcc_assert (DECL_P (new_vard));
5187 }
da008d72 5188 tree rvar = NULL_TREE, *rvarp = NULL;
5189 if (is_simd
5190 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5191 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5192 rvarp = &rvar;
3d483a94 5193 if (is_simd
8e818b28 5194 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
da008d72 5195 ivar, lvar, rvarp))
3d483a94 5196 {
bc7bff74 5197 if (new_vard == new_var)
5198 {
5199 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5200 SET_DECL_VALUE_EXPR (new_var, ivar);
5201 }
5202 else
5203 {
5204 SET_DECL_VALUE_EXPR (new_vard,
5205 build_fold_addr_expr (ivar));
5206 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5207 }
5208 x = lang_hooks.decls.omp_clause_default_ctor
5209 (c, unshare_expr (ivar),
5210 build_outer_var_ref (var, ctx));
da008d72 5211 if (rvarp)
5212 {
5213 if (x)
5214 {
5215 gimplify_and_add (x, &llist[0]);
5216
5217 tree ivar2 = unshare_expr (lvar);
5218 TREE_OPERAND (ivar2, 1) = sctx.idx;
5219 x = lang_hooks.decls.omp_clause_default_ctor
5220 (c, ivar2, build_outer_var_ref (var, ctx));
5221 gimplify_and_add (x, &llist[0]);
5222
5223 /* For types that need construction, add another
5224 private var which will be default constructed
5225 and optionally initialized with
5226 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5227 loop we want to assign this value instead of
5228 constructing and destructing it in each
5229 iteration. */
5230 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5231 gimple_add_tmp_var (nv);
5232 ctx->cb.decl_map->put (TREE_OPERAND (ivar, 0),
5233 nv);
5234 x = lang_hooks.decls.omp_clause_default_ctor
5235 (c, nv, build_outer_var_ref (var, ctx));
5236 gimplify_and_add (x, ilist);
5237
5238 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5239 {
5240 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5241 x = DECL_VALUE_EXPR (new_var);
5242 SET_DECL_VALUE_EXPR (new_var, nv);
5243 lower_omp (&tseq, ctx);
5244 SET_DECL_VALUE_EXPR (new_var, x);
5245 gimple_seq_add_seq (ilist, tseq);
5246 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5247 }
5248
5249 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5250 if (x)
5251 {
5252 tseq = NULL;
5253 dtor = x;
5254 gimplify_stmt (&dtor, &tseq);
5255 gimple_seq_add_seq (dlist, tseq);
5256 }
5257 }
5258
5259 tree ref = build_outer_var_ref (var, ctx);
5260 x = unshare_expr (ivar);
5261 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5262 ref);
5263 gimplify_and_add (x, &llist[0]);
5264
5265 ref = build_outer_var_ref (var, ctx);
5266 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5267 rvar);
5268 gimplify_and_add (x, &llist[3]);
5269
5270 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5271 if (new_vard == new_var)
5272 SET_DECL_VALUE_EXPR (new_var, lvar);
5273 else
5274 SET_DECL_VALUE_EXPR (new_vard,
5275 build_fold_addr_expr (lvar));
5276
5277 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5278 if (x)
5279 {
5280 tseq = NULL;
5281 dtor = x;
5282 gimplify_stmt (&dtor, &tseq);
5283 gimple_seq_add_seq (&llist[1], tseq);
5284 }
5285
5286 tree ivar2 = unshare_expr (lvar);
5287 TREE_OPERAND (ivar2, 1) = sctx.idx;
5288 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5289 if (x)
5290 {
5291 tseq = NULL;
5292 dtor = x;
5293 gimplify_stmt (&dtor, &tseq);
5294 gimple_seq_add_seq (&llist[1], tseq);
5295 }
5296 break;
5297 }
bc7bff74 5298 if (x)
5299 gimplify_and_add (x, &llist[0]);
5300 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5301 {
5302 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5303 lower_omp (&tseq, ctx);
5304 gimple_seq_add_seq (&llist[0], tseq);
5305 }
5306 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5307 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5308 lower_omp (&tseq, ctx);
5309 gimple_seq_add_seq (&llist[1], tseq);
5310 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5311 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5312 if (new_vard == new_var)
5313 SET_DECL_VALUE_EXPR (new_var, lvar);
5314 else
5315 SET_DECL_VALUE_EXPR (new_vard,
5316 build_fold_addr_expr (lvar));
5317 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5318 if (x)
5319 {
5320 tseq = NULL;
5321 dtor = x;
5322 gimplify_stmt (&dtor, &tseq);
5323 gimple_seq_add_seq (&llist[1], tseq);
5324 }
5325 break;
5326 }
09d1c205 5327 /* If this is a reference to constant size reduction var
5328 with placeholder, we haven't emitted the initializer
5329 for it because it is undesirable if SIMD arrays are used.
5330 But if they aren't used, we need to emit the deferred
5331 initialization now. */
4954efd4 5332 else if (omp_is_reference (var) && is_simd)
2712b6de 5333 handle_simd_reference (clause_loc, new_vard, ilist);
7e5a76c8 5334
5335 tree lab2 = NULL_TREE;
5336 if (cond)
5337 {
5338 gimple *g;
5339 if (!is_parallel_ctx (ctx))
5340 {
5341 tree condv = create_tmp_var (boolean_type_node);
5342 tree m = build_simple_mem_ref (cond);
5343 g = gimple_build_assign (condv, m);
5344 gimple_seq_add_stmt (ilist, g);
5345 tree lab1
5346 = create_artificial_label (UNKNOWN_LOCATION);
5347 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5348 g = gimple_build_cond (NE_EXPR, condv,
5349 boolean_false_node,
5350 lab2, lab1);
5351 gimple_seq_add_stmt (ilist, g);
5352 gimple_seq_add_stmt (ilist,
5353 gimple_build_label (lab1));
5354 }
5355 g = gimple_build_assign (build_simple_mem_ref (cond),
5356 boolean_true_node);
5357 gimple_seq_add_stmt (ilist, g);
5358 }
bc7bff74 5359 x = lang_hooks.decls.omp_clause_default_ctor
cf5f881f 5360 (c, unshare_expr (new_var),
7e5a76c8 5361 cond ? NULL_TREE
5362 : build_outer_var_ref (var, ctx));
bc7bff74 5363 if (x)
5364 gimplify_and_add (x, ilist);
da008d72 5365
5366 if (rvarp)
5367 {
5368 if (x)
5369 {
5370 tree nv = create_tmp_var_raw (TREE_TYPE (new_vard));
5371 gimple_add_tmp_var (nv);
5372 ctx->cb.decl_map->put (new_var, nv);
5373 x = lang_hooks.decls.omp_clause_default_ctor
5374 (c, nv, build_outer_var_ref (var, ctx));
5375 gimplify_and_add (x, ilist);
5376 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5377 {
5378 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5379 SET_DECL_VALUE_EXPR (new_var, nv);
5380 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5381 lower_omp (&tseq, ctx);
5382 SET_DECL_VALUE_EXPR (new_var, NULL_TREE);
5383 DECL_HAS_VALUE_EXPR_P (new_var) = 0;
5384 gimple_seq_add_seq (ilist, tseq);
5385 }
5386 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5387 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5388 if (x)
5389 {
5390 tseq = NULL;
5391 dtor = x;
5392 gimplify_stmt (&dtor, &tseq);
5393 gimple_seq_add_seq (dlist, tseq);
5394 }
5395 }
5396 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5397 goto do_dtor;
5398 }
5399
bc7bff74 5400 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5401 {
5402 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5403 lower_omp (&tseq, ctx);
5404 gimple_seq_add_seq (ilist, tseq);
5405 }
75a70cf9 5406 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
bc7bff74 5407 if (is_simd)
5408 {
5409 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5410 lower_omp (&tseq, ctx);
5411 gimple_seq_add_seq (dlist, tseq);
5412 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5413 }
fd6481cf 5414 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
7e5a76c8 5415 if (cond)
5416 {
5417 if (lab2)
5418 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5419 break;
5420 }
bc7bff74 5421 goto do_dtor;
1e8e9920 5422 }
5423 else
5424 {
5425 x = omp_reduction_init (c, TREE_TYPE (new_var));
5426 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
c22ad515 5427 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5428
7e5a76c8 5429 if (cond)
5430 {
5431 gimple *g;
5432 tree lab2 = NULL_TREE;
5433 /* GOMP_taskgroup_reduction_register memsets the whole
5434 array to zero. If the initializer is zero, we don't
5435 need to initialize it again, just mark it as ever
5436 used unconditionally, i.e. cond = true. */
5437 if (initializer_zerop (x))
5438 {
5439 g = gimple_build_assign (build_simple_mem_ref (cond),
5440 boolean_true_node);
5441 gimple_seq_add_stmt (ilist, g);
5442 break;
5443 }
5444
5445 /* Otherwise, emit
5446 if (!cond) { cond = true; new_var = x; } */
5447 if (!is_parallel_ctx (ctx))
5448 {
5449 tree condv = create_tmp_var (boolean_type_node);
5450 tree m = build_simple_mem_ref (cond);
5451 g = gimple_build_assign (condv, m);
5452 gimple_seq_add_stmt (ilist, g);
5453 tree lab1
5454 = create_artificial_label (UNKNOWN_LOCATION);
5455 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5456 g = gimple_build_cond (NE_EXPR, condv,
5457 boolean_false_node,
5458 lab2, lab1);
5459 gimple_seq_add_stmt (ilist, g);
5460 gimple_seq_add_stmt (ilist,
5461 gimple_build_label (lab1));
5462 }
5463 g = gimple_build_assign (build_simple_mem_ref (cond),
5464 boolean_true_node);
5465 gimple_seq_add_stmt (ilist, g);
5466 gimplify_assign (new_var, x, ilist);
5467 if (lab2)
5468 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5469 break;
5470 }
5471
c22ad515 5472 /* reduction(-:var) sums up the partial results, so it
5473 acts identically to reduction(+:var). */
5474 if (code == MINUS_EXPR)
5475 code = PLUS_EXPR;
5476
2712b6de 5477 tree new_vard = new_var;
4954efd4 5478 if (is_simd && omp_is_reference (var))
2712b6de 5479 {
5480 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5481 new_vard = TREE_OPERAND (new_var, 0);
5482 gcc_assert (DECL_P (new_vard));
5483 }
da008d72 5484 tree rvar = NULL_TREE, *rvarp = NULL;
5485 if (is_simd
5486 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5487 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5488 rvarp = &rvar;
3d483a94 5489 if (is_simd
8e818b28 5490 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
da008d72 5491 ivar, lvar, rvarp))
3d483a94 5492 {
da008d72 5493 if (new_vard != new_var)
5494 {
5495 SET_DECL_VALUE_EXPR (new_vard,
5496 build_fold_addr_expr (lvar));
5497 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5498 }
5499
3d483a94 5500 tree ref = build_outer_var_ref (var, ctx);
5501
da008d72 5502 if (rvarp)
5503 {
5504 gimplify_assign (ivar, ref, &llist[0]);
5505 ref = build_outer_var_ref (var, ctx);
5506 gimplify_assign (ref, rvar, &llist[3]);
5507 break;
5508 }
5509
3d483a94 5510 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5511
8e818b28 5512 if (sctx.is_simt)
bab6706a 5513 {
5514 if (!simt_lane)
5515 simt_lane = create_tmp_var (unsigned_type_node);
5516 x = build_call_expr_internal_loc
5517 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5518 TREE_TYPE (ivar), 2, ivar, simt_lane);
5519 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5520 gimplify_assign (ivar, x, &llist[2]);
5521 }
3d483a94 5522 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5523 ref = build_outer_var_ref (var, ctx);
5524 gimplify_assign (ref, x, &llist[1]);
2712b6de 5525
3d483a94 5526 }
da008d72 5527 else if (rvarp == NULL)
3d483a94 5528 {
4954efd4 5529 if (omp_is_reference (var) && is_simd)
2712b6de 5530 handle_simd_reference (clause_loc, new_vard, ilist);
3d483a94 5531 gimplify_assign (new_var, x, ilist);
5532 if (is_simd)
c22ad515 5533 {
5534 tree ref = build_outer_var_ref (var, ctx);
5535
5536 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5537 ref = build_outer_var_ref (var, ctx);
5538 gimplify_assign (ref, x, dlist);
5539 }
3d483a94 5540 }
1e8e9920 5541 }
5542 break;
5543
5544 default:
5545 gcc_unreachable ();
5546 }
5547 }
5548 }
7e5a76c8 5549 if (tskred_avar)
5550 {
5551 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5552 TREE_THIS_VOLATILE (clobber) = 1;
5553 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5554 }
1e8e9920 5555
9d805ed8 5556 if (known_eq (sctx.max_vf, 1U))
4f4b92d8 5557 {
5558 sctx.is_simt = false;
5559 if (ctx->lastprivate_conditional_map)
5560 {
384aea12 5561 if (gimple_omp_for_combined_into_p (ctx->stmt))
5562 {
5563 /* Signal to lower_omp_1 that it should use parent context. */
5564 ctx->combined_into_simd_safelen0 = true;
5565 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5567 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5568 {
5569 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5570 tree *v
5571 = ctx->lastprivate_conditional_map->get (o);
5572 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx->outer);
5573 tree *pv
5574 = ctx->outer->lastprivate_conditional_map->get (po);
5575 *v = *pv;
5576 }
5577 }
5578 else
5579 {
5580 /* When not vectorized, treat lastprivate(conditional:) like
5581 normal lastprivate, as there will be just one simd lane
5582 writing the privatized variable. */
5583 delete ctx->lastprivate_conditional_map;
5584 ctx->lastprivate_conditional_map = NULL;
5585 }
4f4b92d8 5586 }
5587 }
1b576300 5588
1d86b8dc 5589 if (nonconst_simd_if)
5590 {
5591 if (sctx.lane == NULL_TREE)
5592 {
5593 sctx.idx = create_tmp_var (unsigned_type_node);
5594 sctx.lane = create_tmp_var (unsigned_type_node);
5595 }
5596 /* FIXME: For now. */
5597 sctx.is_simt = false;
5598 }
5599
1b576300 5600 if (sctx.lane || sctx.is_simt)
3d483a94 5601 {
1b576300 5602 uid = create_tmp_var (ptr_type_node, "simduid");
8e1a382d 5603 /* Don't want uninit warnings on simduid, it is always uninitialized,
5604 but we use it not for the value, but for the DECL_UID only. */
5605 TREE_NO_WARNING (uid) = 1;
1b576300 5606 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5607 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5608 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5609 gimple_omp_for_set_clauses (ctx->stmt, c);
5610 }
5611 /* Emit calls denoting privatized variables and initializing a pointer to
5612 structure that holds private variables as fields after ompdevlow pass. */
5613 if (sctx.is_simt)
5614 {
5615 sctx.simt_eargs[0] = uid;
5616 gimple *g
5617 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5618 gimple_call_set_lhs (g, uid);
5619 gimple_seq_add_stmt (ilist, g);
5620 sctx.simt_eargs.release ();
5621
5622 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5623 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5624 gimple_call_set_lhs (g, simtrec);
5625 gimple_seq_add_stmt (ilist, g);
5626 }
5627 if (sctx.lane)
5628 {
1d86b8dc 5629 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
da008d72 5630 2 + (nonconst_simd_if != NULL),
5631 uid, integer_zero_node,
5632 nonconst_simd_if);
8e818b28 5633 gimple_call_set_lhs (g, sctx.lane);
3d483a94 5634 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5635 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
8e818b28 5636 g = gimple_build_assign (sctx.lane, INTEGER_CST,
e9cf809e 5637 build_int_cst (unsigned_type_node, 0));
3d483a94 5638 gimple_seq_add_stmt (ilist, g);
da008d72 5639 if (sctx.lastlane)
5640 {
5641 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5642 2, uid, sctx.lane);
5643 gimple_call_set_lhs (g, sctx.lastlane);
5644 gimple_seq_add_stmt (dlist, g);
5645 gimple_seq_add_seq (dlist, llist[3]);
5646 }
bab6706a 5647 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5648 if (llist[2])
5649 {
5650 tree simt_vf = create_tmp_var (unsigned_type_node);
5651 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5652 gimple_call_set_lhs (g, simt_vf);
5653 gimple_seq_add_stmt (dlist, g);
5654
5655 tree t = build_int_cst (unsigned_type_node, 1);
5656 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5657 gimple_seq_add_stmt (dlist, g);
5658
5659 t = build_int_cst (unsigned_type_node, 0);
8e818b28 5660 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
bab6706a 5661 gimple_seq_add_stmt (dlist, g);
5662
5663 tree body = create_artificial_label (UNKNOWN_LOCATION);
5664 tree header = create_artificial_label (UNKNOWN_LOCATION);
5665 tree end = create_artificial_label (UNKNOWN_LOCATION);
5666 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5667 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5668
5669 gimple_seq_add_seq (dlist, llist[2]);
5670
5671 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5672 gimple_seq_add_stmt (dlist, g);
5673
5674 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5675 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5676 gimple_seq_add_stmt (dlist, g);
5677
5678 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5679 }
3d483a94 5680 for (int i = 0; i < 2; i++)
5681 if (llist[i])
5682 {
f9e245b2 5683 tree vf = create_tmp_var (unsigned_type_node);
3d483a94 5684 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5685 gimple_call_set_lhs (g, vf);
5686 gimple_seq *seq = i == 0 ? ilist : dlist;
5687 gimple_seq_add_stmt (seq, g);
5688 tree t = build_int_cst (unsigned_type_node, 0);
8e818b28 5689 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
3d483a94 5690 gimple_seq_add_stmt (seq, g);
5691 tree body = create_artificial_label (UNKNOWN_LOCATION);
5692 tree header = create_artificial_label (UNKNOWN_LOCATION);
5693 tree end = create_artificial_label (UNKNOWN_LOCATION);
5694 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5695 gimple_seq_add_stmt (seq, gimple_build_label (body));
5696 gimple_seq_add_seq (seq, llist[i]);
5697 t = build_int_cst (unsigned_type_node, 1);
8e818b28 5698 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
3d483a94 5699 gimple_seq_add_stmt (seq, g);
5700 gimple_seq_add_stmt (seq, gimple_build_label (header));
8e818b28 5701 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
3d483a94 5702 gimple_seq_add_stmt (seq, g);
5703 gimple_seq_add_stmt (seq, gimple_build_label (end));
5704 }
5705 }
1b576300 5706 if (sctx.is_simt)
5707 {
5708 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5709 gimple *g
5710 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5711 gimple_seq_add_stmt (dlist, g);
5712 }
3d483a94 5713
1e8e9920 5714 /* The copyin sequence is not to be executed by the main thread, since
5715 that would result in self-copies. Perhaps not visible to scalars,
5716 but it certainly is to C++ operator=. */
5717 if (copyin_seq)
5718 {
b9a16870 5719 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5720 0);
1e8e9920 5721 x = build2 (NE_EXPR, boolean_type_node, x,
5722 build_int_cst (TREE_TYPE (x), 0));
5723 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5724 gimplify_and_add (x, ilist);
5725 }
5726
5727 /* If any copyin variable is passed by reference, we must ensure the
5728 master thread doesn't modify it before it is copied over in all
f49d7bb5 5729 threads. Similarly for variables in both firstprivate and
5730 lastprivate clauses we need to ensure the lastprivate copying
bc7bff74 5731 happens after firstprivate copying in all threads. And similarly
5732 for UDRs if initializer expression refers to omp_orig. */
5733 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
3d483a94 5734 {
5735 /* Don't add any barrier for #pragma omp simd or
5736 #pragma omp distribute. */
7e5a76c8 5737 if (!is_task_ctx (ctx)
5738 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5739 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
4954efd4 5740 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
3d483a94 5741 }
5742
5743 /* If max_vf is non-zero, then we can use only a vectorization factor
5744 up to the max_vf we chose. So stick it into the safelen clause. */
9d805ed8 5745 if (maybe_ne (sctx.max_vf, 0U))
3d483a94 5746 {
4954efd4 5747 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3d483a94 5748 OMP_CLAUSE_SAFELEN);
9d805ed8 5749 poly_uint64 safe_len;
3d483a94 5750 if (c == NULL_TREE
9d805ed8 5751 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5752 && maybe_gt (safe_len, sctx.max_vf)))
3d483a94 5753 {
5754 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5755 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
8e818b28 5756 sctx.max_vf);
3d483a94 5757 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5758 gimple_omp_for_set_clauses (ctx->stmt, c);
5759 }
5760 }
1e8e9920 5761}
5762
9a1d892b 5763/* Create temporary variables for lastprivate(conditional:) implementation
5764 in context CTX with CLAUSES. */
5765
5766static void
5767lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5768{
9a1d892b 5769 tree iter_type = NULL_TREE;
5770 tree cond_ptr = NULL_TREE;
5771 tree iter_var = NULL_TREE;
4f4b92d8 5772 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5773 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5774 tree next = *clauses;
9a1d892b 5775 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5777 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5778 {
4f4b92d8 5779 if (is_simd)
5780 {
5781 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5782 gcc_assert (cc);
5783 if (iter_type == NULL_TREE)
5784 {
5785 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5786 iter_var = create_tmp_var_raw (iter_type);
5787 DECL_CONTEXT (iter_var) = current_function_decl;
5788 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5789 DECL_CHAIN (iter_var) = ctx->block_vars;
5790 ctx->block_vars = iter_var;
5791 tree c3
5792 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5793 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5794 OMP_CLAUSE_DECL (c3) = iter_var;
5795 OMP_CLAUSE_CHAIN (c3) = *clauses;
5796 *clauses = c3;
5797 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5798 }
5799 next = OMP_CLAUSE_CHAIN (cc);
5800 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5801 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5802 ctx->lastprivate_conditional_map->put (o, v);
5803 continue;
5804 }
9a1d892b 5805 if (iter_type == NULL)
5806 {
eb7a699d 5807 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5808 {
5809 struct omp_for_data fd;
5810 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5811 NULL);
5812 iter_type = unsigned_type_for (fd.iter_type);
5813 }
5814 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5815 iter_type = unsigned_type_node;
48152aa2 5816 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5817 if (c2)
5818 {
5819 cond_ptr
5820 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5821 OMP_CLAUSE_DECL (c2) = cond_ptr;
5822 }
5823 else
5824 {
5825 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5826 DECL_CONTEXT (cond_ptr) = current_function_decl;
5827 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5828 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5829 ctx->block_vars = cond_ptr;
5830 c2 = build_omp_clause (UNKNOWN_LOCATION,
5831 OMP_CLAUSE__CONDTEMP_);
5832 OMP_CLAUSE_DECL (c2) = cond_ptr;
5833 OMP_CLAUSE_CHAIN (c2) = *clauses;
5834 *clauses = c2;
5835 }
9a1d892b 5836 iter_var = create_tmp_var_raw (iter_type);
5837 DECL_CONTEXT (iter_var) = current_function_decl;
5838 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5839 DECL_CHAIN (iter_var) = ctx->block_vars;
5840 ctx->block_vars = iter_var;
9a1d892b 5841 tree c3
5842 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
4f4b92d8 5843 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
9a1d892b 5844 OMP_CLAUSE_DECL (c3) = iter_var;
48152aa2 5845 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
9a1d892b 5846 OMP_CLAUSE_CHAIN (c2) = c3;
9a1d892b 5847 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5848 }
5849 tree v = create_tmp_var_raw (iter_type);
5850 DECL_CONTEXT (v) = current_function_decl;
5851 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
5852 DECL_CHAIN (v) = ctx->block_vars;
5853 ctx->block_vars = v;
5854 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5855 ctx->lastprivate_conditional_map->put (o, v);
5856 }
5857}
5858
773c5ba7 5859
1e8e9920 5860/* Generate code to implement the LASTPRIVATE clauses. This is used for
5861 both parallel and workshare constructs. PREDICATE may be NULL if it's
9a1d892b 5862 always true. BODY_P is the sequence to insert early initialization
5863 if needed, STMT_LIST is where the non-conditional lastprivate handling
5864 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
5865 section. */
1e8e9920 5866
5867static void
9a1d892b 5868lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
5869 gimple_seq *stmt_list, gimple_seq *cstmt_list,
bc7bff74 5870 omp_context *ctx)
1e8e9920 5871{
3d483a94 5872 tree x, c, label = NULL, orig_clauses = clauses;
fd6481cf 5873 bool par_clauses = false;
bab6706a 5874 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
9a1d892b 5875 unsigned HOST_WIDE_INT conditional_off = 0;
1e8e9920 5876
3d483a94 5877 /* Early exit if there are no lastprivate or linear clauses. */
5878 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5879 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5880 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5881 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5882 break;
1e8e9920 5883 if (clauses == NULL)
5884 {
5885 /* If this was a workshare clause, see if it had been combined
5886 with its parallel. In that case, look for the clauses on the
5887 parallel statement itself. */
5888 if (is_parallel_ctx (ctx))
5889 return;
5890
5891 ctx = ctx->outer;
5892 if (ctx == NULL || !is_parallel_ctx (ctx))
5893 return;
5894
4954efd4 5895 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1e8e9920 5896 OMP_CLAUSE_LASTPRIVATE);
5897 if (clauses == NULL)
5898 return;
fd6481cf 5899 par_clauses = true;
1e8e9920 5900 }
5901
bab6706a 5902 bool maybe_simt = false;
5903 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5904 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5905 {
4954efd4 5906 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5907 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
bab6706a 5908 if (simduid)
5909 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5910 }
5911
75a70cf9 5912 if (predicate)
5913 {
1a91d914 5914 gcond *stmt;
75a70cf9 5915 tree label_true, arm1, arm2;
da80a82f 5916 enum tree_code pred_code = TREE_CODE (predicate);
75a70cf9 5917
e60a6f7b 5918 label = create_artificial_label (UNKNOWN_LOCATION);
5919 label_true = create_artificial_label (UNKNOWN_LOCATION);
da80a82f 5920 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5921 {
5922 arm1 = TREE_OPERAND (predicate, 0);
5923 arm2 = TREE_OPERAND (predicate, 1);
5924 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5925 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5926 }
5927 else
5928 {
5929 arm1 = predicate;
5930 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5931 arm2 = boolean_false_node;
5932 pred_code = NE_EXPR;
5933 }
bab6706a 5934 if (maybe_simt)
5935 {
da80a82f 5936 c = build2 (pred_code, boolean_type_node, arm1, arm2);
bab6706a 5937 c = fold_convert (integer_type_node, c);
5938 simtcond = create_tmp_var (integer_type_node);
5939 gimplify_assign (simtcond, c, stmt_list);
5940 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5941 1, simtcond);
5942 c = create_tmp_var (integer_type_node);
5943 gimple_call_set_lhs (g, c);
5944 gimple_seq_add_stmt (stmt_list, g);
5945 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5946 label_true, label);
5947 }
5948 else
da80a82f 5949 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
75a70cf9 5950 gimple_seq_add_stmt (stmt_list, stmt);
5951 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5952 }
1e8e9920 5953
9a1d892b 5954 tree cond_ptr = NULL_TREE;
fd6481cf 5955 for (c = clauses; c ;)
1e8e9920 5956 {
5957 tree var, new_var;
389dd41b 5958 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9a1d892b 5959 gimple_seq *this_stmt_list = stmt_list;
5960 tree lab2 = NULL_TREE;
5961
5962 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4f4b92d8 5963 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
384aea12 5964 && ctx->lastprivate_conditional_map
5965 && !ctx->combined_into_simd_safelen0)
9a1d892b 5966 {
4f4b92d8 5967 gcc_assert (body_p);
5968 if (simduid)
5969 goto next;
9a1d892b 5970 if (cond_ptr == NULL_TREE)
5971 {
5972 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
5973 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
5974 }
5975 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
5976 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5977 tree v = *ctx->lastprivate_conditional_map->get (o);
5978 gimplify_assign (v, build_zero_cst (type), body_p);
5979 this_stmt_list = cstmt_list;
48152aa2 5980 tree mem;
5981 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
5982 {
5983 mem = build2 (MEM_REF, type, cond_ptr,
5984 build_int_cst (TREE_TYPE (cond_ptr),
5985 conditional_off));
5986 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
5987 }
5988 else
5989 mem = build4 (ARRAY_REF, type, cond_ptr,
5990 size_int (conditional_off++), NULL_TREE, NULL_TREE);
9a1d892b 5991 tree mem2 = copy_node (mem);
9a1d892b 5992 gimple_seq seq = NULL;
5993 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
5994 gimple_seq_add_seq (this_stmt_list, seq);
5995 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5996 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5997 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
5998 gimple_seq_add_stmt (this_stmt_list, g);
5999 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6000 gimplify_assign (mem2, v, this_stmt_list);
6001 }
1e8e9920 6002
3d483a94 6003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6004 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6005 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
fd6481cf 6006 {
6007 var = OMP_CLAUSE_DECL (c);
43895be5 6008 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6009 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6010 && is_taskloop_ctx (ctx))
6011 {
6012 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6013 new_var = lookup_decl (var, ctx->outer);
6014 }
6015 else
aa1ddb66 6016 {
6017 new_var = lookup_decl (var, ctx);
6018 /* Avoid uninitialized warnings for lastprivate and
6019 for linear iterators. */
6020 if (predicate
6021 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6022 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6023 TREE_NO_WARNING (new_var) = 1;
6024 }
1e8e9920 6025
d66340f7 6026 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3d483a94 6027 {
6028 tree val = DECL_VALUE_EXPR (new_var);
d66340f7 6029 if (TREE_CODE (val) == ARRAY_REF
3d483a94 6030 && VAR_P (TREE_OPERAND (val, 0))
6031 && lookup_attribute ("omp simd array",
6032 DECL_ATTRIBUTES (TREE_OPERAND (val,
6033 0))))
6034 {
6035 if (lastlane == NULL)
6036 {
f9e245b2 6037 lastlane = create_tmp_var (unsigned_type_node);
1a91d914 6038 gcall *g
3d483a94 6039 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6040 2, simduid,
6041 TREE_OPERAND (val, 1));
6042 gimple_call_set_lhs (g, lastlane);
9a1d892b 6043 gimple_seq_add_stmt (this_stmt_list, g);
3d483a94 6044 }
6045 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6046 TREE_OPERAND (val, 0), lastlane,
6047 NULL_TREE, NULL_TREE);
da008d72 6048 TREE_THIS_NOTRAP (new_var) = 1;
1b576300 6049 }
d66340f7 6050 }
6051 else if (maybe_simt)
6052 {
6053 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6054 ? DECL_VALUE_EXPR (new_var)
6055 : new_var);
6056 if (simtlast == NULL)
1b576300 6057 {
d66340f7 6058 simtlast = create_tmp_var (unsigned_type_node);
6059 gcall *g = gimple_build_call_internal
6060 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6061 gimple_call_set_lhs (g, simtlast);
9a1d892b 6062 gimple_seq_add_stmt (this_stmt_list, g);
3d483a94 6063 }
d66340f7 6064 x = build_call_expr_internal_loc
6065 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6066 TREE_TYPE (val), 2, val, simtlast);
6067 new_var = unshare_expr (new_var);
9a1d892b 6068 gimplify_assign (new_var, x, this_stmt_list);
d66340f7 6069 new_var = unshare_expr (new_var);
3d483a94 6070 }
6071
6072 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6073 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
75a70cf9 6074 {
e3a19533 6075 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
9a1d892b 6076 gimple_seq_add_seq (this_stmt_list,
75a70cf9 6077 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
3d483a94 6078 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
75a70cf9 6079 }
2b536a17 6080 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6081 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6082 {
6083 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
9a1d892b 6084 gimple_seq_add_seq (this_stmt_list,
2b536a17 6085 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6086 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6087 }
1e8e9920 6088
43895be5 6089 x = NULL_TREE;
6090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6091 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6092 {
6093 gcc_checking_assert (is_taskloop_ctx (ctx));
6094 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6095 ctx->outer->outer);
6096 if (is_global_var (ovar))
6097 x = ovar;
6098 }
6099 if (!x)
1f355935 6100 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4954efd4 6101 if (omp_is_reference (var))
182cf5a9 6102 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
fd6481cf 6103 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
9a1d892b 6104 gimplify_and_add (x, this_stmt_list);
6105
6106 if (lab2)
6107 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
fd6481cf 6108 }
9a1d892b 6109
4f4b92d8 6110 next:
fd6481cf 6111 c = OMP_CLAUSE_CHAIN (c);
6112 if (c == NULL && !par_clauses)
6113 {
6114 /* If this was a workshare clause, see if it had been combined
6115 with its parallel. In that case, continue looking for the
6116 clauses also on the parallel statement itself. */
6117 if (is_parallel_ctx (ctx))
6118 break;
6119
6120 ctx = ctx->outer;
6121 if (ctx == NULL || !is_parallel_ctx (ctx))
6122 break;
6123
4954efd4 6124 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
fd6481cf 6125 OMP_CLAUSE_LASTPRIVATE);
6126 par_clauses = true;
6127 }
1e8e9920 6128 }
6129
75a70cf9 6130 if (label)
6131 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
1e8e9920 6132}
6133
641a0fa1 6134/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6135 (which might be a placeholder). INNER is true if this is an inner
6136 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6137 join markers. Generate the before-loop forking sequence in
6138 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6139 general form of these sequences is
6140
6141 GOACC_REDUCTION_SETUP
6142 GOACC_FORK
6143 GOACC_REDUCTION_INIT
6144 ...
6145 GOACC_REDUCTION_FINI
6146 GOACC_JOIN
6147 GOACC_REDUCTION_TEARDOWN. */
6148
ca4c3545 6149static void
641a0fa1 6150lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6151 gcall *fork, gcall *join, gimple_seq *fork_seq,
6152 gimple_seq *join_seq, omp_context *ctx)
ca4c3545 6153{
641a0fa1 6154 gimple_seq before_fork = NULL;
6155 gimple_seq after_fork = NULL;
6156 gimple_seq before_join = NULL;
6157 gimple_seq after_join = NULL;
6158 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6159 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6160 unsigned offset = 0;
6161
6162 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6163 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6164 {
6165 tree orig = OMP_CLAUSE_DECL (c);
6166 tree var = maybe_lookup_decl (orig, ctx);
6167 tree ref_to_res = NULL_TREE;
0c302595 6168 tree incoming, outgoing, v1, v2, v3;
6169 bool is_private = false;
641a0fa1 6170
6171 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6172 if (rcode == MINUS_EXPR)
6173 rcode = PLUS_EXPR;
6174 else if (rcode == TRUTH_ANDIF_EXPR)
6175 rcode = BIT_AND_EXPR;
6176 else if (rcode == TRUTH_ORIF_EXPR)
6177 rcode = BIT_IOR_EXPR;
6178 tree op = build_int_cst (unsigned_type_node, rcode);
6179
6180 if (!var)
6181 var = orig;
641a0fa1 6182
6183 incoming = outgoing = var;
7c6746c9 6184
641a0fa1 6185 if (!inner)
6186 {
6187 /* See if an outer construct also reduces this variable. */
6188 omp_context *outer = ctx;
ca4c3545 6189
641a0fa1 6190 while (omp_context *probe = outer->outer)
6191 {
6192 enum gimple_code type = gimple_code (probe->stmt);
6193 tree cls;
ca4c3545 6194
641a0fa1 6195 switch (type)
6196 {
6197 case GIMPLE_OMP_FOR:
6198 cls = gimple_omp_for_clauses (probe->stmt);
6199 break;
ca4c3545 6200
641a0fa1 6201 case GIMPLE_OMP_TARGET:
6202 if (gimple_omp_target_kind (probe->stmt)
6203 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6204 goto do_lookup;
ca4c3545 6205
641a0fa1 6206 cls = gimple_omp_target_clauses (probe->stmt);
6207 break;
ca4c3545 6208
641a0fa1 6209 default:
6210 goto do_lookup;
6211 }
7c6746c9 6212
641a0fa1 6213 outer = probe;
6214 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6215 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6216 && orig == OMP_CLAUSE_DECL (cls))
0c302595 6217 {
6218 incoming = outgoing = lookup_decl (orig, probe);
6219 goto has_outer_reduction;
6220 }
6221 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6222 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6223 && orig == OMP_CLAUSE_DECL (cls))
6224 {
6225 is_private = true;
6226 goto do_lookup;
6227 }
641a0fa1 6228 }
ca4c3545 6229
641a0fa1 6230 do_lookup:
6231 /* This is the outermost construct with this reduction,
6232 see if there's a mapping for it. */
6233 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
0c302595 6234 && maybe_lookup_field (orig, outer) && !is_private)
641a0fa1 6235 {
6236 ref_to_res = build_receiver_ref (orig, false, outer);
4954efd4 6237 if (omp_is_reference (orig))
641a0fa1 6238 ref_to_res = build_simple_mem_ref (ref_to_res);
ca4c3545 6239
0c302595 6240 tree type = TREE_TYPE (var);
6241 if (POINTER_TYPE_P (type))
6242 type = TREE_TYPE (type);
6243
641a0fa1 6244 outgoing = var;
0c302595 6245 incoming = omp_reduction_init_op (loc, rcode, type);
641a0fa1 6246 }
6247 else
f7896ff0 6248 {
6249 /* Try to look at enclosing contexts for reduction var,
6250 use original if no mapping found. */
6251 tree t = NULL_TREE;
6252 omp_context *c = ctx->outer;
6253 while (c && !t)
6254 {
6255 t = maybe_lookup_decl (orig, c);
6256 c = c->outer;
6257 }
6258 incoming = outgoing = (t ? t : orig);
6259 }
7c6746c9 6260
641a0fa1 6261 has_outer_reduction:;
6262 }
ca4c3545 6263
641a0fa1 6264 if (!ref_to_res)
6265 ref_to_res = integer_zero_node;
ca4c3545 6266
7c6746c9 6267 if (omp_is_reference (orig))
0c302595 6268 {
6269 tree type = TREE_TYPE (var);
6270 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6271
6272 if (!inner)
6273 {
6274 tree x = create_tmp_var (TREE_TYPE (type), id);
6275 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6276 }
6277
6278 v1 = create_tmp_var (type, id);
6279 v2 = create_tmp_var (type, id);
6280 v3 = create_tmp_var (type, id);
6281
6282 gimplify_assign (v1, var, fork_seq);
6283 gimplify_assign (v2, var, fork_seq);
6284 gimplify_assign (v3, var, fork_seq);
6285
6286 var = build_simple_mem_ref (var);
6287 v1 = build_simple_mem_ref (v1);
6288 v2 = build_simple_mem_ref (v2);
6289 v3 = build_simple_mem_ref (v3);
6290 outgoing = build_simple_mem_ref (outgoing);
6291
eae3d589 6292 if (!TREE_CONSTANT (incoming))
0c302595 6293 incoming = build_simple_mem_ref (incoming);
6294 }
6295 else
6296 v1 = v2 = v3 = var;
6297
641a0fa1 6298 /* Determine position in reduction buffer, which may be used
d2401312 6299 by target. The parser has ensured that this is not a
6300 variable-sized type. */
6301 fixed_size_mode mode
6302 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
641a0fa1 6303 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6304 offset = (offset + align - 1) & ~(align - 1);
6305 tree off = build_int_cst (sizetype, offset);
6306 offset += GET_MODE_SIZE (mode);
ca4c3545 6307
641a0fa1 6308 if (!init_code)
6309 {
6310 init_code = build_int_cst (integer_type_node,
6311 IFN_GOACC_REDUCTION_INIT);
6312 fini_code = build_int_cst (integer_type_node,
6313 IFN_GOACC_REDUCTION_FINI);
6314 setup_code = build_int_cst (integer_type_node,
6315 IFN_GOACC_REDUCTION_SETUP);
6316 teardown_code = build_int_cst (integer_type_node,
6317 IFN_GOACC_REDUCTION_TEARDOWN);
6318 }
6319
6320 tree setup_call
6321 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6322 TREE_TYPE (var), 6, setup_code,
6323 unshare_expr (ref_to_res),
6324 incoming, level, op, off);
6325 tree init_call
6326 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6327 TREE_TYPE (var), 6, init_code,
6328 unshare_expr (ref_to_res),
0c302595 6329 v1, level, op, off);
641a0fa1 6330 tree fini_call
6331 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6332 TREE_TYPE (var), 6, fini_code,
6333 unshare_expr (ref_to_res),
0c302595 6334 v2, level, op, off);
641a0fa1 6335 tree teardown_call
6336 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6337 TREE_TYPE (var), 6, teardown_code,
0c302595 6338 ref_to_res, v3, level, op, off);
641a0fa1 6339
0c302595 6340 gimplify_assign (v1, setup_call, &before_fork);
6341 gimplify_assign (v2, init_call, &after_fork);
6342 gimplify_assign (v3, fini_call, &before_join);
641a0fa1 6343 gimplify_assign (outgoing, teardown_call, &after_join);
6344 }
6345
6346 /* Now stitch things together. */
6347 gimple_seq_add_seq (fork_seq, before_fork);
6348 if (fork)
6349 gimple_seq_add_stmt (fork_seq, fork);
6350 gimple_seq_add_seq (fork_seq, after_fork);
6351
6352 gimple_seq_add_seq (join_seq, before_join);
6353 if (join)
6354 gimple_seq_add_stmt (join_seq, join);
6355 gimple_seq_add_seq (join_seq, after_join);
ca4c3545 6356}
773c5ba7 6357
9a1d892b 6358/* Generate code to implement the REDUCTION clauses, append it
6359 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6360 that should be emitted also inside of the critical section,
6361 in that case clear *CLIST afterwards, otherwise leave it as is
6362 and let the caller emit it itself. */
1e8e9920 6363
6364static void
9a1d892b 6365lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6366 gimple_seq *clist, omp_context *ctx)
1e8e9920 6367{
75a70cf9 6368 gimple_seq sub_seq = NULL;
42acab1c 6369 gimple *stmt;
f69b8a4c 6370 tree x, c;
1e8e9920 6371 int count = 0;
6372
641a0fa1 6373 /* OpenACC loop reductions are handled elsewhere. */
6374 if (is_gimple_omp_oacc (ctx->stmt))
6375 return;
6376
3d483a94 6377 /* SIMD reductions are handled in lower_rec_input_clauses. */
6378 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
10c55644 6379 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3d483a94 6380 return;
6381
1e8e9920 6382 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6383 update in that case, otherwise use a lock. */
6384 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 6385 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6386 && !OMP_CLAUSE_REDUCTION_TASK (c))
1e8e9920 6387 {
43895be5 6388 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6389 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
1e8e9920 6390 {
bc7bff74 6391 /* Never use OMP_ATOMIC for array reductions or UDRs. */
1e8e9920 6392 count = -1;
6393 break;
6394 }
6395 count++;
6396 }
6397
6398 if (count == 0)
6399 return;
6400
6401 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6402 {
43895be5 6403 tree var, ref, new_var, orig_var;
1e8e9920 6404 enum tree_code code;
389dd41b 6405 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 6406
7e5a76c8 6407 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6408 || OMP_CLAUSE_REDUCTION_TASK (c))
1e8e9920 6409 continue;
6410
d96999c6 6411 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
43895be5 6412 orig_var = var = OMP_CLAUSE_DECL (c);
6413 if (TREE_CODE (var) == MEM_REF)
6414 {
6415 var = TREE_OPERAND (var, 0);
9561765e 6416 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6417 var = TREE_OPERAND (var, 0);
d96999c6 6418 if (TREE_CODE (var) == ADDR_EXPR)
43895be5 6419 var = TREE_OPERAND (var, 0);
d96999c6 6420 else
6421 {
6422 /* If this is a pointer or referenced based array
6423 section, the var could be private in the outer
6424 context e.g. on orphaned loop construct. Pretend this
6425 is private variable's outer reference. */
6426 ccode = OMP_CLAUSE_PRIVATE;
6427 if (TREE_CODE (var) == INDIRECT_REF)
6428 var = TREE_OPERAND (var, 0);
6429 }
43895be5 6430 orig_var = var;
6431 if (is_variable_sized (var))
6432 {
6433 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6434 var = DECL_VALUE_EXPR (var);
6435 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6436 var = TREE_OPERAND (var, 0);
6437 gcc_assert (DECL_P (var));
6438 }
6439 }
1e8e9920 6440 new_var = lookup_decl (var, ctx);
4954efd4 6441 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
182cf5a9 6442 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
d96999c6 6443 ref = build_outer_var_ref (var, ctx, ccode);
1e8e9920 6444 code = OMP_CLAUSE_REDUCTION_CODE (c);
773c5ba7 6445
6446 /* reduction(-:var) sums up the partial results, so it acts
6447 identically to reduction(+:var). */
1e8e9920 6448 if (code == MINUS_EXPR)
6449 code = PLUS_EXPR;
6450
641a0fa1 6451 if (count == 1)
1e8e9920 6452 {
389dd41b 6453 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 6454
6455 addr = save_expr (addr);
6456 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
389dd41b 6457 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
1e8e9920 6458 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7e5a76c8 6459 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
75a70cf9 6460 gimplify_and_add (x, stmt_seqp);
1e8e9920 6461 return;
6462 }
43895be5 6463 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6464 {
6465 tree d = OMP_CLAUSE_DECL (c);
6466 tree type = TREE_TYPE (d);
6467 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7e5a76c8 6468 tree i = create_tmp_var (TREE_TYPE (v));
43895be5 6469 tree ptype = build_pointer_type (TREE_TYPE (type));
9561765e 6470 tree bias = TREE_OPERAND (d, 1);
6471 d = TREE_OPERAND (d, 0);
6472 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6473 {
6474 tree b = TREE_OPERAND (d, 1);
6475 b = maybe_lookup_decl (b, ctx);
6476 if (b == NULL)
6477 {
6478 b = TREE_OPERAND (d, 1);
6479 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6480 }
6481 if (integer_zerop (bias))
6482 bias = b;
6483 else
6484 {
6485 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6486 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6487 TREE_TYPE (b), b, bias);
6488 }
6489 d = TREE_OPERAND (d, 0);
6490 }
43895be5 6491 /* For ref build_outer_var_ref already performs this, so
6492 only new_var needs a dereference. */
9561765e 6493 if (TREE_CODE (d) == INDIRECT_REF)
43895be5 6494 {
6495 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4954efd4 6496 gcc_assert (omp_is_reference (var) && var == orig_var);
43895be5 6497 }
9561765e 6498 else if (TREE_CODE (d) == ADDR_EXPR)
43895be5 6499 {
6500 if (orig_var == var)
6501 {
6502 new_var = build_fold_addr_expr (new_var);
6503 ref = build_fold_addr_expr (ref);
6504 }
6505 }
6506 else
6507 {
6508 gcc_assert (orig_var == var);
4954efd4 6509 if (omp_is_reference (var))
43895be5 6510 ref = build_fold_addr_expr (ref);
6511 }
6512 if (DECL_P (v))
6513 {
6514 tree t = maybe_lookup_decl (v, ctx);
6515 if (t)
6516 v = t;
6517 else
6518 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6519 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6520 }
9561765e 6521 if (!integer_zerop (bias))
6522 {
6523 bias = fold_convert_loc (clause_loc, sizetype, bias);
6524 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6525 TREE_TYPE (new_var), new_var,
6526 unshare_expr (bias));
6527 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6528 TREE_TYPE (ref), ref, bias);
6529 }
43895be5 6530 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6531 ref = fold_convert_loc (clause_loc, ptype, ref);
7e5a76c8 6532 tree m = create_tmp_var (ptype);
43895be5 6533 gimplify_assign (m, new_var, stmt_seqp);
6534 new_var = m;
7e5a76c8 6535 m = create_tmp_var (ptype);
43895be5 6536 gimplify_assign (m, ref, stmt_seqp);
6537 ref = m;
6538 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6539 tree body = create_artificial_label (UNKNOWN_LOCATION);
6540 tree end = create_artificial_label (UNKNOWN_LOCATION);
6541 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6542 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6543 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6544 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6545 {
6546 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6547 tree decl_placeholder
6548 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6549 SET_DECL_VALUE_EXPR (placeholder, out);
6550 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6551 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6552 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6553 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6554 gimple_seq_add_seq (&sub_seq,
6555 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6556 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6557 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6558 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6559 }
6560 else
6561 {
6562 x = build2 (code, TREE_TYPE (out), out, priv);
6563 out = unshare_expr (out);
6564 gimplify_assign (out, x, &sub_seq);
6565 }
6566 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6567 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6568 gimple_seq_add_stmt (&sub_seq, g);
6569 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6570 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6571 gimple_seq_add_stmt (&sub_seq, g);
6572 g = gimple_build_assign (i, PLUS_EXPR, i,
6573 build_int_cst (TREE_TYPE (i), 1));
6574 gimple_seq_add_stmt (&sub_seq, g);
6575 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6576 gimple_seq_add_stmt (&sub_seq, g);
6577 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6578 }
ca4c3545 6579 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1e8e9920 6580 {
6581 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6582
4954efd4 6583 if (omp_is_reference (var)
bc7bff74 6584 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6585 TREE_TYPE (ref)))
389dd41b 6586 ref = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 6587 SET_DECL_VALUE_EXPR (placeholder, ref);
6588 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
e3a19533 6589 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
75a70cf9 6590 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6591 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
1e8e9920 6592 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6593 }
6594 else
6595 {
6596 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6597 ref = build_outer_var_ref (var, ctx);
75a70cf9 6598 gimplify_assign (ref, x, &sub_seq);
1e8e9920 6599 }
6600 }
6601
b9a16870 6602 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6603 0);
75a70cf9 6604 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 6605
75a70cf9 6606 gimple_seq_add_seq (stmt_seqp, sub_seq);
1e8e9920 6607
9a1d892b 6608 if (clist)
6609 {
6610 gimple_seq_add_seq (stmt_seqp, *clist);
6611 *clist = NULL;
6612 }
6613
b9a16870 6614 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6615 0);
75a70cf9 6616 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 6617}
6618
773c5ba7 6619
1e8e9920 6620/* Generate code to implement the COPYPRIVATE clauses. */
6621
6622static void
75a70cf9 6623lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
1e8e9920 6624 omp_context *ctx)
6625{
6626 tree c;
6627
6628 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6629 {
cb561506 6630 tree var, new_var, ref, x;
1e8e9920 6631 bool by_ref;
389dd41b 6632 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 6633
55d6e7cd 6634 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
1e8e9920 6635 continue;
6636
6637 var = OMP_CLAUSE_DECL (c);
e8a588af 6638 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 6639
6640 ref = build_sender_ref (var, ctx);
cb561506 6641 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6642 if (by_ref)
6643 {
6644 x = build_fold_addr_expr_loc (clause_loc, new_var);
6645 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6646 }
75a70cf9 6647 gimplify_assign (ref, x, slist);
1e8e9920 6648
cb561506 6649 ref = build_receiver_ref (var, false, ctx);
6650 if (by_ref)
6651 {
6652 ref = fold_convert_loc (clause_loc,
6653 build_pointer_type (TREE_TYPE (new_var)),
6654 ref);
6655 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6656 }
4954efd4 6657 if (omp_is_reference (var))
1e8e9920 6658 {
cb561506 6659 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
182cf5a9 6660 ref = build_simple_mem_ref_loc (clause_loc, ref);
6661 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 6662 }
cb561506 6663 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
1e8e9920 6664 gimplify_and_add (x, rlist);
6665 }
6666}
6667
773c5ba7 6668
1e8e9920 6669/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6670 and REDUCTION from the sender (aka parent) side. */
6671
6672static void
75a70cf9 6673lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6674 omp_context *ctx)
1e8e9920 6675{
43895be5 6676 tree c, t;
6677 int ignored_looptemp = 0;
6678 bool is_taskloop = false;
6679
6680 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6681 by GOMP_taskloop. */
6682 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6683 {
6684 ignored_looptemp = 2;
6685 is_taskloop = true;
6686 }
1e8e9920 6687
6688 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6689 {
773c5ba7 6690 tree val, ref, x, var;
1e8e9920 6691 bool by_ref, do_in = false, do_out = false;
389dd41b 6692 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 6693
55d6e7cd 6694 switch (OMP_CLAUSE_CODE (c))
1e8e9920 6695 {
fd6481cf 6696 case OMP_CLAUSE_PRIVATE:
6697 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6698 break;
6699 continue;
1e8e9920 6700 case OMP_CLAUSE_FIRSTPRIVATE:
6701 case OMP_CLAUSE_COPYIN:
6702 case OMP_CLAUSE_LASTPRIVATE:
7e5a76c8 6703 case OMP_CLAUSE_IN_REDUCTION:
6704 case OMP_CLAUSE__REDUCTEMP_:
6705 break;
1e8e9920 6706 case OMP_CLAUSE_REDUCTION:
7e5a76c8 6707 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6708 continue;
43895be5 6709 break;
6710 case OMP_CLAUSE_SHARED:
6711 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6712 break;
6713 continue;
bc7bff74 6714 case OMP_CLAUSE__LOOPTEMP_:
43895be5 6715 if (ignored_looptemp)
6716 {
6717 ignored_looptemp--;
6718 continue;
6719 }
1e8e9920 6720 break;
6721 default:
6722 continue;
6723 }
6724
87b31375 6725 val = OMP_CLAUSE_DECL (c);
7e5a76c8 6726 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6727 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
43895be5 6728 && TREE_CODE (val) == MEM_REF)
6729 {
6730 val = TREE_OPERAND (val, 0);
9561765e 6731 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6732 val = TREE_OPERAND (val, 0);
43895be5 6733 if (TREE_CODE (val) == INDIRECT_REF
6734 || TREE_CODE (val) == ADDR_EXPR)
6735 val = TREE_OPERAND (val, 0);
6736 if (is_variable_sized (val))
6737 continue;
6738 }
6739
6740 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6741 outer taskloop region. */
6742 omp_context *ctx_for_o = ctx;
6743 if (is_taskloop
6744 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6745 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6746 ctx_for_o = ctx->outer;
6747
6748 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
773c5ba7 6749
f49d7bb5 6750 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7e5a76c8 6751 && is_global_var (var)
6752 && (val == OMP_CLAUSE_DECL (c)
6753 || !is_task_ctx (ctx)
6754 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6755 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6756 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6757 != POINTER_TYPE)))))
f49d7bb5 6758 continue;
43895be5 6759
6760 t = omp_member_access_dummy_var (var);
6761 if (t)
6762 {
6763 var = DECL_VALUE_EXPR (var);
6764 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6765 if (o != t)
6766 var = unshare_and_remap (var, t, o);
6767 else
6768 var = unshare_expr (var);
6769 }
6770
6771 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6772 {
6773 /* Handle taskloop firstprivate/lastprivate, where the
6774 lastprivate on GIMPLE_OMP_TASK is represented as
6775 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6776 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6777 x = omp_build_component_ref (ctx->sender_decl, f);
6778 if (use_pointer_for_field (val, ctx))
6779 var = build_fold_addr_expr (var);
6780 gimplify_assign (x, var, ilist);
6781 DECL_ABSTRACT_ORIGIN (f) = NULL;
6782 continue;
6783 }
6784
7e5a76c8 6785 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6786 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
43895be5 6787 || val == OMP_CLAUSE_DECL (c))
6788 && is_variable_sized (val))
1e8e9920 6789 continue;
e8a588af 6790 by_ref = use_pointer_for_field (val, NULL);
1e8e9920 6791
55d6e7cd 6792 switch (OMP_CLAUSE_CODE (c))
1e8e9920 6793 {
6794 case OMP_CLAUSE_FIRSTPRIVATE:
585aefbb 6795 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6796 && !by_ref
6797 && is_task_ctx (ctx))
6798 TREE_NO_WARNING (var) = 1;
6799 do_in = true;
6800 break;
6801
6802 case OMP_CLAUSE_PRIVATE:
1e8e9920 6803 case OMP_CLAUSE_COPYIN:
bc7bff74 6804 case OMP_CLAUSE__LOOPTEMP_:
7e5a76c8 6805 case OMP_CLAUSE__REDUCTEMP_:
1e8e9920 6806 do_in = true;
6807 break;
6808
6809 case OMP_CLAUSE_LASTPRIVATE:
4954efd4 6810 if (by_ref || omp_is_reference (val))
1e8e9920 6811 {
6812 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6813 continue;
6814 do_in = true;
6815 }
6816 else
fd6481cf 6817 {
6818 do_out = true;
6819 if (lang_hooks.decls.omp_private_outer_ref (val))
6820 do_in = true;
6821 }
1e8e9920 6822 break;
6823
6824 case OMP_CLAUSE_REDUCTION:
7e5a76c8 6825 case OMP_CLAUSE_IN_REDUCTION:
1e8e9920 6826 do_in = true;
43895be5 6827 if (val == OMP_CLAUSE_DECL (c))
7e5a76c8 6828 {
6829 if (is_task_ctx (ctx))
6830 by_ref = use_pointer_for_field (val, ctx);
6831 else
6832 do_out = !(by_ref || omp_is_reference (val));
6833 }
43895be5 6834 else
6835 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
1e8e9920 6836 break;
6837
6838 default:
6839 gcc_unreachable ();
6840 }
6841
6842 if (do_in)
6843 {
6844 ref = build_sender_ref (val, ctx);
389dd41b 6845 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
75a70cf9 6846 gimplify_assign (ref, x, ilist);
fd6481cf 6847 if (is_task_ctx (ctx))
6848 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
1e8e9920 6849 }
773c5ba7 6850
1e8e9920 6851 if (do_out)
6852 {
6853 ref = build_sender_ref (val, ctx);
75a70cf9 6854 gimplify_assign (var, ref, olist);
1e8e9920 6855 }
6856 }
6857}
6858
75a70cf9 6859/* Generate code to implement SHARED from the sender (aka parent)
6860 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6861 list things that got automatically shared. */
1e8e9920 6862
6863static void
75a70cf9 6864lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
1e8e9920 6865{
43895be5 6866 tree var, ovar, nvar, t, f, x, record_type;
1e8e9920 6867
6868 if (ctx->record_type == NULL)
6869 return;
773c5ba7 6870
fd6481cf 6871 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
1767a056 6872 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
1e8e9920 6873 {
6874 ovar = DECL_ABSTRACT_ORIGIN (f);
43895be5 6875 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6876 continue;
6877
1e8e9920 6878 nvar = maybe_lookup_decl (ovar, ctx);
6879 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6880 continue;
6881
773c5ba7 6882 /* If CTX is a nested parallel directive. Find the immediately
6883 enclosing parallel or workshare construct that contains a
6884 mapping for OVAR. */
87b31375 6885 var = lookup_decl_in_outer_ctx (ovar, ctx);
773c5ba7 6886
43895be5 6887 t = omp_member_access_dummy_var (var);
6888 if (t)
6889 {
6890 var = DECL_VALUE_EXPR (var);
6891 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6892 if (o != t)
6893 var = unshare_and_remap (var, t, o);
6894 else
6895 var = unshare_expr (var);
6896 }
6897
e8a588af 6898 if (use_pointer_for_field (ovar, ctx))
1e8e9920 6899 {
6900 x = build_sender_ref (ovar, ctx);
48152aa2 6901 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
6902 && TREE_TYPE (f) == TREE_TYPE (ovar))
6903 {
6904 gcc_assert (is_parallel_ctx (ctx)
6905 && DECL_ARTIFICIAL (ovar));
6906 /* _condtemp_ clause. */
6907 var = build_constructor (TREE_TYPE (x), NULL);
6908 }
6909 else
6910 var = build_fold_addr_expr (var);
75a70cf9 6911 gimplify_assign (x, var, ilist);
1e8e9920 6912 }
6913 else
6914 {
6915 x = build_sender_ref (ovar, ctx);
75a70cf9 6916 gimplify_assign (x, var, ilist);
1e8e9920 6917
d2263ebb 6918 if (!TREE_READONLY (var)
6919 /* We don't need to receive a new reference to a result
6920 or parm decl. In fact we may not store to it as we will
6921 invalidate any pending RSO and generate wrong gimple
6922 during inlining. */
6923 && !((TREE_CODE (var) == RESULT_DECL
6924 || TREE_CODE (var) == PARM_DECL)
6925 && DECL_BY_REFERENCE (var)))
fd6481cf 6926 {
6927 x = build_sender_ref (ovar, ctx);
75a70cf9 6928 gimplify_assign (var, x, olist);
fd6481cf 6929 }
1e8e9920 6930 }
6931 }
6932}
6933
a8e785ba 6934/* Emit an OpenACC head marker call, encapulating the partitioning and
6935 other information that must be processed by the target compiler.
6936 Return the maximum number of dimensions the associated loop might
6937 be partitioned over. */
6938
6939static unsigned
6940lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6941 gimple_seq *seq, omp_context *ctx)
6942{
6943 unsigned levels = 0;
6944 unsigned tag = 0;
6945 tree gang_static = NULL_TREE;
6946 auto_vec<tree, 5> args;
6947
6948 args.quick_push (build_int_cst
6949 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6950 args.quick_push (ddvar);
6951 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6952 {
6953 switch (OMP_CLAUSE_CODE (c))
6954 {
6955 case OMP_CLAUSE_GANG:
6956 tag |= OLF_DIM_GANG;
6957 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6958 /* static:* is represented by -1, and we can ignore it, as
6959 scheduling is always static. */
6960 if (gang_static && integer_minus_onep (gang_static))
6961 gang_static = NULL_TREE;
6962 levels++;
6963 break;
6964
6965 case OMP_CLAUSE_WORKER:
6966 tag |= OLF_DIM_WORKER;
6967 levels++;
6968 break;
6969
6970 case OMP_CLAUSE_VECTOR:
6971 tag |= OLF_DIM_VECTOR;
6972 levels++;
6973 break;
6974
6975 case OMP_CLAUSE_SEQ:
6976 tag |= OLF_SEQ;
6977 break;
6978
6979 case OMP_CLAUSE_AUTO:
6980 tag |= OLF_AUTO;
6981 break;
6982
6983 case OMP_CLAUSE_INDEPENDENT:
6984 tag |= OLF_INDEPENDENT;
6985 break;
6986
719a7570 6987 case OMP_CLAUSE_TILE:
6988 tag |= OLF_TILE;
6989 break;
6990
a8e785ba 6991 default:
6992 continue;
6993 }
6994 }
6995
6996 if (gang_static)
6997 {
6998 if (DECL_P (gang_static))
6999 gang_static = build_outer_var_ref (gang_static, ctx);
7000 tag |= OLF_GANG_STATIC;
7001 }
7002
7003 /* In a parallel region, loops are implicitly INDEPENDENT. */
7004 omp_context *tgt = enclosing_target_ctx (ctx);
7005 if (!tgt || is_oacc_parallel (tgt))
7006 tag |= OLF_INDEPENDENT;
7007
719a7570 7008 if (tag & OLF_TILE)
7009 /* Tiling could use all 3 levels. */
7010 levels = 3;
7011 else
7012 {
7013 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7014 Ensure at least one level, or 2 for possible auto
7015 partitioning */
7016 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7017 << OLF_DIM_BASE) | OLF_SEQ));
7018
7019 if (levels < 1u + maybe_auto)
7020 levels = 1u + maybe_auto;
7021 }
a8e785ba 7022
7023 args.quick_push (build_int_cst (integer_type_node, levels));
7024 args.quick_push (build_int_cst (integer_type_node, tag));
7025 if (gang_static)
7026 args.quick_push (gang_static);
7027
7028 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7029 gimple_set_location (call, loc);
7030 gimple_set_lhs (call, ddvar);
7031 gimple_seq_add_stmt (seq, call);
7032
7033 return levels;
7034}
7035
7036/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7037 partitioning level of the enclosed region. */
7038
7039static void
7040lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7041 tree tofollow, gimple_seq *seq)
7042{
7043 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7044 : IFN_UNIQUE_OACC_TAIL_MARK);
7045 tree marker = build_int_cst (integer_type_node, marker_kind);
7046 int nargs = 2 + (tofollow != NULL_TREE);
7047 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7048 marker, ddvar, tofollow);
7049 gimple_set_location (call, loc);
7050 gimple_set_lhs (call, ddvar);
7051 gimple_seq_add_stmt (seq, call);
7052}
7053
7054/* Generate the before and after OpenACC loop sequences. CLAUSES are
7055 the loop clauses, from which we extract reductions. Initialize
7056 HEAD and TAIL. */
7057
7058static void
7059lower_oacc_head_tail (location_t loc, tree clauses,
7060 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7061{
7062 bool inner = false;
7063 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7064 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7065
7066 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
a8e785ba 7067 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7068 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7069
cff84c1a 7070 gcc_assert (count);
a8e785ba 7071 for (unsigned done = 1; count; count--, done++)
7072 {
7073 gimple_seq fork_seq = NULL;
7074 gimple_seq join_seq = NULL;
7075
7076 tree place = build_int_cst (integer_type_node, -1);
7077 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7078 fork_kind, ddvar, place);
7079 gimple_set_location (fork, loc);
7080 gimple_set_lhs (fork, ddvar);
7081
7082 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7083 join_kind, ddvar, place);
7084 gimple_set_location (join, loc);
7085 gimple_set_lhs (join, ddvar);
7086
7087 /* Mark the beginning of this level sequence. */
7088 if (inner)
7089 lower_oacc_loop_marker (loc, ddvar, true,
7090 build_int_cst (integer_type_node, count),
7091 &fork_seq);
7092 lower_oacc_loop_marker (loc, ddvar, false,
7093 build_int_cst (integer_type_node, done),
7094 &join_seq);
7095
641a0fa1 7096 lower_oacc_reductions (loc, clauses, place, inner,
7097 fork, join, &fork_seq, &join_seq, ctx);
a8e785ba 7098
7099 /* Append this level to head. */
7100 gimple_seq_add_seq (head, fork_seq);
7101 /* Prepend it to tail. */
7102 gimple_seq_add_seq (&join_seq, *tail);
7103 *tail = join_seq;
7104
7105 inner = true;
7106 }
7107
7108 /* Mark the end of the sequence. */
7109 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7110 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7111}
75a70cf9 7112
4954efd4 7113/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7114 catch handler and return it. This prevents programs from violating the
7115 structured block semantics with throws. */
75a70cf9 7116
4954efd4 7117static gimple_seq
7118maybe_catch_exception (gimple_seq body)
75a70cf9 7119{
4954efd4 7120 gimple *g;
7121 tree decl;
56686608 7122
4954efd4 7123 if (!flag_exceptions)
7124 return body;
56686608 7125
4954efd4 7126 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7127 decl = lang_hooks.eh_protect_cleanup_actions ();
7128 else
7129 decl = builtin_decl_explicit (BUILT_IN_TRAP);
56686608 7130
4954efd4 7131 g = gimple_build_eh_must_not_throw (decl);
7132 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7133 GIMPLE_TRY_CATCH);
56686608 7134
4954efd4 7135 return gimple_seq_alloc_with_stmt (g);
56686608 7136}
7137
4954efd4 7138\f
7139/* Routines to lower OMP directives into OMP-GIMPLE. */
75a70cf9 7140
4954efd4 7141/* If ctx is a worksharing context inside of a cancellable parallel
7142 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7143 and conditional branch to parallel's cancel_label to handle
7144 cancellation in the implicit barrier. */
1e8e9920 7145
7146static void
7e5a76c8 7147maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7148 gimple_seq *body)
1e8e9920 7149{
4954efd4 7150 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7151 if (gimple_omp_return_nowait_p (omp_return))
7152 return;
7e5a76c8 7153 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7154 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7155 && outer->cancellable)
7156 {
7157 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7158 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7159 tree lhs = create_tmp_var (c_bool_type);
7160 gimple_omp_return_set_lhs (omp_return, lhs);
7161 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7162 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7163 fold_convert (c_bool_type,
7164 boolean_false_node),
7165 outer->cancel_label, fallthru_label);
7166 gimple_seq_add_stmt (body, g);
7167 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7168 }
7169 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7170 return;
7171}
7172
7173/* Find the first task_reduction or reduction clause or return NULL
7174 if there are none. */
7175
7176static inline tree
7177omp_task_reductions_find_first (tree clauses, enum tree_code code,
7178 enum omp_clause_code ccode)
7179{
7180 while (1)
7181 {
7182 clauses = omp_find_clause (clauses, ccode);
7183 if (clauses == NULL_TREE)
7184 return NULL_TREE;
7185 if (ccode != OMP_CLAUSE_REDUCTION
7186 || code == OMP_TASKLOOP
7187 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7188 return clauses;
7189 clauses = OMP_CLAUSE_CHAIN (clauses);
773c5ba7 7190 }
4954efd4 7191}
1e8e9920 7192
7e5a76c8 7193static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7194 gimple_seq *, gimple_seq *);
7195
4954efd4 7196/* Lower the OpenMP sections directive in the current statement in GSI_P.
7197 CTX is the enclosing OMP context for the current statement. */
1e8e9920 7198
4954efd4 7199static void
7200lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7201{
7202 tree block, control;
7203 gimple_stmt_iterator tgsi;
7204 gomp_sections *stmt;
7205 gimple *t;
7206 gbind *new_stmt, *bind;
eb7a699d 7207 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
1e8e9920 7208
4954efd4 7209 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
1e8e9920 7210
4954efd4 7211 push_gimplify_context ();
bc7bff74 7212
4954efd4 7213 dlist = NULL;
7214 ilist = NULL;
7e5a76c8 7215
7216 tree rclauses
7217 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7218 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7219 tree rtmp = NULL_TREE;
7220 if (rclauses)
7221 {
7222 tree type = build_pointer_type (pointer_sized_int_node);
7223 tree temp = create_tmp_var (type);
7224 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7225 OMP_CLAUSE_DECL (c) = temp;
7226 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7227 gimple_omp_sections_set_clauses (stmt, c);
7228 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7229 gimple_omp_sections_clauses (stmt),
7230 &ilist, &tred_dlist);
7231 rclauses = c;
7232 rtmp = make_ssa_name (type);
7233 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7234 }
7235
4f4b92d8 7236 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7237 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7238
4954efd4 7239 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7240 &ilist, &dlist, ctx, NULL);
1e8e9920 7241
eb7a699d 7242 control = create_tmp_var (unsigned_type_node, ".section");
7243 gimple_omp_sections_set_control (stmt, control);
7244
4954efd4 7245 new_body = gimple_omp_body (stmt);
7246 gimple_omp_set_body (stmt, NULL);
7247 tgsi = gsi_start (new_body);
7248 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
1e8e9920 7249 {
4954efd4 7250 omp_context *sctx;
7251 gimple *sec_start;
773c5ba7 7252
4954efd4 7253 sec_start = gsi_stmt (tgsi);
7254 sctx = maybe_lookup_ctx (sec_start);
7255 gcc_assert (sctx);
7256
7257 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7258 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7259 GSI_CONTINUE_LINKING);
7260 gimple_omp_set_body (sec_start, NULL);
7261
7262 if (gsi_one_before_end_p (tgsi))
773c5ba7 7263 {
4954efd4 7264 gimple_seq l = NULL;
7265 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
eb7a699d 7266 &ilist, &l, &clist, ctx);
4954efd4 7267 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7268 gimple_omp_section_set_last (sec_start);
7269 }
79acaae1 7270
4954efd4 7271 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7272 GSI_CONTINUE_LINKING);
7273 }
773c5ba7 7274
4954efd4 7275 block = make_node (BLOCK);
7276 bind = gimple_build_bind (NULL, new_body, block);
773c5ba7 7277
4954efd4 7278 olist = NULL;
9a1d892b 7279 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
eb7a699d 7280 &clist, ctx);
7281 if (clist)
7282 {
7283 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7284 gcall *g = gimple_build_call (fndecl, 0);
7285 gimple_seq_add_stmt (&olist, g);
7286 gimple_seq_add_seq (&olist, clist);
7287 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7288 g = gimple_build_call (fndecl, 0);
7289 gimple_seq_add_stmt (&olist, g);
7290 }
773c5ba7 7291
4954efd4 7292 block = make_node (BLOCK);
7293 new_stmt = gimple_build_bind (NULL, NULL, block);
7294 gsi_replace (gsi_p, new_stmt, true);
773c5ba7 7295
4954efd4 7296 pop_gimplify_context (new_stmt);
7297 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7298 BLOCK_VARS (block) = gimple_bind_vars (bind);
7299 if (BLOCK_VARS (block))
7300 TREE_USED (block) = 1;
773c5ba7 7301
4954efd4 7302 new_body = NULL;
7303 gimple_seq_add_seq (&new_body, ilist);
7304 gimple_seq_add_stmt (&new_body, stmt);
7305 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7306 gimple_seq_add_stmt (&new_body, bind);
773c5ba7 7307
4954efd4 7308 t = gimple_build_omp_continue (control, control);
4954efd4 7309 gimple_seq_add_stmt (&new_body, t);
773c5ba7 7310
4954efd4 7311 gimple_seq_add_seq (&new_body, olist);
7312 if (ctx->cancellable)
7313 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7314 gimple_seq_add_seq (&new_body, dlist);
79acaae1 7315
4954efd4 7316 new_body = maybe_catch_exception (new_body);
773c5ba7 7317
7c6746c9 7318 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7319 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7320 t = gimple_build_omp_return (nowait);
4954efd4 7321 gimple_seq_add_stmt (&new_body, t);
7e5a76c8 7322 gimple_seq_add_seq (&new_body, tred_dlist);
7323 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7324
7325 if (rclauses)
7326 OMP_CLAUSE_DECL (rclauses) = rtmp;
1e8e9920 7327
4954efd4 7328 gimple_bind_set_body (new_stmt, new_body);
1e8e9920 7329}
7330
40750995 7331
4954efd4 7332/* A subroutine of lower_omp_single. Expand the simple form of
7333 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
40750995 7334
4954efd4 7335 if (GOMP_single_start ())
7336 BODY;
7337 [ GOMP_barrier (); ] -> unless 'nowait' is present.
40750995 7338
4954efd4 7339 FIXME. It may be better to delay expanding the logic of this until
7340 pass_expand_omp. The expanded logic may make the job more difficult
7341 to a synchronization analysis pass. */
fd6481cf 7342
7343static void
4954efd4 7344lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
fd6481cf 7345{
4954efd4 7346 location_t loc = gimple_location (single_stmt);
7347 tree tlabel = create_artificial_label (loc);
7348 tree flabel = create_artificial_label (loc);
7349 gimple *call, *cond;
7350 tree lhs, decl;
2169f33b 7351
4954efd4 7352 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7353 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7354 call = gimple_build_call (decl, 0);
7355 gimple_call_set_lhs (call, lhs);
7356 gimple_seq_add_stmt (pre_p, call);
fd6481cf 7357
4954efd4 7358 cond = gimple_build_cond (EQ_EXPR, lhs,
7359 fold_convert_loc (loc, TREE_TYPE (lhs),
7360 boolean_true_node),
7361 tlabel, flabel);
7362 gimple_seq_add_stmt (pre_p, cond);
7363 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7364 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7365 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
fd6481cf 7366}
7367
7368
4954efd4 7369/* A subroutine of lower_omp_single. Expand the simple form of
7370 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
1e8e9920 7371
4954efd4 7372 #pragma omp single copyprivate (a, b, c)
1e8e9920 7373
4954efd4 7374 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
1e8e9920 7375
4954efd4 7376 {
7377 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7378 {
7379 BODY;
7380 copyout.a = a;
7381 copyout.b = b;
7382 copyout.c = c;
7383 GOMP_single_copy_end (&copyout);
7384 }
7385 else
7386 {
7387 a = copyout_p->a;
7388 b = copyout_p->b;
7389 c = copyout_p->c;
7390 }
7391 GOMP_barrier ();
7392 }
75a70cf9 7393
4954efd4 7394 FIXME. It may be better to delay expanding the logic of this until
7395 pass_expand_omp. The expanded logic may make the job more difficult
7396 to a synchronization analysis pass. */
1e8e9920 7397
4954efd4 7398static void
7399lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7400 omp_context *ctx)
7401{
7402 tree ptr_type, t, l0, l1, l2, bfn_decl;
7403 gimple_seq copyin_seq;
7404 location_t loc = gimple_location (single_stmt);
1e8e9920 7405
4954efd4 7406 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
1e8e9920 7407
4954efd4 7408 ptr_type = build_pointer_type (ctx->record_type);
7409 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
1e8e9920 7410
4954efd4 7411 l0 = create_artificial_label (loc);
7412 l1 = create_artificial_label (loc);
7413 l2 = create_artificial_label (loc);
1e8e9920 7414
4954efd4 7415 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7416 t = build_call_expr_loc (loc, bfn_decl, 0);
7417 t = fold_convert_loc (loc, ptr_type, t);
7418 gimplify_assign (ctx->receiver_decl, t, pre_p);
1e8e9920 7419
4954efd4 7420 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7421 build_int_cst (ptr_type, 0));
7422 t = build3 (COND_EXPR, void_type_node, t,
7423 build_and_jump (&l0), build_and_jump (&l1));
7424 gimplify_and_add (t, pre_p);
1e8e9920 7425
4954efd4 7426 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
1e8e9920 7427
4954efd4 7428 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
1e8e9920 7429
4954efd4 7430 copyin_seq = NULL;
7431 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7432 &copyin_seq, ctx);
1e8e9920 7433
4954efd4 7434 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7435 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7436 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7437 gimplify_and_add (t, pre_p);
5056ba1a 7438
4954efd4 7439 t = build_and_jump (&l2);
7440 gimplify_and_add (t, pre_p);
1e8e9920 7441
4954efd4 7442 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
1e8e9920 7443
4954efd4 7444 gimple_seq_add_seq (pre_p, copyin_seq);
61e47ac8 7445
4954efd4 7446 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
61e47ac8 7447}
773c5ba7 7448
4954efd4 7449
7450/* Expand code for an OpenMP single directive. */
658b4427 7451
7452static void
4954efd4 7453lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
658b4427 7454{
4954efd4 7455 tree block;
4954efd4 7456 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7457 gbind *bind;
7458 gimple_seq bind_body, bind_body_tail = NULL, dlist;
658b4427 7459
4954efd4 7460 push_gimplify_context ();
658b4427 7461
4954efd4 7462 block = make_node (BLOCK);
7463 bind = gimple_build_bind (NULL, NULL, block);
7464 gsi_replace (gsi_p, bind, true);
7465 bind_body = NULL;
7466 dlist = NULL;
7467 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7468 &bind_body, &dlist, ctx, NULL);
7469 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
658b4427 7470
4954efd4 7471 gimple_seq_add_stmt (&bind_body, single_stmt);
658b4427 7472
4954efd4 7473 if (ctx->record_type)
7474 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7475 else
7476 lower_omp_single_simple (single_stmt, &bind_body);
658b4427 7477
4954efd4 7478 gimple_omp_set_body (single_stmt, NULL);
658b4427 7479
4954efd4 7480 gimple_seq_add_seq (&bind_body, dlist);
8e6b4515 7481
4954efd4 7482 bind_body = maybe_catch_exception (bind_body);
8e6b4515 7483
7c6746c9 7484 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7485 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7486 gimple *g = gimple_build_omp_return (nowait);
7487 gimple_seq_add_stmt (&bind_body_tail, g);
7e5a76c8 7488 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
4954efd4 7489 if (ctx->record_type)
7490 {
7491 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7492 tree clobber = build_constructor (ctx->record_type, NULL);
7493 TREE_THIS_VOLATILE (clobber) = 1;
7494 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7495 clobber), GSI_SAME_STMT);
7496 }
7497 gimple_seq_add_seq (&bind_body, bind_body_tail);
7498 gimple_bind_set_body (bind, bind_body);
8e6b4515 7499
4954efd4 7500 pop_gimplify_context (bind);
8e6b4515 7501
4954efd4 7502 gimple_bind_append_vars (bind, ctx->block_vars);
7503 BLOCK_VARS (block) = ctx->block_vars;
7504 if (BLOCK_VARS (block))
7505 TREE_USED (block) = 1;
8e6b4515 7506}
7507
3d483a94 7508
4954efd4 7509/* Expand code for an OpenMP master directive. */
1e8e9920 7510
7511static void
4954efd4 7512lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 7513{
4954efd4 7514 tree block, lab = NULL, x, bfn_decl;
7515 gimple *stmt = gsi_stmt (*gsi_p);
7516 gbind *bind;
7517 location_t loc = gimple_location (stmt);
7518 gimple_seq tseq;
773c5ba7 7519
4954efd4 7520 push_gimplify_context ();
773c5ba7 7521
4954efd4 7522 block = make_node (BLOCK);
7523 bind = gimple_build_bind (NULL, NULL, block);
7524 gsi_replace (gsi_p, bind, true);
7525 gimple_bind_add_stmt (bind, stmt);
773c5ba7 7526
4954efd4 7527 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7528 x = build_call_expr_loc (loc, bfn_decl, 0);
7529 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7530 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7531 tseq = NULL;
7532 gimplify_and_add (x, &tseq);
7533 gimple_bind_add_seq (bind, tseq);
40750995 7534
4954efd4 7535 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7536 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7537 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7538 gimple_omp_set_body (stmt, NULL);
1d22f541 7539
4954efd4 7540 gimple_bind_add_stmt (bind, gimple_build_label (lab));
fbe86b1b 7541
4954efd4 7542 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9561765e 7543
4954efd4 7544 pop_gimplify_context (bind);
48e1416a 7545
4954efd4 7546 gimple_bind_append_vars (bind, ctx->block_vars);
7547 BLOCK_VARS (block) = ctx->block_vars;
1e8e9920 7548}
7549
7e5a76c8 7550/* Helper function for lower_omp_task_reductions. For a specific PASS
7551 find out the current clause it should be processed, or return false
7552 if all have been processed already. */
7553
7554static inline bool
7555omp_task_reduction_iterate (int pass, enum tree_code code,
7556 enum omp_clause_code ccode, tree *c, tree *decl,
7557 tree *type, tree *next)
7558{
7559 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7560 {
7561 if (ccode == OMP_CLAUSE_REDUCTION
7562 && code != OMP_TASKLOOP
7563 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7564 continue;
7565 *decl = OMP_CLAUSE_DECL (*c);
7566 *type = TREE_TYPE (*decl);
7567 if (TREE_CODE (*decl) == MEM_REF)
7568 {
7569 if (pass != 1)
7570 continue;
7571 }
7572 else
7573 {
7574 if (omp_is_reference (*decl))
7575 *type = TREE_TYPE (*type);
7576 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7577 continue;
7578 }
7579 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7580 return true;
7581 }
7582 *decl = NULL_TREE;
7583 *type = NULL_TREE;
7584 *next = NULL_TREE;
7585 return false;
7586}
7587
7588/* Lower task_reduction and reduction clauses (the latter unless CODE is
7589 OMP_TASKGROUP only with task modifier). Register mapping of those in
7590 START sequence and reducing them and unregister them in the END sequence. */
7591
7592static void
7593lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7594 gimple_seq *start, gimple_seq *end)
7595{
7596 enum omp_clause_code ccode
7597 = (code == OMP_TASKGROUP
7598 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7599 tree cancellable = NULL_TREE;
7600 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7601 if (clauses == NULL_TREE)
7602 return;
7603 if (code == OMP_FOR || code == OMP_SECTIONS)
7604 {
7605 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7606 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7607 && outer->cancellable)
7608 {
7609 cancellable = error_mark_node;
7610 break;
7611 }
7612 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7613 break;
7614 }
7615 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7616 tree *last = &TYPE_FIELDS (record_type);
7617 unsigned cnt = 0;
7618 if (cancellable)
7619 {
7620 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7621 ptr_type_node);
7622 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7623 integer_type_node);
7624 *last = field;
7625 DECL_CHAIN (field) = ifield;
7626 last = &DECL_CHAIN (ifield);
c75a1c19 7627 DECL_CONTEXT (field) = record_type;
7628 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7629 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7630 DECL_CONTEXT (ifield) = record_type;
7631 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7632 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7e5a76c8 7633 }
7634 for (int pass = 0; pass < 2; pass++)
7635 {
7636 tree decl, type, next;
7637 for (tree c = clauses;
7638 omp_task_reduction_iterate (pass, code, ccode,
7639 &c, &decl, &type, &next); c = next)
7640 {
7641 ++cnt;
7642 tree new_type = type;
7643 if (ctx->outer)
7644 new_type = remap_type (type, &ctx->outer->cb);
7645 tree field
7646 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7647 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7648 new_type);
7649 if (DECL_P (decl) && type == TREE_TYPE (decl))
7650 {
7651 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7652 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7653 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7654 }
7655 else
7656 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7657 DECL_CONTEXT (field) = record_type;
c75a1c19 7658 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7659 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7e5a76c8 7660 *last = field;
7661 last = &DECL_CHAIN (field);
7662 tree bfield
7663 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7664 boolean_type_node);
7665 DECL_CONTEXT (bfield) = record_type;
c75a1c19 7666 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7667 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7e5a76c8 7668 *last = bfield;
7669 last = &DECL_CHAIN (bfield);
7670 }
7671 }
7672 *last = NULL_TREE;
7673 layout_type (record_type);
7674
7675 /* Build up an array which registers with the runtime all the reductions
7676 and deregisters them at the end. Format documented in libgomp/task.c. */
7677 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7678 tree avar = create_tmp_var_raw (atype);
7679 gimple_add_tmp_var (avar);
7680 TREE_ADDRESSABLE (avar) = 1;
7681 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7682 NULL_TREE, NULL_TREE);
7683 tree t = build_int_cst (pointer_sized_int_node, cnt);
7684 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7685 gimple_seq seq = NULL;
7686 tree sz = fold_convert (pointer_sized_int_node,
7687 TYPE_SIZE_UNIT (record_type));
7688 int cachesz = 64;
7689 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7690 build_int_cst (pointer_sized_int_node, cachesz - 1));
7691 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7692 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7693 ctx->task_reductions.create (1 + cnt);
7694 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7695 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7696 ? sz : NULL_TREE);
7697 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7698 gimple_seq_add_seq (start, seq);
7699 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7700 NULL_TREE, NULL_TREE);
7701 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7702 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7703 NULL_TREE, NULL_TREE);
7704 t = build_int_cst (pointer_sized_int_node,
7705 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7706 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7707 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7708 NULL_TREE, NULL_TREE);
7709 t = build_int_cst (pointer_sized_int_node, -1);
7710 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7711 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7712 NULL_TREE, NULL_TREE);
7713 t = build_int_cst (pointer_sized_int_node, 0);
7714 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7715
7716 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7717 and for each task reduction checks a bool right after the private variable
7718 within that thread's chunk; if the bool is clear, it hasn't been
7719 initialized and thus isn't going to be reduced nor destructed, otherwise
7720 reduce and destruct it. */
7721 tree idx = create_tmp_var (size_type_node);
7722 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7723 tree num_thr_sz = create_tmp_var (size_type_node);
7724 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7725 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7726 tree lab3 = NULL_TREE;
7727 gimple *g;
7728 if (code == OMP_FOR || code == OMP_SECTIONS)
7729 {
7730 /* For worksharing constructs, only perform it in the master thread,
7731 with the exception of cancelled implicit barriers - then only handle
7732 the current thread. */
7733 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7734 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7735 tree thr_num = create_tmp_var (integer_type_node);
7736 g = gimple_build_call (t, 0);
7737 gimple_call_set_lhs (g, thr_num);
7738 gimple_seq_add_stmt (end, g);
7739 if (cancellable)
7740 {
7741 tree c;
7742 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7743 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7744 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7745 if (code == OMP_FOR)
7746 c = gimple_omp_for_clauses (ctx->stmt);
0bd73648 7747 else /* if (code == OMP_SECTIONS) */
7e5a76c8 7748 c = gimple_omp_sections_clauses (ctx->stmt);
7749 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7750 cancellable = c;
7751 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7752 lab5, lab6);
7753 gimple_seq_add_stmt (end, g);
7754 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7755 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7756 gimple_seq_add_stmt (end, g);
7757 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7758 build_one_cst (TREE_TYPE (idx)));
7759 gimple_seq_add_stmt (end, g);
7760 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7761 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7762 }
7763 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7764 gimple_seq_add_stmt (end, g);
7765 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7766 }
7767 if (code != OMP_PARALLEL)
7768 {
7769 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7770 tree num_thr = create_tmp_var (integer_type_node);
7771 g = gimple_build_call (t, 0);
7772 gimple_call_set_lhs (g, num_thr);
7773 gimple_seq_add_stmt (end, g);
7774 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7775 gimple_seq_add_stmt (end, g);
7776 if (cancellable)
7777 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7778 }
7779 else
7780 {
7781 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7782 OMP_CLAUSE__REDUCTEMP_);
7783 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7784 t = fold_convert (size_type_node, t);
7785 gimplify_assign (num_thr_sz, t, end);
7786 }
7787 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7788 NULL_TREE, NULL_TREE);
7789 tree data = create_tmp_var (pointer_sized_int_node);
7790 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7791 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7792 tree ptr;
7793 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7794 ptr = create_tmp_var (build_pointer_type (record_type));
7795 else
7796 ptr = create_tmp_var (ptr_type_node);
7797 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7798
7799 tree field = TYPE_FIELDS (record_type);
7800 cnt = 0;
7801 if (cancellable)
7802 field = DECL_CHAIN (DECL_CHAIN (field));
7803 for (int pass = 0; pass < 2; pass++)
7804 {
7805 tree decl, type, next;
7806 for (tree c = clauses;
7807 omp_task_reduction_iterate (pass, code, ccode,
7808 &c, &decl, &type, &next); c = next)
7809 {
7810 tree var = decl, ref;
7811 if (TREE_CODE (decl) == MEM_REF)
7812 {
7813 var = TREE_OPERAND (var, 0);
7814 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7815 var = TREE_OPERAND (var, 0);
7816 tree v = var;
7817 if (TREE_CODE (var) == ADDR_EXPR)
7818 var = TREE_OPERAND (var, 0);
7819 else if (TREE_CODE (var) == INDIRECT_REF)
7820 var = TREE_OPERAND (var, 0);
7821 tree orig_var = var;
7822 if (is_variable_sized (var))
7823 {
7824 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7825 var = DECL_VALUE_EXPR (var);
7826 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7827 var = TREE_OPERAND (var, 0);
7828 gcc_assert (DECL_P (var));
7829 }
7830 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7831 if (orig_var != var)
7832 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7833 else if (TREE_CODE (v) == ADDR_EXPR)
7834 t = build_fold_addr_expr (t);
7835 else if (TREE_CODE (v) == INDIRECT_REF)
7836 t = build_fold_indirect_ref (t);
7837 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7838 {
7839 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7840 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7841 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7842 }
7843 if (!integer_zerop (TREE_OPERAND (decl, 1)))
7844 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7845 fold_convert (size_type_node,
7846 TREE_OPERAND (decl, 1)));
7847 }
7848 else
7849 {
7850 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7851 if (!omp_is_reference (decl))
7852 t = build_fold_addr_expr (t);
7853 }
7854 t = fold_convert (pointer_sized_int_node, t);
7855 seq = NULL;
7856 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7857 gimple_seq_add_seq (start, seq);
7858 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7859 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7860 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7861 t = unshare_expr (byte_position (field));
7862 t = fold_convert (pointer_sized_int_node, t);
7863 ctx->task_reduction_map->put (c, cnt);
7864 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7865 ? t : NULL_TREE);
7866 seq = NULL;
7867 t = force_gimple_operand (t, &seq, true, NULL_TREE);
7868 gimple_seq_add_seq (start, seq);
7869 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7870 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7871 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7872
7873 tree bfield = DECL_CHAIN (field);
7874 tree cond;
7875 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7876 /* In parallel or worksharing all threads unconditionally
7877 initialize all their task reduction private variables. */
7878 cond = boolean_true_node;
7879 else if (TREE_TYPE (ptr) == ptr_type_node)
7880 {
7881 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7882 unshare_expr (byte_position (bfield)));
7883 seq = NULL;
7884 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7885 gimple_seq_add_seq (end, seq);
7886 tree pbool = build_pointer_type (TREE_TYPE (bfield));
7887 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7888 build_int_cst (pbool, 0));
7889 }
7890 else
7891 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7892 build_simple_mem_ref (ptr), bfield, NULL_TREE);
7893 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7894 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7895 tree condv = create_tmp_var (boolean_type_node);
7896 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7897 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7898 lab3, lab4);
7899 gimple_seq_add_stmt (end, g);
7900 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7901 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7902 {
7903 /* If this reduction doesn't need destruction and parallel
7904 has been cancelled, there is nothing to do for this
7905 reduction, so jump around the merge operation. */
7906 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7907 g = gimple_build_cond (NE_EXPR, cancellable,
7908 build_zero_cst (TREE_TYPE (cancellable)),
7909 lab4, lab5);
7910 gimple_seq_add_stmt (end, g);
7911 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7912 }
7913
7914 tree new_var;
7915 if (TREE_TYPE (ptr) == ptr_type_node)
7916 {
7917 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7918 unshare_expr (byte_position (field)));
7919 seq = NULL;
7920 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7921 gimple_seq_add_seq (end, seq);
7922 tree pbool = build_pointer_type (TREE_TYPE (field));
7923 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7924 build_int_cst (pbool, 0));
7925 }
7926 else
7927 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7928 build_simple_mem_ref (ptr), field, NULL_TREE);
7929
7930 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7931 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7932 ref = build_simple_mem_ref (ref);
7933 /* reduction(-:var) sums up the partial results, so it acts
7934 identically to reduction(+:var). */
7935 if (rcode == MINUS_EXPR)
7936 rcode = PLUS_EXPR;
7937 if (TREE_CODE (decl) == MEM_REF)
7938 {
7939 tree type = TREE_TYPE (new_var);
7940 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7941 tree i = create_tmp_var (TREE_TYPE (v));
7942 tree ptype = build_pointer_type (TREE_TYPE (type));
7943 if (DECL_P (v))
7944 {
7945 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7946 tree vv = create_tmp_var (TREE_TYPE (v));
7947 gimplify_assign (vv, v, start);
7948 v = vv;
7949 }
7950 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7951 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7952 new_var = build_fold_addr_expr (new_var);
7953 new_var = fold_convert (ptype, new_var);
7954 ref = fold_convert (ptype, ref);
7955 tree m = create_tmp_var (ptype);
7956 gimplify_assign (m, new_var, end);
7957 new_var = m;
7958 m = create_tmp_var (ptype);
7959 gimplify_assign (m, ref, end);
7960 ref = m;
7961 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7962 tree body = create_artificial_label (UNKNOWN_LOCATION);
7963 tree endl = create_artificial_label (UNKNOWN_LOCATION);
7964 gimple_seq_add_stmt (end, gimple_build_label (body));
7965 tree priv = build_simple_mem_ref (new_var);
7966 tree out = build_simple_mem_ref (ref);
7967 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7968 {
7969 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7970 tree decl_placeholder
7971 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7972 tree lab6 = NULL_TREE;
7973 if (cancellable)
7974 {
7975 /* If this reduction needs destruction and parallel
7976 has been cancelled, jump around the merge operation
7977 to the destruction. */
7978 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7979 lab6 = create_artificial_label (UNKNOWN_LOCATION);
7980 tree zero = build_zero_cst (TREE_TYPE (cancellable));
7981 g = gimple_build_cond (NE_EXPR, cancellable, zero,
7982 lab6, lab5);
7983 gimple_seq_add_stmt (end, g);
7984 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7985 }
7986 SET_DECL_VALUE_EXPR (placeholder, out);
7987 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7988 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7989 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7990 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7991 gimple_seq_add_seq (end,
7992 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7993 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7994 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7995 {
7996 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7997 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7998 }
7999 if (cancellable)
8000 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8001 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8002 if (x)
8003 {
8004 gimple_seq tseq = NULL;
8005 gimplify_stmt (&x, &tseq);
8006 gimple_seq_add_seq (end, tseq);
8007 }
8008 }
8009 else
8010 {
8011 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8012 out = unshare_expr (out);
8013 gimplify_assign (out, x, end);
8014 }
8015 gimple *g
8016 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8017 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8018 gimple_seq_add_stmt (end, g);
8019 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8020 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8021 gimple_seq_add_stmt (end, g);
8022 g = gimple_build_assign (i, PLUS_EXPR, i,
8023 build_int_cst (TREE_TYPE (i), 1));
8024 gimple_seq_add_stmt (end, g);
8025 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8026 gimple_seq_add_stmt (end, g);
8027 gimple_seq_add_stmt (end, gimple_build_label (endl));
8028 }
8029 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8030 {
8031 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8032 tree oldv = NULL_TREE;
8033 tree lab6 = NULL_TREE;
8034 if (cancellable)
8035 {
8036 /* If this reduction needs destruction and parallel
8037 has been cancelled, jump around the merge operation
8038 to the destruction. */
8039 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8040 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8041 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8042 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8043 lab6, lab5);
8044 gimple_seq_add_stmt (end, g);
8045 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8046 }
8047 if (omp_is_reference (decl)
8048 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8049 TREE_TYPE (ref)))
8050 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8051 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8052 tree refv = create_tmp_var (TREE_TYPE (ref));
8053 gimplify_assign (refv, ref, end);
8054 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8055 SET_DECL_VALUE_EXPR (placeholder, ref);
8056 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8057 tree d = maybe_lookup_decl (decl, ctx);
8058 gcc_assert (d);
8059 if (DECL_HAS_VALUE_EXPR_P (d))
8060 oldv = DECL_VALUE_EXPR (d);
8061 if (omp_is_reference (var))
8062 {
8063 tree v = fold_convert (TREE_TYPE (d),
8064 build_fold_addr_expr (new_var));
8065 SET_DECL_VALUE_EXPR (d, v);
8066 }
8067 else
8068 SET_DECL_VALUE_EXPR (d, new_var);
8069 DECL_HAS_VALUE_EXPR_P (d) = 1;
8070 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8071 if (oldv)
8072 SET_DECL_VALUE_EXPR (d, oldv);
8073 else
8074 {
8075 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8076 DECL_HAS_VALUE_EXPR_P (d) = 0;
8077 }
8078 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8079 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8080 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8081 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8082 if (cancellable)
8083 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8084 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8085 if (x)
8086 {
8087 gimple_seq tseq = NULL;
8088 gimplify_stmt (&x, &tseq);
8089 gimple_seq_add_seq (end, tseq);
8090 }
8091 }
8092 else
8093 {
8094 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8095 ref = unshare_expr (ref);
8096 gimplify_assign (ref, x, end);
8097 }
8098 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8099 ++cnt;
8100 field = DECL_CHAIN (bfield);
8101 }
8102 }
8103
8104 if (code == OMP_TASKGROUP)
8105 {
8106 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8107 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8108 gimple_seq_add_stmt (start, g);
8109 }
8110 else
8111 {
8112 tree c;
8113 if (code == OMP_FOR)
8114 c = gimple_omp_for_clauses (ctx->stmt);
8115 else if (code == OMP_SECTIONS)
8116 c = gimple_omp_sections_clauses (ctx->stmt);
8117 else
8118 c = gimple_omp_taskreg_clauses (ctx->stmt);
8119 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8120 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8121 build_fold_addr_expr (avar));
8122 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8123 }
8124
8125 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8126 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8127 size_one_node));
8128 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8129 gimple_seq_add_stmt (end, g);
8130 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8131 if (code == OMP_FOR || code == OMP_SECTIONS)
8132 {
8133 enum built_in_function bfn
8134 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8135 t = builtin_decl_explicit (bfn);
8136 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8137 tree arg;
8138 if (cancellable)
8139 {
8140 arg = create_tmp_var (c_bool_type);
8141 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8142 cancellable));
8143 }
8144 else
8145 arg = build_int_cst (c_bool_type, 0);
8146 g = gimple_build_call (t, 1, arg);
8147 }
8148 else
8149 {
8150 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8151 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8152 }
8153 gimple_seq_add_stmt (end, g);
8154 t = build_constructor (atype, NULL);
8155 TREE_THIS_VOLATILE (t) = 1;
8156 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8157}
a8e785ba 8158
4954efd4 8159/* Expand code for an OpenMP taskgroup directive. */
a8e785ba 8160
4954efd4 8161static void
8162lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
a8e785ba 8163{
4954efd4 8164 gimple *stmt = gsi_stmt (*gsi_p);
8165 gcall *x;
8166 gbind *bind;
7e5a76c8 8167 gimple_seq dseq = NULL;
4954efd4 8168 tree block = make_node (BLOCK);
a8e785ba 8169
4954efd4 8170 bind = gimple_build_bind (NULL, NULL, block);
8171 gsi_replace (gsi_p, bind, true);
8172 gimple_bind_add_stmt (bind, stmt);
a8e785ba 8173
7e5a76c8 8174 push_gimplify_context ();
8175
4954efd4 8176 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8177 0);
8178 gimple_bind_add_stmt (bind, x);
a8e785ba 8179
7e5a76c8 8180 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8181 gimple_omp_taskgroup_clauses (stmt),
8182 gimple_bind_body_ptr (bind), &dseq);
8183
4954efd4 8184 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8185 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8186 gimple_omp_set_body (stmt, NULL);
a8e785ba 8187
4954efd4 8188 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7e5a76c8 8189 gimple_bind_add_seq (bind, dseq);
8190
8191 pop_gimplify_context (bind);
a8e785ba 8192
4954efd4 8193 gimple_bind_append_vars (bind, ctx->block_vars);
8194 BLOCK_VARS (block) = ctx->block_vars;
a8e785ba 8195}
8196
773c5ba7 8197
4954efd4 8198/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
3d483a94 8199
8200static void
4954efd4 8201lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8202 omp_context *ctx)
3d483a94 8203{
4954efd4 8204 struct omp_for_data fd;
8205 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8206 return;
3d483a94 8207
4954efd4 8208 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8209 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8210 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8211 if (!fd.ordered)
8212 return;
bc7bff74 8213
4954efd4 8214 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8215 tree c = gimple_omp_ordered_clauses (ord_stmt);
8216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8217 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
3d483a94 8218 {
4954efd4 8219 /* Merge depend clauses from multiple adjacent
8220 #pragma omp ordered depend(sink:...) constructs
8221 into one #pragma omp ordered depend(sink:...), so that
8222 we can optimize them together. */
8223 gimple_stmt_iterator gsi = *gsi_p;
8224 gsi_next (&gsi);
8225 while (!gsi_end_p (gsi))
3d483a94 8226 {
4954efd4 8227 gimple *stmt = gsi_stmt (gsi);
8228 if (is_gimple_debug (stmt)
8229 || gimple_code (stmt) == GIMPLE_NOP)
3d483a94 8230 {
4954efd4 8231 gsi_next (&gsi);
8232 continue;
3d483a94 8233 }
4954efd4 8234 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8235 break;
8236 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8237 c = gimple_omp_ordered_clauses (ord_stmt2);
8238 if (c == NULL_TREE
8239 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8240 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8241 break;
8242 while (*list_p)
8243 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8244 *list_p = c;
8245 gsi_remove (&gsi, true);
3d483a94 8246 }
8247 }
3d483a94 8248
4954efd4 8249 /* Canonicalize sink dependence clauses into one folded clause if
8250 possible.
3d483a94 8251
4954efd4 8252 The basic algorithm is to create a sink vector whose first
8253 element is the GCD of all the first elements, and whose remaining
8254 elements are the minimum of the subsequent columns.
3d483a94 8255
4954efd4 8256 We ignore dependence vectors whose first element is zero because
8257 such dependencies are known to be executed by the same thread.
bc7bff74 8258
4954efd4 8259 We take into account the direction of the loop, so a minimum
8260 becomes a maximum if the loop is iterating forwards. We also
8261 ignore sink clauses where the loop direction is unknown, or where
8262 the offsets are clearly invalid because they are not a multiple
8263 of the loop increment.
8264
8265 For example:
8266
8267 #pragma omp for ordered(2)
8268 for (i=0; i < N; ++i)
8269 for (j=0; j < M; ++j)
bc7bff74 8270 {
4954efd4 8271 #pragma omp ordered \
8272 depend(sink:i-8,j-2) \
8273 depend(sink:i,j-1) \ // Completely ignored because i+0.
8274 depend(sink:i-4,j-3) \
8275 depend(sink:i-6,j-4)
8276 #pragma omp ordered depend(source)
bc7bff74 8277 }
bc7bff74 8278
4954efd4 8279 Folded clause is:
3d483a94 8280
4954efd4 8281 depend(sink:-gcd(8,4,6),-min(2,3,4))
8282 -or-
8283 depend(sink:-2,-2)
8284 */
3d483a94 8285
4954efd4 8286 /* FIXME: Computing GCD's where the first element is zero is
8287 non-trivial in the presence of collapsed loops. Do this later. */
8288 if (fd.collapse > 1)
8289 return;
3d483a94 8290
4954efd4 8291 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
a324786b 8292
8293 /* wide_int is not a POD so it must be default-constructed. */
8294 for (unsigned i = 0; i != 2 * len - 1; ++i)
8295 new (static_cast<void*>(folded_deps + i)) wide_int ();
8296
4954efd4 8297 tree folded_dep = NULL_TREE;
8298 /* TRUE if the first dimension's offset is negative. */
8299 bool neg_offset_p = false;
3d483a94 8300
4954efd4 8301 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8302 unsigned int i;
8303 while ((c = *list_p) != NULL)
3d483a94 8304 {
4954efd4 8305 bool remove = false;
3d483a94 8306
4954efd4 8307 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8308 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8309 goto next_ordered_clause;
3d483a94 8310
4954efd4 8311 tree vec;
8312 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8313 vec && TREE_CODE (vec) == TREE_LIST;
8314 vec = TREE_CHAIN (vec), ++i)
3d483a94 8315 {
4954efd4 8316 gcc_assert (i < len);
3d483a94 8317
4954efd4 8318 /* omp_extract_for_data has canonicalized the condition. */
8319 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8320 || fd.loops[i].cond_code == GT_EXPR);
8321 bool forward = fd.loops[i].cond_code == LT_EXPR;
8322 bool maybe_lexically_later = true;
1e8e9920 8323
4954efd4 8324 /* While the committee makes up its mind, bail if we have any
8325 non-constant steps. */
8326 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8327 goto lower_omp_ordered_ret;
1e8e9920 8328
4954efd4 8329 tree itype = TREE_TYPE (TREE_VALUE (vec));
8330 if (POINTER_TYPE_P (itype))
8331 itype = sizetype;
e3d0f65c 8332 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
4954efd4 8333 TYPE_PRECISION (itype),
8334 TYPE_SIGN (itype));
fd6481cf 8335
4954efd4 8336 /* Ignore invalid offsets that are not multiples of the step. */
e3d0f65c 8337 if (!wi::multiple_of_p (wi::abs (offset),
8338 wi::abs (wi::to_wide (fd.loops[i].step)),
8339 UNSIGNED))
44b49e6b 8340 {
4954efd4 8341 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8342 "ignoring sink clause with offset that is not "
8343 "a multiple of the loop step");
8344 remove = true;
8345 goto next_ordered_clause;
44b49e6b 8346 }
43895be5 8347
4954efd4 8348 /* Calculate the first dimension. The first dimension of
8349 the folded dependency vector is the GCD of the first
8350 elements, while ignoring any first elements whose offset
8351 is 0. */
8352 if (i == 0)
44b49e6b 8353 {
4954efd4 8354 /* Ignore dependence vectors whose first dimension is 0. */
8355 if (offset == 0)
44b49e6b 8356 {
4954efd4 8357 remove = true;
8358 goto next_ordered_clause;
44b49e6b 8359 }
43895be5 8360 else
4954efd4 8361 {
8362 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8363 {
8364 error_at (OMP_CLAUSE_LOCATION (c),
8365 "first offset must be in opposite direction "
8366 "of loop iterations");
8367 goto lower_omp_ordered_ret;
8368 }
8369 if (forward)
8370 offset = -offset;
8371 neg_offset_p = forward;
8372 /* Initialize the first time around. */
8373 if (folded_dep == NULL_TREE)
8374 {
8375 folded_dep = c;
8376 folded_deps[0] = offset;
8377 }
8378 else
8379 folded_deps[0] = wi::gcd (folded_deps[0],
8380 offset, UNSIGNED);
8381 }
43895be5 8382 }
4954efd4 8383 /* Calculate minimum for the remaining dimensions. */
43895be5 8384 else
43895be5 8385 {
4954efd4 8386 folded_deps[len + i - 1] = offset;
8387 if (folded_dep == c)
8388 folded_deps[i] = offset;
8389 else if (maybe_lexically_later
8390 && !wi::eq_p (folded_deps[i], offset))
8391 {
8392 if (forward ^ wi::gts_p (folded_deps[i], offset))
8393 {
8394 unsigned int j;
8395 folded_dep = c;
8396 for (j = 1; j <= i; j++)
8397 folded_deps[j] = folded_deps[len + j - 1];
8398 }
8399 else
8400 maybe_lexically_later = false;
8401 }
43895be5 8402 }
43895be5 8403 }
4954efd4 8404 gcc_assert (i == len);
43895be5 8405
4954efd4 8406 remove = true;
8407
8408 next_ordered_clause:
8409 if (remove)
8410 *list_p = OMP_CLAUSE_CHAIN (c);
43895be5 8411 else
4954efd4 8412 list_p = &OMP_CLAUSE_CHAIN (c);
43895be5 8413 }
43895be5 8414
4954efd4 8415 if (folded_dep)
43895be5 8416 {
4954efd4 8417 if (neg_offset_p)
8418 folded_deps[0] = -folded_deps[0];
43895be5 8419
4954efd4 8420 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8421 if (POINTER_TYPE_P (itype))
8422 itype = sizetype;
8423
8424 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8425 = wide_int_to_tree (itype, folded_deps[0]);
8426 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8427 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
43895be5 8428 }
8429
4954efd4 8430 lower_omp_ordered_ret:
43895be5 8431
4954efd4 8432 /* Ordered without clauses is #pragma omp threads, while we want
8433 a nop instead if we remove all clauses. */
8434 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8435 gsi_replace (gsi_p, gimple_build_nop (), true);
43895be5 8436}
8437
8438
4954efd4 8439/* Expand code for an OpenMP ordered directive. */
1e8e9920 8440
61e47ac8 8441static void
4954efd4 8442lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8443{
4954efd4 8444 tree block;
8445 gimple *stmt = gsi_stmt (*gsi_p), *g;
8446 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8447 gcall *x;
8448 gbind *bind;
8449 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8450 OMP_CLAUSE_SIMD);
8451 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8452 loop. */
8453 bool maybe_simt
8454 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8455 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8456 OMP_CLAUSE_THREADS);
43895be5 8457
4954efd4 8458 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8459 OMP_CLAUSE_DEPEND))
43895be5 8460 {
4954efd4 8461 /* FIXME: This is needs to be moved to the expansion to verify various
8462 conditions only testable on cfg with dominators computed, and also
8463 all the depend clauses to be merged still might need to be available
8464 for the runtime checks. */
8465 if (0)
8466 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8467 return;
fd6481cf 8468 }
43895be5 8469
4954efd4 8470 push_gimplify_context ();
8471
8472 block = make_node (BLOCK);
8473 bind = gimple_build_bind (NULL, NULL, block);
8474 gsi_replace (gsi_p, bind, true);
8475 gimple_bind_add_stmt (bind, stmt);
43895be5 8476
4954efd4 8477 if (simd)
79acaae1 8478 {
4954efd4 8479 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8480 build_int_cst (NULL_TREE, threads));
8481 cfun->has_simduid_loops = true;
79acaae1 8482 }
8483 else
4954efd4 8484 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8485 0);
8486 gimple_bind_add_stmt (bind, x);
8487
8488 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8489 if (maybe_simt)
1e8e9920 8490 {
4954efd4 8491 counter = create_tmp_var (integer_type_node);
8492 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8493 gimple_call_set_lhs (g, counter);
8494 gimple_bind_add_stmt (bind, g);
43895be5 8495
4954efd4 8496 body = create_artificial_label (UNKNOWN_LOCATION);
8497 test = create_artificial_label (UNKNOWN_LOCATION);
8498 gimple_bind_add_stmt (bind, gimple_build_label (body));
1e8e9920 8499
4954efd4 8500 tree simt_pred = create_tmp_var (integer_type_node);
8501 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8502 gimple_call_set_lhs (g, simt_pred);
8503 gimple_bind_add_stmt (bind, g);
43895be5 8504
4954efd4 8505 tree t = create_artificial_label (UNKNOWN_LOCATION);
8506 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8507 gimple_bind_add_stmt (bind, g);
3d483a94 8508
4954efd4 8509 gimple_bind_add_stmt (bind, gimple_build_label (t));
bc7bff74 8510 }
4954efd4 8511 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8512 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8513 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8514 gimple_omp_set_body (stmt, NULL);
bc7bff74 8515
4954efd4 8516 if (maybe_simt)
43895be5 8517 {
4954efd4 8518 gimple_bind_add_stmt (bind, gimple_build_label (test));
8519 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8520 gimple_bind_add_stmt (bind, g);
773c5ba7 8521
4954efd4 8522 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8523 tree nonneg = create_tmp_var (integer_type_node);
8524 gimple_seq tseq = NULL;
8525 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8526 gimple_bind_add_seq (bind, tseq);
43895be5 8527
4954efd4 8528 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8529 gimple_call_set_lhs (g, nonneg);
8530 gimple_bind_add_stmt (bind, g);
43895be5 8531
4954efd4 8532 tree end = create_artificial_label (UNKNOWN_LOCATION);
8533 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8534 gimple_bind_add_stmt (bind, g);
773c5ba7 8535
4954efd4 8536 gimple_bind_add_stmt (bind, gimple_build_label (end));
ac6e3339 8537 }
4954efd4 8538 if (simd)
8539 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8540 build_int_cst (NULL_TREE, threads));
61e47ac8 8541 else
4954efd4 8542 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8543 0);
8544 gimple_bind_add_stmt (bind, x);
79acaae1 8545
4954efd4 8546 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
79acaae1 8547
4954efd4 8548 pop_gimplify_context (bind);
79acaae1 8549
4954efd4 8550 gimple_bind_append_vars (bind, ctx->block_vars);
8551 BLOCK_VARS (block) = gimple_bind_vars (bind);
8552}
2131a1a9 8553
2131a1a9 8554
da008d72 8555/* Expand code for an OpenMP scan directive and the structured block
8556 before the scan directive. */
8557
8558static void
8559lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8560{
8561 gimple *stmt = gsi_stmt (*gsi_p);
8562 bool has_clauses
8563 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8564 tree lane = NULL_TREE;
8565 gimple_seq before = NULL;
8566 omp_context *octx = ctx->outer;
8567 gcc_assert (octx);
8568 bool input_phase = has_clauses ^ octx->scan_inclusive;
8569 if (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8570 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD)
8571 && !gimple_omp_for_combined_into_p (octx->stmt)
8572 && octx->scan_inclusive)
8573 {
8574 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8575 OMP_CLAUSE__SIMDUID_))
8576 {
8577 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8578 lane = create_tmp_var (unsigned_type_node);
8579 tree t = build_int_cst (integer_type_node, 1 + !input_phase);
8580 gimple *g
8581 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8582 gimple_call_set_lhs (g, lane);
8583 gimple_seq_add_stmt (&before, g);
8584 }
8585 for (tree c = gimple_omp_for_clauses (octx->stmt);
8586 c; c = OMP_CLAUSE_CHAIN (c))
8587 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8588 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8589 {
8590 tree var = OMP_CLAUSE_DECL (c);
8591 tree new_var = lookup_decl (var, octx);
8592 tree val = new_var;
8593 tree var2 = NULL_TREE;
8594 tree var3 = NULL_TREE;
8595 if (DECL_HAS_VALUE_EXPR_P (new_var))
8596 {
8597 val = DECL_VALUE_EXPR (new_var);
8598 if (TREE_CODE (val) == ARRAY_REF
8599 && VAR_P (TREE_OPERAND (val, 0)))
8600 {
8601 tree v = TREE_OPERAND (val, 0);
8602 if (lookup_attribute ("omp simd array",
8603 DECL_ATTRIBUTES (v)))
8604 {
8605 val = unshare_expr (val);
8606 TREE_OPERAND (val, 1) = lane;
8607 var2 = lookup_decl (v, octx);
8608 if (input_phase
8609 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8610 var3 = maybe_lookup_decl (var2, octx);
8611 if (!input_phase)
8612 {
8613 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8614 var2, lane, NULL_TREE, NULL_TREE);
8615 TREE_THIS_NOTRAP (var2) = 1;
8616 }
8617 else
8618 var2 = val;
8619 }
8620 }
8621 }
8622 else
8623 {
8624 var2 = build_outer_var_ref (var, octx);
8625 if (input_phase && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8626 {
8627 var3 = maybe_lookup_decl (new_var, octx);
8628 if (var3 == new_var)
8629 var3 = NULL_TREE;
8630 }
8631 }
8632 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8633 {
8634 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8635 if (input_phase)
8636 {
8637 if (var3)
8638 {
8639 /* If we've added a separate identity element
8640 variable, copy it over into val. */
8641 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8642 var3);
8643 gimplify_and_add (x, &before);
8644 }
8645 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8646 {
8647 /* Otherwise, assign to it the identity element. */
8648 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8649 tree x = (DECL_HAS_VALUE_EXPR_P (new_var)
8650 ? DECL_VALUE_EXPR (new_var) : NULL_TREE);
8651 tree ref = build_outer_var_ref (var, octx);
8652 SET_DECL_VALUE_EXPR (new_var, val);
8653 SET_DECL_VALUE_EXPR (placeholder, ref);
8654 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8655 lower_omp (&tseq, octx);
8656 SET_DECL_VALUE_EXPR (new_var, x);
8657 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8658 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8659 if (x == NULL_TREE)
8660 DECL_HAS_VALUE_EXPR_P (new_var) = 0;
8661 gimple_seq_add_seq (&before, tseq);
8662 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8663 }
8664 }
8665 else
8666 {
8667 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8668 tree x = (DECL_HAS_VALUE_EXPR_P (new_var)
8669 ? DECL_VALUE_EXPR (new_var) : NULL_TREE);
8670 SET_DECL_VALUE_EXPR (new_var, val);
8671 SET_DECL_VALUE_EXPR (placeholder, var2);
8672 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8673 lower_omp (&tseq, octx);
8674 gimple_seq_add_seq (&before, tseq);
8675 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8676 SET_DECL_VALUE_EXPR (new_var, x);
8677 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8678 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8679 x = lang_hooks.decls.omp_clause_assign_op (c, val, var2);
8680 gimplify_and_add (x, &before);
8681 }
8682 }
8683 else
8684 {
8685 if (input_phase)
8686 {
8687 /* input phase. Set val to initializer before
8688 the body. */
8689 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8690 gimplify_assign (val, x, &before);
8691 }
8692 else
8693 {
8694 /* scan phase. */
8695 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8696 if (code == MINUS_EXPR)
8697 code = PLUS_EXPR;
8698
8699 tree x = build2 (code, TREE_TYPE (var2),
8700 unshare_expr (var2), unshare_expr (val));
8701 gimplify_assign (unshare_expr (var2), x, &before);
8702 gimplify_assign (val, var2, &before);
8703 }
8704 }
8705 }
8706 }
8707 else if (has_clauses)
8708 sorry_at (gimple_location (stmt),
8709 "%<#pragma omp scan%> not supported yet");
8710 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
8711 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
8712 gsi_replace (gsi_p, gimple_build_nop (), true);
8713}
8714
8715
4954efd4 8716/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
8717 substitution of a couple of function calls. But in the NAMED case,
8718 requires that languages coordinate a symbol name. It is therefore
8719 best put here in common code. */
2131a1a9 8720
4954efd4 8721static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
2131a1a9 8722
4954efd4 8723static void
8724lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8725{
8726 tree block;
8727 tree name, lock, unlock;
8728 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8729 gbind *bind;
8730 location_t loc = gimple_location (stmt);
8731 gimple_seq tbody;
2131a1a9 8732
4954efd4 8733 name = gimple_omp_critical_name (stmt);
8734 if (name)
8735 {
8736 tree decl;
2131a1a9 8737
4954efd4 8738 if (!critical_name_mutexes)
8739 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
2131a1a9 8740
4954efd4 8741 tree *n = critical_name_mutexes->get (name);
8742 if (n == NULL)
3d483a94 8743 {
4954efd4 8744 char *new_str;
1e8e9920 8745
4954efd4 8746 decl = create_tmp_var_raw (ptr_type_node);
1e8e9920 8747
4954efd4 8748 new_str = ACONCAT ((".gomp_critical_user_",
8749 IDENTIFIER_POINTER (name), NULL));
8750 DECL_NAME (decl) = get_identifier (new_str);
8751 TREE_PUBLIC (decl) = 1;
8752 TREE_STATIC (decl) = 1;
8753 DECL_COMMON (decl) = 1;
8754 DECL_ARTIFICIAL (decl) = 1;
8755 DECL_IGNORED_P (decl) = 1;
1e8e9920 8756
4954efd4 8757 varpool_node::finalize_decl (decl);
1e8e9920 8758
4954efd4 8759 critical_name_mutexes->put (name, decl);
8760 }
8761 else
8762 decl = *n;
1e8e9920 8763
4954efd4 8764 /* If '#pragma omp critical' is inside offloaded region or
8765 inside function marked as offloadable, the symbol must be
8766 marked as offloadable too. */
8767 omp_context *octx;
8768 if (cgraph_node::get (current_function_decl)->offloadable)
8769 varpool_node::get_create (decl)->offloadable = 1;
8770 else
8771 for (octx = ctx->outer; octx; octx = octx->outer)
8772 if (is_gimple_omp_offloaded (octx->stmt))
8773 {
8774 varpool_node::get_create (decl)->offloadable = 1;
8775 break;
8776 }
61e47ac8 8777
4954efd4 8778 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
7c6746c9 8779 lock = build_call_expr_loc (loc, lock, 1,
8780 build_fold_addr_expr_loc (loc, decl));
61e47ac8 8781
4954efd4 8782 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8783 unlock = build_call_expr_loc (loc, unlock, 1,
8784 build_fold_addr_expr_loc (loc, decl));
bc7bff74 8785 }
bc7bff74 8786 else
8e6b4515 8787 {
4954efd4 8788 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8789 lock = build_call_expr_loc (loc, lock, 0);
8e6b4515 8790
4954efd4 8791 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8792 unlock = build_call_expr_loc (loc, unlock, 0);
bc7bff74 8793 }
1e8e9920 8794
4954efd4 8795 push_gimplify_context ();
31712e83 8796
4954efd4 8797 block = make_node (BLOCK);
8798 bind = gimple_build_bind (NULL, NULL, block);
8799 gsi_replace (gsi_p, bind, true);
8800 gimple_bind_add_stmt (bind, stmt);
31712e83 8801
4954efd4 8802 tbody = gimple_bind_body (bind);
8803 gimplify_and_add (lock, &tbody);
8804 gimple_bind_set_body (bind, tbody);
31712e83 8805
4954efd4 8806 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8807 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8808 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8809 gimple_omp_set_body (stmt, NULL);
1e8e9920 8810
4954efd4 8811 tbody = gimple_bind_body (bind);
8812 gimplify_and_add (unlock, &tbody);
8813 gimple_bind_set_body (bind, tbody);
1e8e9920 8814
4954efd4 8815 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
79acaae1 8816
4954efd4 8817 pop_gimplify_context (bind);
8818 gimple_bind_append_vars (bind, ctx->block_vars);
8819 BLOCK_VARS (block) = gimple_bind_vars (bind);
8820}
773c5ba7 8821
4954efd4 8822/* A subroutine of lower_omp_for. Generate code to emit the predicate
8823 for a lastprivate clause. Given a loop control predicate of (V
8824 cond N2), we gate the clause on (!(V cond N2)). The lowered form
8825 is appended to *DLIST, iterator initialization is appended to
9a1d892b 8826 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
8827 to be emitted in a critical section. */
773c5ba7 8828
4954efd4 8829static void
8830lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9a1d892b 8831 gimple_seq *dlist, gimple_seq *clist,
8832 struct omp_context *ctx)
4954efd4 8833{
8834 tree clauses, cond, vinit;
8835 enum tree_code cond_code;
8836 gimple_seq stmts;
1e8e9920 8837
4954efd4 8838 cond_code = fd->loop.cond_code;
8839 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
bc7bff74 8840
4954efd4 8841 /* When possible, use a strict equality expression. This can let VRP
8842 type optimizations deduce the value and remove a copy. */
8843 if (tree_fits_shwi_p (fd->loop.step))
bc7bff74 8844 {
4954efd4 8845 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8846 if (step == 1 || step == -1)
8847 cond_code = EQ_EXPR;
bc7bff74 8848 }
4954efd4 8849
8850 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8851 || gimple_omp_for_grid_phony (fd->for_stmt))
8852 cond = omp_grid_lastprivate_predicate (fd);
fd6481cf 8853 else
bc7bff74 8854 {
4954efd4 8855 tree n2 = fd->loop.n2;
8856 if (fd->collapse > 1
8857 && TREE_CODE (n2) != INTEGER_CST
8858 && gimple_omp_for_combined_into_p (fd->for_stmt))
43895be5 8859 {
4954efd4 8860 struct omp_context *taskreg_ctx = NULL;
8861 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
43895be5 8862 {
4954efd4 8863 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8864 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8865 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
43895be5 8866 {
4954efd4 8867 if (gimple_omp_for_combined_into_p (gfor))
8868 {
8869 gcc_assert (ctx->outer->outer
8870 && is_parallel_ctx (ctx->outer->outer));
8871 taskreg_ctx = ctx->outer->outer;
8872 }
8873 else
8874 {
8875 struct omp_for_data outer_fd;
8876 omp_extract_for_data (gfor, &outer_fd, NULL);
8877 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8878 }
43895be5 8879 }
4954efd4 8880 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8881 taskreg_ctx = ctx->outer->outer;
8882 }
8883 else if (is_taskreg_ctx (ctx->outer))
8884 taskreg_ctx = ctx->outer;
8885 if (taskreg_ctx)
8886 {
8887 int i;
8888 tree taskreg_clauses
8889 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8890 tree innerc = omp_find_clause (taskreg_clauses,
8891 OMP_CLAUSE__LOOPTEMP_);
8892 gcc_assert (innerc);
8893 for (i = 0; i < fd->collapse; i++)
8894 {
8895 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8896 OMP_CLAUSE__LOOPTEMP_);
8897 gcc_assert (innerc);
8898 }
8899 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8900 OMP_CLAUSE__LOOPTEMP_);
8901 if (innerc)
8902 n2 = fold_convert (TREE_TYPE (n2),
8903 lookup_decl (OMP_CLAUSE_DECL (innerc),
8904 taskreg_ctx));
43895be5 8905 }
bc7bff74 8906 }
4954efd4 8907 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
bc7bff74 8908 }
773c5ba7 8909
4954efd4 8910 clauses = gimple_omp_for_clauses (fd->for_stmt);
8911 stmts = NULL;
9a1d892b 8912 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
4954efd4 8913 if (!gimple_seq_empty_p (stmts))
bc7bff74 8914 {
4954efd4 8915 gimple_seq_add_seq (&stmts, *dlist);
8916 *dlist = stmts;
04c2922b 8917
4954efd4 8918 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
8919 vinit = fd->loop.n1;
8920 if (cond_code == EQ_EXPR
8921 && tree_fits_shwi_p (fd->loop.n2)
8922 && ! integer_zerop (fd->loop.n2))
8923 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8924 else
8925 vinit = unshare_expr (vinit);
86a932e0 8926
4954efd4 8927 /* Initialize the iterator variable, so that threads that don't execute
8928 any iterations don't execute the lastprivate clauses by accident. */
8929 gimplify_assign (fd->loop.v, vinit, body_p);
bc7bff74 8930 }
1e8e9920 8931}
8932
281001a9 8933
4954efd4 8934/* Lower code for an OMP loop directive. */
773c5ba7 8935
4954efd4 8936static void
8937lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8938{
8939 tree *rhs_p, block;
8940 struct omp_for_data fd, *fdp = NULL;
8941 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8942 gbind *new_stmt;
7e5a76c8 8943 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
9a1d892b 8944 gimple_seq cnt_list = NULL, clist = NULL;
4954efd4 8945 gimple_seq oacc_head = NULL, oacc_tail = NULL;
8946 size_t i;
1e8e9920 8947
4954efd4 8948 push_gimplify_context ();
1e8e9920 8949
4954efd4 8950 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
1e8e9920 8951
4954efd4 8952 block = make_node (BLOCK);
8953 new_stmt = gimple_build_bind (NULL, NULL, block);
8954 /* Replace at gsi right away, so that 'stmt' is no member
8955 of a sequence anymore as we're going to add to a different
8956 one below. */
8957 gsi_replace (gsi_p, new_stmt, true);
1e8e9920 8958
4954efd4 8959 /* Move declaration of temporaries in the loop body before we make
8960 it go away. */
8961 omp_for_body = gimple_omp_body (stmt);
8962 if (!gimple_seq_empty_p (omp_for_body)
8963 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
bc7bff74 8964 {
4954efd4 8965 gbind *inner_bind
8966 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8967 tree vars = gimple_bind_vars (inner_bind);
8968 gimple_bind_append_vars (new_stmt, vars);
8969 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8970 keep them on the inner_bind and it's block. */
8971 gimple_bind_set_vars (inner_bind, NULL_TREE);
8972 if (gimple_bind_block (inner_bind))
8973 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
bc7bff74 8974 }
773c5ba7 8975
4954efd4 8976 if (gimple_omp_for_combined_into_p (stmt))
8e6b4515 8977 {
4954efd4 8978 omp_extract_for_data (stmt, &fd, NULL);
8979 fdp = &fd;
8980
8981 /* We need two temporaries with fd.loop.v type (istart/iend)
8982 and then (fd.collapse - 1) temporaries with the same
8983 type for count2 ... countN-1 vars if not constant. */
8984 size_t count = 2;
8985 tree type = fd.iter_type;
8986 if (fd.collapse > 1
8987 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8988 count += fd.collapse - 1;
8989 bool taskreg_for
8990 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8991 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8992 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
9cf7bec9 8993 tree simtc = NULL;
4954efd4 8994 tree clauses = *pc;
8995 if (taskreg_for)
8996 outerc
8997 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8998 OMP_CLAUSE__LOOPTEMP_);
9cf7bec9 8999 if (ctx->simt_stmt)
9000 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
9001 OMP_CLAUSE__LOOPTEMP_);
4954efd4 9002 for (i = 0; i < count; i++)
8e6b4515 9003 {
4954efd4 9004 tree temp;
9005 if (taskreg_for)
9006 {
9007 gcc_assert (outerc);
9008 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
9009 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
9010 OMP_CLAUSE__LOOPTEMP_);
9011 }
9012 else
8e6b4515 9013 {
9cf7bec9 9014 /* If there are 2 adjacent SIMD stmts, one with _simt_
9015 clause, another without, make sure they have the same
9016 decls in _looptemp_ clauses, because the outer stmt
9017 they are combined into will look up just one inner_stmt. */
9018 if (ctx->simt_stmt)
9019 temp = OMP_CLAUSE_DECL (simtc);
9020 else
9021 temp = create_tmp_var (type);
4954efd4 9022 insert_decl_map (&ctx->outer->cb, temp, temp);
8e6b4515 9023 }
4954efd4 9024 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
9025 OMP_CLAUSE_DECL (*pc) = temp;
9026 pc = &OMP_CLAUSE_CHAIN (*pc);
9cf7bec9 9027 if (ctx->simt_stmt)
9028 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
9029 OMP_CLAUSE__LOOPTEMP_);
8e6b4515 9030 }
4954efd4 9031 *pc = clauses;
8e6b4515 9032 }
9033
4954efd4 9034 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
9035 dlist = NULL;
9036 body = NULL;
7e5a76c8 9037 tree rclauses
9038 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
9039 OMP_CLAUSE_REDUCTION);
9040 tree rtmp = NULL_TREE;
9041 if (rclauses)
9042 {
9043 tree type = build_pointer_type (pointer_sized_int_node);
9044 tree temp = create_tmp_var (type);
9045 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
9046 OMP_CLAUSE_DECL (c) = temp;
9047 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
9048 gimple_omp_for_set_clauses (stmt, c);
9049 lower_omp_task_reductions (ctx, OMP_FOR,
9050 gimple_omp_for_clauses (stmt),
9051 &tred_ilist, &tred_dlist);
9052 rclauses = c;
9053 rtmp = make_ssa_name (type);
9054 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
9055 }
9056
4f4b92d8 9057 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
9058 ctx);
9059
4954efd4 9060 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
9061 fdp);
7e5a76c8 9062 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
9063 gimple_omp_for_pre_body (stmt));
79acaae1 9064
4954efd4 9065 lower_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 9066
4954efd4 9067 /* Lower the header expressions. At this point, we can assume that
9068 the header is of the form:
773c5ba7 9069
4954efd4 9070 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
79acaae1 9071
4954efd4 9072 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
9073 using the .omp_data_s mapping, if needed. */
9074 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
9075 {
9076 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
9077 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 9078 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
36ac6a87 9079 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9080 recompute_tree_invariant_for_addr_expr (*rhs_p);
773c5ba7 9081
4954efd4 9082 rhs_p = gimple_omp_for_final_ptr (stmt, i);
9083 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 9084 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
36ac6a87 9085 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
9086 recompute_tree_invariant_for_addr_expr (*rhs_p);
43895be5 9087
4954efd4 9088 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
9089 if (!is_gimple_min_invariant (*rhs_p))
7e5a76c8 9090 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
4954efd4 9091 }
7e5a76c8 9092 if (rclauses)
9093 gimple_seq_add_seq (&tred_ilist, cnt_list);
9094 else
9095 gimple_seq_add_seq (&body, cnt_list);
1e8e9920 9096
4954efd4 9097 /* Once lowered, extract the bounds and clauses. */
9098 omp_extract_for_data (stmt, &fd, NULL);
1e8e9920 9099
4954efd4 9100 if (is_gimple_omp_oacc (ctx->stmt)
9101 && !ctx_in_oacc_kernels_region (ctx))
9102 lower_oacc_head_tail (gimple_location (stmt),
9103 gimple_omp_for_clauses (stmt),
9104 &oacc_head, &oacc_tail, ctx);
1e8e9920 9105
7c6746c9 9106 /* Add OpenACC partitioning and reduction markers just before the loop. */
4954efd4 9107 if (oacc_head)
9108 gimple_seq_add_seq (&body, oacc_head);
7c6746c9 9109
9a1d892b 9110 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
bc7bff74 9111
4954efd4 9112 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
9113 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
43895be5 9114 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9115 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9116 {
4954efd4 9117 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
9118 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
9119 OMP_CLAUSE_LINEAR_STEP (c)
9120 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
9121 ctx);
43895be5 9122 }
bc7bff74 9123
4954efd4 9124 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
9125 && gimple_omp_for_grid_phony (stmt));
9126 if (!phony_loop)
9127 gimple_seq_add_stmt (&body, stmt);
9128 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
9129
9130 if (!phony_loop)
9131 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
9132 fd.loop.v));
79acaae1 9133
4954efd4 9134 /* After the loop, add exit clauses. */
9a1d892b 9135 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
9136
9137 if (clist)
9138 {
9139 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
9140 gcall *g = gimple_build_call (fndecl, 0);
9141 gimple_seq_add_stmt (&body, g);
9142 gimple_seq_add_seq (&body, clist);
9143 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
9144 g = gimple_build_call (fndecl, 0);
9145 gimple_seq_add_stmt (&body, g);
9146 }
48e1416a 9147
4954efd4 9148 if (ctx->cancellable)
9149 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
773c5ba7 9150
4954efd4 9151 gimple_seq_add_seq (&body, dlist);
1e8e9920 9152
7e5a76c8 9153 if (rclauses)
9154 {
9155 gimple_seq_add_seq (&tred_ilist, body);
9156 body = tred_ilist;
9157 }
9158
4954efd4 9159 body = maybe_catch_exception (body);
1e8e9920 9160
4954efd4 9161 if (!phony_loop)
bc7bff74 9162 {
4954efd4 9163 /* Region exit marker goes at the end of the loop body. */
7e5a76c8 9164 gimple *g = gimple_build_omp_return (fd.have_nowait);
9165 gimple_seq_add_stmt (&body, g);
9166
9167 gimple_seq_add_seq (&body, tred_dlist);
9168
9169 maybe_add_implicit_barrier_cancel (ctx, g, &body);
9170
9171 if (rclauses)
9172 OMP_CLAUSE_DECL (rclauses) = rtmp;
bc7bff74 9173 }
1e8e9920 9174
4954efd4 9175 /* Add OpenACC joining and reduction markers just after the loop. */
9176 if (oacc_tail)
9177 gimple_seq_add_seq (&body, oacc_tail);
79acaae1 9178
4954efd4 9179 pop_gimplify_context (new_stmt);
79acaae1 9180
4954efd4 9181 gimple_bind_append_vars (new_stmt, ctx->block_vars);
2918f4e9 9182 maybe_remove_omp_member_access_dummy_vars (new_stmt);
4954efd4 9183 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
9184 if (BLOCK_VARS (block))
9185 TREE_USED (block) = 1;
79acaae1 9186
4954efd4 9187 gimple_bind_set_body (new_stmt, body);
9188 gimple_omp_set_body (stmt, NULL);
9189 gimple_omp_for_set_pre_body (stmt, NULL);
9190}
264aa959 9191
4954efd4 9192/* Callback for walk_stmts. Check if the current statement only contains
9193 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
79acaae1 9194
4954efd4 9195static tree
9196check_combined_parallel (gimple_stmt_iterator *gsi_p,
9197 bool *handled_ops_p,
9198 struct walk_stmt_info *wi)
9199{
9200 int *info = (int *) wi->info;
9201 gimple *stmt = gsi_stmt (*gsi_p);
79acaae1 9202
4954efd4 9203 *handled_ops_p = true;
9204 switch (gimple_code (stmt))
bc7bff74 9205 {
4954efd4 9206 WALK_SUBSTMTS;
fc1d58e3 9207
bce107d7 9208 case GIMPLE_DEBUG:
9209 break;
4954efd4 9210 case GIMPLE_OMP_FOR:
9211 case GIMPLE_OMP_SECTIONS:
9212 *info = *info == 0 ? 1 : -1;
9213 break;
9214 default:
9215 *info = -1;
9216 break;
bc7bff74 9217 }
4954efd4 9218 return NULL;
1e8e9920 9219}
9220
4954efd4 9221struct omp_taskcopy_context
9222{
9223 /* This field must be at the beginning, as we do "inheritance": Some
9224 callback functions for tree-inline.c (e.g., omp_copy_decl)
9225 receive a copy_body_data pointer that is up-casted to an
9226 omp_context pointer. */
9227 copy_body_data cb;
9228 omp_context *ctx;
9229};
40750995 9230
4954efd4 9231static tree
9232task_copyfn_copy_decl (tree var, copy_body_data *cb)
9233{
9234 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
40750995 9235
4954efd4 9236 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9237 return create_tmp_var (TREE_TYPE (var));
40750995 9238
4954efd4 9239 return var;
9240}
40750995 9241
4954efd4 9242static tree
9243task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
40750995 9244{
4954efd4 9245 tree name, new_fields = NULL, type, f;
40750995 9246
4954efd4 9247 type = lang_hooks.types.make_type (RECORD_TYPE);
9248 name = DECL_NAME (TYPE_NAME (orig_type));
9249 name = build_decl (gimple_location (tcctx->ctx->stmt),
9250 TYPE_DECL, name, type);
9251 TYPE_NAME (type) = name;
40750995 9252
4954efd4 9253 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
40750995 9254 {
4954efd4 9255 tree new_f = copy_node (f);
9256 DECL_CONTEXT (new_f) = type;
9257 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9258 TREE_CHAIN (new_f) = new_fields;
9259 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9260 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9261 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9262 &tcctx->cb, NULL);
9263 new_fields = new_f;
9264 tcctx->cb.decl_map->put (f, new_f);
40750995 9265 }
4954efd4 9266 TYPE_FIELDS (type) = nreverse (new_fields);
9267 layout_type (type);
9268 return type;
9269}
40750995 9270
4954efd4 9271/* Create task copyfn. */
40750995 9272
4954efd4 9273static void
9274create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
9275{
9276 struct function *child_cfun;
9277 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9278 tree record_type, srecord_type, bind, list;
9279 bool record_needs_remap = false, srecord_needs_remap = false;
9280 splay_tree_node n;
9281 struct omp_taskcopy_context tcctx;
9282 location_t loc = gimple_location (task_stmt);
99d30117 9283 size_t looptempno = 0;
40750995 9284
4954efd4 9285 child_fn = gimple_omp_task_copy_fn (task_stmt);
9286 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9287 gcc_assert (child_cfun->cfg == NULL);
9288 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
40750995 9289
4954efd4 9290 /* Reset DECL_CONTEXT on function arguments. */
9291 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
9292 DECL_CONTEXT (t) = child_fn;
40750995 9293
4954efd4 9294 /* Populate the function. */
9295 push_gimplify_context ();
9296 push_cfun (child_cfun);
40750995 9297
4954efd4 9298 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9299 TREE_SIDE_EFFECTS (bind) = 1;
9300 list = NULL;
9301 DECL_SAVED_TREE (child_fn) = bind;
9302 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
40750995 9303
4954efd4 9304 /* Remap src and dst argument types if needed. */
9305 record_type = ctx->record_type;
9306 srecord_type = ctx->srecord_type;
9307 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
9308 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9309 {
9310 record_needs_remap = true;
9311 break;
9312 }
9313 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
9314 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9315 {
9316 srecord_needs_remap = true;
9317 break;
9318 }
40750995 9319
4954efd4 9320 if (record_needs_remap || srecord_needs_remap)
40750995 9321 {
4954efd4 9322 memset (&tcctx, '\0', sizeof (tcctx));
9323 tcctx.cb.src_fn = ctx->cb.src_fn;
9324 tcctx.cb.dst_fn = child_fn;
9325 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
9326 gcc_checking_assert (tcctx.cb.src_node);
9327 tcctx.cb.dst_node = tcctx.cb.src_node;
9328 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9329 tcctx.cb.copy_decl = task_copyfn_copy_decl;
9330 tcctx.cb.eh_lp_nr = 0;
9331 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9332 tcctx.cb.decl_map = new hash_map<tree, tree>;
9333 tcctx.ctx = ctx;
40750995 9334
4954efd4 9335 if (record_needs_remap)
9336 record_type = task_copyfn_remap_type (&tcctx, record_type);
9337 if (srecord_needs_remap)
9338 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
40750995 9339 }
9340 else
4954efd4 9341 tcctx.cb.decl_map = NULL;
40750995 9342
4954efd4 9343 arg = DECL_ARGUMENTS (child_fn);
9344 TREE_TYPE (arg) = build_pointer_type (record_type);
9345 sarg = DECL_CHAIN (arg);
9346 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
40750995 9347
4954efd4 9348 /* First pass: initialize temporaries used in record_type and srecord_type
9349 sizes and field offsets. */
9350 if (tcctx.cb.decl_map)
9351 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9353 {
9354 tree *p;
40750995 9355
4954efd4 9356 decl = OMP_CLAUSE_DECL (c);
9357 p = tcctx.cb.decl_map->get (decl);
9358 if (p == NULL)
9359 continue;
9360 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9361 sf = (tree) n->value;
9362 sf = *tcctx.cb.decl_map->get (sf);
9363 src = build_simple_mem_ref_loc (loc, sarg);
9364 src = omp_build_component_ref (src, sf);
9365 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
9366 append_to_statement_list (t, &list);
9367 }
40750995 9368
4954efd4 9369 /* Second pass: copy shared var pointers and copy construct non-VLA
9370 firstprivate vars. */
9371 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9372 switch (OMP_CLAUSE_CODE (c))
9373 {
9374 splay_tree_key key;
9375 case OMP_CLAUSE_SHARED:
9376 decl = OMP_CLAUSE_DECL (c);
9377 key = (splay_tree_key) decl;
9378 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
9379 key = (splay_tree_key) &DECL_UID (decl);
9380 n = splay_tree_lookup (ctx->field_map, key);
9381 if (n == NULL)
9382 break;
9383 f = (tree) n->value;
9384 if (tcctx.cb.decl_map)
9385 f = *tcctx.cb.decl_map->get (f);
9386 n = splay_tree_lookup (ctx->sfield_map, key);
9387 sf = (tree) n->value;
9388 if (tcctx.cb.decl_map)
9389 sf = *tcctx.cb.decl_map->get (sf);
9390 src = build_simple_mem_ref_loc (loc, sarg);
9391 src = omp_build_component_ref (src, sf);
9392 dst = build_simple_mem_ref_loc (loc, arg);
9393 dst = omp_build_component_ref (dst, f);
9394 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9395 append_to_statement_list (t, &list);
9396 break;
7e5a76c8 9397 case OMP_CLAUSE_REDUCTION:
9398 case OMP_CLAUSE_IN_REDUCTION:
9399 decl = OMP_CLAUSE_DECL (c);
9400 if (TREE_CODE (decl) == MEM_REF)
9401 {
9402 decl = TREE_OPERAND (decl, 0);
9403 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
9404 decl = TREE_OPERAND (decl, 0);
9405 if (TREE_CODE (decl) == INDIRECT_REF
9406 || TREE_CODE (decl) == ADDR_EXPR)
9407 decl = TREE_OPERAND (decl, 0);
9408 }
9409 key = (splay_tree_key) decl;
9410 n = splay_tree_lookup (ctx->field_map, key);
9411 if (n == NULL)
9412 break;
9413 f = (tree) n->value;
9414 if (tcctx.cb.decl_map)
9415 f = *tcctx.cb.decl_map->get (f);
9416 n = splay_tree_lookup (ctx->sfield_map, key);
9417 sf = (tree) n->value;
9418 if (tcctx.cb.decl_map)
9419 sf = *tcctx.cb.decl_map->get (sf);
9420 src = build_simple_mem_ref_loc (loc, sarg);
9421 src = omp_build_component_ref (src, sf);
9422 if (decl != OMP_CLAUSE_DECL (c)
9423 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9424 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9425 src = build_simple_mem_ref_loc (loc, src);
9426 dst = build_simple_mem_ref_loc (loc, arg);
9427 dst = omp_build_component_ref (dst, f);
9428 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9429 append_to_statement_list (t, &list);
9430 break;
99d30117 9431 case OMP_CLAUSE__LOOPTEMP_:
9432 /* Fields for first two _looptemp_ clauses are initialized by
9433 GOMP_taskloop*, the rest are handled like firstprivate. */
9434 if (looptempno < 2)
9435 {
9436 looptempno++;
9437 break;
9438 }
9439 /* FALLTHRU */
7e5a76c8 9440 case OMP_CLAUSE__REDUCTEMP_:
4954efd4 9441 case OMP_CLAUSE_FIRSTPRIVATE:
9442 decl = OMP_CLAUSE_DECL (c);
9443 if (is_variable_sized (decl))
9444 break;
9445 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9446 if (n == NULL)
9447 break;
9448 f = (tree) n->value;
9449 if (tcctx.cb.decl_map)
9450 f = *tcctx.cb.decl_map->get (f);
9451 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9452 if (n != NULL)
9453 {
9454 sf = (tree) n->value;
9455 if (tcctx.cb.decl_map)
9456 sf = *tcctx.cb.decl_map->get (sf);
9457 src = build_simple_mem_ref_loc (loc, sarg);
9458 src = omp_build_component_ref (src, sf);
9459 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
9460 src = build_simple_mem_ref_loc (loc, src);
9461 }
9462 else
9463 src = decl;
9464 dst = build_simple_mem_ref_loc (loc, arg);
9465 dst = omp_build_component_ref (dst, f);
7e5a76c8 9466 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
99d30117 9467 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9468 else
9469 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
4954efd4 9470 append_to_statement_list (t, &list);
9471 break;
9472 case OMP_CLAUSE_PRIVATE:
9473 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9474 break;
9475 decl = OMP_CLAUSE_DECL (c);
9476 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9477 f = (tree) n->value;
9478 if (tcctx.cb.decl_map)
9479 f = *tcctx.cb.decl_map->get (f);
9480 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9481 if (n != NULL)
9482 {
9483 sf = (tree) n->value;
9484 if (tcctx.cb.decl_map)
9485 sf = *tcctx.cb.decl_map->get (sf);
9486 src = build_simple_mem_ref_loc (loc, sarg);
9487 src = omp_build_component_ref (src, sf);
9488 if (use_pointer_for_field (decl, NULL))
9489 src = build_simple_mem_ref_loc (loc, src);
9490 }
9491 else
9492 src = decl;
9493 dst = build_simple_mem_ref_loc (loc, arg);
9494 dst = omp_build_component_ref (dst, f);
9495 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9496 append_to_statement_list (t, &list);
9497 break;
9498 default:
9499 break;
9500 }
3d483a94 9501
4954efd4 9502 /* Last pass: handle VLA firstprivates. */
9503 if (tcctx.cb.decl_map)
9504 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9506 {
9507 tree ind, ptr, df;
3d483a94 9508
4954efd4 9509 decl = OMP_CLAUSE_DECL (c);
9510 if (!is_variable_sized (decl))
9511 continue;
9512 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9513 if (n == NULL)
9514 continue;
9515 f = (tree) n->value;
9516 f = *tcctx.cb.decl_map->get (f);
9517 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9518 ind = DECL_VALUE_EXPR (decl);
9519 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9520 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9521 n = splay_tree_lookup (ctx->sfield_map,
9522 (splay_tree_key) TREE_OPERAND (ind, 0));
9523 sf = (tree) n->value;
9524 sf = *tcctx.cb.decl_map->get (sf);
9525 src = build_simple_mem_ref_loc (loc, sarg);
9526 src = omp_build_component_ref (src, sf);
9527 src = build_simple_mem_ref_loc (loc, src);
9528 dst = build_simple_mem_ref_loc (loc, arg);
9529 dst = omp_build_component_ref (dst, f);
9530 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9531 append_to_statement_list (t, &list);
9532 n = splay_tree_lookup (ctx->field_map,
9533 (splay_tree_key) TREE_OPERAND (ind, 0));
9534 df = (tree) n->value;
9535 df = *tcctx.cb.decl_map->get (df);
9536 ptr = build_simple_mem_ref_loc (loc, arg);
9537 ptr = omp_build_component_ref (ptr, df);
9538 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
9539 build_fold_addr_expr_loc (loc, dst));
9540 append_to_statement_list (t, &list);
9541 }
3d483a94 9542
4954efd4 9543 t = build1 (RETURN_EXPR, void_type_node, NULL);
9544 append_to_statement_list (t, &list);
3d483a94 9545
4954efd4 9546 if (tcctx.cb.decl_map)
9547 delete tcctx.cb.decl_map;
9548 pop_gimplify_context (NULL);
9549 BIND_EXPR_BODY (bind) = list;
9550 pop_cfun ();
9551}
3d483a94 9552
9553static void
4954efd4 9554lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
3d483a94 9555{
4954efd4 9556 tree c, clauses;
9557 gimple *g;
7e5a76c8 9558 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
4954efd4 9559
9560 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
9561 gcc_assert (clauses);
9562 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9564 switch (OMP_CLAUSE_DEPEND_KIND (c))
9565 {
7e5a76c8 9566 case OMP_CLAUSE_DEPEND_LAST:
9567 /* Lowering already done at gimplification. */
9568 return;
4954efd4 9569 case OMP_CLAUSE_DEPEND_IN:
7e5a76c8 9570 cnt[2]++;
4954efd4 9571 break;
9572 case OMP_CLAUSE_DEPEND_OUT:
9573 case OMP_CLAUSE_DEPEND_INOUT:
7e5a76c8 9574 cnt[0]++;
9575 break;
9576 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9577 cnt[1]++;
9578 break;
9579 case OMP_CLAUSE_DEPEND_DEPOBJ:
9580 cnt[3]++;
4954efd4 9581 break;
9582 case OMP_CLAUSE_DEPEND_SOURCE:
9583 case OMP_CLAUSE_DEPEND_SINK:
9584 /* FALLTHRU */
9585 default:
9586 gcc_unreachable ();
9587 }
7e5a76c8 9588 if (cnt[1] || cnt[3])
9589 idx = 5;
9590 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
9591 tree type = build_array_type_nelts (ptr_type_node, total + idx);
4954efd4 9592 tree array = create_tmp_var (type);
9593 TREE_ADDRESSABLE (array) = 1;
9594 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9595 NULL_TREE);
7e5a76c8 9596 if (idx == 5)
9597 {
9598 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
9599 gimple_seq_add_stmt (iseq, g);
9600 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9601 NULL_TREE);
9602 }
9603 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
4954efd4 9604 gimple_seq_add_stmt (iseq, g);
7e5a76c8 9605 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
9606 {
9607 r = build4 (ARRAY_REF, ptr_type_node, array,
9608 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
9609 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
9610 gimple_seq_add_stmt (iseq, g);
9611 }
9612 for (i = 0; i < 4; i++)
3d483a94 9613 {
7e5a76c8 9614 if (cnt[i] == 0)
4954efd4 9615 continue;
9616 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7e5a76c8 9617 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
9618 continue;
9619 else
4954efd4 9620 {
7e5a76c8 9621 switch (OMP_CLAUSE_DEPEND_KIND (c))
9622 {
9623 case OMP_CLAUSE_DEPEND_IN:
9624 if (i != 2)
9625 continue;
9626 break;
9627 case OMP_CLAUSE_DEPEND_OUT:
9628 case OMP_CLAUSE_DEPEND_INOUT:
9629 if (i != 0)
9630 continue;
9631 break;
9632 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9633 if (i != 1)
9634 continue;
9635 break;
9636 case OMP_CLAUSE_DEPEND_DEPOBJ:
9637 if (i != 3)
9638 continue;
9639 break;
9640 default:
9641 gcc_unreachable ();
9642 }
4954efd4 9643 tree t = OMP_CLAUSE_DECL (c);
9644 t = fold_convert (ptr_type_node, t);
9645 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9646 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9647 NULL_TREE, NULL_TREE);
9648 g = gimple_build_assign (r, t);
9649 gimple_seq_add_stmt (iseq, g);
9650 }
3d483a94 9651 }
4954efd4 9652 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7e5a76c8 9653 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
4954efd4 9654 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9655 OMP_CLAUSE_CHAIN (c) = *pclauses;
9656 *pclauses = c;
9657 tree clobber = build_constructor (type, NULL);
9658 TREE_THIS_VOLATILE (clobber) = 1;
9659 g = gimple_build_assign (array, clobber);
9660 gimple_seq_add_stmt (oseq, g);
9661}
9662
9663/* Lower the OpenMP parallel or task directive in the current statement
9664 in GSI_P. CTX holds context information for the directive. */
3d483a94 9665
4954efd4 9666static void
9667lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9668{
9669 tree clauses;
9670 tree child_fn, t;
9671 gimple *stmt = gsi_stmt (*gsi_p);
9672 gbind *par_bind, *bind, *dep_bind = NULL;
7e5a76c8 9673 gimple_seq par_body;
4954efd4 9674 location_t loc = gimple_location (stmt);
3d483a94 9675
4954efd4 9676 clauses = gimple_omp_taskreg_clauses (stmt);
7e5a76c8 9677 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9678 && gimple_omp_task_taskwait_p (stmt))
9679 {
9680 par_bind = NULL;
9681 par_body = NULL;
9682 }
9683 else
9684 {
9685 par_bind
9686 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
9687 par_body = gimple_bind_body (par_bind);
9688 }
4954efd4 9689 child_fn = ctx->cb.dst_fn;
9690 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9691 && !gimple_omp_parallel_combined_p (stmt))
3d483a94 9692 {
4954efd4 9693 struct walk_stmt_info wi;
9694 int ws_num = 0;
3d483a94 9695
4954efd4 9696 memset (&wi, 0, sizeof (wi));
9697 wi.info = &ws_num;
9698 wi.val_only = true;
9699 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
9700 if (ws_num == 1)
9701 gimple_omp_parallel_set_combined_p (stmt, true);
3d483a94 9702 }
4954efd4 9703 gimple_seq dep_ilist = NULL;
9704 gimple_seq dep_olist = NULL;
9705 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9706 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
bc7bff74 9707 {
4954efd4 9708 push_gimplify_context ();
9709 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9710 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
9711 &dep_ilist, &dep_olist);
bab6706a 9712 }
bab6706a 9713
7e5a76c8 9714 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9715 && gimple_omp_task_taskwait_p (stmt))
9716 {
9717 if (dep_bind)
9718 {
9719 gsi_replace (gsi_p, dep_bind, true);
9720 gimple_bind_add_seq (dep_bind, dep_ilist);
9721 gimple_bind_add_stmt (dep_bind, stmt);
9722 gimple_bind_add_seq (dep_bind, dep_olist);
9723 pop_gimplify_context (dep_bind);
9724 }
9725 return;
9726 }
9727
4954efd4 9728 if (ctx->srecord_type)
9729 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
bab6706a 9730
7e5a76c8 9731 gimple_seq tskred_ilist = NULL;
9732 gimple_seq tskred_olist = NULL;
9733 if ((is_task_ctx (ctx)
9734 && gimple_omp_task_taskloop_p (ctx->stmt)
9735 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
9736 OMP_CLAUSE_REDUCTION))
9737 || (is_parallel_ctx (ctx)
9738 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9739 OMP_CLAUSE__REDUCTEMP_)))
9740 {
9741 if (dep_bind == NULL)
9742 {
9743 push_gimplify_context ();
9744 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9745 }
9746 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
9747 : OMP_PARALLEL,
9748 gimple_omp_taskreg_clauses (ctx->stmt),
9749 &tskred_ilist, &tskred_olist);
9750 }
9751
4954efd4 9752 push_gimplify_context ();
3d483a94 9753
7e5a76c8 9754 gimple_seq par_olist = NULL;
9755 gimple_seq par_ilist = NULL;
9756 gimple_seq par_rlist = NULL;
4954efd4 9757 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9758 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
9759 if (phony_construct && ctx->record_type)
bab6706a 9760 {
4954efd4 9761 gcc_checking_assert (!ctx->receiver_decl);
9762 ctx->receiver_decl = create_tmp_var
9763 (build_reference_type (ctx->record_type), ".omp_rec");
bab6706a 9764 }
4954efd4 9765 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
9766 lower_omp (&par_body, ctx);
9767 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
9a1d892b 9768 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
bab6706a 9769
4954efd4 9770 /* Declare all the variables created by mapping and the variables
9771 declared in the scope of the parallel body. */
9772 record_vars_into (ctx->block_vars, child_fn);
2918f4e9 9773 maybe_remove_omp_member_access_dummy_vars (par_bind);
4954efd4 9774 record_vars_into (gimple_bind_vars (par_bind), child_fn);
3d483a94 9775
4954efd4 9776 if (ctx->record_type)
3d483a94 9777 {
4954efd4 9778 ctx->sender_decl
9779 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9780 : ctx->record_type, ".omp_data_o");
9781 DECL_NAMELESS (ctx->sender_decl) = 1;
9782 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9783 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
3d483a94 9784 }
3d483a94 9785
7e5a76c8 9786 gimple_seq olist = NULL;
9787 gimple_seq ilist = NULL;
4954efd4 9788 lower_send_clauses (clauses, &ilist, &olist, ctx);
9789 lower_send_shared_vars (&ilist, &olist, ctx);
bab6706a 9790
4954efd4 9791 if (ctx->record_type)
3d483a94 9792 {
4954efd4 9793 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9794 TREE_THIS_VOLATILE (clobber) = 1;
9795 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9796 clobber));
43895be5 9797 }
43895be5 9798
4954efd4 9799 /* Once all the expansions are done, sequence all the different
9800 fragments inside gimple_omp_body. */
43895be5 9801
7e5a76c8 9802 gimple_seq new_body = NULL;
43895be5 9803
4954efd4 9804 if (ctx->record_type)
43895be5 9805 {
4954efd4 9806 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9807 /* fixup_child_record_type might have changed receiver_decl's type. */
9808 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9809 gimple_seq_add_stmt (&new_body,
9810 gimple_build_assign (ctx->receiver_decl, t));
43895be5 9811 }
9812
4954efd4 9813 gimple_seq_add_seq (&new_body, par_ilist);
9814 gimple_seq_add_seq (&new_body, par_body);
9815 gimple_seq_add_seq (&new_body, par_rlist);
9816 if (ctx->cancellable)
9817 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9818 gimple_seq_add_seq (&new_body, par_olist);
9819 new_body = maybe_catch_exception (new_body);
9820 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9821 gimple_seq_add_stmt (&new_body,
9822 gimple_build_omp_continue (integer_zero_node,
9823 integer_zero_node));
9824 if (!phony_construct)
43895be5 9825 {
4954efd4 9826 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9827 gimple_omp_set_body (stmt, new_body);
43895be5 9828 }
9829
7e5a76c8 9830 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9831 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9832 else
9833 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
4954efd4 9834 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9835 gimple_bind_add_seq (bind, ilist);
9836 if (!phony_construct)
9837 gimple_bind_add_stmt (bind, stmt);
43895be5 9838 else
4954efd4 9839 gimple_bind_add_seq (bind, new_body);
9840 gimple_bind_add_seq (bind, olist);
43895be5 9841
4954efd4 9842 pop_gimplify_context (NULL);
9843
9844 if (dep_bind)
43895be5 9845 {
4954efd4 9846 gimple_bind_add_seq (dep_bind, dep_ilist);
7e5a76c8 9847 gimple_bind_add_seq (dep_bind, tskred_ilist);
4954efd4 9848 gimple_bind_add_stmt (dep_bind, bind);
7e5a76c8 9849 gimple_bind_add_seq (dep_bind, tskred_olist);
4954efd4 9850 gimple_bind_add_seq (dep_bind, dep_olist);
9851 pop_gimplify_context (dep_bind);
43895be5 9852 }
43895be5 9853}
9854
4954efd4 9855/* Lower the GIMPLE_OMP_TARGET in the current statement
9856 in GSI_P. CTX holds context information for the directive. */
43895be5 9857
9858static void
4954efd4 9859lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
43895be5 9860{
4954efd4 9861 tree clauses;
9862 tree child_fn, t, c;
9863 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9864 gbind *tgt_bind, *bind, *dep_bind = NULL;
9865 gimple_seq tgt_body, olist, ilist, fplist, new_body;
9866 location_t loc = gimple_location (stmt);
9867 bool offloaded, data_region;
9868 unsigned int map_cnt = 0;
43895be5 9869
4954efd4 9870 offloaded = is_gimple_omp_offloaded (stmt);
9871 switch (gimple_omp_target_kind (stmt))
43895be5 9872 {
4954efd4 9873 case GF_OMP_TARGET_KIND_REGION:
9874 case GF_OMP_TARGET_KIND_UPDATE:
9875 case GF_OMP_TARGET_KIND_ENTER_DATA:
9876 case GF_OMP_TARGET_KIND_EXIT_DATA:
9877 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9878 case GF_OMP_TARGET_KIND_OACC_KERNELS:
9879 case GF_OMP_TARGET_KIND_OACC_UPDATE:
9880 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9881 case GF_OMP_TARGET_KIND_OACC_DECLARE:
9882 data_region = false;
9883 break;
9884 case GF_OMP_TARGET_KIND_DATA:
9885 case GF_OMP_TARGET_KIND_OACC_DATA:
9886 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9887 data_region = true;
9888 break;
9889 default:
9890 gcc_unreachable ();
3d483a94 9891 }
3d483a94 9892
4954efd4 9893 clauses = gimple_omp_target_clauses (stmt);
43895be5 9894
4954efd4 9895 gimple_seq dep_ilist = NULL;
9896 gimple_seq dep_olist = NULL;
9897 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
43895be5 9898 {
4954efd4 9899 push_gimplify_context ();
9900 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9901 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9902 &dep_ilist, &dep_olist);
43895be5 9903 }
1e8e9920 9904
4954efd4 9905 tgt_bind = NULL;
9906 tgt_body = NULL;
9907 if (offloaded)
a8e785ba 9908 {
4954efd4 9909 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9910 tgt_body = gimple_bind_body (tgt_bind);
a8e785ba 9911 }
4954efd4 9912 else if (data_region)
9913 tgt_body = gimple_omp_body (stmt);
9914 child_fn = ctx->cb.dst_fn;
a8e785ba 9915
4954efd4 9916 push_gimplify_context ();
9917 fplist = NULL;
a8e785ba 9918
4954efd4 9919 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9920 switch (OMP_CLAUSE_CODE (c))
9921 {
9922 tree var, x;
a8e785ba 9923
4954efd4 9924 default:
9925 break;
9926 case OMP_CLAUSE_MAP:
9927#if CHECKING_P
9928 /* First check what we're prepared to handle in the following. */
9929 switch (OMP_CLAUSE_MAP_KIND (c))
9930 {
9931 case GOMP_MAP_ALLOC:
9932 case GOMP_MAP_TO:
9933 case GOMP_MAP_FROM:
9934 case GOMP_MAP_TOFROM:
9935 case GOMP_MAP_POINTER:
9936 case GOMP_MAP_TO_PSET:
9937 case GOMP_MAP_DELETE:
9938 case GOMP_MAP_RELEASE:
9939 case GOMP_MAP_ALWAYS_TO:
9940 case GOMP_MAP_ALWAYS_FROM:
9941 case GOMP_MAP_ALWAYS_TOFROM:
9942 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9943 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9944 case GOMP_MAP_STRUCT:
9945 case GOMP_MAP_ALWAYS_POINTER:
9946 break;
9947 case GOMP_MAP_FORCE_ALLOC:
9948 case GOMP_MAP_FORCE_TO:
9949 case GOMP_MAP_FORCE_FROM:
9950 case GOMP_MAP_FORCE_TOFROM:
9951 case GOMP_MAP_FORCE_PRESENT:
9952 case GOMP_MAP_FORCE_DEVICEPTR:
9953 case GOMP_MAP_DEVICE_RESIDENT:
9954 case GOMP_MAP_LINK:
9955 gcc_assert (is_gimple_omp_oacc (stmt));
9956 break;
9957 default:
9958 gcc_unreachable ();
9959 }
9960#endif
9961 /* FALLTHRU */
9962 case OMP_CLAUSE_TO:
9963 case OMP_CLAUSE_FROM:
9964 oacc_firstprivate:
9965 var = OMP_CLAUSE_DECL (c);
9966 if (!DECL_P (var))
9967 {
9968 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9969 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9970 && (OMP_CLAUSE_MAP_KIND (c)
9971 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
9972 map_cnt++;
9973 continue;
9974 }
a8e785ba 9975
4954efd4 9976 if (DECL_SIZE (var)
9977 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9978 {
9979 tree var2 = DECL_VALUE_EXPR (var);
9980 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9981 var2 = TREE_OPERAND (var2, 0);
9982 gcc_assert (DECL_P (var2));
9983 var = var2;
9984 }
a8e785ba 9985
4954efd4 9986 if (offloaded
9987 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9988 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9989 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9990 {
9991 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9992 {
9993 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9994 && varpool_node::get_create (var)->offloadable)
9995 continue;
a8e785ba 9996
4954efd4 9997 tree type = build_pointer_type (TREE_TYPE (var));
9998 tree new_var = lookup_decl (var, ctx);
9999 x = create_tmp_var_raw (type, get_name (new_var));
10000 gimple_add_tmp_var (x);
10001 x = build_simple_mem_ref (x);
10002 SET_DECL_VALUE_EXPR (new_var, x);
10003 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10004 }
10005 continue;
10006 }
a8e785ba 10007
4954efd4 10008 if (!maybe_lookup_field (var, ctx))
10009 continue;
a8e785ba 10010
4954efd4 10011 /* Don't remap oacc parallel reduction variables, because the
10012 intermediate result must be local to each gang. */
10013 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10014 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
10015 {
10016 x = build_receiver_ref (var, true, ctx);
10017 tree new_var = lookup_decl (var, ctx);
a8e785ba 10018
4954efd4 10019 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10020 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10021 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10022 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10023 x = build_simple_mem_ref (x);
10024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10025 {
10026 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
a209c8cc 10027 if (omp_is_reference (new_var)
10028 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
4954efd4 10029 {
10030 /* Create a local object to hold the instance
10031 value. */
10032 tree type = TREE_TYPE (TREE_TYPE (new_var));
10033 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
10034 tree inst = create_tmp_var (type, id);
10035 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
10036 x = build_fold_addr_expr (inst);
10037 }
10038 gimplify_assign (new_var, x, &fplist);
10039 }
10040 else if (DECL_P (new_var))
10041 {
10042 SET_DECL_VALUE_EXPR (new_var, x);
10043 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10044 }
10045 else
10046 gcc_unreachable ();
10047 }
10048 map_cnt++;
10049 break;
a8e785ba 10050
4954efd4 10051 case OMP_CLAUSE_FIRSTPRIVATE:
10052 if (is_oacc_parallel (ctx))
10053 goto oacc_firstprivate;
10054 map_cnt++;
10055 var = OMP_CLAUSE_DECL (c);
10056 if (!omp_is_reference (var)
10057 && !is_gimple_reg_type (TREE_TYPE (var)))
10058 {
10059 tree new_var = lookup_decl (var, ctx);
10060 if (is_variable_sized (var))
10061 {
10062 tree pvar = DECL_VALUE_EXPR (var);
10063 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10064 pvar = TREE_OPERAND (pvar, 0);
10065 gcc_assert (DECL_P (pvar));
10066 tree new_pvar = lookup_decl (pvar, ctx);
10067 x = build_fold_indirect_ref (new_pvar);
10068 TREE_THIS_NOTRAP (x) = 1;
10069 }
10070 else
10071 x = build_receiver_ref (var, true, ctx);
10072 SET_DECL_VALUE_EXPR (new_var, x);
10073 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10074 }
10075 break;
a8e785ba 10076
4954efd4 10077 case OMP_CLAUSE_PRIVATE:
10078 if (is_gimple_omp_oacc (ctx->stmt))
10079 break;
10080 var = OMP_CLAUSE_DECL (c);
10081 if (is_variable_sized (var))
10082 {
10083 tree new_var = lookup_decl (var, ctx);
10084 tree pvar = DECL_VALUE_EXPR (var);
10085 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10086 pvar = TREE_OPERAND (pvar, 0);
10087 gcc_assert (DECL_P (pvar));
10088 tree new_pvar = lookup_decl (pvar, ctx);
10089 x = build_fold_indirect_ref (new_pvar);
10090 TREE_THIS_NOTRAP (x) = 1;
10091 SET_DECL_VALUE_EXPR (new_var, x);
10092 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10093 }
10094 break;
a8e785ba 10095
4954efd4 10096 case OMP_CLAUSE_USE_DEVICE_PTR:
10097 case OMP_CLAUSE_IS_DEVICE_PTR:
10098 var = OMP_CLAUSE_DECL (c);
10099 map_cnt++;
10100 if (is_variable_sized (var))
10101 {
10102 tree new_var = lookup_decl (var, ctx);
10103 tree pvar = DECL_VALUE_EXPR (var);
10104 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10105 pvar = TREE_OPERAND (pvar, 0);
10106 gcc_assert (DECL_P (pvar));
10107 tree new_pvar = lookup_decl (pvar, ctx);
10108 x = build_fold_indirect_ref (new_pvar);
10109 TREE_THIS_NOTRAP (x) = 1;
10110 SET_DECL_VALUE_EXPR (new_var, x);
10111 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10112 }
10113 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10114 {
10115 tree new_var = lookup_decl (var, ctx);
10116 tree type = build_pointer_type (TREE_TYPE (var));
10117 x = create_tmp_var_raw (type, get_name (new_var));
10118 gimple_add_tmp_var (x);
10119 x = build_simple_mem_ref (x);
10120 SET_DECL_VALUE_EXPR (new_var, x);
10121 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10122 }
10123 else
10124 {
10125 tree new_var = lookup_decl (var, ctx);
10126 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
10127 gimple_add_tmp_var (x);
10128 SET_DECL_VALUE_EXPR (new_var, x);
10129 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
10130 }
10131 break;
10132 }
a8e785ba 10133
4954efd4 10134 if (offloaded)
a8e785ba 10135 {
4954efd4 10136 target_nesting_level++;
10137 lower_omp (&tgt_body, ctx);
10138 target_nesting_level--;
a8e785ba 10139 }
4954efd4 10140 else if (data_region)
10141 lower_omp (&tgt_body, ctx);
a8e785ba 10142
4954efd4 10143 if (offloaded)
a8e785ba 10144 {
4954efd4 10145 /* Declare all the variables created by mapping and the variables
10146 declared in the scope of the target body. */
10147 record_vars_into (ctx->block_vars, child_fn);
2918f4e9 10148 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
4954efd4 10149 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
a8e785ba 10150 }
10151
4954efd4 10152 olist = NULL;
10153 ilist = NULL;
10154 if (ctx->record_type)
a8e785ba 10155 {
4954efd4 10156 ctx->sender_decl
10157 = create_tmp_var (ctx->record_type, ".omp_data_arr");
10158 DECL_NAMELESS (ctx->sender_decl) = 1;
10159 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10160 t = make_tree_vec (3);
10161 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
10162 TREE_VEC_ELT (t, 1)
10163 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
10164 ".omp_data_sizes");
10165 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
10166 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
10167 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
10168 tree tkind_type = short_unsigned_type_node;
10169 int talign_shift = 8;
10170 TREE_VEC_ELT (t, 2)
10171 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
10172 ".omp_data_kinds");
10173 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
10174 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
10175 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
10176 gimple_omp_target_set_data_arg (stmt, t);
1e8e9920 10177
4954efd4 10178 vec<constructor_elt, va_gc> *vsize;
10179 vec<constructor_elt, va_gc> *vkind;
10180 vec_alloc (vsize, map_cnt);
10181 vec_alloc (vkind, map_cnt);
10182 unsigned int map_idx = 0;
1e8e9920 10183
4954efd4 10184 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10185 switch (OMP_CLAUSE_CODE (c))
1e8e9920 10186 {
4954efd4 10187 tree ovar, nc, s, purpose, var, x, type;
10188 unsigned int talign;
1e8e9920 10189
4954efd4 10190 default:
10191 break;
1e8e9920 10192
4954efd4 10193 case OMP_CLAUSE_MAP:
10194 case OMP_CLAUSE_TO:
10195 case OMP_CLAUSE_FROM:
10196 oacc_firstprivate_map:
10197 nc = c;
10198 ovar = OMP_CLAUSE_DECL (c);
10199 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10200 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10201 || (OMP_CLAUSE_MAP_KIND (c)
10202 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10203 break;
10204 if (!DECL_P (ovar))
9884aaf8 10205 {
4954efd4 10206 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10207 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
10208 {
10209 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
10210 == get_base_address (ovar));
10211 nc = OMP_CLAUSE_CHAIN (c);
10212 ovar = OMP_CLAUSE_DECL (nc);
10213 }
10214 else
10215 {
10216 tree x = build_sender_ref (ovar, ctx);
10217 tree v
10218 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
10219 gimplify_assign (x, v, &ilist);
10220 nc = NULL_TREE;
10221 }
10222 }
10223 else
10224 {
10225 if (DECL_SIZE (ovar)
10226 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
10227 {
10228 tree ovar2 = DECL_VALUE_EXPR (ovar);
10229 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
10230 ovar2 = TREE_OPERAND (ovar2, 0);
10231 gcc_assert (DECL_P (ovar2));
10232 ovar = ovar2;
10233 }
10234 if (!maybe_lookup_field (ovar, ctx))
10235 continue;
9884aaf8 10236 }
61e47ac8 10237
4954efd4 10238 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
10239 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
10240 talign = DECL_ALIGN_UNIT (ovar);
10241 if (nc)
10242 {
10243 var = lookup_decl_in_outer_ctx (ovar, ctx);
10244 x = build_sender_ref (ovar, ctx);
61e47ac8 10245
4954efd4 10246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10247 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
10248 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
10249 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
10250 {
10251 gcc_assert (offloaded);
10252 tree avar
10253 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
10254 mark_addressable (avar);
10255 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
10256 talign = DECL_ALIGN_UNIT (avar);
10257 avar = build_fold_addr_expr (avar);
10258 gimplify_assign (x, avar, &ilist);
10259 }
10260 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10261 {
10262 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
10263 if (!omp_is_reference (var))
10264 {
10265 if (is_gimple_reg (var)
10266 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10267 TREE_NO_WARNING (var) = 1;
10268 var = build_fold_addr_expr (var);
10269 }
10270 else
10271 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10272 gimplify_assign (x, var, &ilist);
10273 }
10274 else if (is_gimple_reg (var))
10275 {
10276 gcc_assert (offloaded);
10277 tree avar = create_tmp_var (TREE_TYPE (var));
10278 mark_addressable (avar);
10279 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
10280 if (GOMP_MAP_COPY_TO_P (map_kind)
10281 || map_kind == GOMP_MAP_POINTER
10282 || map_kind == GOMP_MAP_TO_PSET
10283 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
10284 {
10285 /* If we need to initialize a temporary
10286 with VAR because it is not addressable, and
10287 the variable hasn't been initialized yet, then
10288 we'll get a warning for the store to avar.
10289 Don't warn in that case, the mapping might
10290 be implicit. */
10291 TREE_NO_WARNING (var) = 1;
10292 gimplify_assign (avar, var, &ilist);
10293 }
10294 avar = build_fold_addr_expr (avar);
10295 gimplify_assign (x, avar, &ilist);
10296 if ((GOMP_MAP_COPY_FROM_P (map_kind)
10297 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
10298 && !TYPE_READONLY (TREE_TYPE (var)))
10299 {
10300 x = unshare_expr (x);
10301 x = build_simple_mem_ref (x);
10302 gimplify_assign (var, x, &olist);
10303 }
10304 }
10305 else
10306 {
10307 var = build_fold_addr_expr (var);
10308 gimplify_assign (x, var, &ilist);
10309 }
10310 }
10311 s = NULL_TREE;
10312 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10313 {
10314 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
10315 s = TREE_TYPE (ovar);
10316 if (TREE_CODE (s) == REFERENCE_TYPE)
10317 s = TREE_TYPE (s);
10318 s = TYPE_SIZE_UNIT (s);
10319 }
10320 else
10321 s = OMP_CLAUSE_SIZE (c);
10322 if (s == NULL_TREE)
10323 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
10324 s = fold_convert (size_type_node, s);
10325 purpose = size_int (map_idx++);
10326 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10327 if (TREE_CODE (s) != INTEGER_CST)
10328 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
61e47ac8 10329
4954efd4 10330 unsigned HOST_WIDE_INT tkind, tkind_zero;
10331 switch (OMP_CLAUSE_CODE (c))
10332 {
10333 case OMP_CLAUSE_MAP:
10334 tkind = OMP_CLAUSE_MAP_KIND (c);
10335 tkind_zero = tkind;
10336 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
10337 switch (tkind)
10338 {
10339 case GOMP_MAP_ALLOC:
10340 case GOMP_MAP_TO:
10341 case GOMP_MAP_FROM:
10342 case GOMP_MAP_TOFROM:
10343 case GOMP_MAP_ALWAYS_TO:
10344 case GOMP_MAP_ALWAYS_FROM:
10345 case GOMP_MAP_ALWAYS_TOFROM:
10346 case GOMP_MAP_RELEASE:
10347 case GOMP_MAP_FORCE_TO:
10348 case GOMP_MAP_FORCE_FROM:
10349 case GOMP_MAP_FORCE_TOFROM:
10350 case GOMP_MAP_FORCE_PRESENT:
10351 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
10352 break;
10353 case GOMP_MAP_DELETE:
10354 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
10355 default:
10356 break;
10357 }
10358 if (tkind_zero != tkind)
10359 {
10360 if (integer_zerop (s))
10361 tkind = tkind_zero;
10362 else if (integer_nonzerop (s))
10363 tkind_zero = tkind;
10364 }
10365 break;
10366 case OMP_CLAUSE_FIRSTPRIVATE:
10367 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
10368 tkind = GOMP_MAP_TO;
10369 tkind_zero = tkind;
10370 break;
10371 case OMP_CLAUSE_TO:
10372 tkind = GOMP_MAP_TO;
10373 tkind_zero = tkind;
10374 break;
10375 case OMP_CLAUSE_FROM:
10376 tkind = GOMP_MAP_FROM;
10377 tkind_zero = tkind;
10378 break;
10379 default:
10380 gcc_unreachable ();
10381 }
10382 gcc_checking_assert (tkind
10383 < (HOST_WIDE_INT_C (1U) << talign_shift));
10384 gcc_checking_assert (tkind_zero
10385 < (HOST_WIDE_INT_C (1U) << talign_shift));
10386 talign = ceil_log2 (talign);
10387 tkind |= talign << talign_shift;
10388 tkind_zero |= talign << talign_shift;
10389 gcc_checking_assert (tkind
10390 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10391 gcc_checking_assert (tkind_zero
10392 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10393 if (tkind == tkind_zero)
10394 x = build_int_cstu (tkind_type, tkind);
10395 else
10396 {
10397 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
10398 x = build3 (COND_EXPR, tkind_type,
10399 fold_build2 (EQ_EXPR, boolean_type_node,
10400 unshare_expr (s), size_zero_node),
10401 build_int_cstu (tkind_type, tkind_zero),
10402 build_int_cstu (tkind_type, tkind));
10403 }
10404 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
10405 if (nc && nc != c)
10406 c = nc;
10407 break;
3ec11c49 10408
4954efd4 10409 case OMP_CLAUSE_FIRSTPRIVATE:
10410 if (is_oacc_parallel (ctx))
10411 goto oacc_firstprivate_map;
10412 ovar = OMP_CLAUSE_DECL (c);
10413 if (omp_is_reference (ovar))
10414 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10415 else
10416 talign = DECL_ALIGN_UNIT (ovar);
10417 var = lookup_decl_in_outer_ctx (ovar, ctx);
10418 x = build_sender_ref (ovar, ctx);
10419 tkind = GOMP_MAP_FIRSTPRIVATE;
10420 type = TREE_TYPE (ovar);
10421 if (omp_is_reference (ovar))
10422 type = TREE_TYPE (type);
10423 if ((INTEGRAL_TYPE_P (type)
10424 && TYPE_PRECISION (type) <= POINTER_SIZE)
10425 || TREE_CODE (type) == POINTER_TYPE)
10426 {
10427 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10428 tree t = var;
10429 if (omp_is_reference (var))
10430 t = build_simple_mem_ref (var);
10431 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10432 TREE_NO_WARNING (var) = 1;
10433 if (TREE_CODE (type) != POINTER_TYPE)
10434 t = fold_convert (pointer_sized_int_node, t);
10435 t = fold_convert (TREE_TYPE (x), t);
10436 gimplify_assign (x, t, &ilist);
10437 }
10438 else if (omp_is_reference (var))
10439 gimplify_assign (x, var, &ilist);
10440 else if (is_gimple_reg (var))
10441 {
10442 tree avar = create_tmp_var (TREE_TYPE (var));
10443 mark_addressable (avar);
10444 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10445 TREE_NO_WARNING (var) = 1;
10446 gimplify_assign (avar, var, &ilist);
10447 avar = build_fold_addr_expr (avar);
10448 gimplify_assign (x, avar, &ilist);
10449 }
10450 else
10451 {
10452 var = build_fold_addr_expr (var);
10453 gimplify_assign (x, var, &ilist);
10454 }
10455 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
10456 s = size_int (0);
10457 else if (omp_is_reference (ovar))
10458 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
10459 else
10460 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
10461 s = fold_convert (size_type_node, s);
10462 purpose = size_int (map_idx++);
10463 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10464 if (TREE_CODE (s) != INTEGER_CST)
10465 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
3ec11c49 10466
4954efd4 10467 gcc_checking_assert (tkind
10468 < (HOST_WIDE_INT_C (1U) << talign_shift));
10469 talign = ceil_log2 (talign);
10470 tkind |= talign << talign_shift;
10471 gcc_checking_assert (tkind
10472 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10473 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10474 build_int_cstu (tkind_type, tkind));
10475 break;
3ec11c49 10476
4954efd4 10477 case OMP_CLAUSE_USE_DEVICE_PTR:
10478 case OMP_CLAUSE_IS_DEVICE_PTR:
10479 ovar = OMP_CLAUSE_DECL (c);
10480 var = lookup_decl_in_outer_ctx (ovar, ctx);
10481 x = build_sender_ref (ovar, ctx);
10482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10483 tkind = GOMP_MAP_USE_DEVICE_PTR;
10484 else
10485 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
10486 type = TREE_TYPE (ovar);
10487 if (TREE_CODE (type) == ARRAY_TYPE)
10488 var = build_fold_addr_expr (var);
10489 else
10490 {
10491 if (omp_is_reference (ovar))
10492 {
10493 type = TREE_TYPE (type);
10494 if (TREE_CODE (type) != ARRAY_TYPE)
10495 var = build_simple_mem_ref (var);
10496 var = fold_convert (TREE_TYPE (x), var);
10497 }
10498 }
10499 gimplify_assign (x, var, &ilist);
10500 s = size_int (0);
10501 purpose = size_int (map_idx++);
10502 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
10503 gcc_checking_assert (tkind
10504 < (HOST_WIDE_INT_C (1U) << talign_shift));
10505 gcc_checking_assert (tkind
10506 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
10507 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
10508 build_int_cstu (tkind_type, tkind));
10509 break;
10510 }
3ec11c49 10511
4954efd4 10512 gcc_assert (map_idx == map_cnt);
2169f33b 10513
4954efd4 10514 DECL_INITIAL (TREE_VEC_ELT (t, 1))
10515 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
10516 DECL_INITIAL (TREE_VEC_ELT (t, 2))
10517 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
10518 for (int i = 1; i <= 2; i++)
10519 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
10520 {
10521 gimple_seq initlist = NULL;
10522 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
10523 TREE_VEC_ELT (t, i)),
10524 &initlist, true, NULL_TREE);
10525 gimple_seq_add_seq (&ilist, initlist);
2169f33b 10526
4954efd4 10527 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
10528 NULL);
10529 TREE_THIS_VOLATILE (clobber) = 1;
10530 gimple_seq_add_stmt (&olist,
10531 gimple_build_assign (TREE_VEC_ELT (t, i),
10532 clobber));
10533 }
3ec11c49 10534
4954efd4 10535 tree clobber = build_constructor (ctx->record_type, NULL);
10536 TREE_THIS_VOLATILE (clobber) = 1;
10537 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10538 clobber));
10539 }
3ec11c49 10540
4954efd4 10541 /* Once all the expansions are done, sequence all the different
10542 fragments inside gimple_omp_body. */
3ec11c49 10543
4954efd4 10544 new_body = NULL;
3ec11c49 10545
4954efd4 10546 if (offloaded
10547 && ctx->record_type)
3ec11c49 10548 {
4954efd4 10549 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10550 /* fixup_child_record_type might have changed receiver_decl's type. */
10551 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10552 gimple_seq_add_stmt (&new_body,
10553 gimple_build_assign (ctx->receiver_decl, t));
3ec11c49 10554 }
4954efd4 10555 gimple_seq_add_seq (&new_body, fplist);
3ec11c49 10556
4954efd4 10557 if (offloaded || data_region)
7740abd8 10558 {
4954efd4 10559 tree prev = NULL_TREE;
10560 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10561 switch (OMP_CLAUSE_CODE (c))
7740abd8 10562 {
4954efd4 10563 tree var, x;
10564 default:
10565 break;
10566 case OMP_CLAUSE_FIRSTPRIVATE:
10567 if (is_gimple_omp_oacc (ctx->stmt))
10568 break;
10569 var = OMP_CLAUSE_DECL (c);
10570 if (omp_is_reference (var)
10571 || is_gimple_reg_type (TREE_TYPE (var)))
7740abd8 10572 {
4954efd4 10573 tree new_var = lookup_decl (var, ctx);
10574 tree type;
10575 type = TREE_TYPE (var);
10576 if (omp_is_reference (var))
10577 type = TREE_TYPE (type);
10578 if ((INTEGRAL_TYPE_P (type)
10579 && TYPE_PRECISION (type) <= POINTER_SIZE)
10580 || TREE_CODE (type) == POINTER_TYPE)
10581 {
10582 x = build_receiver_ref (var, false, ctx);
10583 if (TREE_CODE (type) != POINTER_TYPE)
10584 x = fold_convert (pointer_sized_int_node, x);
10585 x = fold_convert (type, x);
10586 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10587 fb_rvalue);
10588 if (omp_is_reference (var))
10589 {
10590 tree v = create_tmp_var_raw (type, get_name (var));
10591 gimple_add_tmp_var (v);
10592 TREE_ADDRESSABLE (v) = 1;
10593 gimple_seq_add_stmt (&new_body,
10594 gimple_build_assign (v, x));
10595 x = build_fold_addr_expr (v);
10596 }
10597 gimple_seq_add_stmt (&new_body,
10598 gimple_build_assign (new_var, x));
10599 }
10600 else
10601 {
10602 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
10603 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10604 fb_rvalue);
10605 gimple_seq_add_stmt (&new_body,
10606 gimple_build_assign (new_var, x));
10607 }
10608 }
10609 else if (is_variable_sized (var))
10610 {
10611 tree pvar = DECL_VALUE_EXPR (var);
10612 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10613 pvar = TREE_OPERAND (pvar, 0);
10614 gcc_assert (DECL_P (pvar));
10615 tree new_var = lookup_decl (pvar, ctx);
10616 x = build_receiver_ref (var, false, ctx);
10617 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10618 gimple_seq_add_stmt (&new_body,
10619 gimple_build_assign (new_var, x));
10620 }
10621 break;
10622 case OMP_CLAUSE_PRIVATE:
10623 if (is_gimple_omp_oacc (ctx->stmt))
10624 break;
10625 var = OMP_CLAUSE_DECL (c);
10626 if (omp_is_reference (var))
10627 {
10628 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10629 tree new_var = lookup_decl (var, ctx);
10630 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10631 if (TREE_CONSTANT (x))
10632 {
10633 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
10634 get_name (var));
10635 gimple_add_tmp_var (x);
10636 TREE_ADDRESSABLE (x) = 1;
10637 x = build_fold_addr_expr_loc (clause_loc, x);
10638 }
10639 else
10640 break;
cbba99a0 10641
4954efd4 10642 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10643 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10644 gimple_seq_add_stmt (&new_body,
10645 gimple_build_assign (new_var, x));
10646 }
10647 break;
10648 case OMP_CLAUSE_USE_DEVICE_PTR:
10649 case OMP_CLAUSE_IS_DEVICE_PTR:
10650 var = OMP_CLAUSE_DECL (c);
10651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
10652 x = build_sender_ref (var, ctx);
10653 else
10654 x = build_receiver_ref (var, false, ctx);
10655 if (is_variable_sized (var))
10656 {
10657 tree pvar = DECL_VALUE_EXPR (var);
10658 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10659 pvar = TREE_OPERAND (pvar, 0);
10660 gcc_assert (DECL_P (pvar));
10661 tree new_var = lookup_decl (pvar, ctx);
10662 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10663 gimple_seq_add_stmt (&new_body,
10664 gimple_build_assign (new_var, x));
10665 }
10666 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
10667 {
10668 tree new_var = lookup_decl (var, ctx);
10669 new_var = DECL_VALUE_EXPR (new_var);
10670 gcc_assert (TREE_CODE (new_var) == MEM_REF);
10671 new_var = TREE_OPERAND (new_var, 0);
10672 gcc_assert (DECL_P (new_var));
10673 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10674 gimple_seq_add_stmt (&new_body,
10675 gimple_build_assign (new_var, x));
10676 }
cbba99a0 10677 else
4954efd4 10678 {
10679 tree type = TREE_TYPE (var);
10680 tree new_var = lookup_decl (var, ctx);
10681 if (omp_is_reference (var))
10682 {
10683 type = TREE_TYPE (type);
10684 if (TREE_CODE (type) != ARRAY_TYPE)
10685 {
10686 tree v = create_tmp_var_raw (type, get_name (var));
10687 gimple_add_tmp_var (v);
10688 TREE_ADDRESSABLE (v) = 1;
10689 x = fold_convert (type, x);
10690 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
10691 fb_rvalue);
10692 gimple_seq_add_stmt (&new_body,
10693 gimple_build_assign (v, x));
10694 x = build_fold_addr_expr (v);
10695 }
10696 }
10697 new_var = DECL_VALUE_EXPR (new_var);
10698 x = fold_convert (TREE_TYPE (new_var), x);
10699 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10700 gimple_seq_add_stmt (&new_body,
10701 gimple_build_assign (new_var, x));
10702 }
10703 break;
cbba99a0 10704 }
4954efd4 10705 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
10706 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
10707 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
10708 or references to VLAs. */
10709 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10710 switch (OMP_CLAUSE_CODE (c))
10711 {
10712 tree var;
10713 default:
10714 break;
10715 case OMP_CLAUSE_MAP:
10716 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10717 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10718 {
10719 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
773078cb 10720 poly_int64 offset = 0;
4954efd4 10721 gcc_assert (prev);
10722 var = OMP_CLAUSE_DECL (c);
10723 if (DECL_P (var)
10724 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
10725 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
10726 ctx))
10727 && varpool_node::get_create (var)->offloadable)
10728 break;
10729 if (TREE_CODE (var) == INDIRECT_REF
10730 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
10731 var = TREE_OPERAND (var, 0);
10732 if (TREE_CODE (var) == COMPONENT_REF)
10733 {
10734 var = get_addr_base_and_unit_offset (var, &offset);
10735 gcc_assert (var != NULL_TREE && DECL_P (var));
10736 }
10737 else if (DECL_SIZE (var)
10738 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10739 {
10740 tree var2 = DECL_VALUE_EXPR (var);
10741 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10742 var2 = TREE_OPERAND (var2, 0);
10743 gcc_assert (DECL_P (var2));
10744 var = var2;
10745 }
10746 tree new_var = lookup_decl (var, ctx), x;
10747 tree type = TREE_TYPE (new_var);
10748 bool is_ref;
10749 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
10750 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10751 == COMPONENT_REF))
10752 {
10753 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
10754 is_ref = true;
10755 new_var = build2 (MEM_REF, type,
10756 build_fold_addr_expr (new_var),
10757 build_int_cst (build_pointer_type (type),
10758 offset));
10759 }
10760 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
10761 {
10762 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
10763 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
10764 new_var = build2 (MEM_REF, type,
10765 build_fold_addr_expr (new_var),
10766 build_int_cst (build_pointer_type (type),
10767 offset));
10768 }
10769 else
10770 is_ref = omp_is_reference (var);
10771 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10772 is_ref = false;
10773 bool ref_to_array = false;
10774 if (is_ref)
10775 {
10776 type = TREE_TYPE (type);
10777 if (TREE_CODE (type) == ARRAY_TYPE)
10778 {
10779 type = build_pointer_type (type);
10780 ref_to_array = true;
10781 }
10782 }
10783 else if (TREE_CODE (type) == ARRAY_TYPE)
10784 {
10785 tree decl2 = DECL_VALUE_EXPR (new_var);
10786 gcc_assert (TREE_CODE (decl2) == MEM_REF);
10787 decl2 = TREE_OPERAND (decl2, 0);
10788 gcc_assert (DECL_P (decl2));
10789 new_var = decl2;
10790 type = TREE_TYPE (new_var);
10791 }
10792 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10793 x = fold_convert_loc (clause_loc, type, x);
10794 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10795 {
10796 tree bias = OMP_CLAUSE_SIZE (c);
10797 if (DECL_P (bias))
10798 bias = lookup_decl (bias, ctx);
10799 bias = fold_convert_loc (clause_loc, sizetype, bias);
10800 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10801 bias);
10802 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10803 TREE_TYPE (x), x, bias);
10804 }
10805 if (ref_to_array)
10806 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10807 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10808 if (is_ref && !ref_to_array)
10809 {
10810 tree t = create_tmp_var_raw (type, get_name (var));
10811 gimple_add_tmp_var (t);
10812 TREE_ADDRESSABLE (t) = 1;
10813 gimple_seq_add_stmt (&new_body,
10814 gimple_build_assign (t, x));
10815 x = build_fold_addr_expr_loc (clause_loc, t);
10816 }
10817 gimple_seq_add_stmt (&new_body,
10818 gimple_build_assign (new_var, x));
10819 prev = NULL_TREE;
10820 }
10821 else if (OMP_CLAUSE_CHAIN (c)
10822 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10823 == OMP_CLAUSE_MAP
10824 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10825 == GOMP_MAP_FIRSTPRIVATE_POINTER
10826 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10827 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10828 prev = c;
10829 break;
10830 case OMP_CLAUSE_PRIVATE:
10831 var = OMP_CLAUSE_DECL (c);
10832 if (is_variable_sized (var))
10833 {
10834 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10835 tree new_var = lookup_decl (var, ctx);
10836 tree pvar = DECL_VALUE_EXPR (var);
10837 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10838 pvar = TREE_OPERAND (pvar, 0);
10839 gcc_assert (DECL_P (pvar));
10840 tree new_pvar = lookup_decl (pvar, ctx);
10841 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10842 tree al = size_int (DECL_ALIGN (var));
10843 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10844 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10845 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10846 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10847 gimple_seq_add_stmt (&new_body,
10848 gimple_build_assign (new_pvar, x));
10849 }
10850 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10851 {
10852 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10853 tree new_var = lookup_decl (var, ctx);
10854 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10855 if (TREE_CONSTANT (x))
10856 break;
10857 else
10858 {
10859 tree atmp
10860 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10861 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10862 tree al = size_int (TYPE_ALIGN (rtype));
10863 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10864 }
cbba99a0 10865
4954efd4 10866 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10867 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10868 gimple_seq_add_stmt (&new_body,
10869 gimple_build_assign (new_var, x));
10870 }
10871 break;
10872 }
cbba99a0 10873
4954efd4 10874 gimple_seq fork_seq = NULL;
10875 gimple_seq join_seq = NULL;
cbba99a0 10876
4954efd4 10877 if (is_oacc_parallel (ctx))
cbba99a0 10878 {
4954efd4 10879 /* If there are reductions on the offloaded region itself, treat
10880 them as a dummy GANG loop. */
10881 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
cbba99a0 10882
4954efd4 10883 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10884 false, NULL, NULL, &fork_seq, &join_seq, ctx);
cbba99a0 10885 }
cbba99a0 10886
4954efd4 10887 gimple_seq_add_seq (&new_body, fork_seq);
10888 gimple_seq_add_seq (&new_body, tgt_body);
10889 gimple_seq_add_seq (&new_body, join_seq);
cbba99a0 10890
4954efd4 10891 if (offloaded)
10892 new_body = maybe_catch_exception (new_body);
cbba99a0 10893
4954efd4 10894 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10895 gimple_omp_set_body (stmt, new_body);
cbba99a0 10896 }
10897
4954efd4 10898 bind = gimple_build_bind (NULL, NULL,
10899 tgt_bind ? gimple_bind_block (tgt_bind)
10900 : NULL_TREE);
10901 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10902 gimple_bind_add_seq (bind, ilist);
10903 gimple_bind_add_stmt (bind, stmt);
10904 gimple_bind_add_seq (bind, olist);
cbba99a0 10905
10906 pop_gimplify_context (NULL);
10907
4954efd4 10908 if (dep_bind)
948eee2f 10909 {
4954efd4 10910 gimple_bind_add_seq (dep_bind, dep_ilist);
10911 gimple_bind_add_stmt (dep_bind, bind);
10912 gimple_bind_add_seq (dep_bind, dep_olist);
10913 pop_gimplify_context (dep_bind);
948eee2f 10914 }
948eee2f 10915}
10916
4954efd4 10917/* Expand code for an OpenMP teams directive. */
0bb0f256 10918
6f431819 10919static void
4954efd4 10920lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
0bb0f256 10921{
4954efd4 10922 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10923 push_gimplify_context ();
0bb0f256 10924
4954efd4 10925 tree block = make_node (BLOCK);
10926 gbind *bind = gimple_build_bind (NULL, NULL, block);
10927 gsi_replace (gsi_p, bind, true);
10928 gimple_seq bind_body = NULL;
10929 gimple_seq dlist = NULL;
10930 gimple_seq olist = NULL;
0bb0f256 10931
4954efd4 10932 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10933 OMP_CLAUSE_NUM_TEAMS);
10934 if (num_teams == NULL_TREE)
10935 num_teams = build_int_cst (unsigned_type_node, 0);
10936 else
0bb0f256 10937 {
4954efd4 10938 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10939 num_teams = fold_convert (unsigned_type_node, num_teams);
10940 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
0bb0f256 10941 }
4954efd4 10942 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10943 OMP_CLAUSE_THREAD_LIMIT);
10944 if (thread_limit == NULL_TREE)
10945 thread_limit = build_int_cst (unsigned_type_node, 0);
10946 else
0bb0f256 10947 {
4954efd4 10948 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10949 thread_limit = fold_convert (unsigned_type_node, thread_limit);
10950 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10951 fb_rvalue);
0bb0f256 10952 }
cbba99a0 10953
4954efd4 10954 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10955 &bind_body, &dlist, ctx, NULL);
10956 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9a1d892b 10957 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
10958 NULL, ctx);
4954efd4 10959 if (!gimple_omp_teams_grid_phony (teams_stmt))
cbba99a0 10960 {
4954efd4 10961 gimple_seq_add_stmt (&bind_body, teams_stmt);
10962 location_t loc = gimple_location (teams_stmt);
10963 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10964 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10965 gimple_set_location (call, loc);
10966 gimple_seq_add_stmt (&bind_body, call);
cbba99a0 10967 }
10968
4954efd4 10969 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10970 gimple_omp_set_body (teams_stmt, NULL);
10971 gimple_seq_add_seq (&bind_body, olist);
10972 gimple_seq_add_seq (&bind_body, dlist);
10973 if (!gimple_omp_teams_grid_phony (teams_stmt))
10974 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10975 gimple_bind_set_body (bind, bind_body);
cbba99a0 10976
4954efd4 10977 pop_gimplify_context (bind);
cbba99a0 10978
4954efd4 10979 gimple_bind_append_vars (bind, ctx->block_vars);
10980 BLOCK_VARS (block) = ctx->block_vars;
10981 if (BLOCK_VARS (block))
10982 TREE_USED (block) = 1;
cbba99a0 10983}
10984
4954efd4 10985/* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
cbba99a0 10986
4954efd4 10987static void
10988lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
cbba99a0 10989{
4954efd4 10990 gimple *stmt = gsi_stmt (*gsi_p);
10991 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10992 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10993 gimple_build_omp_return (false));
cbba99a0 10994}
10995
cbba99a0 10996
4954efd4 10997/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
10998 regimplified. If DATA is non-NULL, lower_omp_1 is outside
10999 of OMP context, but with task_shared_vars set. */
cbba99a0 11000
4954efd4 11001static tree
11002lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
11003 void *data)
cbba99a0 11004{
4954efd4 11005 tree t = *tp;
cbba99a0 11006
4954efd4 11007 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
11008 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
11009 return t;
cbba99a0 11010
4954efd4 11011 if (task_shared_vars
11012 && DECL_P (t)
11013 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
11014 return t;
cbba99a0 11015
4954efd4 11016 /* If a global variable has been privatized, TREE_CONSTANT on
11017 ADDR_EXPR might be wrong. */
11018 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
11019 recompute_tree_invariant_for_addr_expr (t);
cbba99a0 11020
4954efd4 11021 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
11022 return NULL_TREE;
cbba99a0 11023}
11024
4954efd4 11025/* Data to be communicated between lower_omp_regimplify_operands and
11026 lower_omp_regimplify_operands_p. */
cbba99a0 11027
4954efd4 11028struct lower_omp_regimplify_operands_data
cbba99a0 11029{
4954efd4 11030 omp_context *ctx;
11031 vec<tree> *decls;
11032};
cbba99a0 11033
4954efd4 11034/* Helper function for lower_omp_regimplify_operands. Find
11035 omp_member_access_dummy_var vars and adjust temporarily their
11036 DECL_VALUE_EXPRs if needed. */
cbba99a0 11037
4954efd4 11038static tree
11039lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
11040 void *data)
cbba99a0 11041{
4954efd4 11042 tree t = omp_member_access_dummy_var (*tp);
11043 if (t)
cbba99a0 11044 {
4954efd4 11045 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11046 lower_omp_regimplify_operands_data *ldata
11047 = (lower_omp_regimplify_operands_data *) wi->info;
11048 tree o = maybe_lookup_decl (t, ldata->ctx);
11049 if (o != t)
cbba99a0 11050 {
4954efd4 11051 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
11052 ldata->decls->safe_push (*tp);
11053 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
11054 SET_DECL_VALUE_EXPR (*tp, v);
cbba99a0 11055 }
cbba99a0 11056 }
4954efd4 11057 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
11058 return NULL_TREE;
cbba99a0 11059}
11060
4954efd4 11061/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
11062 of omp_member_access_dummy_var vars during regimplification. */
cbba99a0 11063
11064static void
4954efd4 11065lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
11066 gimple_stmt_iterator *gsi_p)
cbba99a0 11067{
4954efd4 11068 auto_vec<tree, 10> decls;
11069 if (ctx)
11070 {
11071 struct walk_stmt_info wi;
11072 memset (&wi, '\0', sizeof (wi));
11073 struct lower_omp_regimplify_operands_data data;
11074 data.ctx = ctx;
11075 data.decls = &decls;
11076 wi.info = &data;
11077 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
11078 }
11079 gimple_regimplify_operands (stmt, gsi_p);
11080 while (!decls.is_empty ())
11081 {
11082 tree t = decls.pop ();
11083 tree v = decls.pop ();
11084 SET_DECL_VALUE_EXPR (t, v);
11085 }
cbba99a0 11086}
11087
cbba99a0 11088static void
4954efd4 11089lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
cbba99a0 11090{
4954efd4 11091 gimple *stmt = gsi_stmt (*gsi_p);
11092 struct walk_stmt_info wi;
11093 gcall *call_stmt;
cbba99a0 11094
4954efd4 11095 if (gimple_has_location (stmt))
11096 input_location = gimple_location (stmt);
cbba99a0 11097
4954efd4 11098 if (task_shared_vars)
11099 memset (&wi, '\0', sizeof (wi));
cbba99a0 11100
4954efd4 11101 /* If we have issued syntax errors, avoid doing any heavy lifting.
11102 Just replace the OMP directives with a NOP to avoid
11103 confusing RTL expansion. */
11104 if (seen_error () && is_gimple_omp (stmt))
cbba99a0 11105 {
4954efd4 11106 gsi_replace (gsi_p, gimple_build_nop (), true);
11107 return;
11108 }
cbba99a0 11109
4954efd4 11110 switch (gimple_code (stmt))
11111 {
11112 case GIMPLE_COND:
11113 {
11114 gcond *cond_stmt = as_a <gcond *> (stmt);
11115 if ((ctx || task_shared_vars)
11116 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
11117 lower_omp_regimplify_p,
11118 ctx ? NULL : &wi, NULL)
11119 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
11120 lower_omp_regimplify_p,
11121 ctx ? NULL : &wi, NULL)))
11122 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
11123 }
11124 break;
11125 case GIMPLE_CATCH:
11126 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
11127 break;
11128 case GIMPLE_EH_FILTER:
11129 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
11130 break;
11131 case GIMPLE_TRY:
11132 lower_omp (gimple_try_eval_ptr (stmt), ctx);
11133 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
11134 break;
11135 case GIMPLE_TRANSACTION:
7c6746c9 11136 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
4954efd4 11137 ctx);
11138 break;
11139 case GIMPLE_BIND:
11140 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
2918f4e9 11141 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
4954efd4 11142 break;
11143 case GIMPLE_OMP_PARALLEL:
11144 case GIMPLE_OMP_TASK:
11145 ctx = maybe_lookup_ctx (stmt);
11146 gcc_assert (ctx);
11147 if (ctx->cancellable)
11148 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11149 lower_omp_taskreg (gsi_p, ctx);
11150 break;
11151 case GIMPLE_OMP_FOR:
11152 ctx = maybe_lookup_ctx (stmt);
11153 gcc_assert (ctx);
11154 if (ctx->cancellable)
11155 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11156 lower_omp_for (gsi_p, ctx);
11157 break;
11158 case GIMPLE_OMP_SECTIONS:
11159 ctx = maybe_lookup_ctx (stmt);
11160 gcc_assert (ctx);
11161 if (ctx->cancellable)
11162 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
11163 lower_omp_sections (gsi_p, ctx);
11164 break;
11165 case GIMPLE_OMP_SINGLE:
11166 ctx = maybe_lookup_ctx (stmt);
11167 gcc_assert (ctx);
11168 lower_omp_single (gsi_p, ctx);
11169 break;
11170 case GIMPLE_OMP_MASTER:
11171 ctx = maybe_lookup_ctx (stmt);
11172 gcc_assert (ctx);
11173 lower_omp_master (gsi_p, ctx);
11174 break;
11175 case GIMPLE_OMP_TASKGROUP:
11176 ctx = maybe_lookup_ctx (stmt);
11177 gcc_assert (ctx);
11178 lower_omp_taskgroup (gsi_p, ctx);
11179 break;
11180 case GIMPLE_OMP_ORDERED:
11181 ctx = maybe_lookup_ctx (stmt);
11182 gcc_assert (ctx);
11183 lower_omp_ordered (gsi_p, ctx);
11184 break;
70a6624c 11185 case GIMPLE_OMP_SCAN:
11186 ctx = maybe_lookup_ctx (stmt);
11187 gcc_assert (ctx);
da008d72 11188 lower_omp_scan (gsi_p, ctx);
70a6624c 11189 break;
4954efd4 11190 case GIMPLE_OMP_CRITICAL:
11191 ctx = maybe_lookup_ctx (stmt);
11192 gcc_assert (ctx);
11193 lower_omp_critical (gsi_p, ctx);
11194 break;
11195 case GIMPLE_OMP_ATOMIC_LOAD:
11196 if ((ctx || task_shared_vars)
11197 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
11198 as_a <gomp_atomic_load *> (stmt)),
11199 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
11200 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
11201 break;
11202 case GIMPLE_OMP_TARGET:
11203 ctx = maybe_lookup_ctx (stmt);
11204 gcc_assert (ctx);
11205 lower_omp_target (gsi_p, ctx);
11206 break;
11207 case GIMPLE_OMP_TEAMS:
11208 ctx = maybe_lookup_ctx (stmt);
11209 gcc_assert (ctx);
7e5a76c8 11210 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
11211 lower_omp_taskreg (gsi_p, ctx);
11212 else
11213 lower_omp_teams (gsi_p, ctx);
4954efd4 11214 break;
11215 case GIMPLE_OMP_GRID_BODY:
11216 ctx = maybe_lookup_ctx (stmt);
11217 gcc_assert (ctx);
11218 lower_omp_grid_body (gsi_p, ctx);
11219 break;
11220 case GIMPLE_CALL:
11221 tree fndecl;
11222 call_stmt = as_a <gcall *> (stmt);
11223 fndecl = gimple_call_fndecl (call_stmt);
11224 if (fndecl
a0e9bfbb 11225 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4954efd4 11226 switch (DECL_FUNCTION_CODE (fndecl))
cbba99a0 11227 {
4954efd4 11228 case BUILT_IN_GOMP_BARRIER:
11229 if (ctx == NULL)
11230 break;
11231 /* FALLTHRU */
11232 case BUILT_IN_GOMP_CANCEL:
11233 case BUILT_IN_GOMP_CANCELLATION_POINT:
11234 omp_context *cctx;
11235 cctx = ctx;
11236 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
11237 cctx = cctx->outer;
11238 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
11239 if (!cctx->cancellable)
11240 {
11241 if (DECL_FUNCTION_CODE (fndecl)
11242 == BUILT_IN_GOMP_CANCELLATION_POINT)
11243 {
11244 stmt = gimple_build_nop ();
11245 gsi_replace (gsi_p, stmt, false);
11246 }
11247 break;
11248 }
11249 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
11250 {
11251 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
11252 gimple_call_set_fndecl (call_stmt, fndecl);
11253 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
11254 }
11255 tree lhs;
11256 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
11257 gimple_call_set_lhs (call_stmt, lhs);
11258 tree fallthru_label;
11259 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
11260 gimple *g;
11261 g = gimple_build_label (fallthru_label);
11262 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11263 g = gimple_build_cond (NE_EXPR, lhs,
11264 fold_convert (TREE_TYPE (lhs),
11265 boolean_false_node),
11266 cctx->cancel_label, fallthru_label);
11267 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11268 break;
11269 default:
11270 break;
cbba99a0 11271 }
9a1d892b 11272 goto regimplify;
11273
11274 case GIMPLE_ASSIGN:
09cf268c 11275 for (omp_context *up = ctx; up; up = up->outer)
9a1d892b 11276 {
09cf268c 11277 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
11278 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
11279 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
48152aa2 11280 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
70a6624c 11281 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
48152aa2 11282 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
11283 && (gimple_omp_target_kind (up->stmt)
11284 == GF_OMP_TARGET_KIND_DATA)))
09cf268c 11285 continue;
11286 else if (!up->lastprivate_conditional_map)
11287 break;
9a1d892b 11288 tree lhs = get_base_address (gimple_assign_lhs (stmt));
8259fae1 11289 if (TREE_CODE (lhs) == MEM_REF
11290 && DECL_P (TREE_OPERAND (lhs, 0))
11291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
11292 0))) == REFERENCE_TYPE)
11293 lhs = TREE_OPERAND (lhs, 0);
9a1d892b 11294 if (DECL_P (lhs))
09cf268c 11295 if (tree *v = up->lastprivate_conditional_map->get (lhs))
9a1d892b 11296 {
eb7a699d 11297 tree clauses;
384aea12 11298 if (up->combined_into_simd_safelen0)
11299 up = up->outer;
eb7a699d 11300 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
11301 clauses = gimple_omp_for_clauses (up->stmt);
11302 else
11303 clauses = gimple_omp_sections_clauses (up->stmt);
9a1d892b 11304 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
4f4b92d8 11305 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
11306 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
11307 OMP_CLAUSE__CONDTEMP_);
11308 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
9a1d892b 11309 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
11310 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
11311 }
11312 }
4954efd4 11313 /* FALLTHRU */
9a1d892b 11314
4954efd4 11315 default:
9a1d892b 11316 regimplify:
4954efd4 11317 if ((ctx || task_shared_vars)
11318 && walk_gimple_op (stmt, lower_omp_regimplify_p,
11319 ctx ? NULL : &wi))
cbba99a0 11320 {
4954efd4 11321 /* Just remove clobbers, this should happen only if we have
11322 "privatized" local addressable variables in SIMD regions,
11323 the clobber isn't needed in that case and gimplifying address
11324 of the ARRAY_REF into a pointer and creating MEM_REF based
11325 clobber would create worse code than we get with the clobber
11326 dropped. */
11327 if (gimple_clobber_p (stmt))
584fba4b 11328 {
4954efd4 11329 gsi_replace (gsi_p, gimple_build_nop (), true);
11330 break;
cbba99a0 11331 }
4954efd4 11332 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
cbba99a0 11333 }
4954efd4 11334 break;
cbba99a0 11335 }
cbba99a0 11336}
11337
cbba99a0 11338static void
4954efd4 11339lower_omp (gimple_seq *body, omp_context *ctx)
cbba99a0 11340{
4954efd4 11341 location_t saved_location = input_location;
11342 gimple_stmt_iterator gsi;
11343 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
11344 lower_omp_1 (&gsi, ctx);
11345 /* During gimplification, we haven't folded statments inside offloading
11346 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
11347 if (target_nesting_level || taskreg_nesting_level)
11348 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
11349 fold_stmt (&gsi);
11350 input_location = saved_location;
cbba99a0 11351}
11352
4954efd4 11353/* Main entry point. */
cbba99a0 11354
4954efd4 11355static unsigned int
11356execute_lower_omp (void)
cbba99a0 11357{
4954efd4 11358 gimple_seq body;
11359 int i;
11360 omp_context *ctx;
cbba99a0 11361
4954efd4 11362 /* This pass always runs, to provide PROP_gimple_lomp.
11363 But often, there is nothing to do. */
efa02472 11364 if (flag_openacc == 0 && flag_openmp == 0
4954efd4 11365 && flag_openmp_simd == 0)
11366 return 0;
cbba99a0 11367
4954efd4 11368 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
11369 delete_omp_context);
cbba99a0 11370
4954efd4 11371 body = gimple_body (current_function_decl);
cbba99a0 11372
4954efd4 11373 if (hsa_gen_requested_p ())
11374 omp_grid_gridify_all_targets (&body);
11375
11376 scan_omp (&body, NULL);
11377 gcc_assert (taskreg_nesting_level == 0);
11378 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
11379 finish_taskreg_scan (ctx);
11380 taskreg_contexts.release ();
cbba99a0 11381
4954efd4 11382 if (all_contexts->root)
11383 {
11384 if (task_shared_vars)
11385 push_gimplify_context ();
11386 lower_omp (&body, NULL);
11387 if (task_shared_vars)
11388 pop_gimplify_context (NULL);
11389 }
11390
11391 if (all_contexts)
11392 {
11393 splay_tree_delete (all_contexts);
11394 all_contexts = NULL;
cbba99a0 11395 }
4954efd4 11396 BITMAP_FREE (task_shared_vars);
2918f4e9 11397
11398 /* If current function is a method, remove artificial dummy VAR_DECL created
11399 for non-static data member privatization, they aren't needed for
11400 debuginfo nor anything else, have been already replaced everywhere in the
11401 IL and cause problems with LTO. */
11402 if (DECL_ARGUMENTS (current_function_decl)
11403 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
11404 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
11405 == POINTER_TYPE))
11406 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
4954efd4 11407 return 0;
cbba99a0 11408}
11409
4954efd4 11410namespace {
cbba99a0 11411
4954efd4 11412const pass_data pass_data_lower_omp =
cbba99a0 11413{
4954efd4 11414 GIMPLE_PASS, /* type */
11415 "omplower", /* name */
f57c8178 11416 OPTGROUP_OMP, /* optinfo_flags */
4954efd4 11417 TV_NONE, /* tv_id */
11418 PROP_gimple_any, /* properties_required */
11419 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
11420 0, /* properties_destroyed */
11421 0, /* todo_flags_start */
11422 0, /* todo_flags_finish */
11423};
cbba99a0 11424
4954efd4 11425class pass_lower_omp : public gimple_opt_pass
11426{
11427public:
11428 pass_lower_omp (gcc::context *ctxt)
11429 : gimple_opt_pass (pass_data_lower_omp, ctxt)
11430 {}
cbba99a0 11431
4954efd4 11432 /* opt_pass methods: */
11433 virtual unsigned int execute (function *) { return execute_lower_omp (); }
cbba99a0 11434
4954efd4 11435}; // class pass_lower_omp
cbba99a0 11436
4954efd4 11437} // anon namespace
cbba99a0 11438
4954efd4 11439gimple_opt_pass *
11440make_pass_lower_omp (gcc::context *ctxt)
11441{
11442 return new pass_lower_omp (ctxt);
cbba99a0 11443}
4954efd4 11444\f
11445/* The following is a utility to diagnose structured block violations.
11446 It is not part of the "omplower" pass, as that's invoked too late. It
11447 should be invoked by the respective front ends after gimplification. */
cbba99a0 11448
4954efd4 11449static splay_tree all_labels;
cbba99a0 11450
4954efd4 11451/* Check for mismatched contexts and generate an error if needed. Return
11452 true if an error is detected. */
cbba99a0 11453
4954efd4 11454static bool
11455diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
11456 gimple *branch_ctx, gimple *label_ctx)
11457{
11458 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
11459 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
cbba99a0 11460
4954efd4 11461 if (label_ctx == branch_ctx)
11462 return false;
cbba99a0 11463
4954efd4 11464 const char* kind = NULL;
cbba99a0 11465
4954efd4 11466 if (flag_openacc)
cbba99a0 11467 {
4954efd4 11468 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
11469 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
cbba99a0 11470 {
4954efd4 11471 gcc_checking_assert (kind == NULL);
11472 kind = "OpenACC";
cbba99a0 11473 }
11474 }
4954efd4 11475 if (kind == NULL)
28e869d0 11476 {
93c7cd0f 11477 gcc_checking_assert (flag_openmp || flag_openmp_simd);
4954efd4 11478 kind = "OpenMP";
28e869d0 11479 }
cbba99a0 11480
7c6746c9 11481 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
4954efd4 11482 so we could traverse it and issue a correct "exit" or "enter" error
11483 message upon a structured block violation.
f4f5b4b4 11484
4954efd4 11485 We built the context by building a list with tree_cons'ing, but there is
11486 no easy counterpart in gimple tuples. It seems like far too much work
11487 for issuing exit/enter error messages. If someone really misses the
7c6746c9 11488 distinct error message... patches welcome. */
f4f5b4b4 11489
4954efd4 11490#if 0
11491 /* Try to avoid confusing the user by producing and error message
11492 with correct "exit" or "enter" verbiage. We prefer "exit"
11493 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
11494 if (branch_ctx == NULL)
11495 exit_p = false;
11496 else
28e869d0 11497 {
4954efd4 11498 while (label_ctx)
11499 {
11500 if (TREE_VALUE (label_ctx) == branch_ctx)
11501 {
11502 exit_p = false;
11503 break;
11504 }
11505 label_ctx = TREE_CHAIN (label_ctx);
11506 }
28e869d0 11507 }
11508
4954efd4 11509 if (exit_p)
11510 error ("invalid exit from %s structured block", kind);
11511 else
11512 error ("invalid entry to %s structured block", kind);
11513#endif
28e869d0 11514
4954efd4 11515 /* If it's obvious we have an invalid entry, be specific about the error. */
11516 if (branch_ctx == NULL)
11517 error ("invalid entry to %s structured block", kind);
11518 else
f4f5b4b4 11519 {
4954efd4 11520 /* Otherwise, be vague and lazy, but efficient. */
11521 error ("invalid branch to/from %s structured block", kind);
f4f5b4b4 11522 }
28e869d0 11523
4954efd4 11524 gsi_replace (gsi_p, gimple_build_nop (), false);
11525 return true;
f4f5b4b4 11526}
11527
4954efd4 11528/* Pass 1: Create a minimal tree of structured blocks, and record
11529 where each label is found. */
cbba99a0 11530
4954efd4 11531static tree
11532diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11533 struct walk_stmt_info *wi)
cbba99a0 11534{
4954efd4 11535 gimple *context = (gimple *) wi->info;
11536 gimple *inner_context;
11537 gimple *stmt = gsi_stmt (*gsi_p);
cbba99a0 11538
4954efd4 11539 *handled_ops_p = true;
c25f1934 11540
4954efd4 11541 switch (gimple_code (stmt))
11542 {
11543 WALK_SUBSTMTS;
c25f1934 11544
4954efd4 11545 case GIMPLE_OMP_PARALLEL:
11546 case GIMPLE_OMP_TASK:
11547 case GIMPLE_OMP_SECTIONS:
11548 case GIMPLE_OMP_SINGLE:
11549 case GIMPLE_OMP_SECTION:
11550 case GIMPLE_OMP_MASTER:
11551 case GIMPLE_OMP_ORDERED:
70a6624c 11552 case GIMPLE_OMP_SCAN:
4954efd4 11553 case GIMPLE_OMP_CRITICAL:
11554 case GIMPLE_OMP_TARGET:
11555 case GIMPLE_OMP_TEAMS:
11556 case GIMPLE_OMP_TASKGROUP:
11557 /* The minimal context here is just the current OMP construct. */
11558 inner_context = stmt;
11559 wi->info = inner_context;
11560 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11561 wi->info = context;
11562 break;
641a0fa1 11563
4954efd4 11564 case GIMPLE_OMP_FOR:
11565 inner_context = stmt;
11566 wi->info = inner_context;
11567 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11568 walk them. */
11569 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
11570 diagnose_sb_1, NULL, wi);
11571 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
11572 wi->info = context;
11573 break;
641a0fa1 11574
4954efd4 11575 case GIMPLE_LABEL:
11576 splay_tree_insert (all_labels,
11577 (splay_tree_key) gimple_label_label (
11578 as_a <glabel *> (stmt)),
11579 (splay_tree_value) context);
11580 break;
641a0fa1 11581
4954efd4 11582 default:
11583 break;
641a0fa1 11584 }
11585
4954efd4 11586 return NULL_TREE;
641a0fa1 11587}
11588
4954efd4 11589/* Pass 2: Check each branch and see if its context differs from that of
11590 the destination label's context. */
0bb0f256 11591
4954efd4 11592static tree
11593diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
11594 struct walk_stmt_info *wi)
0bb0f256 11595{
4954efd4 11596 gimple *context = (gimple *) wi->info;
11597 splay_tree_node n;
11598 gimple *stmt = gsi_stmt (*gsi_p);
6f431819 11599
4954efd4 11600 *handled_ops_p = true;
6f431819 11601
4954efd4 11602 switch (gimple_code (stmt))
cbba99a0 11603 {
4954efd4 11604 WALK_SUBSTMTS;
cbba99a0 11605
4954efd4 11606 case GIMPLE_OMP_PARALLEL:
11607 case GIMPLE_OMP_TASK:
11608 case GIMPLE_OMP_SECTIONS:
11609 case GIMPLE_OMP_SINGLE:
11610 case GIMPLE_OMP_SECTION:
11611 case GIMPLE_OMP_MASTER:
11612 case GIMPLE_OMP_ORDERED:
70a6624c 11613 case GIMPLE_OMP_SCAN:
4954efd4 11614 case GIMPLE_OMP_CRITICAL:
11615 case GIMPLE_OMP_TARGET:
11616 case GIMPLE_OMP_TEAMS:
11617 case GIMPLE_OMP_TASKGROUP:
11618 wi->info = stmt;
11619 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11620 wi->info = context;
11621 break;
641a0fa1 11622
4954efd4 11623 case GIMPLE_OMP_FOR:
11624 wi->info = stmt;
11625 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
11626 walk them. */
11627 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
11628 diagnose_sb_2, NULL, wi);
11629 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
11630 wi->info = context;
11631 break;
641a0fa1 11632
4954efd4 11633 case GIMPLE_COND:
11634 {
11635 gcond *cond_stmt = as_a <gcond *> (stmt);
11636 tree lab = gimple_cond_true_label (cond_stmt);
11637 if (lab)
cbba99a0 11638 {
4954efd4 11639 n = splay_tree_lookup (all_labels,
11640 (splay_tree_key) lab);
11641 diagnose_sb_0 (gsi_p, context,
11642 n ? (gimple *) n->value : NULL);
cbba99a0 11643 }
4954efd4 11644 lab = gimple_cond_false_label (cond_stmt);
11645 if (lab)
11646 {
11647 n = splay_tree_lookup (all_labels,
11648 (splay_tree_key) lab);
11649 diagnose_sb_0 (gsi_p, context,
11650 n ? (gimple *) n->value : NULL);
11651 }
11652 }
11653 break;
cbba99a0 11654
4954efd4 11655 case GIMPLE_GOTO:
11656 {
11657 tree lab = gimple_goto_dest (stmt);
11658 if (TREE_CODE (lab) != LABEL_DECL)
11659 break;
11660
11661 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11662 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
11663 }
11664 break;
cbba99a0 11665
4954efd4 11666 case GIMPLE_SWITCH:
11667 {
11668 gswitch *switch_stmt = as_a <gswitch *> (stmt);
11669 unsigned int i;
11670 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
cbba99a0 11671 {
4954efd4 11672 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
11673 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
11674 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
11675 break;
cbba99a0 11676 }
cbba99a0 11677 }
4954efd4 11678 break;
cbba99a0 11679
4954efd4 11680 case GIMPLE_RETURN:
11681 diagnose_sb_0 (gsi_p, context, NULL);
11682 break;
0bb0f256 11683
4954efd4 11684 default:
11685 break;
0bb0f256 11686 }
11687
4954efd4 11688 return NULL_TREE;
e1037942 11689}
11690
4954efd4 11691static unsigned int
11692diagnose_omp_structured_block_errors (void)
0bb0f256 11693{
4954efd4 11694 struct walk_stmt_info wi;
11695 gimple_seq body = gimple_body (current_function_decl);
7c727679 11696
4954efd4 11697 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
0bb0f256 11698
4954efd4 11699 memset (&wi, 0, sizeof (wi));
11700 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
0bb0f256 11701
4954efd4 11702 memset (&wi, 0, sizeof (wi));
11703 wi.want_locations = true;
11704 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
0bb0f256 11705
4954efd4 11706 gimple_set_body (current_function_decl, body);
bab6706a 11707
4954efd4 11708 splay_tree_delete (all_labels);
11709 all_labels = NULL;
bab6706a 11710
bab6706a 11711 return 0;
11712}
11713
11714namespace {
11715
4954efd4 11716const pass_data pass_data_diagnose_omp_blocks =
bab6706a 11717{
11718 GIMPLE_PASS, /* type */
4954efd4 11719 "*diagnose_omp_blocks", /* name */
f57c8178 11720 OPTGROUP_OMP, /* optinfo_flags */
bab6706a 11721 TV_NONE, /* tv_id */
4954efd4 11722 PROP_gimple_any, /* properties_required */
11723 0, /* properties_provided */
bab6706a 11724 0, /* properties_destroyed */
11725 0, /* todo_flags_start */
4954efd4 11726 0, /* todo_flags_finish */
bab6706a 11727};
11728
4954efd4 11729class pass_diagnose_omp_blocks : public gimple_opt_pass
bab6706a 11730{
11731public:
4954efd4 11732 pass_diagnose_omp_blocks (gcc::context *ctxt)
11733 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
bab6706a 11734 {}
11735
11736 /* opt_pass methods: */
4954efd4 11737 virtual bool gate (function *)
11738 {
efa02472 11739 return flag_openacc || flag_openmp || flag_openmp_simd;
4954efd4 11740 }
bab6706a 11741 virtual unsigned int execute (function *)
11742 {
4954efd4 11743 return diagnose_omp_structured_block_errors ();
c0998828 11744 }
11745
4954efd4 11746}; // class pass_diagnose_omp_blocks
c0998828 11747
11748} // anon namespace
11749
11750gimple_opt_pass *
4954efd4 11751make_pass_diagnose_omp_blocks (gcc::context *ctxt)
c0998828 11752{
4954efd4 11753 return new pass_diagnose_omp_blocks (ctxt);
c0998828 11754}
4954efd4 11755\f
c0998828 11756
1e8e9920 11757#include "gt-omp-low.h"