]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/omp-low.c
middle-end: add support for per-location warning groups.
[thirdparty/gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
133
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
139
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
145
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
150
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
153
154 /* True if this construct can be cancelled. */
155 bool cancellable;
156
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
160
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
163
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
166
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
169
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
172
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
175
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
179
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
183
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
186 };
187
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
197
198 #define WALK_SUBSTMTS \
199 case GIMPLE_BIND: \
200 case GIMPLE_TRY: \
201 case GIMPLE_CATCH: \
202 case GIMPLE_EH_FILTER: \
203 case GIMPLE_TRANSACTION: \
204 /* The sub-statements for these should be walked. */ \
205 *handled_ops_p = false; \
206 break;
207
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209 (This doesn't include OpenACC 'kernels' decomposed parts.) */
210
211 static bool
212 is_oacc_parallel_or_serial (omp_context *ctx)
213 {
214 enum gimple_code outer_type = gimple_code (ctx->stmt);
215 return ((outer_type == GIMPLE_OMP_TARGET)
216 && ((gimple_omp_target_kind (ctx->stmt)
217 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 || (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
220 }
221
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223 (This doesn't include OpenACC 'kernels' decomposed parts.) */
224
225 static bool
226 is_oacc_kernels (omp_context *ctx)
227 {
228 enum gimple_code outer_type = gimple_code (ctx->stmt);
229 return ((outer_type == GIMPLE_OMP_TARGET)
230 && (gimple_omp_target_kind (ctx->stmt)
231 == GF_OMP_TARGET_KIND_OACC_KERNELS));
232 }
233
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
235
236 static bool
237 is_oacc_kernels_decomposed_part (omp_context *ctx)
238 {
239 enum gimple_code outer_type = gimple_code (ctx->stmt);
240 return ((outer_type == GIMPLE_OMP_TARGET)
241 && ((gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 || (gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
247 }
248
249 /* Return true if STMT corresponds to an OpenMP target region. */
250 static bool
251 is_omp_target (gimple *stmt)
252 {
253 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
254 {
255 int kind = gimple_omp_target_kind (stmt);
256 return (kind == GF_OMP_TARGET_KIND_REGION
257 || kind == GF_OMP_TARGET_KIND_DATA
258 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
260 }
261 return false;
262 }
263
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265 data member privatization, return the underlying "this" parameter,
266 otherwise return NULL. */
267
268 tree
269 omp_member_access_dummy_var (tree decl)
270 {
271 if (!VAR_P (decl)
272 || !DECL_ARTIFICIAL (decl)
273 || !DECL_IGNORED_P (decl)
274 || !DECL_HAS_VALUE_EXPR_P (decl)
275 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276 return NULL_TREE;
277
278 tree v = DECL_VALUE_EXPR (decl);
279 if (TREE_CODE (v) != COMPONENT_REF)
280 return NULL_TREE;
281
282 while (1)
283 switch (TREE_CODE (v))
284 {
285 case COMPONENT_REF:
286 case MEM_REF:
287 case INDIRECT_REF:
288 CASE_CONVERT:
289 case POINTER_PLUS_EXPR:
290 v = TREE_OPERAND (v, 0);
291 continue;
292 case PARM_DECL:
293 if (DECL_CONTEXT (v) == current_function_decl
294 && DECL_ARTIFICIAL (v)
295 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 return v;
297 return NULL_TREE;
298 default:
299 return NULL_TREE;
300 }
301 }
302
303 /* Helper for unshare_and_remap, called through walk_tree. */
304
305 static tree
306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
307 {
308 tree *pair = (tree *) data;
309 if (*tp == pair[0])
310 {
311 *tp = unshare_expr (pair[1]);
312 *walk_subtrees = 0;
313 }
314 else if (IS_TYPE_OR_DECL_P (*tp))
315 *walk_subtrees = 0;
316 return NULL_TREE;
317 }
318
319 /* Return unshare_expr (X) with all occurrences of FROM
320 replaced with TO. */
321
322 static tree
323 unshare_and_remap (tree x, tree from, tree to)
324 {
325 tree pair[2] = { from, to };
326 x = unshare_expr (x);
327 walk_tree (&x, unshare_and_remap_1, pair, NULL);
328 return x;
329 }
330
331 /* Convenience function for calling scan_omp_1_op on tree operands. */
332
333 static inline tree
334 scan_omp_op (tree *tp, omp_context *ctx)
335 {
336 struct walk_stmt_info wi;
337
338 memset (&wi, 0, sizeof (wi));
339 wi.info = ctx;
340 wi.want_locations = true;
341
342 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
343 }
344
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
348
349 /* Return true if CTX is for an omp parallel. */
350
351 static inline bool
352 is_parallel_ctx (omp_context *ctx)
353 {
354 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
355 }
356
357
358 /* Return true if CTX is for an omp task. */
359
360 static inline bool
361 is_task_ctx (omp_context *ctx)
362 {
363 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
364 }
365
366
367 /* Return true if CTX is for an omp taskloop. */
368
369 static inline bool
370 is_taskloop_ctx (omp_context *ctx)
371 {
372 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
374 }
375
376
377 /* Return true if CTX is for a host omp teams. */
378
379 static inline bool
380 is_host_teams_ctx (omp_context *ctx)
381 {
382 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
384 }
385
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387 (the last one is strictly not a task region in OpenMP speak, but we
388 need to treat it similarly). */
389
390 static inline bool
391 is_taskreg_ctx (omp_context *ctx)
392 {
393 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
394 }
395
396 /* Return true if EXPR is variable sized. */
397
398 static inline bool
399 is_variable_sized (const_tree expr)
400 {
401 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
402 }
403
404 /* Lookup variables. The "maybe" form
405 allows for the variable form to not have been entered, otherwise we
406 assert that the variable must have been entered. */
407
408 static inline tree
409 lookup_decl (tree var, omp_context *ctx)
410 {
411 tree *n = ctx->cb.decl_map->get (var);
412 return *n;
413 }
414
415 static inline tree
416 maybe_lookup_decl (const_tree var, omp_context *ctx)
417 {
418 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419 return n ? *n : NULL_TREE;
420 }
421
422 static inline tree
423 lookup_field (tree var, omp_context *ctx)
424 {
425 splay_tree_node n;
426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427 return (tree) n->value;
428 }
429
430 static inline tree
431 lookup_sfield (splay_tree_key key, omp_context *ctx)
432 {
433 splay_tree_node n;
434 n = splay_tree_lookup (ctx->sfield_map
435 ? ctx->sfield_map : ctx->field_map, key);
436 return (tree) n->value;
437 }
438
439 static inline tree
440 lookup_sfield (tree var, omp_context *ctx)
441 {
442 return lookup_sfield ((splay_tree_key) var, ctx);
443 }
444
445 static inline tree
446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
447 {
448 splay_tree_node n;
449 n = splay_tree_lookup (ctx->field_map, key);
450 return n ? (tree) n->value : NULL_TREE;
451 }
452
453 static inline tree
454 maybe_lookup_field (tree var, omp_context *ctx)
455 {
456 return maybe_lookup_field ((splay_tree_key) var, ctx);
457 }
458
459 /* Return true if DECL should be copied by pointer. SHARED_CTX is
460 the parallel context if DECL is to be shared. */
461
462 static bool
463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
464 {
465 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466 || TYPE_ATOMIC (TREE_TYPE (decl)))
467 return true;
468
469 /* We can only use copy-in/copy-out semantics for shared variables
470 when we know the value is not accessible from an outer scope. */
471 if (shared_ctx)
472 {
473 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
474
475 /* ??? Trivially accessible from anywhere. But why would we even
476 be passing an address in this case? Should we simply assert
477 this to be false, or should we have a cleanup pass that removes
478 these from the list of mappings? */
479 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 return true;
481
482 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 without analyzing the expression whether or not its location
484 is accessible to anyone else. In the case of nested parallel
485 regions it certainly may be. */
486 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 return true;
488
489 /* Do not use copy-in/copy-out for variables that have their
490 address taken. */
491 if (is_global_var (decl))
492 {
493 /* For file scope vars, track whether we've seen them as
494 non-addressable initially and in that case, keep the same
495 answer for the duration of the pass, even when they are made
496 addressable later on e.g. through reduction expansion. Global
497 variables which weren't addressable before the pass will not
498 have their privatized copies address taken. See PR91216. */
499 if (!TREE_ADDRESSABLE (decl))
500 {
501 if (!global_nonaddressable_vars)
502 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
504 }
505 else if (!global_nonaddressable_vars
506 || !bitmap_bit_p (global_nonaddressable_vars,
507 DECL_UID (decl)))
508 return true;
509 }
510 else if (TREE_ADDRESSABLE (decl))
511 return true;
512
513 /* lower_send_shared_vars only uses copy-in, but not copy-out
514 for these. */
515 if (TREE_READONLY (decl)
516 || ((TREE_CODE (decl) == RESULT_DECL
517 || TREE_CODE (decl) == PARM_DECL)
518 && DECL_BY_REFERENCE (decl)))
519 return false;
520
521 /* Disallow copy-in/out in nested parallel if
522 decl is shared in outer parallel, otherwise
523 each thread could store the shared variable
524 in its own copy-in location, making the
525 variable no longer really shared. */
526 if (shared_ctx->is_nested)
527 {
528 omp_context *up;
529
530 for (up = shared_ctx->outer; up; up = up->outer)
531 if ((is_taskreg_ctx (up)
532 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 && is_gimple_omp_offloaded (up->stmt)))
534 && maybe_lookup_decl (decl, up))
535 break;
536
537 if (up)
538 {
539 tree c;
540
541 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
542 {
543 for (c = gimple_omp_target_clauses (up->stmt);
544 c; c = OMP_CLAUSE_CHAIN (c))
545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 && OMP_CLAUSE_DECL (c) == decl)
547 break;
548 }
549 else
550 for (c = gimple_omp_taskreg_clauses (up->stmt);
551 c; c = OMP_CLAUSE_CHAIN (c))
552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 && OMP_CLAUSE_DECL (c) == decl)
554 break;
555
556 if (c)
557 goto maybe_mark_addressable_and_ret;
558 }
559 }
560
561 /* For tasks avoid using copy-in/out. As tasks can be
562 deferred or executed in different thread, when GOMP_task
563 returns, the task hasn't necessarily terminated. */
564 if (is_task_ctx (shared_ctx))
565 {
566 tree outer;
567 maybe_mark_addressable_and_ret:
568 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
570 {
571 /* Taking address of OUTER in lower_send_shared_vars
572 might need regimplification of everything that uses the
573 variable. */
574 if (!task_shared_vars)
575 task_shared_vars = BITMAP_ALLOC (NULL);
576 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 TREE_ADDRESSABLE (outer) = 1;
578 }
579 return true;
580 }
581 }
582
583 return false;
584 }
585
586 /* Construct a new automatic decl similar to VAR. */
587
588 static tree
589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
590 {
591 tree copy = copy_var_decl (var, name, type);
592
593 DECL_CONTEXT (copy) = current_function_decl;
594 DECL_CHAIN (copy) = ctx->block_vars;
595 /* If VAR is listed in task_shared_vars, it means it wasn't
596 originally addressable and is just because task needs to take
597 it's address. But we don't need to take address of privatizations
598 from that var. */
599 if (TREE_ADDRESSABLE (var)
600 && ((task_shared_vars
601 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
602 || (global_nonaddressable_vars
603 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
604 TREE_ADDRESSABLE (copy) = 0;
605 ctx->block_vars = copy;
606
607 return copy;
608 }
609
610 static tree
611 omp_copy_decl_1 (tree var, omp_context *ctx)
612 {
613 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
614 }
615
616 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
617 as appropriate. */
618 static tree
619 omp_build_component_ref (tree obj, tree field)
620 {
621 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
622 if (TREE_THIS_VOLATILE (field))
623 TREE_THIS_VOLATILE (ret) |= 1;
624 if (TREE_READONLY (field))
625 TREE_READONLY (ret) |= 1;
626 return ret;
627 }
628
629 /* Build tree nodes to access the field for VAR on the receiver side. */
630
631 static tree
632 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
633 {
634 tree x, field = lookup_field (var, ctx);
635
636 /* If the receiver record type was remapped in the child function,
637 remap the field into the new record type. */
638 x = maybe_lookup_field (field, ctx);
639 if (x != NULL)
640 field = x;
641
642 x = build_simple_mem_ref (ctx->receiver_decl);
643 TREE_THIS_NOTRAP (x) = 1;
644 x = omp_build_component_ref (x, field);
645 if (by_ref)
646 {
647 x = build_simple_mem_ref (x);
648 TREE_THIS_NOTRAP (x) = 1;
649 }
650
651 return x;
652 }
653
654 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
655 of a parallel, this is a component reference; for workshare constructs
656 this is some variable. */
657
658 static tree
659 build_outer_var_ref (tree var, omp_context *ctx,
660 enum omp_clause_code code = OMP_CLAUSE_ERROR)
661 {
662 tree x;
663 omp_context *outer = ctx->outer;
664 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
665 outer = outer->outer;
666
667 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
668 x = var;
669 else if (is_variable_sized (var))
670 {
671 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
672 x = build_outer_var_ref (x, ctx, code);
673 x = build_simple_mem_ref (x);
674 }
675 else if (is_taskreg_ctx (ctx))
676 {
677 bool by_ref = use_pointer_for_field (var, NULL);
678 x = build_receiver_ref (var, by_ref, ctx);
679 }
680 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
681 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
682 || ctx->loop_p
683 || (code == OMP_CLAUSE_PRIVATE
684 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
685 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
686 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
687 {
688 /* #pragma omp simd isn't a worksharing construct, and can reference
689 even private vars in its linear etc. clauses.
690 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
691 to private vars in all worksharing constructs. */
692 x = NULL_TREE;
693 if (outer && is_taskreg_ctx (outer))
694 x = lookup_decl (var, outer);
695 else if (outer)
696 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
697 if (x == NULL_TREE)
698 x = var;
699 }
700 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
701 {
702 gcc_assert (outer);
703 splay_tree_node n
704 = splay_tree_lookup (outer->field_map,
705 (splay_tree_key) &DECL_UID (var));
706 if (n == NULL)
707 {
708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
709 x = var;
710 else
711 x = lookup_decl (var, outer);
712 }
713 else
714 {
715 tree field = (tree) n->value;
716 /* If the receiver record type was remapped in the child function,
717 remap the field into the new record type. */
718 x = maybe_lookup_field (field, outer);
719 if (x != NULL)
720 field = x;
721
722 x = build_simple_mem_ref (outer->receiver_decl);
723 x = omp_build_component_ref (x, field);
724 if (use_pointer_for_field (var, outer))
725 x = build_simple_mem_ref (x);
726 }
727 }
728 else if (outer)
729 x = lookup_decl (var, outer);
730 else if (omp_is_reference (var))
731 /* This can happen with orphaned constructs. If var is reference, it is
732 possible it is shared and as such valid. */
733 x = var;
734 else if (omp_member_access_dummy_var (var))
735 x = var;
736 else
737 gcc_unreachable ();
738
739 if (x == var)
740 {
741 tree t = omp_member_access_dummy_var (var);
742 if (t)
743 {
744 x = DECL_VALUE_EXPR (var);
745 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
746 if (o != t)
747 x = unshare_and_remap (x, t, o);
748 else
749 x = unshare_expr (x);
750 }
751 }
752
753 if (omp_is_reference (var))
754 x = build_simple_mem_ref (x);
755
756 return x;
757 }
758
759 /* Build tree nodes to access the field for VAR on the sender side. */
760
761 static tree
762 build_sender_ref (splay_tree_key key, omp_context *ctx)
763 {
764 tree field = lookup_sfield (key, ctx);
765 return omp_build_component_ref (ctx->sender_decl, field);
766 }
767
768 static tree
769 build_sender_ref (tree var, omp_context *ctx)
770 {
771 return build_sender_ref ((splay_tree_key) var, ctx);
772 }
773
774 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
775 BASE_POINTERS_RESTRICT, declare the field with restrict. */
776
777 static void
778 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
779 {
780 tree field, type, sfield = NULL_TREE;
781 splay_tree_key key = (splay_tree_key) var;
782
783 if ((mask & 16) != 0)
784 {
785 key = (splay_tree_key) &DECL_NAME (var);
786 gcc_checking_assert (key != (splay_tree_key) var);
787 }
788 if ((mask & 8) != 0)
789 {
790 key = (splay_tree_key) &DECL_UID (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
792 }
793 gcc_assert ((mask & 1) == 0
794 || !splay_tree_lookup (ctx->field_map, key));
795 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
796 || !splay_tree_lookup (ctx->sfield_map, key));
797 gcc_assert ((mask & 3) == 3
798 || !is_gimple_omp_oacc (ctx->stmt));
799
800 type = TREE_TYPE (var);
801 if ((mask & 16) != 0)
802 type = lang_hooks.decls.omp_array_data (var, true);
803
804 /* Prevent redeclaring the var in the split-off function with a restrict
805 pointer type. Note that we only clear type itself, restrict qualifiers in
806 the pointed-to type will be ignored by points-to analysis. */
807 if (POINTER_TYPE_P (type)
808 && TYPE_RESTRICT (type))
809 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
810
811 if (mask & 4)
812 {
813 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
814 type = build_pointer_type (build_pointer_type (type));
815 }
816 else if (by_ref)
817 type = build_pointer_type (type);
818 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
819 type = TREE_TYPE (type);
820
821 field = build_decl (DECL_SOURCE_LOCATION (var),
822 FIELD_DECL, DECL_NAME (var), type);
823
824 /* Remember what variable this field was created for. This does have a
825 side effect of making dwarf2out ignore this member, so for helpful
826 debugging we clear it later in delete_omp_context. */
827 DECL_ABSTRACT_ORIGIN (field) = var;
828 if ((mask & 16) == 0 && type == TREE_TYPE (var))
829 {
830 SET_DECL_ALIGN (field, DECL_ALIGN (var));
831 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
832 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
833 }
834 else
835 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
836
837 if ((mask & 3) == 3)
838 {
839 insert_field_into_struct (ctx->record_type, field);
840 if (ctx->srecord_type)
841 {
842 sfield = build_decl (DECL_SOURCE_LOCATION (var),
843 FIELD_DECL, DECL_NAME (var), type);
844 DECL_ABSTRACT_ORIGIN (sfield) = var;
845 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
846 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
847 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
848 insert_field_into_struct (ctx->srecord_type, sfield);
849 }
850 }
851 else
852 {
853 if (ctx->srecord_type == NULL_TREE)
854 {
855 tree t;
856
857 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
858 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
859 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
860 {
861 sfield = build_decl (DECL_SOURCE_LOCATION (t),
862 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
863 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
864 insert_field_into_struct (ctx->srecord_type, sfield);
865 splay_tree_insert (ctx->sfield_map,
866 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
867 (splay_tree_value) sfield);
868 }
869 }
870 sfield = field;
871 insert_field_into_struct ((mask & 1) ? ctx->record_type
872 : ctx->srecord_type, field);
873 }
874
875 if (mask & 1)
876 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
877 if ((mask & 2) && ctx->sfield_map)
878 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
879 }
880
881 static tree
882 install_var_local (tree var, omp_context *ctx)
883 {
884 tree new_var = omp_copy_decl_1 (var, ctx);
885 insert_decl_map (&ctx->cb, var, new_var);
886 return new_var;
887 }
888
889 /* Adjust the replacement for DECL in CTX for the new context. This means
890 copying the DECL_VALUE_EXPR, and fixing up the type. */
891
892 static void
893 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
894 {
895 tree new_decl, size;
896
897 new_decl = lookup_decl (decl, ctx);
898
899 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
900
901 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
902 && DECL_HAS_VALUE_EXPR_P (decl))
903 {
904 tree ve = DECL_VALUE_EXPR (decl);
905 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
906 SET_DECL_VALUE_EXPR (new_decl, ve);
907 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
908 }
909
910 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
911 {
912 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
913 if (size == error_mark_node)
914 size = TYPE_SIZE (TREE_TYPE (new_decl));
915 DECL_SIZE (new_decl) = size;
916
917 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
918 if (size == error_mark_node)
919 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
920 DECL_SIZE_UNIT (new_decl) = size;
921 }
922 }
923
924 /* The callback for remap_decl. Search all containing contexts for a
925 mapping of the variable; this avoids having to duplicate the splay
926 tree ahead of time. We know a mapping doesn't already exist in the
927 given context. Create new mappings to implement default semantics. */
928
929 static tree
930 omp_copy_decl (tree var, copy_body_data *cb)
931 {
932 omp_context *ctx = (omp_context *) cb;
933 tree new_var;
934
935 if (TREE_CODE (var) == LABEL_DECL)
936 {
937 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
938 return var;
939 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
940 DECL_CONTEXT (new_var) = current_function_decl;
941 insert_decl_map (&ctx->cb, var, new_var);
942 return new_var;
943 }
944
945 while (!is_taskreg_ctx (ctx))
946 {
947 ctx = ctx->outer;
948 if (ctx == NULL)
949 return var;
950 new_var = maybe_lookup_decl (var, ctx);
951 if (new_var)
952 return new_var;
953 }
954
955 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
956 return var;
957
958 return error_mark_node;
959 }
960
961 /* Create a new context, with OUTER_CTX being the surrounding context. */
962
963 static omp_context *
964 new_omp_context (gimple *stmt, omp_context *outer_ctx)
965 {
966 omp_context *ctx = XCNEW (omp_context);
967
968 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
969 (splay_tree_value) ctx);
970 ctx->stmt = stmt;
971
972 if (outer_ctx)
973 {
974 ctx->outer = outer_ctx;
975 ctx->cb = outer_ctx->cb;
976 ctx->cb.block = NULL;
977 ctx->depth = outer_ctx->depth + 1;
978 }
979 else
980 {
981 ctx->cb.src_fn = current_function_decl;
982 ctx->cb.dst_fn = current_function_decl;
983 ctx->cb.src_node = cgraph_node::get (current_function_decl);
984 gcc_checking_assert (ctx->cb.src_node);
985 ctx->cb.dst_node = ctx->cb.src_node;
986 ctx->cb.src_cfun = cfun;
987 ctx->cb.copy_decl = omp_copy_decl;
988 ctx->cb.eh_lp_nr = 0;
989 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
990 ctx->cb.adjust_array_error_bounds = true;
991 ctx->cb.dont_remap_vla_if_no_change = true;
992 ctx->depth = 1;
993 }
994
995 ctx->cb.decl_map = new hash_map<tree, tree>;
996
997 return ctx;
998 }
999
1000 static gimple_seq maybe_catch_exception (gimple_seq);
1001
1002 /* Finalize task copyfn. */
1003
1004 static void
1005 finalize_task_copyfn (gomp_task *task_stmt)
1006 {
1007 struct function *child_cfun;
1008 tree child_fn;
1009 gimple_seq seq = NULL, new_seq;
1010 gbind *bind;
1011
1012 child_fn = gimple_omp_task_copy_fn (task_stmt);
1013 if (child_fn == NULL_TREE)
1014 return;
1015
1016 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1017 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1018
1019 push_cfun (child_cfun);
1020 bind = gimplify_body (child_fn, false);
1021 gimple_seq_add_stmt (&seq, bind);
1022 new_seq = maybe_catch_exception (seq);
1023 if (new_seq != seq)
1024 {
1025 bind = gimple_build_bind (NULL, new_seq, NULL);
1026 seq = NULL;
1027 gimple_seq_add_stmt (&seq, bind);
1028 }
1029 gimple_set_body (child_fn, seq);
1030 pop_cfun ();
1031
1032 /* Inform the callgraph about the new function. */
1033 cgraph_node *node = cgraph_node::get_create (child_fn);
1034 node->parallelized_function = 1;
1035 cgraph_node::add_new_function (child_fn, false);
1036 }
1037
1038 /* Destroy a omp_context data structures. Called through the splay tree
1039 value delete callback. */
1040
1041 static void
1042 delete_omp_context (splay_tree_value value)
1043 {
1044 omp_context *ctx = (omp_context *) value;
1045
1046 delete ctx->cb.decl_map;
1047
1048 if (ctx->field_map)
1049 splay_tree_delete (ctx->field_map);
1050 if (ctx->sfield_map)
1051 splay_tree_delete (ctx->sfield_map);
1052
1053 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1054 it produces corrupt debug information. */
1055 if (ctx->record_type)
1056 {
1057 tree t;
1058 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1059 DECL_ABSTRACT_ORIGIN (t) = NULL;
1060 }
1061 if (ctx->srecord_type)
1062 {
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1066 }
1067
1068 if (is_task_ctx (ctx))
1069 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1070
1071 if (ctx->task_reduction_map)
1072 {
1073 ctx->task_reductions.release ();
1074 delete ctx->task_reduction_map;
1075 }
1076
1077 delete ctx->lastprivate_conditional_map;
1078 delete ctx->allocate_map;
1079
1080 XDELETE (ctx);
1081 }
1082
1083 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1084 context. */
1085
1086 static void
1087 fixup_child_record_type (omp_context *ctx)
1088 {
1089 tree f, type = ctx->record_type;
1090
1091 if (!ctx->receiver_decl)
1092 return;
1093 /* ??? It isn't sufficient to just call remap_type here, because
1094 variably_modified_type_p doesn't work the way we expect for
1095 record types. Testing each field for whether it needs remapping
1096 and creating a new record by hand works, however. */
1097 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1098 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1099 break;
1100 if (f)
1101 {
1102 tree name, new_fields = NULL;
1103
1104 type = lang_hooks.types.make_type (RECORD_TYPE);
1105 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1106 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1107 TYPE_DECL, name, type);
1108 TYPE_NAME (type) = name;
1109
1110 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1111 {
1112 tree new_f = copy_node (f);
1113 DECL_CONTEXT (new_f) = type;
1114 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1115 DECL_CHAIN (new_f) = new_fields;
1116 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1117 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1118 &ctx->cb, NULL);
1119 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 new_fields = new_f;
1122
1123 /* Arrange to be able to look up the receiver field
1124 given the sender field. */
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1126 (splay_tree_value) new_f);
1127 }
1128 TYPE_FIELDS (type) = nreverse (new_fields);
1129 layout_type (type);
1130 }
1131
1132 /* In a target region we never modify any of the pointers in *.omp_data_i,
1133 so attempt to help the optimizers. */
1134 if (is_gimple_omp_offloaded (ctx->stmt))
1135 type = build_qualified_type (type, TYPE_QUAL_CONST);
1136
1137 TREE_TYPE (ctx->receiver_decl)
1138 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1139 }
1140
1141 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1142 specified by CLAUSES. */
1143
1144 static void
1145 scan_sharing_clauses (tree clauses, omp_context *ctx)
1146 {
1147 tree c, decl;
1148 bool scan_array_reductions = false;
1149
1150 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1152 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1153 /* omp_default_mem_alloc is 1 */
1154 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1155 {
1156 if (ctx->allocate_map == NULL)
1157 ctx->allocate_map = new hash_map<tree, tree>;
1158 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1159 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1160 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1161 : integer_zero_node);
1162 }
1163
1164 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1165 {
1166 bool by_ref;
1167
1168 switch (OMP_CLAUSE_CODE (c))
1169 {
1170 case OMP_CLAUSE_PRIVATE:
1171 decl = OMP_CLAUSE_DECL (c);
1172 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1173 goto do_private;
1174 else if (!is_variable_sized (decl))
1175 install_var_local (decl, ctx);
1176 break;
1177
1178 case OMP_CLAUSE_SHARED:
1179 decl = OMP_CLAUSE_DECL (c);
1180 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1181 ctx->allocate_map->remove (decl);
1182 /* Ignore shared directives in teams construct inside of
1183 target construct. */
1184 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1185 && !is_host_teams_ctx (ctx))
1186 {
1187 /* Global variables don't need to be copied,
1188 the receiver side will use them directly. */
1189 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1190 if (is_global_var (odecl))
1191 break;
1192 insert_decl_map (&ctx->cb, decl, odecl);
1193 break;
1194 }
1195 gcc_assert (is_taskreg_ctx (ctx));
1196 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1197 || !is_variable_sized (decl));
1198 /* Global variables don't need to be copied,
1199 the receiver side will use them directly. */
1200 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1201 break;
1202 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1203 {
1204 use_pointer_for_field (decl, ctx);
1205 break;
1206 }
1207 by_ref = use_pointer_for_field (decl, NULL);
1208 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1209 || TREE_ADDRESSABLE (decl)
1210 || by_ref
1211 || omp_is_reference (decl))
1212 {
1213 by_ref = use_pointer_for_field (decl, ctx);
1214 install_var_field (decl, by_ref, 3, ctx);
1215 install_var_local (decl, ctx);
1216 break;
1217 }
1218 /* We don't need to copy const scalar vars back. */
1219 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1220 goto do_private;
1221
1222 case OMP_CLAUSE_REDUCTION:
1223 /* Collect 'reduction' clauses on OpenACC compute construct. */
1224 if (is_gimple_omp_oacc (ctx->stmt)
1225 && is_gimple_omp_offloaded (ctx->stmt))
1226 {
1227 /* No 'reduction' clauses on OpenACC 'kernels'. */
1228 gcc_checking_assert (!is_oacc_kernels (ctx));
1229 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1230 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1231
1232 ctx->local_reduction_clauses
1233 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1234 }
1235 /* FALLTHRU */
1236
1237 case OMP_CLAUSE_IN_REDUCTION:
1238 decl = OMP_CLAUSE_DECL (c);
1239 if (ctx->allocate_map
1240 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1241 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1242 || OMP_CLAUSE_REDUCTION_TASK (c)))
1243 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1244 || is_task_ctx (ctx)))
1245 {
1246 /* For now. */
1247 if (ctx->allocate_map->get (decl))
1248 ctx->allocate_map->remove (decl);
1249 }
1250 if (TREE_CODE (decl) == MEM_REF)
1251 {
1252 tree t = TREE_OPERAND (decl, 0);
1253 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1254 t = TREE_OPERAND (t, 0);
1255 if (TREE_CODE (t) == INDIRECT_REF
1256 || TREE_CODE (t) == ADDR_EXPR)
1257 t = TREE_OPERAND (t, 0);
1258 if (is_omp_target (ctx->stmt))
1259 {
1260 if (is_variable_sized (t))
1261 {
1262 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1263 t = DECL_VALUE_EXPR (t);
1264 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1265 t = TREE_OPERAND (t, 0);
1266 gcc_assert (DECL_P (t));
1267 }
1268 tree at = t;
1269 if (ctx->outer)
1270 scan_omp_op (&at, ctx->outer);
1271 tree nt = omp_copy_decl_1 (at, ctx);
1272 splay_tree_insert (ctx->field_map,
1273 (splay_tree_key) &DECL_CONTEXT (t),
1274 (splay_tree_value) nt);
1275 if (at != t)
1276 splay_tree_insert (ctx->field_map,
1277 (splay_tree_key) &DECL_CONTEXT (at),
1278 (splay_tree_value) nt);
1279 break;
1280 }
1281 install_var_local (t, ctx);
1282 if (is_taskreg_ctx (ctx)
1283 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1284 || (is_task_ctx (ctx)
1285 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1286 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1287 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1288 == POINTER_TYPE)))))
1289 && !is_variable_sized (t)
1290 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1291 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1292 && !is_task_ctx (ctx))))
1293 {
1294 by_ref = use_pointer_for_field (t, NULL);
1295 if (is_task_ctx (ctx)
1296 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1297 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1298 {
1299 install_var_field (t, false, 1, ctx);
1300 install_var_field (t, by_ref, 2, ctx);
1301 }
1302 else
1303 install_var_field (t, by_ref, 3, ctx);
1304 }
1305 break;
1306 }
1307 if (is_omp_target (ctx->stmt))
1308 {
1309 tree at = decl;
1310 if (ctx->outer)
1311 scan_omp_op (&at, ctx->outer);
1312 tree nt = omp_copy_decl_1 (at, ctx);
1313 splay_tree_insert (ctx->field_map,
1314 (splay_tree_key) &DECL_CONTEXT (decl),
1315 (splay_tree_value) nt);
1316 if (at != decl)
1317 splay_tree_insert (ctx->field_map,
1318 (splay_tree_key) &DECL_CONTEXT (at),
1319 (splay_tree_value) nt);
1320 break;
1321 }
1322 if (is_task_ctx (ctx)
1323 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1324 && OMP_CLAUSE_REDUCTION_TASK (c)
1325 && is_parallel_ctx (ctx)))
1326 {
1327 /* Global variables don't need to be copied,
1328 the receiver side will use them directly. */
1329 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1330 {
1331 by_ref = use_pointer_for_field (decl, ctx);
1332 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1333 install_var_field (decl, by_ref, 3, ctx);
1334 }
1335 install_var_local (decl, ctx);
1336 break;
1337 }
1338 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c))
1340 {
1341 install_var_local (decl, ctx);
1342 break;
1343 }
1344 goto do_private;
1345
1346 case OMP_CLAUSE_LASTPRIVATE:
1347 /* Let the corresponding firstprivate clause create
1348 the variable. */
1349 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1350 break;
1351 /* FALLTHRU */
1352
1353 case OMP_CLAUSE_FIRSTPRIVATE:
1354 case OMP_CLAUSE_LINEAR:
1355 decl = OMP_CLAUSE_DECL (c);
1356 do_private:
1357 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1358 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1359 && is_gimple_omp_offloaded (ctx->stmt))
1360 {
1361 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1362 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1363 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1364 install_var_field (decl, true, 3, ctx);
1365 else
1366 install_var_field (decl, false, 3, ctx);
1367 }
1368 if (is_variable_sized (decl))
1369 {
1370 if (is_task_ctx (ctx))
1371 {
1372 if (ctx->allocate_map
1373 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1374 {
1375 /* For now. */
1376 if (ctx->allocate_map->get (decl))
1377 ctx->allocate_map->remove (decl);
1378 }
1379 install_var_field (decl, false, 1, ctx);
1380 }
1381 break;
1382 }
1383 else if (is_taskreg_ctx (ctx))
1384 {
1385 bool global
1386 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1387 by_ref = use_pointer_for_field (decl, NULL);
1388
1389 if (is_task_ctx (ctx)
1390 && (global || by_ref || omp_is_reference (decl)))
1391 {
1392 if (ctx->allocate_map
1393 && ctx->allocate_map->get (decl))
1394 install_var_field (decl, by_ref, 32 | 1, ctx);
1395 else
1396 install_var_field (decl, false, 1, ctx);
1397 if (!global)
1398 install_var_field (decl, by_ref, 2, ctx);
1399 }
1400 else if (!global)
1401 install_var_field (decl, by_ref, 3, ctx);
1402 }
1403 install_var_local (decl, ctx);
1404 break;
1405
1406 case OMP_CLAUSE_USE_DEVICE_PTR:
1407 case OMP_CLAUSE_USE_DEVICE_ADDR:
1408 decl = OMP_CLAUSE_DECL (c);
1409
1410 /* Fortran array descriptors. */
1411 if (lang_hooks.decls.omp_array_data (decl, true))
1412 install_var_field (decl, false, 19, ctx);
1413 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1414 && !omp_is_reference (decl)
1415 && !omp_is_allocatable_or_ptr (decl))
1416 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1417 install_var_field (decl, true, 11, ctx);
1418 else
1419 install_var_field (decl, false, 11, ctx);
1420 if (DECL_SIZE (decl)
1421 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1422 {
1423 tree decl2 = DECL_VALUE_EXPR (decl);
1424 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1425 decl2 = TREE_OPERAND (decl2, 0);
1426 gcc_assert (DECL_P (decl2));
1427 install_var_local (decl2, ctx);
1428 }
1429 install_var_local (decl, ctx);
1430 break;
1431
1432 case OMP_CLAUSE_IS_DEVICE_PTR:
1433 decl = OMP_CLAUSE_DECL (c);
1434 goto do_private;
1435
1436 case OMP_CLAUSE__LOOPTEMP_:
1437 case OMP_CLAUSE__REDUCTEMP_:
1438 gcc_assert (is_taskreg_ctx (ctx));
1439 decl = OMP_CLAUSE_DECL (c);
1440 install_var_field (decl, false, 3, ctx);
1441 install_var_local (decl, ctx);
1442 break;
1443
1444 case OMP_CLAUSE_COPYPRIVATE:
1445 case OMP_CLAUSE_COPYIN:
1446 decl = OMP_CLAUSE_DECL (c);
1447 by_ref = use_pointer_for_field (decl, NULL);
1448 install_var_field (decl, by_ref, 3, ctx);
1449 break;
1450
1451 case OMP_CLAUSE_FINAL:
1452 case OMP_CLAUSE_IF:
1453 case OMP_CLAUSE_NUM_THREADS:
1454 case OMP_CLAUSE_NUM_TEAMS:
1455 case OMP_CLAUSE_THREAD_LIMIT:
1456 case OMP_CLAUSE_DEVICE:
1457 case OMP_CLAUSE_SCHEDULE:
1458 case OMP_CLAUSE_DIST_SCHEDULE:
1459 case OMP_CLAUSE_DEPEND:
1460 case OMP_CLAUSE_PRIORITY:
1461 case OMP_CLAUSE_GRAINSIZE:
1462 case OMP_CLAUSE_NUM_TASKS:
1463 case OMP_CLAUSE_NUM_GANGS:
1464 case OMP_CLAUSE_NUM_WORKERS:
1465 case OMP_CLAUSE_VECTOR_LENGTH:
1466 case OMP_CLAUSE_DETACH:
1467 if (ctx->outer)
1468 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1469 break;
1470
1471 case OMP_CLAUSE_TO:
1472 case OMP_CLAUSE_FROM:
1473 case OMP_CLAUSE_MAP:
1474 if (ctx->outer)
1475 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1476 decl = OMP_CLAUSE_DECL (c);
1477 /* Global variables with "omp declare target" attribute
1478 don't need to be copied, the receiver side will use them
1479 directly. However, global variables with "omp declare target link"
1480 attribute need to be copied. Or when ALWAYS modifier is used. */
1481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1482 && DECL_P (decl)
1483 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1484 && (OMP_CLAUSE_MAP_KIND (c)
1485 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1486 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1487 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1488 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1489 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1490 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1491 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1492 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1493 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1494 && varpool_node::get_create (decl)->offloadable
1495 && !lookup_attribute ("omp declare target link",
1496 DECL_ATTRIBUTES (decl)))
1497 break;
1498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1499 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1500 {
1501 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1502 not offloaded; there is nothing to map for those. */
1503 if (!is_gimple_omp_offloaded (ctx->stmt)
1504 && !POINTER_TYPE_P (TREE_TYPE (decl))
1505 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1506 break;
1507 }
1508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1509 && DECL_P (decl)
1510 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1511 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1512 && is_omp_target (ctx->stmt))
1513 {
1514 /* If this is an offloaded region, an attach operation should
1515 only exist when the pointer variable is mapped in a prior
1516 clause. */
1517 if (is_gimple_omp_offloaded (ctx->stmt))
1518 gcc_assert
1519 (maybe_lookup_decl (decl, ctx)
1520 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1521 && lookup_attribute ("omp declare target",
1522 DECL_ATTRIBUTES (decl))));
1523
1524 /* By itself, attach/detach is generated as part of pointer
1525 variable mapping and should not create new variables in the
1526 offloaded region, however sender refs for it must be created
1527 for its address to be passed to the runtime. */
1528 tree field
1529 = build_decl (OMP_CLAUSE_LOCATION (c),
1530 FIELD_DECL, NULL_TREE, ptr_type_node);
1531 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1532 insert_field_into_struct (ctx->record_type, field);
1533 /* To not clash with a map of the pointer variable itself,
1534 attach/detach maps have their field looked up by the *clause*
1535 tree expression, not the decl. */
1536 gcc_assert (!splay_tree_lookup (ctx->field_map,
1537 (splay_tree_key) c));
1538 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1539 (splay_tree_value) field);
1540 break;
1541 }
1542 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1543 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1544 || (OMP_CLAUSE_MAP_KIND (c)
1545 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1546 {
1547 if (TREE_CODE (decl) == COMPONENT_REF
1548 || (TREE_CODE (decl) == INDIRECT_REF
1549 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1550 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1551 == REFERENCE_TYPE)))
1552 break;
1553 if (DECL_SIZE (decl)
1554 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1555 {
1556 tree decl2 = DECL_VALUE_EXPR (decl);
1557 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1558 decl2 = TREE_OPERAND (decl2, 0);
1559 gcc_assert (DECL_P (decl2));
1560 install_var_local (decl2, ctx);
1561 }
1562 install_var_local (decl, ctx);
1563 break;
1564 }
1565 if (DECL_P (decl))
1566 {
1567 if (DECL_SIZE (decl)
1568 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1569 {
1570 tree decl2 = DECL_VALUE_EXPR (decl);
1571 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1572 decl2 = TREE_OPERAND (decl2, 0);
1573 gcc_assert (DECL_P (decl2));
1574 install_var_field (decl2, true, 3, ctx);
1575 install_var_local (decl2, ctx);
1576 install_var_local (decl, ctx);
1577 }
1578 else
1579 {
1580 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1581 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1582 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1583 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1584 install_var_field (decl, true, 7, ctx);
1585 else
1586 install_var_field (decl, true, 3, ctx);
1587 if (is_gimple_omp_offloaded (ctx->stmt)
1588 && !(is_gimple_omp_oacc (ctx->stmt)
1589 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1590 install_var_local (decl, ctx);
1591 }
1592 }
1593 else
1594 {
1595 tree base = get_base_address (decl);
1596 tree nc = OMP_CLAUSE_CHAIN (c);
1597 if (DECL_P (base)
1598 && nc != NULL_TREE
1599 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1600 && OMP_CLAUSE_DECL (nc) == base
1601 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1602 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1603 {
1604 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1605 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1606 }
1607 else
1608 {
1609 if (ctx->outer)
1610 {
1611 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1612 decl = OMP_CLAUSE_DECL (c);
1613 }
1614 gcc_assert (!splay_tree_lookup (ctx->field_map,
1615 (splay_tree_key) decl));
1616 tree field
1617 = build_decl (OMP_CLAUSE_LOCATION (c),
1618 FIELD_DECL, NULL_TREE, ptr_type_node);
1619 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1620 insert_field_into_struct (ctx->record_type, field);
1621 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1622 (splay_tree_value) field);
1623 }
1624 }
1625 break;
1626
1627 case OMP_CLAUSE_ORDER:
1628 ctx->order_concurrent = true;
1629 break;
1630
1631 case OMP_CLAUSE_BIND:
1632 ctx->loop_p = true;
1633 break;
1634
1635 case OMP_CLAUSE_NOWAIT:
1636 case OMP_CLAUSE_ORDERED:
1637 case OMP_CLAUSE_COLLAPSE:
1638 case OMP_CLAUSE_UNTIED:
1639 case OMP_CLAUSE_MERGEABLE:
1640 case OMP_CLAUSE_PROC_BIND:
1641 case OMP_CLAUSE_SAFELEN:
1642 case OMP_CLAUSE_SIMDLEN:
1643 case OMP_CLAUSE_THREADS:
1644 case OMP_CLAUSE_SIMD:
1645 case OMP_CLAUSE_NOGROUP:
1646 case OMP_CLAUSE_DEFAULTMAP:
1647 case OMP_CLAUSE_ASYNC:
1648 case OMP_CLAUSE_WAIT:
1649 case OMP_CLAUSE_GANG:
1650 case OMP_CLAUSE_WORKER:
1651 case OMP_CLAUSE_VECTOR:
1652 case OMP_CLAUSE_INDEPENDENT:
1653 case OMP_CLAUSE_AUTO:
1654 case OMP_CLAUSE_SEQ:
1655 case OMP_CLAUSE_TILE:
1656 case OMP_CLAUSE__SIMT_:
1657 case OMP_CLAUSE_DEFAULT:
1658 case OMP_CLAUSE_NONTEMPORAL:
1659 case OMP_CLAUSE_IF_PRESENT:
1660 case OMP_CLAUSE_FINALIZE:
1661 case OMP_CLAUSE_TASK_REDUCTION:
1662 case OMP_CLAUSE_ALLOCATE:
1663 break;
1664
1665 case OMP_CLAUSE_ALIGNED:
1666 decl = OMP_CLAUSE_DECL (c);
1667 if (is_global_var (decl)
1668 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1669 install_var_local (decl, ctx);
1670 break;
1671
1672 case OMP_CLAUSE__CONDTEMP_:
1673 decl = OMP_CLAUSE_DECL (c);
1674 if (is_parallel_ctx (ctx))
1675 {
1676 install_var_field (decl, false, 3, ctx);
1677 install_var_local (decl, ctx);
1678 }
1679 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1680 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1681 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1682 install_var_local (decl, ctx);
1683 break;
1684
1685 case OMP_CLAUSE__CACHE_:
1686 default:
1687 gcc_unreachable ();
1688 }
1689 }
1690
1691 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1692 {
1693 switch (OMP_CLAUSE_CODE (c))
1694 {
1695 case OMP_CLAUSE_LASTPRIVATE:
1696 /* Let the corresponding firstprivate clause create
1697 the variable. */
1698 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1699 scan_array_reductions = true;
1700 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1701 break;
1702 /* FALLTHRU */
1703
1704 case OMP_CLAUSE_FIRSTPRIVATE:
1705 case OMP_CLAUSE_PRIVATE:
1706 case OMP_CLAUSE_LINEAR:
1707 case OMP_CLAUSE_IS_DEVICE_PTR:
1708 decl = OMP_CLAUSE_DECL (c);
1709 if (is_variable_sized (decl))
1710 {
1711 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1712 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1713 && is_gimple_omp_offloaded (ctx->stmt))
1714 {
1715 tree decl2 = DECL_VALUE_EXPR (decl);
1716 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1717 decl2 = TREE_OPERAND (decl2, 0);
1718 gcc_assert (DECL_P (decl2));
1719 install_var_local (decl2, ctx);
1720 fixup_remapped_decl (decl2, ctx, false);
1721 }
1722 install_var_local (decl, ctx);
1723 }
1724 fixup_remapped_decl (decl, ctx,
1725 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1726 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1728 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1729 scan_array_reductions = true;
1730 break;
1731
1732 case OMP_CLAUSE_REDUCTION:
1733 case OMP_CLAUSE_IN_REDUCTION:
1734 decl = OMP_CLAUSE_DECL (c);
1735 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1736 {
1737 if (is_variable_sized (decl))
1738 install_var_local (decl, ctx);
1739 fixup_remapped_decl (decl, ctx, false);
1740 }
1741 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1742 scan_array_reductions = true;
1743 break;
1744
1745 case OMP_CLAUSE_TASK_REDUCTION:
1746 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1747 scan_array_reductions = true;
1748 break;
1749
1750 case OMP_CLAUSE_SHARED:
1751 /* Ignore shared directives in teams construct inside of
1752 target construct. */
1753 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1754 && !is_host_teams_ctx (ctx))
1755 break;
1756 decl = OMP_CLAUSE_DECL (c);
1757 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1758 break;
1759 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1760 {
1761 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1762 ctx->outer)))
1763 break;
1764 bool by_ref = use_pointer_for_field (decl, ctx);
1765 install_var_field (decl, by_ref, 11, ctx);
1766 break;
1767 }
1768 fixup_remapped_decl (decl, ctx, false);
1769 break;
1770
1771 case OMP_CLAUSE_MAP:
1772 if (!is_gimple_omp_offloaded (ctx->stmt))
1773 break;
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (DECL_P (decl)
1776 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1777 && (OMP_CLAUSE_MAP_KIND (c)
1778 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1779 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1780 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1781 && varpool_node::get_create (decl)->offloadable)
1782 break;
1783 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1784 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1785 && is_omp_target (ctx->stmt)
1786 && !is_gimple_omp_offloaded (ctx->stmt))
1787 break;
1788 if (DECL_P (decl))
1789 {
1790 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1791 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1792 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1793 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1794 {
1795 tree new_decl = lookup_decl (decl, ctx);
1796 TREE_TYPE (new_decl)
1797 = remap_type (TREE_TYPE (decl), &ctx->cb);
1798 }
1799 else if (DECL_SIZE (decl)
1800 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1801 {
1802 tree decl2 = DECL_VALUE_EXPR (decl);
1803 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1804 decl2 = TREE_OPERAND (decl2, 0);
1805 gcc_assert (DECL_P (decl2));
1806 fixup_remapped_decl (decl2, ctx, false);
1807 fixup_remapped_decl (decl, ctx, true);
1808 }
1809 else
1810 fixup_remapped_decl (decl, ctx, false);
1811 }
1812 break;
1813
1814 case OMP_CLAUSE_COPYPRIVATE:
1815 case OMP_CLAUSE_COPYIN:
1816 case OMP_CLAUSE_DEFAULT:
1817 case OMP_CLAUSE_IF:
1818 case OMP_CLAUSE_NUM_THREADS:
1819 case OMP_CLAUSE_NUM_TEAMS:
1820 case OMP_CLAUSE_THREAD_LIMIT:
1821 case OMP_CLAUSE_DEVICE:
1822 case OMP_CLAUSE_SCHEDULE:
1823 case OMP_CLAUSE_DIST_SCHEDULE:
1824 case OMP_CLAUSE_NOWAIT:
1825 case OMP_CLAUSE_ORDERED:
1826 case OMP_CLAUSE_COLLAPSE:
1827 case OMP_CLAUSE_UNTIED:
1828 case OMP_CLAUSE_FINAL:
1829 case OMP_CLAUSE_MERGEABLE:
1830 case OMP_CLAUSE_PROC_BIND:
1831 case OMP_CLAUSE_SAFELEN:
1832 case OMP_CLAUSE_SIMDLEN:
1833 case OMP_CLAUSE_ALIGNED:
1834 case OMP_CLAUSE_DEPEND:
1835 case OMP_CLAUSE_DETACH:
1836 case OMP_CLAUSE_ALLOCATE:
1837 case OMP_CLAUSE__LOOPTEMP_:
1838 case OMP_CLAUSE__REDUCTEMP_:
1839 case OMP_CLAUSE_TO:
1840 case OMP_CLAUSE_FROM:
1841 case OMP_CLAUSE_PRIORITY:
1842 case OMP_CLAUSE_GRAINSIZE:
1843 case OMP_CLAUSE_NUM_TASKS:
1844 case OMP_CLAUSE_THREADS:
1845 case OMP_CLAUSE_SIMD:
1846 case OMP_CLAUSE_NOGROUP:
1847 case OMP_CLAUSE_DEFAULTMAP:
1848 case OMP_CLAUSE_ORDER:
1849 case OMP_CLAUSE_BIND:
1850 case OMP_CLAUSE_USE_DEVICE_PTR:
1851 case OMP_CLAUSE_USE_DEVICE_ADDR:
1852 case OMP_CLAUSE_NONTEMPORAL:
1853 case OMP_CLAUSE_ASYNC:
1854 case OMP_CLAUSE_WAIT:
1855 case OMP_CLAUSE_NUM_GANGS:
1856 case OMP_CLAUSE_NUM_WORKERS:
1857 case OMP_CLAUSE_VECTOR_LENGTH:
1858 case OMP_CLAUSE_GANG:
1859 case OMP_CLAUSE_WORKER:
1860 case OMP_CLAUSE_VECTOR:
1861 case OMP_CLAUSE_INDEPENDENT:
1862 case OMP_CLAUSE_AUTO:
1863 case OMP_CLAUSE_SEQ:
1864 case OMP_CLAUSE_TILE:
1865 case OMP_CLAUSE__SIMT_:
1866 case OMP_CLAUSE_IF_PRESENT:
1867 case OMP_CLAUSE_FINALIZE:
1868 case OMP_CLAUSE__CONDTEMP_:
1869 break;
1870
1871 case OMP_CLAUSE__CACHE_:
1872 default:
1873 gcc_unreachable ();
1874 }
1875 }
1876
1877 gcc_checking_assert (!scan_array_reductions
1878 || !is_gimple_omp_oacc (ctx->stmt));
1879 if (scan_array_reductions)
1880 {
1881 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1882 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1883 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1884 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1885 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1886 {
1887 omp_context *rctx = ctx;
1888 if (is_omp_target (ctx->stmt))
1889 rctx = ctx->outer;
1890 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1891 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1892 }
1893 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1894 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1895 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1896 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1897 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1898 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1899 }
1900 }
1901
1902 /* Create a new name for omp child function. Returns an identifier. */
1903
1904 static tree
1905 create_omp_child_function_name (bool task_copy)
1906 {
1907 return clone_function_name_numbered (current_function_decl,
1908 task_copy ? "_omp_cpyfn" : "_omp_fn");
1909 }
1910
1911 /* Return true if CTX may belong to offloaded code: either if current function
1912 is offloaded, or any enclosing context corresponds to a target region. */
1913
1914 static bool
1915 omp_maybe_offloaded_ctx (omp_context *ctx)
1916 {
1917 if (cgraph_node::get (current_function_decl)->offloadable)
1918 return true;
1919 for (; ctx; ctx = ctx->outer)
1920 if (is_gimple_omp_offloaded (ctx->stmt))
1921 return true;
1922 return false;
1923 }
1924
1925 /* Build a decl for the omp child function. It'll not contain a body
1926 yet, just the bare decl. */
1927
1928 static void
1929 create_omp_child_function (omp_context *ctx, bool task_copy)
1930 {
1931 tree decl, type, name, t;
1932
1933 name = create_omp_child_function_name (task_copy);
1934 if (task_copy)
1935 type = build_function_type_list (void_type_node, ptr_type_node,
1936 ptr_type_node, NULL_TREE);
1937 else
1938 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1939
1940 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1941
1942 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1943 || !task_copy);
1944 if (!task_copy)
1945 ctx->cb.dst_fn = decl;
1946 else
1947 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1948
1949 TREE_STATIC (decl) = 1;
1950 TREE_USED (decl) = 1;
1951 DECL_ARTIFICIAL (decl) = 1;
1952 DECL_IGNORED_P (decl) = 0;
1953 TREE_PUBLIC (decl) = 0;
1954 DECL_UNINLINABLE (decl) = 1;
1955 DECL_EXTERNAL (decl) = 0;
1956 DECL_CONTEXT (decl) = NULL_TREE;
1957 DECL_INITIAL (decl) = make_node (BLOCK);
1958 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1959 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1960 /* Remove omp declare simd attribute from the new attributes. */
1961 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1962 {
1963 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1964 a = a2;
1965 a = TREE_CHAIN (a);
1966 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1967 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1968 *p = TREE_CHAIN (*p);
1969 else
1970 {
1971 tree chain = TREE_CHAIN (*p);
1972 *p = copy_node (*p);
1973 p = &TREE_CHAIN (*p);
1974 *p = chain;
1975 }
1976 }
1977 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1978 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1979 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1980 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1981 DECL_FUNCTION_VERSIONED (decl)
1982 = DECL_FUNCTION_VERSIONED (current_function_decl);
1983
1984 if (omp_maybe_offloaded_ctx (ctx))
1985 {
1986 cgraph_node::get_create (decl)->offloadable = 1;
1987 if (ENABLE_OFFLOADING)
1988 g->have_offload = true;
1989 }
1990
1991 if (cgraph_node::get_create (decl)->offloadable)
1992 {
1993 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1994 ? "omp target entrypoint"
1995 : "omp declare target");
1996 if (lookup_attribute ("omp declare target",
1997 DECL_ATTRIBUTES (current_function_decl)))
1998 {
1999 if (is_gimple_omp_offloaded (ctx->stmt))
2000 DECL_ATTRIBUTES (decl)
2001 = remove_attribute ("omp declare target",
2002 copy_list (DECL_ATTRIBUTES (decl)));
2003 else
2004 target_attr = NULL;
2005 }
2006 if (target_attr)
2007 DECL_ATTRIBUTES (decl)
2008 = tree_cons (get_identifier (target_attr),
2009 NULL_TREE, DECL_ATTRIBUTES (decl));
2010 }
2011
2012 t = build_decl (DECL_SOURCE_LOCATION (decl),
2013 RESULT_DECL, NULL_TREE, void_type_node);
2014 DECL_ARTIFICIAL (t) = 1;
2015 DECL_IGNORED_P (t) = 1;
2016 DECL_CONTEXT (t) = decl;
2017 DECL_RESULT (decl) = t;
2018
2019 tree data_name = get_identifier (".omp_data_i");
2020 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2021 ptr_type_node);
2022 DECL_ARTIFICIAL (t) = 1;
2023 DECL_NAMELESS (t) = 1;
2024 DECL_ARG_TYPE (t) = ptr_type_node;
2025 DECL_CONTEXT (t) = current_function_decl;
2026 TREE_USED (t) = 1;
2027 TREE_READONLY (t) = 1;
2028 DECL_ARGUMENTS (decl) = t;
2029 if (!task_copy)
2030 ctx->receiver_decl = t;
2031 else
2032 {
2033 t = build_decl (DECL_SOURCE_LOCATION (decl),
2034 PARM_DECL, get_identifier (".omp_data_o"),
2035 ptr_type_node);
2036 DECL_ARTIFICIAL (t) = 1;
2037 DECL_NAMELESS (t) = 1;
2038 DECL_ARG_TYPE (t) = ptr_type_node;
2039 DECL_CONTEXT (t) = current_function_decl;
2040 TREE_USED (t) = 1;
2041 TREE_ADDRESSABLE (t) = 1;
2042 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2043 DECL_ARGUMENTS (decl) = t;
2044 }
2045
2046 /* Allocate memory for the function structure. The call to
2047 allocate_struct_function clobbers CFUN, so we need to restore
2048 it afterward. */
2049 push_struct_function (decl);
2050 cfun->function_end_locus = gimple_location (ctx->stmt);
2051 init_tree_ssa (cfun);
2052 pop_cfun ();
2053 }
2054
2055 /* Callback for walk_gimple_seq. Check if combined parallel
2056 contains gimple_omp_for_combined_into_p OMP_FOR. */
2057
2058 tree
2059 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2060 bool *handled_ops_p,
2061 struct walk_stmt_info *wi)
2062 {
2063 gimple *stmt = gsi_stmt (*gsi_p);
2064
2065 *handled_ops_p = true;
2066 switch (gimple_code (stmt))
2067 {
2068 WALK_SUBSTMTS;
2069
2070 case GIMPLE_OMP_FOR:
2071 if (gimple_omp_for_combined_into_p (stmt)
2072 && gimple_omp_for_kind (stmt)
2073 == *(const enum gf_mask *) (wi->info))
2074 {
2075 wi->info = stmt;
2076 return integer_zero_node;
2077 }
2078 break;
2079 default:
2080 break;
2081 }
2082 return NULL;
2083 }
2084
2085 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2086
2087 static void
2088 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2089 omp_context *outer_ctx)
2090 {
2091 struct walk_stmt_info wi;
2092
2093 memset (&wi, 0, sizeof (wi));
2094 wi.val_only = true;
2095 wi.info = (void *) &msk;
2096 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2097 if (wi.info != (void *) &msk)
2098 {
2099 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2100 struct omp_for_data fd;
2101 omp_extract_for_data (for_stmt, &fd, NULL);
2102 /* We need two temporaries with fd.loop.v type (istart/iend)
2103 and then (fd.collapse - 1) temporaries with the same
2104 type for count2 ... countN-1 vars if not constant. */
2105 size_t count = 2, i;
2106 tree type = fd.iter_type;
2107 if (fd.collapse > 1
2108 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2109 {
2110 count += fd.collapse - 1;
2111 /* If there are lastprivate clauses on the inner
2112 GIMPLE_OMP_FOR, add one more temporaries for the total number
2113 of iterations (product of count1 ... countN-1). */
2114 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2115 OMP_CLAUSE_LASTPRIVATE)
2116 || (msk == GF_OMP_FOR_KIND_FOR
2117 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2118 OMP_CLAUSE_LASTPRIVATE)))
2119 {
2120 tree temp = create_tmp_var (type);
2121 tree c = build_omp_clause (UNKNOWN_LOCATION,
2122 OMP_CLAUSE__LOOPTEMP_);
2123 insert_decl_map (&outer_ctx->cb, temp, temp);
2124 OMP_CLAUSE_DECL (c) = temp;
2125 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2126 gimple_omp_taskreg_set_clauses (stmt, c);
2127 }
2128 if (fd.non_rect
2129 && fd.last_nonrect == fd.first_nonrect + 1)
2130 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2131 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2132 {
2133 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2134 tree type2 = TREE_TYPE (v);
2135 count++;
2136 for (i = 0; i < 3; i++)
2137 {
2138 tree temp = create_tmp_var (type2);
2139 tree c = build_omp_clause (UNKNOWN_LOCATION,
2140 OMP_CLAUSE__LOOPTEMP_);
2141 insert_decl_map (&outer_ctx->cb, temp, temp);
2142 OMP_CLAUSE_DECL (c) = temp;
2143 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2144 gimple_omp_taskreg_set_clauses (stmt, c);
2145 }
2146 }
2147 }
2148 for (i = 0; i < count; i++)
2149 {
2150 tree temp = create_tmp_var (type);
2151 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2152 insert_decl_map (&outer_ctx->cb, temp, temp);
2153 OMP_CLAUSE_DECL (c) = temp;
2154 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2155 gimple_omp_taskreg_set_clauses (stmt, c);
2156 }
2157 }
2158 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2159 && omp_find_clause (gimple_omp_task_clauses (stmt),
2160 OMP_CLAUSE_REDUCTION))
2161 {
2162 tree type = build_pointer_type (pointer_sized_int_node);
2163 tree temp = create_tmp_var (type);
2164 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2165 insert_decl_map (&outer_ctx->cb, temp, temp);
2166 OMP_CLAUSE_DECL (c) = temp;
2167 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2168 gimple_omp_task_set_clauses (stmt, c);
2169 }
2170 }
2171
2172 /* Scan an OpenMP parallel directive. */
2173
2174 static void
2175 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2176 {
2177 omp_context *ctx;
2178 tree name;
2179 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2180
2181 /* Ignore parallel directives with empty bodies, unless there
2182 are copyin clauses. */
2183 if (optimize > 0
2184 && empty_body_p (gimple_omp_body (stmt))
2185 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2186 OMP_CLAUSE_COPYIN) == NULL)
2187 {
2188 gsi_replace (gsi, gimple_build_nop (), false);
2189 return;
2190 }
2191
2192 if (gimple_omp_parallel_combined_p (stmt))
2193 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2194 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2195 OMP_CLAUSE_REDUCTION);
2196 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2197 if (OMP_CLAUSE_REDUCTION_TASK (c))
2198 {
2199 tree type = build_pointer_type (pointer_sized_int_node);
2200 tree temp = create_tmp_var (type);
2201 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2202 if (outer_ctx)
2203 insert_decl_map (&outer_ctx->cb, temp, temp);
2204 OMP_CLAUSE_DECL (c) = temp;
2205 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2206 gimple_omp_parallel_set_clauses (stmt, c);
2207 break;
2208 }
2209 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2210 break;
2211
2212 ctx = new_omp_context (stmt, outer_ctx);
2213 taskreg_contexts.safe_push (ctx);
2214 if (taskreg_nesting_level > 1)
2215 ctx->is_nested = true;
2216 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2217 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2218 name = create_tmp_var_name (".omp_data_s");
2219 name = build_decl (gimple_location (stmt),
2220 TYPE_DECL, name, ctx->record_type);
2221 DECL_ARTIFICIAL (name) = 1;
2222 DECL_NAMELESS (name) = 1;
2223 TYPE_NAME (ctx->record_type) = name;
2224 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2225 create_omp_child_function (ctx, false);
2226 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2227
2228 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2229 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2230
2231 if (TYPE_FIELDS (ctx->record_type) == NULL)
2232 ctx->record_type = ctx->receiver_decl = NULL;
2233 }
2234
2235 /* Scan an OpenMP task directive. */
2236
2237 static void
2238 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2239 {
2240 omp_context *ctx;
2241 tree name, t;
2242 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2243
2244 /* Ignore task directives with empty bodies, unless they have depend
2245 clause. */
2246 if (optimize > 0
2247 && gimple_omp_body (stmt)
2248 && empty_body_p (gimple_omp_body (stmt))
2249 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2250 {
2251 gsi_replace (gsi, gimple_build_nop (), false);
2252 return;
2253 }
2254
2255 if (gimple_omp_task_taskloop_p (stmt))
2256 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2257
2258 ctx = new_omp_context (stmt, outer_ctx);
2259
2260 if (gimple_omp_task_taskwait_p (stmt))
2261 {
2262 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2263 return;
2264 }
2265
2266 taskreg_contexts.safe_push (ctx);
2267 if (taskreg_nesting_level > 1)
2268 ctx->is_nested = true;
2269 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2270 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2271 name = create_tmp_var_name (".omp_data_s");
2272 name = build_decl (gimple_location (stmt),
2273 TYPE_DECL, name, ctx->record_type);
2274 DECL_ARTIFICIAL (name) = 1;
2275 DECL_NAMELESS (name) = 1;
2276 TYPE_NAME (ctx->record_type) = name;
2277 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2278 create_omp_child_function (ctx, false);
2279 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2280
2281 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2282
2283 if (ctx->srecord_type)
2284 {
2285 name = create_tmp_var_name (".omp_data_a");
2286 name = build_decl (gimple_location (stmt),
2287 TYPE_DECL, name, ctx->srecord_type);
2288 DECL_ARTIFICIAL (name) = 1;
2289 DECL_NAMELESS (name) = 1;
2290 TYPE_NAME (ctx->srecord_type) = name;
2291 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2292 create_omp_child_function (ctx, true);
2293 }
2294
2295 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2296
2297 if (TYPE_FIELDS (ctx->record_type) == NULL)
2298 {
2299 ctx->record_type = ctx->receiver_decl = NULL;
2300 t = build_int_cst (long_integer_type_node, 0);
2301 gimple_omp_task_set_arg_size (stmt, t);
2302 t = build_int_cst (long_integer_type_node, 1);
2303 gimple_omp_task_set_arg_align (stmt, t);
2304 }
2305 }
2306
2307 /* Helper function for finish_taskreg_scan, called through walk_tree.
2308 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2309 tree, replace it in the expression. */
2310
2311 static tree
2312 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2313 {
2314 if (VAR_P (*tp))
2315 {
2316 omp_context *ctx = (omp_context *) data;
2317 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2318 if (t != *tp)
2319 {
2320 if (DECL_HAS_VALUE_EXPR_P (t))
2321 t = unshare_expr (DECL_VALUE_EXPR (t));
2322 *tp = t;
2323 }
2324 *walk_subtrees = 0;
2325 }
2326 else if (IS_TYPE_OR_DECL_P (*tp))
2327 *walk_subtrees = 0;
2328 return NULL_TREE;
2329 }
2330
2331 /* If any decls have been made addressable during scan_omp,
2332 adjust their fields if needed, and layout record types
2333 of parallel/task constructs. */
2334
2335 static void
2336 finish_taskreg_scan (omp_context *ctx)
2337 {
2338 if (ctx->record_type == NULL_TREE)
2339 return;
2340
2341 /* If any task_shared_vars were needed, verify all
2342 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2343 statements if use_pointer_for_field hasn't changed
2344 because of that. If it did, update field types now. */
2345 if (task_shared_vars)
2346 {
2347 tree c;
2348
2349 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2350 c; c = OMP_CLAUSE_CHAIN (c))
2351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2352 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2353 {
2354 tree decl = OMP_CLAUSE_DECL (c);
2355
2356 /* Global variables don't need to be copied,
2357 the receiver side will use them directly. */
2358 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2359 continue;
2360 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2361 || !use_pointer_for_field (decl, ctx))
2362 continue;
2363 tree field = lookup_field (decl, ctx);
2364 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2365 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2366 continue;
2367 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2368 TREE_THIS_VOLATILE (field) = 0;
2369 DECL_USER_ALIGN (field) = 0;
2370 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2371 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2372 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2373 if (ctx->srecord_type)
2374 {
2375 tree sfield = lookup_sfield (decl, ctx);
2376 TREE_TYPE (sfield) = TREE_TYPE (field);
2377 TREE_THIS_VOLATILE (sfield) = 0;
2378 DECL_USER_ALIGN (sfield) = 0;
2379 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2380 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2381 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2382 }
2383 }
2384 }
2385
2386 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2387 {
2388 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2389 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2390 if (c)
2391 {
2392 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2393 expects to find it at the start of data. */
2394 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2395 tree *p = &TYPE_FIELDS (ctx->record_type);
2396 while (*p)
2397 if (*p == f)
2398 {
2399 *p = DECL_CHAIN (*p);
2400 break;
2401 }
2402 else
2403 p = &DECL_CHAIN (*p);
2404 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2405 TYPE_FIELDS (ctx->record_type) = f;
2406 }
2407 layout_type (ctx->record_type);
2408 fixup_child_record_type (ctx);
2409 }
2410 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2411 {
2412 layout_type (ctx->record_type);
2413 fixup_child_record_type (ctx);
2414 }
2415 else
2416 {
2417 location_t loc = gimple_location (ctx->stmt);
2418 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2419 tree detach_clause
2420 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2421 OMP_CLAUSE_DETACH);
2422 /* Move VLA fields to the end. */
2423 p = &TYPE_FIELDS (ctx->record_type);
2424 while (*p)
2425 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2426 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2427 {
2428 *q = *p;
2429 *p = TREE_CHAIN (*p);
2430 TREE_CHAIN (*q) = NULL_TREE;
2431 q = &TREE_CHAIN (*q);
2432 }
2433 else
2434 p = &DECL_CHAIN (*p);
2435 *p = vla_fields;
2436 if (gimple_omp_task_taskloop_p (ctx->stmt))
2437 {
2438 /* Move fields corresponding to first and second _looptemp_
2439 clause first. There are filled by GOMP_taskloop
2440 and thus need to be in specific positions. */
2441 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2442 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2443 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2444 OMP_CLAUSE__LOOPTEMP_);
2445 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2446 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2447 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2448 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2449 p = &TYPE_FIELDS (ctx->record_type);
2450 while (*p)
2451 if (*p == f1 || *p == f2 || *p == f3)
2452 *p = DECL_CHAIN (*p);
2453 else
2454 p = &DECL_CHAIN (*p);
2455 DECL_CHAIN (f1) = f2;
2456 if (c3)
2457 {
2458 DECL_CHAIN (f2) = f3;
2459 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2460 }
2461 else
2462 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2463 TYPE_FIELDS (ctx->record_type) = f1;
2464 if (ctx->srecord_type)
2465 {
2466 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2467 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2468 if (c3)
2469 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2470 p = &TYPE_FIELDS (ctx->srecord_type);
2471 while (*p)
2472 if (*p == f1 || *p == f2 || *p == f3)
2473 *p = DECL_CHAIN (*p);
2474 else
2475 p = &DECL_CHAIN (*p);
2476 DECL_CHAIN (f1) = f2;
2477 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2478 if (c3)
2479 {
2480 DECL_CHAIN (f2) = f3;
2481 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2482 }
2483 else
2484 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2485 TYPE_FIELDS (ctx->srecord_type) = f1;
2486 }
2487 }
2488 if (detach_clause)
2489 {
2490 tree c, field;
2491
2492 /* Look for a firstprivate clause with the detach event handle. */
2493 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2494 c; c = OMP_CLAUSE_CHAIN (c))
2495 {
2496 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2497 continue;
2498 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2499 == OMP_CLAUSE_DECL (detach_clause))
2500 break;
2501 }
2502
2503 gcc_assert (c);
2504 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2505
2506 /* Move field corresponding to the detach clause first.
2507 This is filled by GOMP_task and needs to be in a
2508 specific position. */
2509 p = &TYPE_FIELDS (ctx->record_type);
2510 while (*p)
2511 if (*p == field)
2512 *p = DECL_CHAIN (*p);
2513 else
2514 p = &DECL_CHAIN (*p);
2515 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2516 TYPE_FIELDS (ctx->record_type) = field;
2517 if (ctx->srecord_type)
2518 {
2519 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2520 p = &TYPE_FIELDS (ctx->srecord_type);
2521 while (*p)
2522 if (*p == field)
2523 *p = DECL_CHAIN (*p);
2524 else
2525 p = &DECL_CHAIN (*p);
2526 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2527 TYPE_FIELDS (ctx->srecord_type) = field;
2528 }
2529 }
2530 layout_type (ctx->record_type);
2531 fixup_child_record_type (ctx);
2532 if (ctx->srecord_type)
2533 layout_type (ctx->srecord_type);
2534 tree t = fold_convert_loc (loc, long_integer_type_node,
2535 TYPE_SIZE_UNIT (ctx->record_type));
2536 if (TREE_CODE (t) != INTEGER_CST)
2537 {
2538 t = unshare_expr (t);
2539 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2540 }
2541 gimple_omp_task_set_arg_size (ctx->stmt, t);
2542 t = build_int_cst (long_integer_type_node,
2543 TYPE_ALIGN_UNIT (ctx->record_type));
2544 gimple_omp_task_set_arg_align (ctx->stmt, t);
2545 }
2546 }
2547
2548 /* Find the enclosing offload context. */
2549
2550 static omp_context *
2551 enclosing_target_ctx (omp_context *ctx)
2552 {
2553 for (; ctx; ctx = ctx->outer)
2554 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2555 break;
2556
2557 return ctx;
2558 }
2559
2560 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2561 construct.
2562 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2563
2564 static bool
2565 ctx_in_oacc_kernels_region (omp_context *ctx)
2566 {
2567 for (;ctx != NULL; ctx = ctx->outer)
2568 {
2569 gimple *stmt = ctx->stmt;
2570 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2571 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2572 return true;
2573 }
2574
2575 return false;
2576 }
2577
2578 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2579 (This doesn't include OpenACC 'kernels' decomposed parts.)
2580 Until kernels handling moves to use the same loop indirection
2581 scheme as parallel, we need to do this checking early. */
2582
2583 static unsigned
2584 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2585 {
2586 bool checking = true;
2587 unsigned outer_mask = 0;
2588 unsigned this_mask = 0;
2589 bool has_seq = false, has_auto = false;
2590
2591 if (ctx->outer)
2592 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2593 if (!stmt)
2594 {
2595 checking = false;
2596 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2597 return outer_mask;
2598 stmt = as_a <gomp_for *> (ctx->stmt);
2599 }
2600
2601 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2602 {
2603 switch (OMP_CLAUSE_CODE (c))
2604 {
2605 case OMP_CLAUSE_GANG:
2606 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2607 break;
2608 case OMP_CLAUSE_WORKER:
2609 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2610 break;
2611 case OMP_CLAUSE_VECTOR:
2612 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2613 break;
2614 case OMP_CLAUSE_SEQ:
2615 has_seq = true;
2616 break;
2617 case OMP_CLAUSE_AUTO:
2618 has_auto = true;
2619 break;
2620 default:
2621 break;
2622 }
2623 }
2624
2625 if (checking)
2626 {
2627 if (has_seq && (this_mask || has_auto))
2628 error_at (gimple_location (stmt), "%<seq%> overrides other"
2629 " OpenACC loop specifiers");
2630 else if (has_auto && this_mask)
2631 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2632 " OpenACC loop specifiers");
2633
2634 if (this_mask & outer_mask)
2635 error_at (gimple_location (stmt), "inner loop uses same"
2636 " OpenACC parallelism as containing loop");
2637 }
2638
2639 return outer_mask | this_mask;
2640 }
2641
2642 /* Scan a GIMPLE_OMP_FOR. */
2643
2644 static omp_context *
2645 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2646 {
2647 omp_context *ctx;
2648 size_t i;
2649 tree clauses = gimple_omp_for_clauses (stmt);
2650
2651 ctx = new_omp_context (stmt, outer_ctx);
2652
2653 if (is_gimple_omp_oacc (stmt))
2654 {
2655 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2656
2657 if (!(tgt && is_oacc_kernels (tgt)))
2658 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2659 {
2660 tree c_op0;
2661 switch (OMP_CLAUSE_CODE (c))
2662 {
2663 case OMP_CLAUSE_GANG:
2664 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2665 break;
2666
2667 case OMP_CLAUSE_WORKER:
2668 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2669 break;
2670
2671 case OMP_CLAUSE_VECTOR:
2672 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2673 break;
2674
2675 default:
2676 continue;
2677 }
2678
2679 if (c_op0)
2680 {
2681 /* By construction, this is impossible for OpenACC 'kernels'
2682 decomposed parts. */
2683 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2684
2685 error_at (OMP_CLAUSE_LOCATION (c),
2686 "argument not permitted on %qs clause",
2687 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2688 if (tgt)
2689 inform (gimple_location (tgt->stmt),
2690 "enclosing parent compute construct");
2691 else if (oacc_get_fn_attrib (current_function_decl))
2692 inform (DECL_SOURCE_LOCATION (current_function_decl),
2693 "enclosing routine");
2694 else
2695 gcc_unreachable ();
2696 }
2697 }
2698
2699 if (tgt && is_oacc_kernels (tgt))
2700 check_oacc_kernel_gwv (stmt, ctx);
2701
2702 /* Collect all variables named in reductions on this loop. Ensure
2703 that, if this loop has a reduction on some variable v, and there is
2704 a reduction on v somewhere in an outer context, then there is a
2705 reduction on v on all intervening loops as well. */
2706 tree local_reduction_clauses = NULL;
2707 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2708 {
2709 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2710 local_reduction_clauses
2711 = tree_cons (NULL, c, local_reduction_clauses);
2712 }
2713 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2714 ctx->outer_reduction_clauses
2715 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2716 ctx->outer->outer_reduction_clauses);
2717 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2718 tree local_iter = local_reduction_clauses;
2719 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2720 {
2721 tree local_clause = TREE_VALUE (local_iter);
2722 tree local_var = OMP_CLAUSE_DECL (local_clause);
2723 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2724 bool have_outer_reduction = false;
2725 tree ctx_iter = outer_reduction_clauses;
2726 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2727 {
2728 tree outer_clause = TREE_VALUE (ctx_iter);
2729 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2730 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2731 if (outer_var == local_var && outer_op != local_op)
2732 {
2733 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2734 "conflicting reduction operations for %qE",
2735 local_var);
2736 inform (OMP_CLAUSE_LOCATION (outer_clause),
2737 "location of the previous reduction for %qE",
2738 outer_var);
2739 }
2740 if (outer_var == local_var)
2741 {
2742 have_outer_reduction = true;
2743 break;
2744 }
2745 }
2746 if (have_outer_reduction)
2747 {
2748 /* There is a reduction on outer_var both on this loop and on
2749 some enclosing loop. Walk up the context tree until such a
2750 loop with a reduction on outer_var is found, and complain
2751 about all intervening loops that do not have such a
2752 reduction. */
2753 struct omp_context *curr_loop = ctx->outer;
2754 bool found = false;
2755 while (curr_loop != NULL)
2756 {
2757 tree curr_iter = curr_loop->local_reduction_clauses;
2758 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2759 {
2760 tree curr_clause = TREE_VALUE (curr_iter);
2761 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2762 if (curr_var == local_var)
2763 {
2764 found = true;
2765 break;
2766 }
2767 }
2768 if (!found)
2769 warning_at (gimple_location (curr_loop->stmt), 0,
2770 "nested loop in reduction needs "
2771 "reduction clause for %qE",
2772 local_var);
2773 else
2774 break;
2775 curr_loop = curr_loop->outer;
2776 }
2777 }
2778 }
2779 ctx->local_reduction_clauses = local_reduction_clauses;
2780 ctx->outer_reduction_clauses
2781 = chainon (unshare_expr (ctx->local_reduction_clauses),
2782 ctx->outer_reduction_clauses);
2783
2784 if (tgt && is_oacc_kernels (tgt))
2785 {
2786 /* Strip out reductions, as they are not handled yet. */
2787 tree *prev_ptr = &clauses;
2788
2789 while (tree probe = *prev_ptr)
2790 {
2791 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2792
2793 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2794 *prev_ptr = *next_ptr;
2795 else
2796 prev_ptr = next_ptr;
2797 }
2798
2799 gimple_omp_for_set_clauses (stmt, clauses);
2800 }
2801 }
2802
2803 scan_sharing_clauses (clauses, ctx);
2804
2805 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2806 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2807 {
2808 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2809 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2810 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2811 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2812 }
2813 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2814 return ctx;
2815 }
2816
2817 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2818
2819 static void
2820 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2821 omp_context *outer_ctx)
2822 {
2823 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2824 gsi_replace (gsi, bind, false);
2825 gimple_seq seq = NULL;
2826 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2827 tree cond = create_tmp_var_raw (integer_type_node);
2828 DECL_CONTEXT (cond) = current_function_decl;
2829 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2830 gimple_bind_set_vars (bind, cond);
2831 gimple_call_set_lhs (g, cond);
2832 gimple_seq_add_stmt (&seq, g);
2833 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2834 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2835 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2836 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2837 gimple_seq_add_stmt (&seq, g);
2838 g = gimple_build_label (lab1);
2839 gimple_seq_add_stmt (&seq, g);
2840 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2841 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2842 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2843 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2844 gimple_omp_for_set_clauses (new_stmt, clause);
2845 gimple_seq_add_stmt (&seq, new_stmt);
2846 g = gimple_build_goto (lab3);
2847 gimple_seq_add_stmt (&seq, g);
2848 g = gimple_build_label (lab2);
2849 gimple_seq_add_stmt (&seq, g);
2850 gimple_seq_add_stmt (&seq, stmt);
2851 g = gimple_build_label (lab3);
2852 gimple_seq_add_stmt (&seq, g);
2853 gimple_bind_set_body (bind, seq);
2854 update_stmt (bind);
2855 scan_omp_for (new_stmt, outer_ctx);
2856 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2857 }
2858
2859 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2860 struct walk_stmt_info *);
2861 static omp_context *maybe_lookup_ctx (gimple *);
2862
2863 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2864 for scan phase loop. */
2865
2866 static void
2867 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2868 omp_context *outer_ctx)
2869 {
2870 /* The only change between inclusive and exclusive scan will be
2871 within the first simd loop, so just use inclusive in the
2872 worksharing loop. */
2873 outer_ctx->scan_inclusive = true;
2874 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2875 OMP_CLAUSE_DECL (c) = integer_zero_node;
2876
2877 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2878 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2879 gsi_replace (gsi, input_stmt, false);
2880 gimple_seq input_body = NULL;
2881 gimple_seq_add_stmt (&input_body, stmt);
2882 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2883
2884 gimple_stmt_iterator input1_gsi = gsi_none ();
2885 struct walk_stmt_info wi;
2886 memset (&wi, 0, sizeof (wi));
2887 wi.val_only = true;
2888 wi.info = (void *) &input1_gsi;
2889 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2890 gcc_assert (!gsi_end_p (input1_gsi));
2891
2892 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2893 gsi_next (&input1_gsi);
2894 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2895 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2896 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2897 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2898 std::swap (input_stmt1, scan_stmt1);
2899
2900 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2901 gimple_omp_set_body (input_stmt1, NULL);
2902
2903 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2904 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2905
2906 gimple_omp_set_body (input_stmt1, input_body1);
2907 gimple_omp_set_body (scan_stmt1, NULL);
2908
2909 gimple_stmt_iterator input2_gsi = gsi_none ();
2910 memset (&wi, 0, sizeof (wi));
2911 wi.val_only = true;
2912 wi.info = (void *) &input2_gsi;
2913 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2914 NULL, &wi);
2915 gcc_assert (!gsi_end_p (input2_gsi));
2916
2917 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2918 gsi_next (&input2_gsi);
2919 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2920 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2921 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2922 std::swap (input_stmt2, scan_stmt2);
2923
2924 gimple_omp_set_body (input_stmt2, NULL);
2925
2926 gimple_omp_set_body (input_stmt, input_body);
2927 gimple_omp_set_body (scan_stmt, scan_body);
2928
2929 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2930 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2931
2932 ctx = new_omp_context (scan_stmt, outer_ctx);
2933 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2934
2935 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2936 }
2937
2938 /* Scan an OpenMP sections directive. */
2939
2940 static void
2941 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2942 {
2943 omp_context *ctx;
2944
2945 ctx = new_omp_context (stmt, outer_ctx);
2946 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2947 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2948 }
2949
2950 /* Scan an OpenMP single directive. */
2951
2952 static void
2953 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2954 {
2955 omp_context *ctx;
2956 tree name;
2957
2958 ctx = new_omp_context (stmt, outer_ctx);
2959 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2960 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2961 name = create_tmp_var_name (".omp_copy_s");
2962 name = build_decl (gimple_location (stmt),
2963 TYPE_DECL, name, ctx->record_type);
2964 TYPE_NAME (ctx->record_type) = name;
2965
2966 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2967 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2968
2969 if (TYPE_FIELDS (ctx->record_type) == NULL)
2970 ctx->record_type = NULL;
2971 else
2972 layout_type (ctx->record_type);
2973 }
2974
2975 /* Scan a GIMPLE_OMP_TARGET. */
2976
2977 static void
2978 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2979 {
2980 omp_context *ctx;
2981 tree name;
2982 bool offloaded = is_gimple_omp_offloaded (stmt);
2983 tree clauses = gimple_omp_target_clauses (stmt);
2984
2985 ctx = new_omp_context (stmt, outer_ctx);
2986 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2987 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2988 name = create_tmp_var_name (".omp_data_t");
2989 name = build_decl (gimple_location (stmt),
2990 TYPE_DECL, name, ctx->record_type);
2991 DECL_ARTIFICIAL (name) = 1;
2992 DECL_NAMELESS (name) = 1;
2993 TYPE_NAME (ctx->record_type) = name;
2994 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2995
2996 if (offloaded)
2997 {
2998 create_omp_child_function (ctx, false);
2999 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3000 }
3001
3002 scan_sharing_clauses (clauses, ctx);
3003 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3004
3005 if (TYPE_FIELDS (ctx->record_type) == NULL)
3006 ctx->record_type = ctx->receiver_decl = NULL;
3007 else
3008 {
3009 TYPE_FIELDS (ctx->record_type)
3010 = nreverse (TYPE_FIELDS (ctx->record_type));
3011 if (flag_checking)
3012 {
3013 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3014 for (tree field = TYPE_FIELDS (ctx->record_type);
3015 field;
3016 field = DECL_CHAIN (field))
3017 gcc_assert (DECL_ALIGN (field) == align);
3018 }
3019 layout_type (ctx->record_type);
3020 if (offloaded)
3021 fixup_child_record_type (ctx);
3022 }
3023
3024 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3025 {
3026 error_at (gimple_location (stmt),
3027 "%<target%> construct with nested %<teams%> construct "
3028 "contains directives outside of the %<teams%> construct");
3029 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3030 }
3031 }
3032
3033 /* Scan an OpenMP teams directive. */
3034
3035 static void
3036 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3037 {
3038 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3039
3040 if (!gimple_omp_teams_host (stmt))
3041 {
3042 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3043 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3044 return;
3045 }
3046 taskreg_contexts.safe_push (ctx);
3047 gcc_assert (taskreg_nesting_level == 1);
3048 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3049 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3050 tree name = create_tmp_var_name (".omp_data_s");
3051 name = build_decl (gimple_location (stmt),
3052 TYPE_DECL, name, ctx->record_type);
3053 DECL_ARTIFICIAL (name) = 1;
3054 DECL_NAMELESS (name) = 1;
3055 TYPE_NAME (ctx->record_type) = name;
3056 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3057 create_omp_child_function (ctx, false);
3058 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3059
3060 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3061 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3062
3063 if (TYPE_FIELDS (ctx->record_type) == NULL)
3064 ctx->record_type = ctx->receiver_decl = NULL;
3065 }
3066
3067 /* Check nesting restrictions. */
3068 static bool
3069 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3070 {
3071 tree c;
3072
3073 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3074 inside an OpenACC CTX. */
3075 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3076 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3077 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3078 ;
3079 else if (!(is_gimple_omp (stmt)
3080 && is_gimple_omp_oacc (stmt)))
3081 {
3082 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3083 {
3084 error_at (gimple_location (stmt),
3085 "non-OpenACC construct inside of OpenACC routine");
3086 return false;
3087 }
3088 else
3089 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3090 if (is_gimple_omp (octx->stmt)
3091 && is_gimple_omp_oacc (octx->stmt))
3092 {
3093 error_at (gimple_location (stmt),
3094 "non-OpenACC construct inside of OpenACC region");
3095 return false;
3096 }
3097 }
3098
3099 if (ctx != NULL)
3100 {
3101 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3102 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3103 {
3104 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3105 ctx->teams_nested_p = true;
3106 else
3107 ctx->nonteams_nested_p = true;
3108 }
3109 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3110 && ctx->outer
3111 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3112 ctx = ctx->outer;
3113 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3114 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3115 && !ctx->loop_p)
3116 {
3117 c = NULL_TREE;
3118 if (ctx->order_concurrent
3119 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3120 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3121 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3122 {
3123 error_at (gimple_location (stmt),
3124 "OpenMP constructs other than %<parallel%>, %<loop%>"
3125 " or %<simd%> may not be nested inside a region with"
3126 " the %<order(concurrent)%> clause");
3127 return false;
3128 }
3129 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3130 {
3131 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3132 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3133 {
3134 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3135 && (ctx->outer == NULL
3136 || !gimple_omp_for_combined_into_p (ctx->stmt)
3137 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3138 || (gimple_omp_for_kind (ctx->outer->stmt)
3139 != GF_OMP_FOR_KIND_FOR)
3140 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3141 {
3142 error_at (gimple_location (stmt),
3143 "%<ordered simd threads%> must be closely "
3144 "nested inside of %<%s simd%> region",
3145 lang_GNU_Fortran () ? "do" : "for");
3146 return false;
3147 }
3148 return true;
3149 }
3150 }
3151 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3152 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3153 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3154 return true;
3155 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3156 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3157 return true;
3158 error_at (gimple_location (stmt),
3159 "OpenMP constructs other than "
3160 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3161 "not be nested inside %<simd%> region");
3162 return false;
3163 }
3164 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3165 {
3166 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3167 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3168 && omp_find_clause (gimple_omp_for_clauses (stmt),
3169 OMP_CLAUSE_BIND) == NULL_TREE))
3170 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3171 {
3172 error_at (gimple_location (stmt),
3173 "only %<distribute%>, %<parallel%> or %<loop%> "
3174 "regions are allowed to be strictly nested inside "
3175 "%<teams%> region");
3176 return false;
3177 }
3178 }
3179 else if (ctx->order_concurrent
3180 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3181 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3182 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3183 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3184 {
3185 if (ctx->loop_p)
3186 error_at (gimple_location (stmt),
3187 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3188 "%<simd%> may not be nested inside a %<loop%> region");
3189 else
3190 error_at (gimple_location (stmt),
3191 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3192 "%<simd%> may not be nested inside a region with "
3193 "the %<order(concurrent)%> clause");
3194 return false;
3195 }
3196 }
3197 switch (gimple_code (stmt))
3198 {
3199 case GIMPLE_OMP_FOR:
3200 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3201 return true;
3202 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3203 {
3204 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3205 {
3206 error_at (gimple_location (stmt),
3207 "%<distribute%> region must be strictly nested "
3208 "inside %<teams%> construct");
3209 return false;
3210 }
3211 return true;
3212 }
3213 /* We split taskloop into task and nested taskloop in it. */
3214 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3215 return true;
3216 /* For now, hope this will change and loop bind(parallel) will not
3217 be allowed in lots of contexts. */
3218 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3219 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3220 return true;
3221 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3222 {
3223 bool ok = false;
3224
3225 if (ctx)
3226 switch (gimple_code (ctx->stmt))
3227 {
3228 case GIMPLE_OMP_FOR:
3229 ok = (gimple_omp_for_kind (ctx->stmt)
3230 == GF_OMP_FOR_KIND_OACC_LOOP);
3231 break;
3232
3233 case GIMPLE_OMP_TARGET:
3234 switch (gimple_omp_target_kind (ctx->stmt))
3235 {
3236 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3237 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3238 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3239 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3240 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3241 ok = true;
3242 break;
3243
3244 default:
3245 break;
3246 }
3247
3248 default:
3249 break;
3250 }
3251 else if (oacc_get_fn_attrib (current_function_decl))
3252 ok = true;
3253 if (!ok)
3254 {
3255 error_at (gimple_location (stmt),
3256 "OpenACC loop directive must be associated with"
3257 " an OpenACC compute region");
3258 return false;
3259 }
3260 }
3261 /* FALLTHRU */
3262 case GIMPLE_CALL:
3263 if (is_gimple_call (stmt)
3264 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3265 == BUILT_IN_GOMP_CANCEL
3266 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3267 == BUILT_IN_GOMP_CANCELLATION_POINT))
3268 {
3269 const char *bad = NULL;
3270 const char *kind = NULL;
3271 const char *construct
3272 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3273 == BUILT_IN_GOMP_CANCEL)
3274 ? "cancel"
3275 : "cancellation point";
3276 if (ctx == NULL)
3277 {
3278 error_at (gimple_location (stmt), "orphaned %qs construct",
3279 construct);
3280 return false;
3281 }
3282 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3283 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3284 : 0)
3285 {
3286 case 1:
3287 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3288 bad = "parallel";
3289 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3290 == BUILT_IN_GOMP_CANCEL
3291 && !integer_zerop (gimple_call_arg (stmt, 1)))
3292 ctx->cancellable = true;
3293 kind = "parallel";
3294 break;
3295 case 2:
3296 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3297 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3298 bad = "for";
3299 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3300 == BUILT_IN_GOMP_CANCEL
3301 && !integer_zerop (gimple_call_arg (stmt, 1)))
3302 {
3303 ctx->cancellable = true;
3304 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3305 OMP_CLAUSE_NOWAIT))
3306 warning_at (gimple_location (stmt), 0,
3307 "%<cancel for%> inside "
3308 "%<nowait%> for construct");
3309 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3310 OMP_CLAUSE_ORDERED))
3311 warning_at (gimple_location (stmt), 0,
3312 "%<cancel for%> inside "
3313 "%<ordered%> for construct");
3314 }
3315 kind = "for";
3316 break;
3317 case 4:
3318 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3319 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3320 bad = "sections";
3321 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3322 == BUILT_IN_GOMP_CANCEL
3323 && !integer_zerop (gimple_call_arg (stmt, 1)))
3324 {
3325 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3326 {
3327 ctx->cancellable = true;
3328 if (omp_find_clause (gimple_omp_sections_clauses
3329 (ctx->stmt),
3330 OMP_CLAUSE_NOWAIT))
3331 warning_at (gimple_location (stmt), 0,
3332 "%<cancel sections%> inside "
3333 "%<nowait%> sections construct");
3334 }
3335 else
3336 {
3337 gcc_assert (ctx->outer
3338 && gimple_code (ctx->outer->stmt)
3339 == GIMPLE_OMP_SECTIONS);
3340 ctx->outer->cancellable = true;
3341 if (omp_find_clause (gimple_omp_sections_clauses
3342 (ctx->outer->stmt),
3343 OMP_CLAUSE_NOWAIT))
3344 warning_at (gimple_location (stmt), 0,
3345 "%<cancel sections%> inside "
3346 "%<nowait%> sections construct");
3347 }
3348 }
3349 kind = "sections";
3350 break;
3351 case 8:
3352 if (!is_task_ctx (ctx)
3353 && (!is_taskloop_ctx (ctx)
3354 || ctx->outer == NULL
3355 || !is_task_ctx (ctx->outer)))
3356 bad = "task";
3357 else
3358 {
3359 for (omp_context *octx = ctx->outer;
3360 octx; octx = octx->outer)
3361 {
3362 switch (gimple_code (octx->stmt))
3363 {
3364 case GIMPLE_OMP_TASKGROUP:
3365 break;
3366 case GIMPLE_OMP_TARGET:
3367 if (gimple_omp_target_kind (octx->stmt)
3368 != GF_OMP_TARGET_KIND_REGION)
3369 continue;
3370 /* FALLTHRU */
3371 case GIMPLE_OMP_PARALLEL:
3372 case GIMPLE_OMP_TEAMS:
3373 error_at (gimple_location (stmt),
3374 "%<%s taskgroup%> construct not closely "
3375 "nested inside of %<taskgroup%> region",
3376 construct);
3377 return false;
3378 case GIMPLE_OMP_TASK:
3379 if (gimple_omp_task_taskloop_p (octx->stmt)
3380 && octx->outer
3381 && is_taskloop_ctx (octx->outer))
3382 {
3383 tree clauses
3384 = gimple_omp_for_clauses (octx->outer->stmt);
3385 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3386 break;
3387 }
3388 continue;
3389 default:
3390 continue;
3391 }
3392 break;
3393 }
3394 ctx->cancellable = true;
3395 }
3396 kind = "taskgroup";
3397 break;
3398 default:
3399 error_at (gimple_location (stmt), "invalid arguments");
3400 return false;
3401 }
3402 if (bad)
3403 {
3404 error_at (gimple_location (stmt),
3405 "%<%s %s%> construct not closely nested inside of %qs",
3406 construct, kind, bad);
3407 return false;
3408 }
3409 }
3410 /* FALLTHRU */
3411 case GIMPLE_OMP_SECTIONS:
3412 case GIMPLE_OMP_SINGLE:
3413 for (; ctx != NULL; ctx = ctx->outer)
3414 switch (gimple_code (ctx->stmt))
3415 {
3416 case GIMPLE_OMP_FOR:
3417 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3418 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3419 break;
3420 /* FALLTHRU */
3421 case GIMPLE_OMP_SECTIONS:
3422 case GIMPLE_OMP_SINGLE:
3423 case GIMPLE_OMP_ORDERED:
3424 case GIMPLE_OMP_MASTER:
3425 case GIMPLE_OMP_TASK:
3426 case GIMPLE_OMP_CRITICAL:
3427 if (is_gimple_call (stmt))
3428 {
3429 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3430 != BUILT_IN_GOMP_BARRIER)
3431 return true;
3432 error_at (gimple_location (stmt),
3433 "barrier region may not be closely nested inside "
3434 "of work-sharing, %<loop%>, %<critical%>, "
3435 "%<ordered%>, %<master%>, explicit %<task%> or "
3436 "%<taskloop%> region");
3437 return false;
3438 }
3439 error_at (gimple_location (stmt),
3440 "work-sharing region may not be closely nested inside "
3441 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3442 "%<master%>, explicit %<task%> or %<taskloop%> region");
3443 return false;
3444 case GIMPLE_OMP_PARALLEL:
3445 case GIMPLE_OMP_TEAMS:
3446 return true;
3447 case GIMPLE_OMP_TARGET:
3448 if (gimple_omp_target_kind (ctx->stmt)
3449 == GF_OMP_TARGET_KIND_REGION)
3450 return true;
3451 break;
3452 default:
3453 break;
3454 }
3455 break;
3456 case GIMPLE_OMP_MASTER:
3457 for (; ctx != NULL; ctx = ctx->outer)
3458 switch (gimple_code (ctx->stmt))
3459 {
3460 case GIMPLE_OMP_FOR:
3461 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3462 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3463 break;
3464 /* FALLTHRU */
3465 case GIMPLE_OMP_SECTIONS:
3466 case GIMPLE_OMP_SINGLE:
3467 case GIMPLE_OMP_TASK:
3468 error_at (gimple_location (stmt),
3469 "%<master%> region may not be closely nested inside "
3470 "of work-sharing, %<loop%>, explicit %<task%> or "
3471 "%<taskloop%> region");
3472 return false;
3473 case GIMPLE_OMP_PARALLEL:
3474 case GIMPLE_OMP_TEAMS:
3475 return true;
3476 case GIMPLE_OMP_TARGET:
3477 if (gimple_omp_target_kind (ctx->stmt)
3478 == GF_OMP_TARGET_KIND_REGION)
3479 return true;
3480 break;
3481 default:
3482 break;
3483 }
3484 break;
3485 case GIMPLE_OMP_TASK:
3486 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3487 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3488 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3489 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3490 {
3491 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3492 error_at (OMP_CLAUSE_LOCATION (c),
3493 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3494 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3495 return false;
3496 }
3497 break;
3498 case GIMPLE_OMP_ORDERED:
3499 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3500 c; c = OMP_CLAUSE_CHAIN (c))
3501 {
3502 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3503 {
3504 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3505 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3506 continue;
3507 }
3508 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3509 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3510 || kind == OMP_CLAUSE_DEPEND_SINK)
3511 {
3512 tree oclause;
3513 /* Look for containing ordered(N) loop. */
3514 if (ctx == NULL
3515 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3516 || (oclause
3517 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3518 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3519 {
3520 error_at (OMP_CLAUSE_LOCATION (c),
3521 "%<ordered%> construct with %<depend%> clause "
3522 "must be closely nested inside an %<ordered%> "
3523 "loop");
3524 return false;
3525 }
3526 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3527 {
3528 error_at (OMP_CLAUSE_LOCATION (c),
3529 "%<ordered%> construct with %<depend%> clause "
3530 "must be closely nested inside a loop with "
3531 "%<ordered%> clause with a parameter");
3532 return false;
3533 }
3534 }
3535 else
3536 {
3537 error_at (OMP_CLAUSE_LOCATION (c),
3538 "invalid depend kind in omp %<ordered%> %<depend%>");
3539 return false;
3540 }
3541 }
3542 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3543 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3544 {
3545 /* ordered simd must be closely nested inside of simd region,
3546 and simd region must not encounter constructs other than
3547 ordered simd, therefore ordered simd may be either orphaned,
3548 or ctx->stmt must be simd. The latter case is handled already
3549 earlier. */
3550 if (ctx != NULL)
3551 {
3552 error_at (gimple_location (stmt),
3553 "%<ordered%> %<simd%> must be closely nested inside "
3554 "%<simd%> region");
3555 return false;
3556 }
3557 }
3558 for (; ctx != NULL; ctx = ctx->outer)
3559 switch (gimple_code (ctx->stmt))
3560 {
3561 case GIMPLE_OMP_CRITICAL:
3562 case GIMPLE_OMP_TASK:
3563 case GIMPLE_OMP_ORDERED:
3564 ordered_in_taskloop:
3565 error_at (gimple_location (stmt),
3566 "%<ordered%> region may not be closely nested inside "
3567 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3568 "%<taskloop%> region");
3569 return false;
3570 case GIMPLE_OMP_FOR:
3571 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3572 goto ordered_in_taskloop;
3573 tree o;
3574 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3575 OMP_CLAUSE_ORDERED);
3576 if (o == NULL)
3577 {
3578 error_at (gimple_location (stmt),
3579 "%<ordered%> region must be closely nested inside "
3580 "a loop region with an %<ordered%> clause");
3581 return false;
3582 }
3583 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3584 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3585 {
3586 error_at (gimple_location (stmt),
3587 "%<ordered%> region without %<depend%> clause may "
3588 "not be closely nested inside a loop region with "
3589 "an %<ordered%> clause with a parameter");
3590 return false;
3591 }
3592 return true;
3593 case GIMPLE_OMP_TARGET:
3594 if (gimple_omp_target_kind (ctx->stmt)
3595 != GF_OMP_TARGET_KIND_REGION)
3596 break;
3597 /* FALLTHRU */
3598 case GIMPLE_OMP_PARALLEL:
3599 case GIMPLE_OMP_TEAMS:
3600 error_at (gimple_location (stmt),
3601 "%<ordered%> region must be closely nested inside "
3602 "a loop region with an %<ordered%> clause");
3603 return false;
3604 default:
3605 break;
3606 }
3607 break;
3608 case GIMPLE_OMP_CRITICAL:
3609 {
3610 tree this_stmt_name
3611 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3612 for (; ctx != NULL; ctx = ctx->outer)
3613 if (gomp_critical *other_crit
3614 = dyn_cast <gomp_critical *> (ctx->stmt))
3615 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3616 {
3617 error_at (gimple_location (stmt),
3618 "%<critical%> region may not be nested inside "
3619 "a %<critical%> region with the same name");
3620 return false;
3621 }
3622 }
3623 break;
3624 case GIMPLE_OMP_TEAMS:
3625 if (ctx == NULL)
3626 break;
3627 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3628 || (gimple_omp_target_kind (ctx->stmt)
3629 != GF_OMP_TARGET_KIND_REGION))
3630 {
3631 /* Teams construct can appear either strictly nested inside of
3632 target construct with no intervening stmts, or can be encountered
3633 only by initial task (so must not appear inside any OpenMP
3634 construct. */
3635 error_at (gimple_location (stmt),
3636 "%<teams%> construct must be closely nested inside of "
3637 "%<target%> construct or not nested in any OpenMP "
3638 "construct");
3639 return false;
3640 }
3641 break;
3642 case GIMPLE_OMP_TARGET:
3643 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3645 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3646 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3647 {
3648 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3649 error_at (OMP_CLAUSE_LOCATION (c),
3650 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3651 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3652 return false;
3653 }
3654 if (is_gimple_omp_offloaded (stmt)
3655 && oacc_get_fn_attrib (cfun->decl) != NULL)
3656 {
3657 error_at (gimple_location (stmt),
3658 "OpenACC region inside of OpenACC routine, nested "
3659 "parallelism not supported yet");
3660 return false;
3661 }
3662 for (; ctx != NULL; ctx = ctx->outer)
3663 {
3664 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3665 {
3666 if (is_gimple_omp (stmt)
3667 && is_gimple_omp_oacc (stmt)
3668 && is_gimple_omp (ctx->stmt))
3669 {
3670 error_at (gimple_location (stmt),
3671 "OpenACC construct inside of non-OpenACC region");
3672 return false;
3673 }
3674 continue;
3675 }
3676
3677 const char *stmt_name, *ctx_stmt_name;
3678 switch (gimple_omp_target_kind (stmt))
3679 {
3680 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3681 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3682 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3683 case GF_OMP_TARGET_KIND_ENTER_DATA:
3684 stmt_name = "target enter data"; break;
3685 case GF_OMP_TARGET_KIND_EXIT_DATA:
3686 stmt_name = "target exit data"; break;
3687 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3688 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3689 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3690 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3691 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3692 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3693 stmt_name = "enter data"; break;
3694 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3695 stmt_name = "exit data"; break;
3696 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3697 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3698 break;
3699 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3700 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3701 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3702 /* OpenACC 'kernels' decomposed parts. */
3703 stmt_name = "kernels"; break;
3704 default: gcc_unreachable ();
3705 }
3706 switch (gimple_omp_target_kind (ctx->stmt))
3707 {
3708 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3709 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3710 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3711 ctx_stmt_name = "parallel"; break;
3712 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3713 ctx_stmt_name = "kernels"; break;
3714 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3715 ctx_stmt_name = "serial"; break;
3716 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3717 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3718 ctx_stmt_name = "host_data"; break;
3719 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3720 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3721 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3722 /* OpenACC 'kernels' decomposed parts. */
3723 ctx_stmt_name = "kernels"; break;
3724 default: gcc_unreachable ();
3725 }
3726
3727 /* OpenACC/OpenMP mismatch? */
3728 if (is_gimple_omp_oacc (stmt)
3729 != is_gimple_omp_oacc (ctx->stmt))
3730 {
3731 error_at (gimple_location (stmt),
3732 "%s %qs construct inside of %s %qs region",
3733 (is_gimple_omp_oacc (stmt)
3734 ? "OpenACC" : "OpenMP"), stmt_name,
3735 (is_gimple_omp_oacc (ctx->stmt)
3736 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3737 return false;
3738 }
3739 if (is_gimple_omp_offloaded (ctx->stmt))
3740 {
3741 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3742 if (is_gimple_omp_oacc (ctx->stmt))
3743 {
3744 error_at (gimple_location (stmt),
3745 "%qs construct inside of %qs region",
3746 stmt_name, ctx_stmt_name);
3747 return false;
3748 }
3749 else
3750 {
3751 warning_at (gimple_location (stmt), 0,
3752 "%qs construct inside of %qs region",
3753 stmt_name, ctx_stmt_name);
3754 }
3755 }
3756 }
3757 break;
3758 default:
3759 break;
3760 }
3761 return true;
3762 }
3763
3764
3765 /* Helper function scan_omp.
3766
3767 Callback for walk_tree or operators in walk_gimple_stmt used to
3768 scan for OMP directives in TP. */
3769
3770 static tree
3771 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3772 {
3773 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3774 omp_context *ctx = (omp_context *) wi->info;
3775 tree t = *tp;
3776
3777 switch (TREE_CODE (t))
3778 {
3779 case VAR_DECL:
3780 case PARM_DECL:
3781 case LABEL_DECL:
3782 case RESULT_DECL:
3783 if (ctx)
3784 {
3785 tree repl = remap_decl (t, &ctx->cb);
3786 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3787 *tp = repl;
3788 }
3789 break;
3790
3791 default:
3792 if (ctx && TYPE_P (t))
3793 *tp = remap_type (t, &ctx->cb);
3794 else if (!DECL_P (t))
3795 {
3796 *walk_subtrees = 1;
3797 if (ctx)
3798 {
3799 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3800 if (tem != TREE_TYPE (t))
3801 {
3802 if (TREE_CODE (t) == INTEGER_CST)
3803 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3804 else
3805 TREE_TYPE (t) = tem;
3806 }
3807 }
3808 }
3809 break;
3810 }
3811
3812 return NULL_TREE;
3813 }
3814
3815 /* Return true if FNDECL is a setjmp or a longjmp. */
3816
3817 static bool
3818 setjmp_or_longjmp_p (const_tree fndecl)
3819 {
3820 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3821 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3822 return true;
3823
3824 tree declname = DECL_NAME (fndecl);
3825 if (!declname
3826 || (DECL_CONTEXT (fndecl) != NULL_TREE
3827 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3828 || !TREE_PUBLIC (fndecl))
3829 return false;
3830
3831 const char *name = IDENTIFIER_POINTER (declname);
3832 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3833 }
3834
3835 /* Return true if FNDECL is an omp_* runtime API call. */
3836
3837 static bool
3838 omp_runtime_api_call (const_tree fndecl)
3839 {
3840 tree declname = DECL_NAME (fndecl);
3841 if (!declname
3842 || (DECL_CONTEXT (fndecl) != NULL_TREE
3843 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3844 || !TREE_PUBLIC (fndecl))
3845 return false;
3846
3847 const char *name = IDENTIFIER_POINTER (declname);
3848 if (!startswith (name, "omp_"))
3849 return false;
3850
3851 static const char *omp_runtime_apis[] =
3852 {
3853 /* This array has 3 sections. First omp_* calls that don't
3854 have any suffixes. */
3855 "target_alloc",
3856 "target_associate_ptr",
3857 "target_disassociate_ptr",
3858 "target_free",
3859 "target_is_present",
3860 "target_memcpy",
3861 "target_memcpy_rect",
3862 NULL,
3863 /* Now omp_* calls that are available as omp_* and omp_*_. */
3864 "capture_affinity",
3865 "destroy_lock",
3866 "destroy_nest_lock",
3867 "display_affinity",
3868 "get_active_level",
3869 "get_affinity_format",
3870 "get_cancellation",
3871 "get_default_device",
3872 "get_dynamic",
3873 "get_initial_device",
3874 "get_level",
3875 "get_max_active_levels",
3876 "get_max_task_priority",
3877 "get_max_threads",
3878 "get_nested",
3879 "get_num_devices",
3880 "get_num_places",
3881 "get_num_procs",
3882 "get_num_teams",
3883 "get_num_threads",
3884 "get_partition_num_places",
3885 "get_place_num",
3886 "get_proc_bind",
3887 "get_team_num",
3888 "get_thread_limit",
3889 "get_thread_num",
3890 "get_wtick",
3891 "get_wtime",
3892 "in_final",
3893 "in_parallel",
3894 "init_lock",
3895 "init_nest_lock",
3896 "is_initial_device",
3897 "pause_resource",
3898 "pause_resource_all",
3899 "set_affinity_format",
3900 "set_lock",
3901 "set_nest_lock",
3902 "test_lock",
3903 "test_nest_lock",
3904 "unset_lock",
3905 "unset_nest_lock",
3906 NULL,
3907 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3908 "get_ancestor_thread_num",
3909 "get_partition_place_nums",
3910 "get_place_num_procs",
3911 "get_place_proc_ids",
3912 "get_schedule",
3913 "get_team_size",
3914 "set_default_device",
3915 "set_dynamic",
3916 "set_max_active_levels",
3917 "set_nested",
3918 "set_num_threads",
3919 "set_schedule"
3920 };
3921
3922 int mode = 0;
3923 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3924 {
3925 if (omp_runtime_apis[i] == NULL)
3926 {
3927 mode++;
3928 continue;
3929 }
3930 size_t len = strlen (omp_runtime_apis[i]);
3931 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3932 && (name[4 + len] == '\0'
3933 || (mode > 0
3934 && name[4 + len] == '_'
3935 && (name[4 + len + 1] == '\0'
3936 || (mode > 1
3937 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3938 return true;
3939 }
3940 return false;
3941 }
3942
3943 /* Helper function for scan_omp.
3944
3945 Callback for walk_gimple_stmt used to scan for OMP directives in
3946 the current statement in GSI. */
3947
3948 static tree
3949 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3950 struct walk_stmt_info *wi)
3951 {
3952 gimple *stmt = gsi_stmt (*gsi);
3953 omp_context *ctx = (omp_context *) wi->info;
3954
3955 if (gimple_has_location (stmt))
3956 input_location = gimple_location (stmt);
3957
3958 /* Check the nesting restrictions. */
3959 bool remove = false;
3960 if (is_gimple_omp (stmt))
3961 remove = !check_omp_nesting_restrictions (stmt, ctx);
3962 else if (is_gimple_call (stmt))
3963 {
3964 tree fndecl = gimple_call_fndecl (stmt);
3965 if (fndecl)
3966 {
3967 if (ctx
3968 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3969 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3970 && setjmp_or_longjmp_p (fndecl)
3971 && !ctx->loop_p)
3972 {
3973 remove = true;
3974 error_at (gimple_location (stmt),
3975 "setjmp/longjmp inside %<simd%> construct");
3976 }
3977 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3978 switch (DECL_FUNCTION_CODE (fndecl))
3979 {
3980 case BUILT_IN_GOMP_BARRIER:
3981 case BUILT_IN_GOMP_CANCEL:
3982 case BUILT_IN_GOMP_CANCELLATION_POINT:
3983 case BUILT_IN_GOMP_TASKYIELD:
3984 case BUILT_IN_GOMP_TASKWAIT:
3985 case BUILT_IN_GOMP_TASKGROUP_START:
3986 case BUILT_IN_GOMP_TASKGROUP_END:
3987 remove = !check_omp_nesting_restrictions (stmt, ctx);
3988 break;
3989 default:
3990 break;
3991 }
3992 else if (ctx)
3993 {
3994 omp_context *octx = ctx;
3995 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3996 octx = ctx->outer;
3997 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3998 {
3999 remove = true;
4000 error_at (gimple_location (stmt),
4001 "OpenMP runtime API call %qD in a region with "
4002 "%<order(concurrent)%> clause", fndecl);
4003 }
4004 }
4005 }
4006 }
4007 if (remove)
4008 {
4009 stmt = gimple_build_nop ();
4010 gsi_replace (gsi, stmt, false);
4011 }
4012
4013 *handled_ops_p = true;
4014
4015 switch (gimple_code (stmt))
4016 {
4017 case GIMPLE_OMP_PARALLEL:
4018 taskreg_nesting_level++;
4019 scan_omp_parallel (gsi, ctx);
4020 taskreg_nesting_level--;
4021 break;
4022
4023 case GIMPLE_OMP_TASK:
4024 taskreg_nesting_level++;
4025 scan_omp_task (gsi, ctx);
4026 taskreg_nesting_level--;
4027 break;
4028
4029 case GIMPLE_OMP_FOR:
4030 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4031 == GF_OMP_FOR_KIND_SIMD)
4032 && gimple_omp_for_combined_into_p (stmt)
4033 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4034 {
4035 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4036 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4037 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4038 {
4039 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4040 break;
4041 }
4042 }
4043 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4044 == GF_OMP_FOR_KIND_SIMD)
4045 && omp_maybe_offloaded_ctx (ctx)
4046 && omp_max_simt_vf ()
4047 && gimple_omp_for_collapse (stmt) == 1)
4048 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4049 else
4050 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4051 break;
4052
4053 case GIMPLE_OMP_SECTIONS:
4054 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4055 break;
4056
4057 case GIMPLE_OMP_SINGLE:
4058 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4059 break;
4060
4061 case GIMPLE_OMP_SCAN:
4062 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4063 {
4064 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4065 ctx->scan_inclusive = true;
4066 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4067 ctx->scan_exclusive = true;
4068 }
4069 /* FALLTHRU */
4070 case GIMPLE_OMP_SECTION:
4071 case GIMPLE_OMP_MASTER:
4072 case GIMPLE_OMP_ORDERED:
4073 case GIMPLE_OMP_CRITICAL:
4074 ctx = new_omp_context (stmt, ctx);
4075 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4076 break;
4077
4078 case GIMPLE_OMP_TASKGROUP:
4079 ctx = new_omp_context (stmt, ctx);
4080 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4081 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4082 break;
4083
4084 case GIMPLE_OMP_TARGET:
4085 if (is_gimple_omp_offloaded (stmt))
4086 {
4087 taskreg_nesting_level++;
4088 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4089 taskreg_nesting_level--;
4090 }
4091 else
4092 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4093 break;
4094
4095 case GIMPLE_OMP_TEAMS:
4096 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4097 {
4098 taskreg_nesting_level++;
4099 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4100 taskreg_nesting_level--;
4101 }
4102 else
4103 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4104 break;
4105
4106 case GIMPLE_BIND:
4107 {
4108 tree var;
4109
4110 *handled_ops_p = false;
4111 if (ctx)
4112 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4113 var ;
4114 var = DECL_CHAIN (var))
4115 insert_decl_map (&ctx->cb, var, var);
4116 }
4117 break;
4118 default:
4119 *handled_ops_p = false;
4120 break;
4121 }
4122
4123 return NULL_TREE;
4124 }
4125
4126
4127 /* Scan all the statements starting at the current statement. CTX
4128 contains context information about the OMP directives and
4129 clauses found during the scan. */
4130
4131 static void
4132 scan_omp (gimple_seq *body_p, omp_context *ctx)
4133 {
4134 location_t saved_location;
4135 struct walk_stmt_info wi;
4136
4137 memset (&wi, 0, sizeof (wi));
4138 wi.info = ctx;
4139 wi.want_locations = true;
4140
4141 saved_location = input_location;
4142 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4143 input_location = saved_location;
4144 }
4145 \f
4146 /* Re-gimplification and code generation routines. */
4147
4148 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4149 of BIND if in a method. */
4150
4151 static void
4152 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4153 {
4154 if (DECL_ARGUMENTS (current_function_decl)
4155 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4156 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4157 == POINTER_TYPE))
4158 {
4159 tree vars = gimple_bind_vars (bind);
4160 for (tree *pvar = &vars; *pvar; )
4161 if (omp_member_access_dummy_var (*pvar))
4162 *pvar = DECL_CHAIN (*pvar);
4163 else
4164 pvar = &DECL_CHAIN (*pvar);
4165 gimple_bind_set_vars (bind, vars);
4166 }
4167 }
4168
4169 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4170 block and its subblocks. */
4171
4172 static void
4173 remove_member_access_dummy_vars (tree block)
4174 {
4175 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4176 if (omp_member_access_dummy_var (*pvar))
4177 *pvar = DECL_CHAIN (*pvar);
4178 else
4179 pvar = &DECL_CHAIN (*pvar);
4180
4181 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4182 remove_member_access_dummy_vars (block);
4183 }
4184
4185 /* If a context was created for STMT when it was scanned, return it. */
4186
4187 static omp_context *
4188 maybe_lookup_ctx (gimple *stmt)
4189 {
4190 splay_tree_node n;
4191 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4192 return n ? (omp_context *) n->value : NULL;
4193 }
4194
4195
4196 /* Find the mapping for DECL in CTX or the immediately enclosing
4197 context that has a mapping for DECL.
4198
4199 If CTX is a nested parallel directive, we may have to use the decl
4200 mappings created in CTX's parent context. Suppose that we have the
4201 following parallel nesting (variable UIDs showed for clarity):
4202
4203 iD.1562 = 0;
4204 #omp parallel shared(iD.1562) -> outer parallel
4205 iD.1562 = iD.1562 + 1;
4206
4207 #omp parallel shared (iD.1562) -> inner parallel
4208 iD.1562 = iD.1562 - 1;
4209
4210 Each parallel structure will create a distinct .omp_data_s structure
4211 for copying iD.1562 in/out of the directive:
4212
4213 outer parallel .omp_data_s.1.i -> iD.1562
4214 inner parallel .omp_data_s.2.i -> iD.1562
4215
4216 A shared variable mapping will produce a copy-out operation before
4217 the parallel directive and a copy-in operation after it. So, in
4218 this case we would have:
4219
4220 iD.1562 = 0;
4221 .omp_data_o.1.i = iD.1562;
4222 #omp parallel shared(iD.1562) -> outer parallel
4223 .omp_data_i.1 = &.omp_data_o.1
4224 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4225
4226 .omp_data_o.2.i = iD.1562; -> **
4227 #omp parallel shared(iD.1562) -> inner parallel
4228 .omp_data_i.2 = &.omp_data_o.2
4229 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4230
4231
4232 ** This is a problem. The symbol iD.1562 cannot be referenced
4233 inside the body of the outer parallel region. But since we are
4234 emitting this copy operation while expanding the inner parallel
4235 directive, we need to access the CTX structure of the outer
4236 parallel directive to get the correct mapping:
4237
4238 .omp_data_o.2.i = .omp_data_i.1->i
4239
4240 Since there may be other workshare or parallel directives enclosing
4241 the parallel directive, it may be necessary to walk up the context
4242 parent chain. This is not a problem in general because nested
4243 parallelism happens only rarely. */
4244
4245 static tree
4246 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4247 {
4248 tree t;
4249 omp_context *up;
4250
4251 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4252 t = maybe_lookup_decl (decl, up);
4253
4254 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4255
4256 return t ? t : decl;
4257 }
4258
4259
4260 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4261 in outer contexts. */
4262
4263 static tree
4264 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4265 {
4266 tree t = NULL;
4267 omp_context *up;
4268
4269 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4270 t = maybe_lookup_decl (decl, up);
4271
4272 return t ? t : decl;
4273 }
4274
4275
4276 /* Construct the initialization value for reduction operation OP. */
4277
4278 tree
4279 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4280 {
4281 switch (op)
4282 {
4283 case PLUS_EXPR:
4284 case MINUS_EXPR:
4285 case BIT_IOR_EXPR:
4286 case BIT_XOR_EXPR:
4287 case TRUTH_OR_EXPR:
4288 case TRUTH_ORIF_EXPR:
4289 case TRUTH_XOR_EXPR:
4290 case NE_EXPR:
4291 return build_zero_cst (type);
4292
4293 case MULT_EXPR:
4294 case TRUTH_AND_EXPR:
4295 case TRUTH_ANDIF_EXPR:
4296 case EQ_EXPR:
4297 return fold_convert_loc (loc, type, integer_one_node);
4298
4299 case BIT_AND_EXPR:
4300 return fold_convert_loc (loc, type, integer_minus_one_node);
4301
4302 case MAX_EXPR:
4303 if (SCALAR_FLOAT_TYPE_P (type))
4304 {
4305 REAL_VALUE_TYPE max, min;
4306 if (HONOR_INFINITIES (type))
4307 {
4308 real_inf (&max);
4309 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4310 }
4311 else
4312 real_maxval (&min, 1, TYPE_MODE (type));
4313 return build_real (type, min);
4314 }
4315 else if (POINTER_TYPE_P (type))
4316 {
4317 wide_int min
4318 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4319 return wide_int_to_tree (type, min);
4320 }
4321 else
4322 {
4323 gcc_assert (INTEGRAL_TYPE_P (type));
4324 return TYPE_MIN_VALUE (type);
4325 }
4326
4327 case MIN_EXPR:
4328 if (SCALAR_FLOAT_TYPE_P (type))
4329 {
4330 REAL_VALUE_TYPE max;
4331 if (HONOR_INFINITIES (type))
4332 real_inf (&max);
4333 else
4334 real_maxval (&max, 0, TYPE_MODE (type));
4335 return build_real (type, max);
4336 }
4337 else if (POINTER_TYPE_P (type))
4338 {
4339 wide_int max
4340 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4341 return wide_int_to_tree (type, max);
4342 }
4343 else
4344 {
4345 gcc_assert (INTEGRAL_TYPE_P (type));
4346 return TYPE_MAX_VALUE (type);
4347 }
4348
4349 default:
4350 gcc_unreachable ();
4351 }
4352 }
4353
4354 /* Construct the initialization value for reduction CLAUSE. */
4355
4356 tree
4357 omp_reduction_init (tree clause, tree type)
4358 {
4359 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4360 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4361 }
4362
4363 /* Return alignment to be assumed for var in CLAUSE, which should be
4364 OMP_CLAUSE_ALIGNED. */
4365
4366 static tree
4367 omp_clause_aligned_alignment (tree clause)
4368 {
4369 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4370 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4371
4372 /* Otherwise return implementation defined alignment. */
4373 unsigned int al = 1;
4374 opt_scalar_mode mode_iter;
4375 auto_vector_modes modes;
4376 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4377 static enum mode_class classes[]
4378 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4379 for (int i = 0; i < 4; i += 2)
4380 /* The for loop above dictates that we only walk through scalar classes. */
4381 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4382 {
4383 scalar_mode mode = mode_iter.require ();
4384 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4385 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4386 continue;
4387 machine_mode alt_vmode;
4388 for (unsigned int j = 0; j < modes.length (); ++j)
4389 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4390 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4391 vmode = alt_vmode;
4392
4393 tree type = lang_hooks.types.type_for_mode (mode, 1);
4394 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4395 continue;
4396 type = build_vector_type_for_mode (type, vmode);
4397 if (TYPE_MODE (type) != vmode)
4398 continue;
4399 if (TYPE_ALIGN_UNIT (type) > al)
4400 al = TYPE_ALIGN_UNIT (type);
4401 }
4402 return build_int_cst (integer_type_node, al);
4403 }
4404
4405
4406 /* This structure is part of the interface between lower_rec_simd_input_clauses
4407 and lower_rec_input_clauses. */
4408
4409 class omplow_simd_context {
4410 public:
4411 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4412 tree idx;
4413 tree lane;
4414 tree lastlane;
4415 vec<tree, va_heap> simt_eargs;
4416 gimple_seq simt_dlist;
4417 poly_uint64_pod max_vf;
4418 bool is_simt;
4419 };
4420
4421 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4422 privatization. */
4423
4424 static bool
4425 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4426 omplow_simd_context *sctx, tree &ivar,
4427 tree &lvar, tree *rvar = NULL,
4428 tree *rvar2 = NULL)
4429 {
4430 if (known_eq (sctx->max_vf, 0U))
4431 {
4432 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4433 if (maybe_gt (sctx->max_vf, 1U))
4434 {
4435 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4436 OMP_CLAUSE_SAFELEN);
4437 if (c)
4438 {
4439 poly_uint64 safe_len;
4440 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4441 || maybe_lt (safe_len, 1U))
4442 sctx->max_vf = 1;
4443 else
4444 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4445 }
4446 }
4447 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4448 {
4449 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4450 c = OMP_CLAUSE_CHAIN (c))
4451 {
4452 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4453 continue;
4454
4455 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4456 {
4457 /* UDR reductions are not supported yet for SIMT, disable
4458 SIMT. */
4459 sctx->max_vf = 1;
4460 break;
4461 }
4462
4463 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4464 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4465 {
4466 /* Doing boolean operations on non-integral types is
4467 for conformance only, it's not worth supporting this
4468 for SIMT. */
4469 sctx->max_vf = 1;
4470 break;
4471 }
4472 }
4473 }
4474 if (maybe_gt (sctx->max_vf, 1U))
4475 {
4476 sctx->idx = create_tmp_var (unsigned_type_node);
4477 sctx->lane = create_tmp_var (unsigned_type_node);
4478 }
4479 }
4480 if (known_eq (sctx->max_vf, 1U))
4481 return false;
4482
4483 if (sctx->is_simt)
4484 {
4485 if (is_gimple_reg (new_var))
4486 {
4487 ivar = lvar = new_var;
4488 return true;
4489 }
4490 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4491 ivar = lvar = create_tmp_var (type);
4492 TREE_ADDRESSABLE (ivar) = 1;
4493 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4494 NULL, DECL_ATTRIBUTES (ivar));
4495 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4496 tree clobber = build_clobber (type);
4497 gimple *g = gimple_build_assign (ivar, clobber);
4498 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4499 }
4500 else
4501 {
4502 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4503 tree avar = create_tmp_var_raw (atype);
4504 if (TREE_ADDRESSABLE (new_var))
4505 TREE_ADDRESSABLE (avar) = 1;
4506 DECL_ATTRIBUTES (avar)
4507 = tree_cons (get_identifier ("omp simd array"), NULL,
4508 DECL_ATTRIBUTES (avar));
4509 gimple_add_tmp_var (avar);
4510 tree iavar = avar;
4511 if (rvar && !ctx->for_simd_scan_phase)
4512 {
4513 /* For inscan reductions, create another array temporary,
4514 which will hold the reduced value. */
4515 iavar = create_tmp_var_raw (atype);
4516 if (TREE_ADDRESSABLE (new_var))
4517 TREE_ADDRESSABLE (iavar) = 1;
4518 DECL_ATTRIBUTES (iavar)
4519 = tree_cons (get_identifier ("omp simd array"), NULL,
4520 tree_cons (get_identifier ("omp simd inscan"), NULL,
4521 DECL_ATTRIBUTES (iavar)));
4522 gimple_add_tmp_var (iavar);
4523 ctx->cb.decl_map->put (avar, iavar);
4524 if (sctx->lastlane == NULL_TREE)
4525 sctx->lastlane = create_tmp_var (unsigned_type_node);
4526 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4527 sctx->lastlane, NULL_TREE, NULL_TREE);
4528 TREE_THIS_NOTRAP (*rvar) = 1;
4529
4530 if (ctx->scan_exclusive)
4531 {
4532 /* And for exclusive scan yet another one, which will
4533 hold the value during the scan phase. */
4534 tree savar = create_tmp_var_raw (atype);
4535 if (TREE_ADDRESSABLE (new_var))
4536 TREE_ADDRESSABLE (savar) = 1;
4537 DECL_ATTRIBUTES (savar)
4538 = tree_cons (get_identifier ("omp simd array"), NULL,
4539 tree_cons (get_identifier ("omp simd inscan "
4540 "exclusive"), NULL,
4541 DECL_ATTRIBUTES (savar)));
4542 gimple_add_tmp_var (savar);
4543 ctx->cb.decl_map->put (iavar, savar);
4544 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4545 sctx->idx, NULL_TREE, NULL_TREE);
4546 TREE_THIS_NOTRAP (*rvar2) = 1;
4547 }
4548 }
4549 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4550 NULL_TREE, NULL_TREE);
4551 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4552 NULL_TREE, NULL_TREE);
4553 TREE_THIS_NOTRAP (ivar) = 1;
4554 TREE_THIS_NOTRAP (lvar) = 1;
4555 }
4556 if (DECL_P (new_var))
4557 {
4558 SET_DECL_VALUE_EXPR (new_var, lvar);
4559 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4560 }
4561 return true;
4562 }
4563
4564 /* Helper function of lower_rec_input_clauses. For a reference
4565 in simd reduction, add an underlying variable it will reference. */
4566
4567 static void
4568 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4569 {
4570 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4571 if (TREE_CONSTANT (z))
4572 {
4573 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4574 get_name (new_vard));
4575 gimple_add_tmp_var (z);
4576 TREE_ADDRESSABLE (z) = 1;
4577 z = build_fold_addr_expr_loc (loc, z);
4578 gimplify_assign (new_vard, z, ilist);
4579 }
4580 }
4581
4582 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4583 code to emit (type) (tskred_temp[idx]). */
4584
4585 static tree
4586 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4587 unsigned idx)
4588 {
4589 unsigned HOST_WIDE_INT sz
4590 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4591 tree r = build2 (MEM_REF, pointer_sized_int_node,
4592 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4593 idx * sz));
4594 tree v = create_tmp_var (pointer_sized_int_node);
4595 gimple *g = gimple_build_assign (v, r);
4596 gimple_seq_add_stmt (ilist, g);
4597 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4598 {
4599 v = create_tmp_var (type);
4600 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4601 gimple_seq_add_stmt (ilist, g);
4602 }
4603 return v;
4604 }
4605
4606 /* Lower early initialization of privatized variable NEW_VAR
4607 if it needs an allocator (has allocate clause). */
4608
4609 static bool
4610 lower_private_allocate (tree var, tree new_var, tree &allocator,
4611 tree &allocate_ptr, gimple_seq *ilist,
4612 omp_context *ctx, bool is_ref, tree size)
4613 {
4614 if (allocator)
4615 return false;
4616 gcc_assert (allocate_ptr == NULL_TREE);
4617 if (ctx->allocate_map
4618 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4619 if (tree *allocatorp = ctx->allocate_map->get (var))
4620 allocator = *allocatorp;
4621 if (allocator == NULL_TREE)
4622 return false;
4623 if (!is_ref && omp_is_reference (var))
4624 {
4625 allocator = NULL_TREE;
4626 return false;
4627 }
4628
4629 if (TREE_CODE (allocator) != INTEGER_CST)
4630 allocator = build_outer_var_ref (allocator, ctx);
4631 allocator = fold_convert (pointer_sized_int_node, allocator);
4632 if (TREE_CODE (allocator) != INTEGER_CST)
4633 {
4634 tree var = create_tmp_var (TREE_TYPE (allocator));
4635 gimplify_assign (var, allocator, ilist);
4636 allocator = var;
4637 }
4638
4639 tree ptr_type, align, sz = size;
4640 if (TYPE_P (new_var))
4641 {
4642 ptr_type = build_pointer_type (new_var);
4643 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4644 }
4645 else if (is_ref)
4646 {
4647 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4648 align = build_int_cst (size_type_node,
4649 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4650 }
4651 else
4652 {
4653 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4654 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4655 if (sz == NULL_TREE)
4656 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4657 }
4658 if (TREE_CODE (sz) != INTEGER_CST)
4659 {
4660 tree szvar = create_tmp_var (size_type_node);
4661 gimplify_assign (szvar, sz, ilist);
4662 sz = szvar;
4663 }
4664 allocate_ptr = create_tmp_var (ptr_type);
4665 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4666 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4667 gimple_call_set_lhs (g, allocate_ptr);
4668 gimple_seq_add_stmt (ilist, g);
4669 if (!is_ref)
4670 {
4671 tree x = build_simple_mem_ref (allocate_ptr);
4672 TREE_THIS_NOTRAP (x) = 1;
4673 SET_DECL_VALUE_EXPR (new_var, x);
4674 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4675 }
4676 return true;
4677 }
4678
4679 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4680 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4681 private variables. Initialization statements go in ILIST, while calls
4682 to destructors go in DLIST. */
4683
4684 static void
4685 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4686 omp_context *ctx, struct omp_for_data *fd)
4687 {
4688 tree c, copyin_seq, x, ptr;
4689 bool copyin_by_ref = false;
4690 bool lastprivate_firstprivate = false;
4691 bool reduction_omp_orig_ref = false;
4692 int pass;
4693 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4694 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4695 omplow_simd_context sctx = omplow_simd_context ();
4696 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4697 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4698 gimple_seq llist[4] = { };
4699 tree nonconst_simd_if = NULL_TREE;
4700
4701 copyin_seq = NULL;
4702 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4703
4704 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4705 with data sharing clauses referencing variable sized vars. That
4706 is unnecessarily hard to support and very unlikely to result in
4707 vectorized code anyway. */
4708 if (is_simd)
4709 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4710 switch (OMP_CLAUSE_CODE (c))
4711 {
4712 case OMP_CLAUSE_LINEAR:
4713 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4714 sctx.max_vf = 1;
4715 /* FALLTHRU */
4716 case OMP_CLAUSE_PRIVATE:
4717 case OMP_CLAUSE_FIRSTPRIVATE:
4718 case OMP_CLAUSE_LASTPRIVATE:
4719 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4720 sctx.max_vf = 1;
4721 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4722 {
4723 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4724 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4725 sctx.max_vf = 1;
4726 }
4727 break;
4728 case OMP_CLAUSE_REDUCTION:
4729 case OMP_CLAUSE_IN_REDUCTION:
4730 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4731 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4732 sctx.max_vf = 1;
4733 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4734 {
4735 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4736 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4737 sctx.max_vf = 1;
4738 }
4739 break;
4740 case OMP_CLAUSE_IF:
4741 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4742 sctx.max_vf = 1;
4743 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4744 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4745 break;
4746 case OMP_CLAUSE_SIMDLEN:
4747 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4748 sctx.max_vf = 1;
4749 break;
4750 case OMP_CLAUSE__CONDTEMP_:
4751 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4752 if (sctx.is_simt)
4753 sctx.max_vf = 1;
4754 break;
4755 default:
4756 continue;
4757 }
4758
4759 /* Add a placeholder for simduid. */
4760 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4761 sctx.simt_eargs.safe_push (NULL_TREE);
4762
4763 unsigned task_reduction_cnt = 0;
4764 unsigned task_reduction_cntorig = 0;
4765 unsigned task_reduction_cnt_full = 0;
4766 unsigned task_reduction_cntorig_full = 0;
4767 unsigned task_reduction_other_cnt = 0;
4768 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4769 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4770 /* Do all the fixed sized types in the first pass, and the variable sized
4771 types in the second pass. This makes sure that the scalar arguments to
4772 the variable sized types are processed before we use them in the
4773 variable sized operations. For task reductions we use 4 passes, in the
4774 first two we ignore them, in the third one gather arguments for
4775 GOMP_task_reduction_remap call and in the last pass actually handle
4776 the task reductions. */
4777 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4778 ? 4 : 2); ++pass)
4779 {
4780 if (pass == 2 && task_reduction_cnt)
4781 {
4782 tskred_atype
4783 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4784 + task_reduction_cntorig);
4785 tskred_avar = create_tmp_var_raw (tskred_atype);
4786 gimple_add_tmp_var (tskred_avar);
4787 TREE_ADDRESSABLE (tskred_avar) = 1;
4788 task_reduction_cnt_full = task_reduction_cnt;
4789 task_reduction_cntorig_full = task_reduction_cntorig;
4790 }
4791 else if (pass == 3 && task_reduction_cnt)
4792 {
4793 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4794 gimple *g
4795 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4796 size_int (task_reduction_cntorig),
4797 build_fold_addr_expr (tskred_avar));
4798 gimple_seq_add_stmt (ilist, g);
4799 }
4800 if (pass == 3 && task_reduction_other_cnt)
4801 {
4802 /* For reduction clauses, build
4803 tskred_base = (void *) tskred_temp[2]
4804 + omp_get_thread_num () * tskred_temp[1]
4805 or if tskred_temp[1] is known to be constant, that constant
4806 directly. This is the start of the private reduction copy block
4807 for the current thread. */
4808 tree v = create_tmp_var (integer_type_node);
4809 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4810 gimple *g = gimple_build_call (x, 0);
4811 gimple_call_set_lhs (g, v);
4812 gimple_seq_add_stmt (ilist, g);
4813 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4814 tskred_temp = OMP_CLAUSE_DECL (c);
4815 if (is_taskreg_ctx (ctx))
4816 tskred_temp = lookup_decl (tskred_temp, ctx);
4817 tree v2 = create_tmp_var (sizetype);
4818 g = gimple_build_assign (v2, NOP_EXPR, v);
4819 gimple_seq_add_stmt (ilist, g);
4820 if (ctx->task_reductions[0])
4821 v = fold_convert (sizetype, ctx->task_reductions[0]);
4822 else
4823 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4824 tree v3 = create_tmp_var (sizetype);
4825 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4826 gimple_seq_add_stmt (ilist, g);
4827 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4828 tskred_base = create_tmp_var (ptr_type_node);
4829 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4830 gimple_seq_add_stmt (ilist, g);
4831 }
4832 task_reduction_cnt = 0;
4833 task_reduction_cntorig = 0;
4834 task_reduction_other_cnt = 0;
4835 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4836 {
4837 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4838 tree var, new_var;
4839 bool by_ref;
4840 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4841 bool task_reduction_p = false;
4842 bool task_reduction_needs_orig_p = false;
4843 tree cond = NULL_TREE;
4844 tree allocator, allocate_ptr;
4845
4846 switch (c_kind)
4847 {
4848 case OMP_CLAUSE_PRIVATE:
4849 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4850 continue;
4851 break;
4852 case OMP_CLAUSE_SHARED:
4853 /* Ignore shared directives in teams construct inside
4854 of target construct. */
4855 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4856 && !is_host_teams_ctx (ctx))
4857 continue;
4858 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4859 {
4860 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4861 || is_global_var (OMP_CLAUSE_DECL (c)));
4862 continue;
4863 }
4864 case OMP_CLAUSE_FIRSTPRIVATE:
4865 case OMP_CLAUSE_COPYIN:
4866 break;
4867 case OMP_CLAUSE_LINEAR:
4868 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4869 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4870 lastprivate_firstprivate = true;
4871 break;
4872 case OMP_CLAUSE_REDUCTION:
4873 case OMP_CLAUSE_IN_REDUCTION:
4874 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
4875 || is_task_ctx (ctx)
4876 || OMP_CLAUSE_REDUCTION_TASK (c))
4877 {
4878 task_reduction_p = true;
4879 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4880 {
4881 task_reduction_other_cnt++;
4882 if (pass == 2)
4883 continue;
4884 }
4885 else
4886 task_reduction_cnt++;
4887 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4888 {
4889 var = OMP_CLAUSE_DECL (c);
4890 /* If var is a global variable that isn't privatized
4891 in outer contexts, we don't need to look up the
4892 original address, it is always the address of the
4893 global variable itself. */
4894 if (!DECL_P (var)
4895 || omp_is_reference (var)
4896 || !is_global_var
4897 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4898 {
4899 task_reduction_needs_orig_p = true;
4900 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4901 task_reduction_cntorig++;
4902 }
4903 }
4904 }
4905 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4906 reduction_omp_orig_ref = true;
4907 break;
4908 case OMP_CLAUSE__REDUCTEMP_:
4909 if (!is_taskreg_ctx (ctx))
4910 continue;
4911 /* FALLTHRU */
4912 case OMP_CLAUSE__LOOPTEMP_:
4913 /* Handle _looptemp_/_reductemp_ clauses only on
4914 parallel/task. */
4915 if (fd)
4916 continue;
4917 break;
4918 case OMP_CLAUSE_LASTPRIVATE:
4919 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4920 {
4921 lastprivate_firstprivate = true;
4922 if (pass != 0 || is_taskloop_ctx (ctx))
4923 continue;
4924 }
4925 /* Even without corresponding firstprivate, if
4926 decl is Fortran allocatable, it needs outer var
4927 reference. */
4928 else if (pass == 0
4929 && lang_hooks.decls.omp_private_outer_ref
4930 (OMP_CLAUSE_DECL (c)))
4931 lastprivate_firstprivate = true;
4932 break;
4933 case OMP_CLAUSE_ALIGNED:
4934 if (pass != 1)
4935 continue;
4936 var = OMP_CLAUSE_DECL (c);
4937 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4938 && !is_global_var (var))
4939 {
4940 new_var = maybe_lookup_decl (var, ctx);
4941 if (new_var == NULL_TREE)
4942 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4943 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4944 tree alarg = omp_clause_aligned_alignment (c);
4945 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4946 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4947 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4948 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4949 gimplify_and_add (x, ilist);
4950 }
4951 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4952 && is_global_var (var))
4953 {
4954 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4955 new_var = lookup_decl (var, ctx);
4956 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4957 t = build_fold_addr_expr_loc (clause_loc, t);
4958 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4959 tree alarg = omp_clause_aligned_alignment (c);
4960 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4961 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4962 t = fold_convert_loc (clause_loc, ptype, t);
4963 x = create_tmp_var (ptype);
4964 t = build2 (MODIFY_EXPR, ptype, x, t);
4965 gimplify_and_add (t, ilist);
4966 t = build_simple_mem_ref_loc (clause_loc, x);
4967 SET_DECL_VALUE_EXPR (new_var, t);
4968 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4969 }
4970 continue;
4971 case OMP_CLAUSE__CONDTEMP_:
4972 if (is_parallel_ctx (ctx)
4973 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4974 break;
4975 continue;
4976 default:
4977 continue;
4978 }
4979
4980 if (task_reduction_p != (pass >= 2))
4981 continue;
4982
4983 allocator = NULL_TREE;
4984 allocate_ptr = NULL_TREE;
4985 new_var = var = OMP_CLAUSE_DECL (c);
4986 if ((c_kind == OMP_CLAUSE_REDUCTION
4987 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4988 && TREE_CODE (var) == MEM_REF)
4989 {
4990 var = TREE_OPERAND (var, 0);
4991 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4992 var = TREE_OPERAND (var, 0);
4993 if (TREE_CODE (var) == INDIRECT_REF
4994 || TREE_CODE (var) == ADDR_EXPR)
4995 var = TREE_OPERAND (var, 0);
4996 if (is_variable_sized (var))
4997 {
4998 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4999 var = DECL_VALUE_EXPR (var);
5000 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5001 var = TREE_OPERAND (var, 0);
5002 gcc_assert (DECL_P (var));
5003 }
5004 new_var = var;
5005 }
5006 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5007 {
5008 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5009 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5010 }
5011 else if (c_kind != OMP_CLAUSE_COPYIN)
5012 new_var = lookup_decl (var, ctx);
5013
5014 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5015 {
5016 if (pass != 0)
5017 continue;
5018 }
5019 /* C/C++ array section reductions. */
5020 else if ((c_kind == OMP_CLAUSE_REDUCTION
5021 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5022 && var != OMP_CLAUSE_DECL (c))
5023 {
5024 if (pass == 0)
5025 continue;
5026
5027 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5028 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5029
5030 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5031 {
5032 tree b = TREE_OPERAND (orig_var, 1);
5033 if (is_omp_target (ctx->stmt))
5034 b = NULL_TREE;
5035 else
5036 b = maybe_lookup_decl (b, ctx);
5037 if (b == NULL)
5038 {
5039 b = TREE_OPERAND (orig_var, 1);
5040 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5041 }
5042 if (integer_zerop (bias))
5043 bias = b;
5044 else
5045 {
5046 bias = fold_convert_loc (clause_loc,
5047 TREE_TYPE (b), bias);
5048 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5049 TREE_TYPE (b), b, bias);
5050 }
5051 orig_var = TREE_OPERAND (orig_var, 0);
5052 }
5053 if (pass == 2)
5054 {
5055 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5056 if (is_global_var (out)
5057 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5058 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5059 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5060 != POINTER_TYPE)))
5061 x = var;
5062 else if (is_omp_target (ctx->stmt))
5063 x = out;
5064 else
5065 {
5066 bool by_ref = use_pointer_for_field (var, NULL);
5067 x = build_receiver_ref (var, by_ref, ctx);
5068 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5069 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5070 == POINTER_TYPE))
5071 x = build_fold_addr_expr (x);
5072 }
5073 if (TREE_CODE (orig_var) == INDIRECT_REF)
5074 x = build_simple_mem_ref (x);
5075 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5076 {
5077 if (var == TREE_OPERAND (orig_var, 0))
5078 x = build_fold_addr_expr (x);
5079 }
5080 bias = fold_convert (sizetype, bias);
5081 x = fold_convert (ptr_type_node, x);
5082 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5083 TREE_TYPE (x), x, bias);
5084 unsigned cnt = task_reduction_cnt - 1;
5085 if (!task_reduction_needs_orig_p)
5086 cnt += (task_reduction_cntorig_full
5087 - task_reduction_cntorig);
5088 else
5089 cnt = task_reduction_cntorig - 1;
5090 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5091 size_int (cnt), NULL_TREE, NULL_TREE);
5092 gimplify_assign (r, x, ilist);
5093 continue;
5094 }
5095
5096 if (TREE_CODE (orig_var) == INDIRECT_REF
5097 || TREE_CODE (orig_var) == ADDR_EXPR)
5098 orig_var = TREE_OPERAND (orig_var, 0);
5099 tree d = OMP_CLAUSE_DECL (c);
5100 tree type = TREE_TYPE (d);
5101 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5102 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5103 tree sz = v;
5104 const char *name = get_name (orig_var);
5105 if (pass != 3 && !TREE_CONSTANT (v))
5106 {
5107 tree t;
5108 if (is_omp_target (ctx->stmt))
5109 t = NULL_TREE;
5110 else
5111 t = maybe_lookup_decl (v, ctx);
5112 if (t)
5113 v = t;
5114 else
5115 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5116 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5117 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5118 TREE_TYPE (v), v,
5119 build_int_cst (TREE_TYPE (v), 1));
5120 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5121 TREE_TYPE (v), t,
5122 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5123 }
5124 if (pass == 3)
5125 {
5126 tree xv = create_tmp_var (ptr_type_node);
5127 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5128 {
5129 unsigned cnt = task_reduction_cnt - 1;
5130 if (!task_reduction_needs_orig_p)
5131 cnt += (task_reduction_cntorig_full
5132 - task_reduction_cntorig);
5133 else
5134 cnt = task_reduction_cntorig - 1;
5135 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5136 size_int (cnt), NULL_TREE, NULL_TREE);
5137
5138 gimple *g = gimple_build_assign (xv, x);
5139 gimple_seq_add_stmt (ilist, g);
5140 }
5141 else
5142 {
5143 unsigned int idx = *ctx->task_reduction_map->get (c);
5144 tree off;
5145 if (ctx->task_reductions[1 + idx])
5146 off = fold_convert (sizetype,
5147 ctx->task_reductions[1 + idx]);
5148 else
5149 off = task_reduction_read (ilist, tskred_temp, sizetype,
5150 7 + 3 * idx + 1);
5151 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5152 tskred_base, off);
5153 gimple_seq_add_stmt (ilist, g);
5154 }
5155 x = fold_convert (build_pointer_type (boolean_type_node),
5156 xv);
5157 if (TREE_CONSTANT (v))
5158 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5159 TYPE_SIZE_UNIT (type));
5160 else
5161 {
5162 tree t;
5163 if (is_omp_target (ctx->stmt))
5164 t = NULL_TREE;
5165 else
5166 t = maybe_lookup_decl (v, ctx);
5167 if (t)
5168 v = t;
5169 else
5170 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5171 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5172 fb_rvalue);
5173 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5174 TREE_TYPE (v), v,
5175 build_int_cst (TREE_TYPE (v), 1));
5176 t = fold_build2_loc (clause_loc, MULT_EXPR,
5177 TREE_TYPE (v), t,
5178 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5179 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5180 }
5181 cond = create_tmp_var (TREE_TYPE (x));
5182 gimplify_assign (cond, x, ilist);
5183 x = xv;
5184 }
5185 else if (lower_private_allocate (var, type, allocator,
5186 allocate_ptr, ilist, ctx,
5187 true,
5188 TREE_CONSTANT (v)
5189 ? TYPE_SIZE_UNIT (type)
5190 : sz))
5191 x = allocate_ptr;
5192 else if (TREE_CONSTANT (v))
5193 {
5194 x = create_tmp_var_raw (type, name);
5195 gimple_add_tmp_var (x);
5196 TREE_ADDRESSABLE (x) = 1;
5197 x = build_fold_addr_expr_loc (clause_loc, x);
5198 }
5199 else
5200 {
5201 tree atmp
5202 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5203 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5204 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5205 }
5206
5207 tree ptype = build_pointer_type (TREE_TYPE (type));
5208 x = fold_convert_loc (clause_loc, ptype, x);
5209 tree y = create_tmp_var (ptype, name);
5210 gimplify_assign (y, x, ilist);
5211 x = y;
5212 tree yb = y;
5213
5214 if (!integer_zerop (bias))
5215 {
5216 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5217 bias);
5218 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5219 x);
5220 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5221 pointer_sized_int_node, yb, bias);
5222 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5223 yb = create_tmp_var (ptype, name);
5224 gimplify_assign (yb, x, ilist);
5225 x = yb;
5226 }
5227
5228 d = TREE_OPERAND (d, 0);
5229 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5230 d = TREE_OPERAND (d, 0);
5231 if (TREE_CODE (d) == ADDR_EXPR)
5232 {
5233 if (orig_var != var)
5234 {
5235 gcc_assert (is_variable_sized (orig_var));
5236 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5237 x);
5238 gimplify_assign (new_var, x, ilist);
5239 tree new_orig_var = lookup_decl (orig_var, ctx);
5240 tree t = build_fold_indirect_ref (new_var);
5241 DECL_IGNORED_P (new_var) = 0;
5242 TREE_THIS_NOTRAP (t) = 1;
5243 SET_DECL_VALUE_EXPR (new_orig_var, t);
5244 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5245 }
5246 else
5247 {
5248 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5249 build_int_cst (ptype, 0));
5250 SET_DECL_VALUE_EXPR (new_var, x);
5251 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5252 }
5253 }
5254 else
5255 {
5256 gcc_assert (orig_var == var);
5257 if (TREE_CODE (d) == INDIRECT_REF)
5258 {
5259 x = create_tmp_var (ptype, name);
5260 TREE_ADDRESSABLE (x) = 1;
5261 gimplify_assign (x, yb, ilist);
5262 x = build_fold_addr_expr_loc (clause_loc, x);
5263 }
5264 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5265 gimplify_assign (new_var, x, ilist);
5266 }
5267 /* GOMP_taskgroup_reduction_register memsets the whole
5268 array to zero. If the initializer is zero, we don't
5269 need to initialize it again, just mark it as ever
5270 used unconditionally, i.e. cond = true. */
5271 if (cond
5272 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5273 && initializer_zerop (omp_reduction_init (c,
5274 TREE_TYPE (type))))
5275 {
5276 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5277 boolean_true_node);
5278 gimple_seq_add_stmt (ilist, g);
5279 continue;
5280 }
5281 tree end = create_artificial_label (UNKNOWN_LOCATION);
5282 if (cond)
5283 {
5284 gimple *g;
5285 if (!is_parallel_ctx (ctx))
5286 {
5287 tree condv = create_tmp_var (boolean_type_node);
5288 g = gimple_build_assign (condv,
5289 build_simple_mem_ref (cond));
5290 gimple_seq_add_stmt (ilist, g);
5291 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5292 g = gimple_build_cond (NE_EXPR, condv,
5293 boolean_false_node, end, lab1);
5294 gimple_seq_add_stmt (ilist, g);
5295 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5296 }
5297 g = gimple_build_assign (build_simple_mem_ref (cond),
5298 boolean_true_node);
5299 gimple_seq_add_stmt (ilist, g);
5300 }
5301
5302 tree y1 = create_tmp_var (ptype);
5303 gimplify_assign (y1, y, ilist);
5304 tree i2 = NULL_TREE, y2 = NULL_TREE;
5305 tree body2 = NULL_TREE, end2 = NULL_TREE;
5306 tree y3 = NULL_TREE, y4 = NULL_TREE;
5307 if (task_reduction_needs_orig_p)
5308 {
5309 y3 = create_tmp_var (ptype);
5310 tree ref;
5311 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5312 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5313 size_int (task_reduction_cnt_full
5314 + task_reduction_cntorig - 1),
5315 NULL_TREE, NULL_TREE);
5316 else
5317 {
5318 unsigned int idx = *ctx->task_reduction_map->get (c);
5319 ref = task_reduction_read (ilist, tskred_temp, ptype,
5320 7 + 3 * idx);
5321 }
5322 gimplify_assign (y3, ref, ilist);
5323 }
5324 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5325 {
5326 if (pass != 3)
5327 {
5328 y2 = create_tmp_var (ptype);
5329 gimplify_assign (y2, y, ilist);
5330 }
5331 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5332 {
5333 tree ref = build_outer_var_ref (var, ctx);
5334 /* For ref build_outer_var_ref already performs this. */
5335 if (TREE_CODE (d) == INDIRECT_REF)
5336 gcc_assert (omp_is_reference (var));
5337 else if (TREE_CODE (d) == ADDR_EXPR)
5338 ref = build_fold_addr_expr (ref);
5339 else if (omp_is_reference (var))
5340 ref = build_fold_addr_expr (ref);
5341 ref = fold_convert_loc (clause_loc, ptype, ref);
5342 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5343 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5344 {
5345 y3 = create_tmp_var (ptype);
5346 gimplify_assign (y3, unshare_expr (ref), ilist);
5347 }
5348 if (is_simd)
5349 {
5350 y4 = create_tmp_var (ptype);
5351 gimplify_assign (y4, ref, dlist);
5352 }
5353 }
5354 }
5355 tree i = create_tmp_var (TREE_TYPE (v));
5356 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5357 tree body = create_artificial_label (UNKNOWN_LOCATION);
5358 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5359 if (y2)
5360 {
5361 i2 = create_tmp_var (TREE_TYPE (v));
5362 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5363 body2 = create_artificial_label (UNKNOWN_LOCATION);
5364 end2 = create_artificial_label (UNKNOWN_LOCATION);
5365 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5366 }
5367 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5368 {
5369 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5370 tree decl_placeholder
5371 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5372 SET_DECL_VALUE_EXPR (decl_placeholder,
5373 build_simple_mem_ref (y1));
5374 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5375 SET_DECL_VALUE_EXPR (placeholder,
5376 y3 ? build_simple_mem_ref (y3)
5377 : error_mark_node);
5378 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5379 x = lang_hooks.decls.omp_clause_default_ctor
5380 (c, build_simple_mem_ref (y1),
5381 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5382 if (x)
5383 gimplify_and_add (x, ilist);
5384 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5385 {
5386 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5387 lower_omp (&tseq, ctx);
5388 gimple_seq_add_seq (ilist, tseq);
5389 }
5390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5391 if (is_simd)
5392 {
5393 SET_DECL_VALUE_EXPR (decl_placeholder,
5394 build_simple_mem_ref (y2));
5395 SET_DECL_VALUE_EXPR (placeholder,
5396 build_simple_mem_ref (y4));
5397 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5398 lower_omp (&tseq, ctx);
5399 gimple_seq_add_seq (dlist, tseq);
5400 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5401 }
5402 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5403 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5404 if (y2)
5405 {
5406 x = lang_hooks.decls.omp_clause_dtor
5407 (c, build_simple_mem_ref (y2));
5408 if (x)
5409 gimplify_and_add (x, dlist);
5410 }
5411 }
5412 else
5413 {
5414 x = omp_reduction_init (c, TREE_TYPE (type));
5415 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5416
5417 /* reduction(-:var) sums up the partial results, so it
5418 acts identically to reduction(+:var). */
5419 if (code == MINUS_EXPR)
5420 code = PLUS_EXPR;
5421
5422 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5423 if (is_simd)
5424 {
5425 x = build2 (code, TREE_TYPE (type),
5426 build_simple_mem_ref (y4),
5427 build_simple_mem_ref (y2));
5428 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5429 }
5430 }
5431 gimple *g
5432 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5433 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5434 gimple_seq_add_stmt (ilist, g);
5435 if (y3)
5436 {
5437 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5438 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5439 gimple_seq_add_stmt (ilist, g);
5440 }
5441 g = gimple_build_assign (i, PLUS_EXPR, i,
5442 build_int_cst (TREE_TYPE (i), 1));
5443 gimple_seq_add_stmt (ilist, g);
5444 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5445 gimple_seq_add_stmt (ilist, g);
5446 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5447 if (y2)
5448 {
5449 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5450 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5451 gimple_seq_add_stmt (dlist, g);
5452 if (y4)
5453 {
5454 g = gimple_build_assign
5455 (y4, POINTER_PLUS_EXPR, y4,
5456 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5457 gimple_seq_add_stmt (dlist, g);
5458 }
5459 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5460 build_int_cst (TREE_TYPE (i2), 1));
5461 gimple_seq_add_stmt (dlist, g);
5462 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5463 gimple_seq_add_stmt (dlist, g);
5464 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5465 }
5466 if (allocator)
5467 {
5468 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5469 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5470 gimple_seq_add_stmt (dlist, g);
5471 }
5472 continue;
5473 }
5474 else if (pass == 2)
5475 {
5476 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5477 if (is_global_var (out))
5478 x = var;
5479 else if (is_omp_target (ctx->stmt))
5480 x = out;
5481 else
5482 {
5483 bool by_ref = use_pointer_for_field (var, ctx);
5484 x = build_receiver_ref (var, by_ref, ctx);
5485 }
5486 if (!omp_is_reference (var))
5487 x = build_fold_addr_expr (x);
5488 x = fold_convert (ptr_type_node, x);
5489 unsigned cnt = task_reduction_cnt - 1;
5490 if (!task_reduction_needs_orig_p)
5491 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5492 else
5493 cnt = task_reduction_cntorig - 1;
5494 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5495 size_int (cnt), NULL_TREE, NULL_TREE);
5496 gimplify_assign (r, x, ilist);
5497 continue;
5498 }
5499 else if (pass == 3)
5500 {
5501 tree type = TREE_TYPE (new_var);
5502 if (!omp_is_reference (var))
5503 type = build_pointer_type (type);
5504 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5505 {
5506 unsigned cnt = task_reduction_cnt - 1;
5507 if (!task_reduction_needs_orig_p)
5508 cnt += (task_reduction_cntorig_full
5509 - task_reduction_cntorig);
5510 else
5511 cnt = task_reduction_cntorig - 1;
5512 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5513 size_int (cnt), NULL_TREE, NULL_TREE);
5514 }
5515 else
5516 {
5517 unsigned int idx = *ctx->task_reduction_map->get (c);
5518 tree off;
5519 if (ctx->task_reductions[1 + idx])
5520 off = fold_convert (sizetype,
5521 ctx->task_reductions[1 + idx]);
5522 else
5523 off = task_reduction_read (ilist, tskred_temp, sizetype,
5524 7 + 3 * idx + 1);
5525 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5526 tskred_base, off);
5527 }
5528 x = fold_convert (type, x);
5529 tree t;
5530 if (omp_is_reference (var))
5531 {
5532 gimplify_assign (new_var, x, ilist);
5533 t = new_var;
5534 new_var = build_simple_mem_ref (new_var);
5535 }
5536 else
5537 {
5538 t = create_tmp_var (type);
5539 gimplify_assign (t, x, ilist);
5540 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5541 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5542 }
5543 t = fold_convert (build_pointer_type (boolean_type_node), t);
5544 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5545 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5546 cond = create_tmp_var (TREE_TYPE (t));
5547 gimplify_assign (cond, t, ilist);
5548 }
5549 else if (is_variable_sized (var))
5550 {
5551 /* For variable sized types, we need to allocate the
5552 actual storage here. Call alloca and store the
5553 result in the pointer decl that we created elsewhere. */
5554 if (pass == 0)
5555 continue;
5556
5557 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5558 {
5559 tree tmp;
5560
5561 ptr = DECL_VALUE_EXPR (new_var);
5562 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5563 ptr = TREE_OPERAND (ptr, 0);
5564 gcc_assert (DECL_P (ptr));
5565 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5566
5567 if (lower_private_allocate (var, new_var, allocator,
5568 allocate_ptr, ilist, ctx,
5569 false, x))
5570 tmp = allocate_ptr;
5571 else
5572 {
5573 /* void *tmp = __builtin_alloca */
5574 tree atmp
5575 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5576 gcall *stmt
5577 = gimple_build_call (atmp, 2, x,
5578 size_int (DECL_ALIGN (var)));
5579 cfun->calls_alloca = 1;
5580 tmp = create_tmp_var_raw (ptr_type_node);
5581 gimple_add_tmp_var (tmp);
5582 gimple_call_set_lhs (stmt, tmp);
5583
5584 gimple_seq_add_stmt (ilist, stmt);
5585 }
5586
5587 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5588 gimplify_assign (ptr, x, ilist);
5589 }
5590 }
5591 else if (omp_is_reference (var)
5592 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5593 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5594 {
5595 /* For references that are being privatized for Fortran,
5596 allocate new backing storage for the new pointer
5597 variable. This allows us to avoid changing all the
5598 code that expects a pointer to something that expects
5599 a direct variable. */
5600 if (pass == 0)
5601 continue;
5602
5603 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5604 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5605 {
5606 x = build_receiver_ref (var, false, ctx);
5607 if (ctx->allocate_map)
5608 if (tree *allocatep = ctx->allocate_map->get (var))
5609 {
5610 allocator = *allocatep;
5611 if (TREE_CODE (allocator) != INTEGER_CST)
5612 allocator = build_outer_var_ref (allocator, ctx);
5613 allocator = fold_convert (pointer_sized_int_node,
5614 allocator);
5615 allocate_ptr = unshare_expr (x);
5616 }
5617 if (allocator == NULL_TREE)
5618 x = build_fold_addr_expr_loc (clause_loc, x);
5619 }
5620 else if (lower_private_allocate (var, new_var, allocator,
5621 allocate_ptr,
5622 ilist, ctx, true, x))
5623 x = allocate_ptr;
5624 else if (TREE_CONSTANT (x))
5625 {
5626 /* For reduction in SIMD loop, defer adding the
5627 initialization of the reference, because if we decide
5628 to use SIMD array for it, the initilization could cause
5629 expansion ICE. Ditto for other privatization clauses. */
5630 if (is_simd)
5631 x = NULL_TREE;
5632 else
5633 {
5634 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5635 get_name (var));
5636 gimple_add_tmp_var (x);
5637 TREE_ADDRESSABLE (x) = 1;
5638 x = build_fold_addr_expr_loc (clause_loc, x);
5639 }
5640 }
5641 else
5642 {
5643 tree atmp
5644 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5645 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5646 tree al = size_int (TYPE_ALIGN (rtype));
5647 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5648 }
5649
5650 if (x)
5651 {
5652 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5653 gimplify_assign (new_var, x, ilist);
5654 }
5655
5656 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5657 }
5658 else if ((c_kind == OMP_CLAUSE_REDUCTION
5659 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5660 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5661 {
5662 if (pass == 0)
5663 continue;
5664 }
5665 else if (pass != 0)
5666 continue;
5667
5668 switch (OMP_CLAUSE_CODE (c))
5669 {
5670 case OMP_CLAUSE_SHARED:
5671 /* Ignore shared directives in teams construct inside
5672 target construct. */
5673 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5674 && !is_host_teams_ctx (ctx))
5675 continue;
5676 /* Shared global vars are just accessed directly. */
5677 if (is_global_var (new_var))
5678 break;
5679 /* For taskloop firstprivate/lastprivate, represented
5680 as firstprivate and shared clause on the task, new_var
5681 is the firstprivate var. */
5682 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5683 break;
5684 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5685 needs to be delayed until after fixup_child_record_type so
5686 that we get the correct type during the dereference. */
5687 by_ref = use_pointer_for_field (var, ctx);
5688 x = build_receiver_ref (var, by_ref, ctx);
5689 SET_DECL_VALUE_EXPR (new_var, x);
5690 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5691
5692 /* ??? If VAR is not passed by reference, and the variable
5693 hasn't been initialized yet, then we'll get a warning for
5694 the store into the omp_data_s structure. Ideally, we'd be
5695 able to notice this and not store anything at all, but
5696 we're generating code too early. Suppress the warning. */
5697 if (!by_ref)
5698 suppress_warning (var, OPT_Wuninitialized);
5699 break;
5700
5701 case OMP_CLAUSE__CONDTEMP_:
5702 if (is_parallel_ctx (ctx))
5703 {
5704 x = build_receiver_ref (var, false, ctx);
5705 SET_DECL_VALUE_EXPR (new_var, x);
5706 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5707 }
5708 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5709 {
5710 x = build_zero_cst (TREE_TYPE (var));
5711 goto do_private;
5712 }
5713 break;
5714
5715 case OMP_CLAUSE_LASTPRIVATE:
5716 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5717 break;
5718 /* FALLTHRU */
5719
5720 case OMP_CLAUSE_PRIVATE:
5721 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5722 x = build_outer_var_ref (var, ctx);
5723 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5724 {
5725 if (is_task_ctx (ctx))
5726 x = build_receiver_ref (var, false, ctx);
5727 else
5728 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5729 }
5730 else
5731 x = NULL;
5732 do_private:
5733 tree nx;
5734 bool copy_ctor;
5735 copy_ctor = false;
5736 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5737 ilist, ctx, false, NULL_TREE);
5738 nx = unshare_expr (new_var);
5739 if (is_simd
5740 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5741 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5742 copy_ctor = true;
5743 if (copy_ctor)
5744 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5745 else
5746 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5747 if (is_simd)
5748 {
5749 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5750 if ((TREE_ADDRESSABLE (new_var) || nx || y
5751 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5752 && (gimple_omp_for_collapse (ctx->stmt) != 1
5753 || (gimple_omp_for_index (ctx->stmt, 0)
5754 != new_var)))
5755 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5756 || omp_is_reference (var))
5757 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5758 ivar, lvar))
5759 {
5760 if (omp_is_reference (var))
5761 {
5762 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5763 tree new_vard = TREE_OPERAND (new_var, 0);
5764 gcc_assert (DECL_P (new_vard));
5765 SET_DECL_VALUE_EXPR (new_vard,
5766 build_fold_addr_expr (lvar));
5767 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5768 }
5769
5770 if (nx)
5771 {
5772 tree iv = unshare_expr (ivar);
5773 if (copy_ctor)
5774 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5775 x);
5776 else
5777 x = lang_hooks.decls.omp_clause_default_ctor (c,
5778 iv,
5779 x);
5780 }
5781 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5782 {
5783 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5784 unshare_expr (ivar), x);
5785 nx = x;
5786 }
5787 if (nx && x)
5788 gimplify_and_add (x, &llist[0]);
5789 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5790 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5791 {
5792 tree v = new_var;
5793 if (!DECL_P (v))
5794 {
5795 gcc_assert (TREE_CODE (v) == MEM_REF);
5796 v = TREE_OPERAND (v, 0);
5797 gcc_assert (DECL_P (v));
5798 }
5799 v = *ctx->lastprivate_conditional_map->get (v);
5800 tree t = create_tmp_var (TREE_TYPE (v));
5801 tree z = build_zero_cst (TREE_TYPE (v));
5802 tree orig_v
5803 = build_outer_var_ref (var, ctx,
5804 OMP_CLAUSE_LASTPRIVATE);
5805 gimple_seq_add_stmt (dlist,
5806 gimple_build_assign (t, z));
5807 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5808 tree civar = DECL_VALUE_EXPR (v);
5809 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5810 civar = unshare_expr (civar);
5811 TREE_OPERAND (civar, 1) = sctx.idx;
5812 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5813 unshare_expr (civar));
5814 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5815 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5816 orig_v, unshare_expr (ivar)));
5817 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5818 civar);
5819 x = build3 (COND_EXPR, void_type_node, cond, x,
5820 void_node);
5821 gimple_seq tseq = NULL;
5822 gimplify_and_add (x, &tseq);
5823 if (ctx->outer)
5824 lower_omp (&tseq, ctx->outer);
5825 gimple_seq_add_seq (&llist[1], tseq);
5826 }
5827 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5828 && ctx->for_simd_scan_phase)
5829 {
5830 x = unshare_expr (ivar);
5831 tree orig_v
5832 = build_outer_var_ref (var, ctx,
5833 OMP_CLAUSE_LASTPRIVATE);
5834 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5835 orig_v);
5836 gimplify_and_add (x, &llist[0]);
5837 }
5838 if (y)
5839 {
5840 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5841 if (y)
5842 gimplify_and_add (y, &llist[1]);
5843 }
5844 break;
5845 }
5846 if (omp_is_reference (var))
5847 {
5848 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5849 tree new_vard = TREE_OPERAND (new_var, 0);
5850 gcc_assert (DECL_P (new_vard));
5851 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5852 x = TYPE_SIZE_UNIT (type);
5853 if (TREE_CONSTANT (x))
5854 {
5855 x = create_tmp_var_raw (type, get_name (var));
5856 gimple_add_tmp_var (x);
5857 TREE_ADDRESSABLE (x) = 1;
5858 x = build_fold_addr_expr_loc (clause_loc, x);
5859 x = fold_convert_loc (clause_loc,
5860 TREE_TYPE (new_vard), x);
5861 gimplify_assign (new_vard, x, ilist);
5862 }
5863 }
5864 }
5865 if (nx)
5866 gimplify_and_add (nx, ilist);
5867 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5868 && is_simd
5869 && ctx->for_simd_scan_phase)
5870 {
5871 tree orig_v = build_outer_var_ref (var, ctx,
5872 OMP_CLAUSE_LASTPRIVATE);
5873 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5874 orig_v);
5875 gimplify_and_add (x, ilist);
5876 }
5877 /* FALLTHRU */
5878
5879 do_dtor:
5880 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5881 if (x)
5882 gimplify_and_add (x, dlist);
5883 if (allocator)
5884 {
5885 if (!is_gimple_val (allocator))
5886 {
5887 tree avar = create_tmp_var (TREE_TYPE (allocator));
5888 gimplify_assign (avar, allocator, dlist);
5889 allocator = avar;
5890 }
5891 if (!is_gimple_val (allocate_ptr))
5892 {
5893 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5894 gimplify_assign (apvar, allocate_ptr, dlist);
5895 allocate_ptr = apvar;
5896 }
5897 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5898 gimple *g
5899 = gimple_build_call (f, 2, allocate_ptr, allocator);
5900 gimple_seq_add_stmt (dlist, g);
5901 }
5902 break;
5903
5904 case OMP_CLAUSE_LINEAR:
5905 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5906 goto do_firstprivate;
5907 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5908 x = NULL;
5909 else
5910 x = build_outer_var_ref (var, ctx);
5911 goto do_private;
5912
5913 case OMP_CLAUSE_FIRSTPRIVATE:
5914 if (is_task_ctx (ctx))
5915 {
5916 if ((omp_is_reference (var)
5917 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5918 || is_variable_sized (var))
5919 goto do_dtor;
5920 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5921 ctx))
5922 || use_pointer_for_field (var, NULL))
5923 {
5924 x = build_receiver_ref (var, false, ctx);
5925 if (ctx->allocate_map)
5926 if (tree *allocatep = ctx->allocate_map->get (var))
5927 {
5928 allocator = *allocatep;
5929 if (TREE_CODE (allocator) != INTEGER_CST)
5930 allocator = build_outer_var_ref (allocator, ctx);
5931 allocator = fold_convert (pointer_sized_int_node,
5932 allocator);
5933 allocate_ptr = unshare_expr (x);
5934 x = build_simple_mem_ref (x);
5935 TREE_THIS_NOTRAP (x) = 1;
5936 }
5937 SET_DECL_VALUE_EXPR (new_var, x);
5938 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5939 goto do_dtor;
5940 }
5941 }
5942 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5943 && omp_is_reference (var))
5944 {
5945 x = build_outer_var_ref (var, ctx);
5946 gcc_assert (TREE_CODE (x) == MEM_REF
5947 && integer_zerop (TREE_OPERAND (x, 1)));
5948 x = TREE_OPERAND (x, 0);
5949 x = lang_hooks.decls.omp_clause_copy_ctor
5950 (c, unshare_expr (new_var), x);
5951 gimplify_and_add (x, ilist);
5952 goto do_dtor;
5953 }
5954 do_firstprivate:
5955 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5956 ilist, ctx, false, NULL_TREE);
5957 x = build_outer_var_ref (var, ctx);
5958 if (is_simd)
5959 {
5960 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5961 && gimple_omp_for_combined_into_p (ctx->stmt))
5962 {
5963 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5964 tree stept = TREE_TYPE (t);
5965 tree ct = omp_find_clause (clauses,
5966 OMP_CLAUSE__LOOPTEMP_);
5967 gcc_assert (ct);
5968 tree l = OMP_CLAUSE_DECL (ct);
5969 tree n1 = fd->loop.n1;
5970 tree step = fd->loop.step;
5971 tree itype = TREE_TYPE (l);
5972 if (POINTER_TYPE_P (itype))
5973 itype = signed_type_for (itype);
5974 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5975 if (TYPE_UNSIGNED (itype)
5976 && fd->loop.cond_code == GT_EXPR)
5977 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5978 fold_build1 (NEGATE_EXPR, itype, l),
5979 fold_build1 (NEGATE_EXPR,
5980 itype, step));
5981 else
5982 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5983 t = fold_build2 (MULT_EXPR, stept,
5984 fold_convert (stept, l), t);
5985
5986 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5987 {
5988 if (omp_is_reference (var))
5989 {
5990 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5991 tree new_vard = TREE_OPERAND (new_var, 0);
5992 gcc_assert (DECL_P (new_vard));
5993 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5994 nx = TYPE_SIZE_UNIT (type);
5995 if (TREE_CONSTANT (nx))
5996 {
5997 nx = create_tmp_var_raw (type,
5998 get_name (var));
5999 gimple_add_tmp_var (nx);
6000 TREE_ADDRESSABLE (nx) = 1;
6001 nx = build_fold_addr_expr_loc (clause_loc,
6002 nx);
6003 nx = fold_convert_loc (clause_loc,
6004 TREE_TYPE (new_vard),
6005 nx);
6006 gimplify_assign (new_vard, nx, ilist);
6007 }
6008 }
6009
6010 x = lang_hooks.decls.omp_clause_linear_ctor
6011 (c, new_var, x, t);
6012 gimplify_and_add (x, ilist);
6013 goto do_dtor;
6014 }
6015
6016 if (POINTER_TYPE_P (TREE_TYPE (x)))
6017 x = fold_build2 (POINTER_PLUS_EXPR,
6018 TREE_TYPE (x), x, t);
6019 else
6020 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6021 }
6022
6023 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6024 || TREE_ADDRESSABLE (new_var)
6025 || omp_is_reference (var))
6026 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6027 ivar, lvar))
6028 {
6029 if (omp_is_reference (var))
6030 {
6031 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6032 tree new_vard = TREE_OPERAND (new_var, 0);
6033 gcc_assert (DECL_P (new_vard));
6034 SET_DECL_VALUE_EXPR (new_vard,
6035 build_fold_addr_expr (lvar));
6036 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6037 }
6038 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6039 {
6040 tree iv = create_tmp_var (TREE_TYPE (new_var));
6041 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6042 gimplify_and_add (x, ilist);
6043 gimple_stmt_iterator gsi
6044 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6045 gassign *g
6046 = gimple_build_assign (unshare_expr (lvar), iv);
6047 gsi_insert_before_without_update (&gsi, g,
6048 GSI_SAME_STMT);
6049 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6050 enum tree_code code = PLUS_EXPR;
6051 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6052 code = POINTER_PLUS_EXPR;
6053 g = gimple_build_assign (iv, code, iv, t);
6054 gsi_insert_before_without_update (&gsi, g,
6055 GSI_SAME_STMT);
6056 break;
6057 }
6058 x = lang_hooks.decls.omp_clause_copy_ctor
6059 (c, unshare_expr (ivar), x);
6060 gimplify_and_add (x, &llist[0]);
6061 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6062 if (x)
6063 gimplify_and_add (x, &llist[1]);
6064 break;
6065 }
6066 if (omp_is_reference (var))
6067 {
6068 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6069 tree new_vard = TREE_OPERAND (new_var, 0);
6070 gcc_assert (DECL_P (new_vard));
6071 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6072 nx = TYPE_SIZE_UNIT (type);
6073 if (TREE_CONSTANT (nx))
6074 {
6075 nx = create_tmp_var_raw (type, get_name (var));
6076 gimple_add_tmp_var (nx);
6077 TREE_ADDRESSABLE (nx) = 1;
6078 nx = build_fold_addr_expr_loc (clause_loc, nx);
6079 nx = fold_convert_loc (clause_loc,
6080 TREE_TYPE (new_vard), nx);
6081 gimplify_assign (new_vard, nx, ilist);
6082 }
6083 }
6084 }
6085 x = lang_hooks.decls.omp_clause_copy_ctor
6086 (c, unshare_expr (new_var), x);
6087 gimplify_and_add (x, ilist);
6088 goto do_dtor;
6089
6090 case OMP_CLAUSE__LOOPTEMP_:
6091 case OMP_CLAUSE__REDUCTEMP_:
6092 gcc_assert (is_taskreg_ctx (ctx));
6093 x = build_outer_var_ref (var, ctx);
6094 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6095 gimplify_and_add (x, ilist);
6096 break;
6097
6098 case OMP_CLAUSE_COPYIN:
6099 by_ref = use_pointer_for_field (var, NULL);
6100 x = build_receiver_ref (var, by_ref, ctx);
6101 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6102 append_to_statement_list (x, &copyin_seq);
6103 copyin_by_ref |= by_ref;
6104 break;
6105
6106 case OMP_CLAUSE_REDUCTION:
6107 case OMP_CLAUSE_IN_REDUCTION:
6108 /* OpenACC reductions are initialized using the
6109 GOACC_REDUCTION internal function. */
6110 if (is_gimple_omp_oacc (ctx->stmt))
6111 break;
6112 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6113 {
6114 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6115 gimple *tseq;
6116 tree ptype = TREE_TYPE (placeholder);
6117 if (cond)
6118 {
6119 x = error_mark_node;
6120 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6121 && !task_reduction_needs_orig_p)
6122 x = var;
6123 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6124 {
6125 tree pptype = build_pointer_type (ptype);
6126 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6127 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6128 size_int (task_reduction_cnt_full
6129 + task_reduction_cntorig - 1),
6130 NULL_TREE, NULL_TREE);
6131 else
6132 {
6133 unsigned int idx
6134 = *ctx->task_reduction_map->get (c);
6135 x = task_reduction_read (ilist, tskred_temp,
6136 pptype, 7 + 3 * idx);
6137 }
6138 x = fold_convert (pptype, x);
6139 x = build_simple_mem_ref (x);
6140 }
6141 }
6142 else
6143 {
6144 lower_private_allocate (var, new_var, allocator,
6145 allocate_ptr, ilist, ctx, false,
6146 NULL_TREE);
6147 x = build_outer_var_ref (var, ctx);
6148
6149 if (omp_is_reference (var)
6150 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6151 x = build_fold_addr_expr_loc (clause_loc, x);
6152 }
6153 SET_DECL_VALUE_EXPR (placeholder, x);
6154 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6155 tree new_vard = new_var;
6156 if (omp_is_reference (var))
6157 {
6158 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6159 new_vard = TREE_OPERAND (new_var, 0);
6160 gcc_assert (DECL_P (new_vard));
6161 }
6162 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6163 if (is_simd
6164 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6165 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6166 rvarp = &rvar;
6167 if (is_simd
6168 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6169 ivar, lvar, rvarp,
6170 &rvar2))
6171 {
6172 if (new_vard == new_var)
6173 {
6174 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6175 SET_DECL_VALUE_EXPR (new_var, ivar);
6176 }
6177 else
6178 {
6179 SET_DECL_VALUE_EXPR (new_vard,
6180 build_fold_addr_expr (ivar));
6181 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6182 }
6183 x = lang_hooks.decls.omp_clause_default_ctor
6184 (c, unshare_expr (ivar),
6185 build_outer_var_ref (var, ctx));
6186 if (rvarp && ctx->for_simd_scan_phase)
6187 {
6188 if (x)
6189 gimplify_and_add (x, &llist[0]);
6190 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6191 if (x)
6192 gimplify_and_add (x, &llist[1]);
6193 break;
6194 }
6195 else if (rvarp)
6196 {
6197 if (x)
6198 {
6199 gimplify_and_add (x, &llist[0]);
6200
6201 tree ivar2 = unshare_expr (lvar);
6202 TREE_OPERAND (ivar2, 1) = sctx.idx;
6203 x = lang_hooks.decls.omp_clause_default_ctor
6204 (c, ivar2, build_outer_var_ref (var, ctx));
6205 gimplify_and_add (x, &llist[0]);
6206
6207 if (rvar2)
6208 {
6209 x = lang_hooks.decls.omp_clause_default_ctor
6210 (c, unshare_expr (rvar2),
6211 build_outer_var_ref (var, ctx));
6212 gimplify_and_add (x, &llist[0]);
6213 }
6214
6215 /* For types that need construction, add another
6216 private var which will be default constructed
6217 and optionally initialized with
6218 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6219 loop we want to assign this value instead of
6220 constructing and destructing it in each
6221 iteration. */
6222 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6223 gimple_add_tmp_var (nv);
6224 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6225 ? rvar2
6226 : ivar, 0),
6227 nv);
6228 x = lang_hooks.decls.omp_clause_default_ctor
6229 (c, nv, build_outer_var_ref (var, ctx));
6230 gimplify_and_add (x, ilist);
6231
6232 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6233 {
6234 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6235 x = DECL_VALUE_EXPR (new_vard);
6236 tree vexpr = nv;
6237 if (new_vard != new_var)
6238 vexpr = build_fold_addr_expr (nv);
6239 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6240 lower_omp (&tseq, ctx);
6241 SET_DECL_VALUE_EXPR (new_vard, x);
6242 gimple_seq_add_seq (ilist, tseq);
6243 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6244 }
6245
6246 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6247 if (x)
6248 gimplify_and_add (x, dlist);
6249 }
6250
6251 tree ref = build_outer_var_ref (var, ctx);
6252 x = unshare_expr (ivar);
6253 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6254 ref);
6255 gimplify_and_add (x, &llist[0]);
6256
6257 ref = build_outer_var_ref (var, ctx);
6258 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6259 rvar);
6260 gimplify_and_add (x, &llist[3]);
6261
6262 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6263 if (new_vard == new_var)
6264 SET_DECL_VALUE_EXPR (new_var, lvar);
6265 else
6266 SET_DECL_VALUE_EXPR (new_vard,
6267 build_fold_addr_expr (lvar));
6268
6269 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6270 if (x)
6271 gimplify_and_add (x, &llist[1]);
6272
6273 tree ivar2 = unshare_expr (lvar);
6274 TREE_OPERAND (ivar2, 1) = sctx.idx;
6275 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6276 if (x)
6277 gimplify_and_add (x, &llist[1]);
6278
6279 if (rvar2)
6280 {
6281 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6282 if (x)
6283 gimplify_and_add (x, &llist[1]);
6284 }
6285 break;
6286 }
6287 if (x)
6288 gimplify_and_add (x, &llist[0]);
6289 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6290 {
6291 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6292 lower_omp (&tseq, ctx);
6293 gimple_seq_add_seq (&llist[0], tseq);
6294 }
6295 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6296 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6297 lower_omp (&tseq, ctx);
6298 gimple_seq_add_seq (&llist[1], tseq);
6299 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6300 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6301 if (new_vard == new_var)
6302 SET_DECL_VALUE_EXPR (new_var, lvar);
6303 else
6304 SET_DECL_VALUE_EXPR (new_vard,
6305 build_fold_addr_expr (lvar));
6306 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6307 if (x)
6308 gimplify_and_add (x, &llist[1]);
6309 break;
6310 }
6311 /* If this is a reference to constant size reduction var
6312 with placeholder, we haven't emitted the initializer
6313 for it because it is undesirable if SIMD arrays are used.
6314 But if they aren't used, we need to emit the deferred
6315 initialization now. */
6316 else if (omp_is_reference (var) && is_simd)
6317 handle_simd_reference (clause_loc, new_vard, ilist);
6318
6319 tree lab2 = NULL_TREE;
6320 if (cond)
6321 {
6322 gimple *g;
6323 if (!is_parallel_ctx (ctx))
6324 {
6325 tree condv = create_tmp_var (boolean_type_node);
6326 tree m = build_simple_mem_ref (cond);
6327 g = gimple_build_assign (condv, m);
6328 gimple_seq_add_stmt (ilist, g);
6329 tree lab1
6330 = create_artificial_label (UNKNOWN_LOCATION);
6331 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6332 g = gimple_build_cond (NE_EXPR, condv,
6333 boolean_false_node,
6334 lab2, lab1);
6335 gimple_seq_add_stmt (ilist, g);
6336 gimple_seq_add_stmt (ilist,
6337 gimple_build_label (lab1));
6338 }
6339 g = gimple_build_assign (build_simple_mem_ref (cond),
6340 boolean_true_node);
6341 gimple_seq_add_stmt (ilist, g);
6342 }
6343 x = lang_hooks.decls.omp_clause_default_ctor
6344 (c, unshare_expr (new_var),
6345 cond ? NULL_TREE
6346 : build_outer_var_ref (var, ctx));
6347 if (x)
6348 gimplify_and_add (x, ilist);
6349
6350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6351 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6352 {
6353 if (ctx->for_simd_scan_phase)
6354 goto do_dtor;
6355 if (x || (!is_simd
6356 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6357 {
6358 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6359 gimple_add_tmp_var (nv);
6360 ctx->cb.decl_map->put (new_vard, nv);
6361 x = lang_hooks.decls.omp_clause_default_ctor
6362 (c, nv, build_outer_var_ref (var, ctx));
6363 if (x)
6364 gimplify_and_add (x, ilist);
6365 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6366 {
6367 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6368 tree vexpr = nv;
6369 if (new_vard != new_var)
6370 vexpr = build_fold_addr_expr (nv);
6371 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6372 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6373 lower_omp (&tseq, ctx);
6374 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6375 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6376 gimple_seq_add_seq (ilist, tseq);
6377 }
6378 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6379 if (is_simd && ctx->scan_exclusive)
6380 {
6381 tree nv2
6382 = create_tmp_var_raw (TREE_TYPE (new_var));
6383 gimple_add_tmp_var (nv2);
6384 ctx->cb.decl_map->put (nv, nv2);
6385 x = lang_hooks.decls.omp_clause_default_ctor
6386 (c, nv2, build_outer_var_ref (var, ctx));
6387 gimplify_and_add (x, ilist);
6388 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6389 if (x)
6390 gimplify_and_add (x, dlist);
6391 }
6392 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6393 if (x)
6394 gimplify_and_add (x, dlist);
6395 }
6396 else if (is_simd
6397 && ctx->scan_exclusive
6398 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6399 {
6400 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6401 gimple_add_tmp_var (nv2);
6402 ctx->cb.decl_map->put (new_vard, nv2);
6403 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6404 if (x)
6405 gimplify_and_add (x, dlist);
6406 }
6407 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6408 goto do_dtor;
6409 }
6410
6411 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6412 {
6413 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6414 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6415 && is_omp_target (ctx->stmt))
6416 {
6417 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6418 tree oldv = NULL_TREE;
6419 gcc_assert (d);
6420 if (DECL_HAS_VALUE_EXPR_P (d))
6421 oldv = DECL_VALUE_EXPR (d);
6422 SET_DECL_VALUE_EXPR (d, new_vard);
6423 DECL_HAS_VALUE_EXPR_P (d) = 1;
6424 lower_omp (&tseq, ctx);
6425 if (oldv)
6426 SET_DECL_VALUE_EXPR (d, oldv);
6427 else
6428 {
6429 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6430 DECL_HAS_VALUE_EXPR_P (d) = 0;
6431 }
6432 }
6433 else
6434 lower_omp (&tseq, ctx);
6435 gimple_seq_add_seq (ilist, tseq);
6436 }
6437 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6438 if (is_simd)
6439 {
6440 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6441 lower_omp (&tseq, ctx);
6442 gimple_seq_add_seq (dlist, tseq);
6443 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6444 }
6445 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6446 if (cond)
6447 {
6448 if (lab2)
6449 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6450 break;
6451 }
6452 goto do_dtor;
6453 }
6454 else
6455 {
6456 x = omp_reduction_init (c, TREE_TYPE (new_var));
6457 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6458 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6459
6460 if (cond)
6461 {
6462 gimple *g;
6463 tree lab2 = NULL_TREE;
6464 /* GOMP_taskgroup_reduction_register memsets the whole
6465 array to zero. If the initializer is zero, we don't
6466 need to initialize it again, just mark it as ever
6467 used unconditionally, i.e. cond = true. */
6468 if (initializer_zerop (x))
6469 {
6470 g = gimple_build_assign (build_simple_mem_ref (cond),
6471 boolean_true_node);
6472 gimple_seq_add_stmt (ilist, g);
6473 break;
6474 }
6475
6476 /* Otherwise, emit
6477 if (!cond) { cond = true; new_var = x; } */
6478 if (!is_parallel_ctx (ctx))
6479 {
6480 tree condv = create_tmp_var (boolean_type_node);
6481 tree m = build_simple_mem_ref (cond);
6482 g = gimple_build_assign (condv, m);
6483 gimple_seq_add_stmt (ilist, g);
6484 tree lab1
6485 = create_artificial_label (UNKNOWN_LOCATION);
6486 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6487 g = gimple_build_cond (NE_EXPR, condv,
6488 boolean_false_node,
6489 lab2, lab1);
6490 gimple_seq_add_stmt (ilist, g);
6491 gimple_seq_add_stmt (ilist,
6492 gimple_build_label (lab1));
6493 }
6494 g = gimple_build_assign (build_simple_mem_ref (cond),
6495 boolean_true_node);
6496 gimple_seq_add_stmt (ilist, g);
6497 gimplify_assign (new_var, x, ilist);
6498 if (lab2)
6499 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6500 break;
6501 }
6502
6503 /* reduction(-:var) sums up the partial results, so it
6504 acts identically to reduction(+:var). */
6505 if (code == MINUS_EXPR)
6506 code = PLUS_EXPR;
6507
6508 /* C/C++ permits FP/complex with || and &&. */
6509 bool is_fp_and_or
6510 = ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6511 && (FLOAT_TYPE_P (TREE_TYPE (new_var))
6512 || TREE_CODE (TREE_TYPE (new_var)) == COMPLEX_TYPE));
6513 tree new_vard = new_var;
6514 if (is_simd && omp_is_reference (var))
6515 {
6516 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6517 new_vard = TREE_OPERAND (new_var, 0);
6518 gcc_assert (DECL_P (new_vard));
6519 }
6520 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6521 if (is_simd
6522 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6523 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6524 rvarp = &rvar;
6525 if (is_simd
6526 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6527 ivar, lvar, rvarp,
6528 &rvar2))
6529 {
6530 if (new_vard != new_var)
6531 {
6532 SET_DECL_VALUE_EXPR (new_vard,
6533 build_fold_addr_expr (lvar));
6534 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6535 }
6536
6537 tree ref = build_outer_var_ref (var, ctx);
6538
6539 if (rvarp)
6540 {
6541 if (ctx->for_simd_scan_phase)
6542 break;
6543 gimplify_assign (ivar, ref, &llist[0]);
6544 ref = build_outer_var_ref (var, ctx);
6545 gimplify_assign (ref, rvar, &llist[3]);
6546 break;
6547 }
6548
6549 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6550
6551 if (sctx.is_simt)
6552 {
6553 if (!simt_lane)
6554 simt_lane = create_tmp_var (unsigned_type_node);
6555 x = build_call_expr_internal_loc
6556 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6557 TREE_TYPE (ivar), 2, ivar, simt_lane);
6558 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6559 gimplify_assign (ivar, x, &llist[2]);
6560 }
6561 tree ivar2 = ivar;
6562 tree ref2 = ref;
6563 if (is_fp_and_or)
6564 {
6565 tree zero = build_zero_cst (TREE_TYPE (ivar));
6566 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6567 integer_type_node, ivar,
6568 zero);
6569 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6570 integer_type_node, ref, zero);
6571 }
6572 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6573 if (is_fp_and_or)
6574 x = fold_convert (TREE_TYPE (ref), x);
6575 ref = build_outer_var_ref (var, ctx);
6576 gimplify_assign (ref, x, &llist[1]);
6577
6578 }
6579 else
6580 {
6581 lower_private_allocate (var, new_var, allocator,
6582 allocate_ptr, ilist, ctx,
6583 false, NULL_TREE);
6584 if (omp_is_reference (var) && is_simd)
6585 handle_simd_reference (clause_loc, new_vard, ilist);
6586 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6587 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6588 break;
6589 gimplify_assign (new_var, x, ilist);
6590 if (is_simd)
6591 {
6592 tree ref = build_outer_var_ref (var, ctx);
6593 tree new_var2 = new_var;
6594 tree ref2 = ref;
6595 if (is_fp_and_or)
6596 {
6597 tree zero = build_zero_cst (TREE_TYPE (new_var));
6598 new_var2
6599 = fold_build2_loc (clause_loc, NE_EXPR,
6600 integer_type_node, new_var,
6601 zero);
6602 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6603 integer_type_node, ref,
6604 zero);
6605 }
6606 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6607 if (is_fp_and_or)
6608 x = fold_convert (TREE_TYPE (new_var), x);
6609 ref = build_outer_var_ref (var, ctx);
6610 gimplify_assign (ref, x, dlist);
6611 }
6612 if (allocator)
6613 goto do_dtor;
6614 }
6615 }
6616 break;
6617
6618 default:
6619 gcc_unreachable ();
6620 }
6621 }
6622 }
6623 if (tskred_avar)
6624 {
6625 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6626 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6627 }
6628
6629 if (known_eq (sctx.max_vf, 1U))
6630 {
6631 sctx.is_simt = false;
6632 if (ctx->lastprivate_conditional_map)
6633 {
6634 if (gimple_omp_for_combined_into_p (ctx->stmt))
6635 {
6636 /* Signal to lower_omp_1 that it should use parent context. */
6637 ctx->combined_into_simd_safelen1 = true;
6638 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6639 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6640 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6641 {
6642 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6643 omp_context *outer = ctx->outer;
6644 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6645 outer = outer->outer;
6646 tree *v = ctx->lastprivate_conditional_map->get (o);
6647 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6648 tree *pv = outer->lastprivate_conditional_map->get (po);
6649 *v = *pv;
6650 }
6651 }
6652 else
6653 {
6654 /* When not vectorized, treat lastprivate(conditional:) like
6655 normal lastprivate, as there will be just one simd lane
6656 writing the privatized variable. */
6657 delete ctx->lastprivate_conditional_map;
6658 ctx->lastprivate_conditional_map = NULL;
6659 }
6660 }
6661 }
6662
6663 if (nonconst_simd_if)
6664 {
6665 if (sctx.lane == NULL_TREE)
6666 {
6667 sctx.idx = create_tmp_var (unsigned_type_node);
6668 sctx.lane = create_tmp_var (unsigned_type_node);
6669 }
6670 /* FIXME: For now. */
6671 sctx.is_simt = false;
6672 }
6673
6674 if (sctx.lane || sctx.is_simt)
6675 {
6676 uid = create_tmp_var (ptr_type_node, "simduid");
6677 /* Don't want uninit warnings on simduid, it is always uninitialized,
6678 but we use it not for the value, but for the DECL_UID only. */
6679 suppress_warning (uid, OPT_Wuninitialized);
6680 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6681 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6682 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6683 gimple_omp_for_set_clauses (ctx->stmt, c);
6684 }
6685 /* Emit calls denoting privatized variables and initializing a pointer to
6686 structure that holds private variables as fields after ompdevlow pass. */
6687 if (sctx.is_simt)
6688 {
6689 sctx.simt_eargs[0] = uid;
6690 gimple *g
6691 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6692 gimple_call_set_lhs (g, uid);
6693 gimple_seq_add_stmt (ilist, g);
6694 sctx.simt_eargs.release ();
6695
6696 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6697 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6698 gimple_call_set_lhs (g, simtrec);
6699 gimple_seq_add_stmt (ilist, g);
6700 }
6701 if (sctx.lane)
6702 {
6703 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6704 2 + (nonconst_simd_if != NULL),
6705 uid, integer_zero_node,
6706 nonconst_simd_if);
6707 gimple_call_set_lhs (g, sctx.lane);
6708 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6709 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6710 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6711 build_int_cst (unsigned_type_node, 0));
6712 gimple_seq_add_stmt (ilist, g);
6713 if (sctx.lastlane)
6714 {
6715 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6716 2, uid, sctx.lane);
6717 gimple_call_set_lhs (g, sctx.lastlane);
6718 gimple_seq_add_stmt (dlist, g);
6719 gimple_seq_add_seq (dlist, llist[3]);
6720 }
6721 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6722 if (llist[2])
6723 {
6724 tree simt_vf = create_tmp_var (unsigned_type_node);
6725 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6726 gimple_call_set_lhs (g, simt_vf);
6727 gimple_seq_add_stmt (dlist, g);
6728
6729 tree t = build_int_cst (unsigned_type_node, 1);
6730 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6731 gimple_seq_add_stmt (dlist, g);
6732
6733 t = build_int_cst (unsigned_type_node, 0);
6734 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6735 gimple_seq_add_stmt (dlist, g);
6736
6737 tree body = create_artificial_label (UNKNOWN_LOCATION);
6738 tree header = create_artificial_label (UNKNOWN_LOCATION);
6739 tree end = create_artificial_label (UNKNOWN_LOCATION);
6740 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6741 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6742
6743 gimple_seq_add_seq (dlist, llist[2]);
6744
6745 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6746 gimple_seq_add_stmt (dlist, g);
6747
6748 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6749 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6750 gimple_seq_add_stmt (dlist, g);
6751
6752 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6753 }
6754 for (int i = 0; i < 2; i++)
6755 if (llist[i])
6756 {
6757 tree vf = create_tmp_var (unsigned_type_node);
6758 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6759 gimple_call_set_lhs (g, vf);
6760 gimple_seq *seq = i == 0 ? ilist : dlist;
6761 gimple_seq_add_stmt (seq, g);
6762 tree t = build_int_cst (unsigned_type_node, 0);
6763 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6764 gimple_seq_add_stmt (seq, g);
6765 tree body = create_artificial_label (UNKNOWN_LOCATION);
6766 tree header = create_artificial_label (UNKNOWN_LOCATION);
6767 tree end = create_artificial_label (UNKNOWN_LOCATION);
6768 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6769 gimple_seq_add_stmt (seq, gimple_build_label (body));
6770 gimple_seq_add_seq (seq, llist[i]);
6771 t = build_int_cst (unsigned_type_node, 1);
6772 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6773 gimple_seq_add_stmt (seq, g);
6774 gimple_seq_add_stmt (seq, gimple_build_label (header));
6775 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6776 gimple_seq_add_stmt (seq, g);
6777 gimple_seq_add_stmt (seq, gimple_build_label (end));
6778 }
6779 }
6780 if (sctx.is_simt)
6781 {
6782 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6783 gimple *g
6784 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6785 gimple_seq_add_stmt (dlist, g);
6786 }
6787
6788 /* The copyin sequence is not to be executed by the main thread, since
6789 that would result in self-copies. Perhaps not visible to scalars,
6790 but it certainly is to C++ operator=. */
6791 if (copyin_seq)
6792 {
6793 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6794 0);
6795 x = build2 (NE_EXPR, boolean_type_node, x,
6796 build_int_cst (TREE_TYPE (x), 0));
6797 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6798 gimplify_and_add (x, ilist);
6799 }
6800
6801 /* If any copyin variable is passed by reference, we must ensure the
6802 master thread doesn't modify it before it is copied over in all
6803 threads. Similarly for variables in both firstprivate and
6804 lastprivate clauses we need to ensure the lastprivate copying
6805 happens after firstprivate copying in all threads. And similarly
6806 for UDRs if initializer expression refers to omp_orig. */
6807 if (copyin_by_ref || lastprivate_firstprivate
6808 || (reduction_omp_orig_ref
6809 && !ctx->scan_inclusive
6810 && !ctx->scan_exclusive))
6811 {
6812 /* Don't add any barrier for #pragma omp simd or
6813 #pragma omp distribute. */
6814 if (!is_task_ctx (ctx)
6815 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6816 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6817 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6818 }
6819
6820 /* If max_vf is non-zero, then we can use only a vectorization factor
6821 up to the max_vf we chose. So stick it into the safelen clause. */
6822 if (maybe_ne (sctx.max_vf, 0U))
6823 {
6824 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6825 OMP_CLAUSE_SAFELEN);
6826 poly_uint64 safe_len;
6827 if (c == NULL_TREE
6828 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6829 && maybe_gt (safe_len, sctx.max_vf)))
6830 {
6831 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6832 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6833 sctx.max_vf);
6834 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6835 gimple_omp_for_set_clauses (ctx->stmt, c);
6836 }
6837 }
6838 }
6839
6840 /* Create temporary variables for lastprivate(conditional:) implementation
6841 in context CTX with CLAUSES. */
6842
6843 static void
6844 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6845 {
6846 tree iter_type = NULL_TREE;
6847 tree cond_ptr = NULL_TREE;
6848 tree iter_var = NULL_TREE;
6849 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6850 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6851 tree next = *clauses;
6852 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6853 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6854 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6855 {
6856 if (is_simd)
6857 {
6858 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6859 gcc_assert (cc);
6860 if (iter_type == NULL_TREE)
6861 {
6862 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6863 iter_var = create_tmp_var_raw (iter_type);
6864 DECL_CONTEXT (iter_var) = current_function_decl;
6865 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6866 DECL_CHAIN (iter_var) = ctx->block_vars;
6867 ctx->block_vars = iter_var;
6868 tree c3
6869 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6870 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6871 OMP_CLAUSE_DECL (c3) = iter_var;
6872 OMP_CLAUSE_CHAIN (c3) = *clauses;
6873 *clauses = c3;
6874 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6875 }
6876 next = OMP_CLAUSE_CHAIN (cc);
6877 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6878 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6879 ctx->lastprivate_conditional_map->put (o, v);
6880 continue;
6881 }
6882 if (iter_type == NULL)
6883 {
6884 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6885 {
6886 struct omp_for_data fd;
6887 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6888 NULL);
6889 iter_type = unsigned_type_for (fd.iter_type);
6890 }
6891 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6892 iter_type = unsigned_type_node;
6893 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6894 if (c2)
6895 {
6896 cond_ptr
6897 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6898 OMP_CLAUSE_DECL (c2) = cond_ptr;
6899 }
6900 else
6901 {
6902 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6903 DECL_CONTEXT (cond_ptr) = current_function_decl;
6904 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6905 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6906 ctx->block_vars = cond_ptr;
6907 c2 = build_omp_clause (UNKNOWN_LOCATION,
6908 OMP_CLAUSE__CONDTEMP_);
6909 OMP_CLAUSE_DECL (c2) = cond_ptr;
6910 OMP_CLAUSE_CHAIN (c2) = *clauses;
6911 *clauses = c2;
6912 }
6913 iter_var = create_tmp_var_raw (iter_type);
6914 DECL_CONTEXT (iter_var) = current_function_decl;
6915 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6916 DECL_CHAIN (iter_var) = ctx->block_vars;
6917 ctx->block_vars = iter_var;
6918 tree c3
6919 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6920 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6921 OMP_CLAUSE_DECL (c3) = iter_var;
6922 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6923 OMP_CLAUSE_CHAIN (c2) = c3;
6924 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6925 }
6926 tree v = create_tmp_var_raw (iter_type);
6927 DECL_CONTEXT (v) = current_function_decl;
6928 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6929 DECL_CHAIN (v) = ctx->block_vars;
6930 ctx->block_vars = v;
6931 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6932 ctx->lastprivate_conditional_map->put (o, v);
6933 }
6934 }
6935
6936
6937 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6938 both parallel and workshare constructs. PREDICATE may be NULL if it's
6939 always true. BODY_P is the sequence to insert early initialization
6940 if needed, STMT_LIST is where the non-conditional lastprivate handling
6941 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6942 section. */
6943
6944 static void
6945 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6946 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6947 omp_context *ctx)
6948 {
6949 tree x, c, label = NULL, orig_clauses = clauses;
6950 bool par_clauses = false;
6951 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6952 unsigned HOST_WIDE_INT conditional_off = 0;
6953 gimple_seq post_stmt_list = NULL;
6954
6955 /* Early exit if there are no lastprivate or linear clauses. */
6956 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6957 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6958 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6959 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6960 break;
6961 if (clauses == NULL)
6962 {
6963 /* If this was a workshare clause, see if it had been combined
6964 with its parallel. In that case, look for the clauses on the
6965 parallel statement itself. */
6966 if (is_parallel_ctx (ctx))
6967 return;
6968
6969 ctx = ctx->outer;
6970 if (ctx == NULL || !is_parallel_ctx (ctx))
6971 return;
6972
6973 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6974 OMP_CLAUSE_LASTPRIVATE);
6975 if (clauses == NULL)
6976 return;
6977 par_clauses = true;
6978 }
6979
6980 bool maybe_simt = false;
6981 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6982 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6983 {
6984 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6985 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6986 if (simduid)
6987 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6988 }
6989
6990 if (predicate)
6991 {
6992 gcond *stmt;
6993 tree label_true, arm1, arm2;
6994 enum tree_code pred_code = TREE_CODE (predicate);
6995
6996 label = create_artificial_label (UNKNOWN_LOCATION);
6997 label_true = create_artificial_label (UNKNOWN_LOCATION);
6998 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6999 {
7000 arm1 = TREE_OPERAND (predicate, 0);
7001 arm2 = TREE_OPERAND (predicate, 1);
7002 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7003 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7004 }
7005 else
7006 {
7007 arm1 = predicate;
7008 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7009 arm2 = boolean_false_node;
7010 pred_code = NE_EXPR;
7011 }
7012 if (maybe_simt)
7013 {
7014 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7015 c = fold_convert (integer_type_node, c);
7016 simtcond = create_tmp_var (integer_type_node);
7017 gimplify_assign (simtcond, c, stmt_list);
7018 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7019 1, simtcond);
7020 c = create_tmp_var (integer_type_node);
7021 gimple_call_set_lhs (g, c);
7022 gimple_seq_add_stmt (stmt_list, g);
7023 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7024 label_true, label);
7025 }
7026 else
7027 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7028 gimple_seq_add_stmt (stmt_list, stmt);
7029 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7030 }
7031
7032 tree cond_ptr = NULL_TREE;
7033 for (c = clauses; c ;)
7034 {
7035 tree var, new_var;
7036 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7037 gimple_seq *this_stmt_list = stmt_list;
7038 tree lab2 = NULL_TREE;
7039
7040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7041 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7042 && ctx->lastprivate_conditional_map
7043 && !ctx->combined_into_simd_safelen1)
7044 {
7045 gcc_assert (body_p);
7046 if (simduid)
7047 goto next;
7048 if (cond_ptr == NULL_TREE)
7049 {
7050 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7051 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7052 }
7053 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7054 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7055 tree v = *ctx->lastprivate_conditional_map->get (o);
7056 gimplify_assign (v, build_zero_cst (type), body_p);
7057 this_stmt_list = cstmt_list;
7058 tree mem;
7059 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7060 {
7061 mem = build2 (MEM_REF, type, cond_ptr,
7062 build_int_cst (TREE_TYPE (cond_ptr),
7063 conditional_off));
7064 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7065 }
7066 else
7067 mem = build4 (ARRAY_REF, type, cond_ptr,
7068 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7069 tree mem2 = copy_node (mem);
7070 gimple_seq seq = NULL;
7071 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7072 gimple_seq_add_seq (this_stmt_list, seq);
7073 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7074 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7075 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7076 gimple_seq_add_stmt (this_stmt_list, g);
7077 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7078 gimplify_assign (mem2, v, this_stmt_list);
7079 }
7080 else if (predicate
7081 && ctx->combined_into_simd_safelen1
7082 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7083 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7084 && ctx->lastprivate_conditional_map)
7085 this_stmt_list = &post_stmt_list;
7086
7087 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7088 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7089 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7090 {
7091 var = OMP_CLAUSE_DECL (c);
7092 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7093 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7094 && is_taskloop_ctx (ctx))
7095 {
7096 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7097 new_var = lookup_decl (var, ctx->outer);
7098 }
7099 else
7100 {
7101 new_var = lookup_decl (var, ctx);
7102 /* Avoid uninitialized warnings for lastprivate and
7103 for linear iterators. */
7104 if (predicate
7105 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7106 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7107 suppress_warning (new_var, OPT_Wuninitialized);
7108 }
7109
7110 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7111 {
7112 tree val = DECL_VALUE_EXPR (new_var);
7113 if (TREE_CODE (val) == ARRAY_REF
7114 && VAR_P (TREE_OPERAND (val, 0))
7115 && lookup_attribute ("omp simd array",
7116 DECL_ATTRIBUTES (TREE_OPERAND (val,
7117 0))))
7118 {
7119 if (lastlane == NULL)
7120 {
7121 lastlane = create_tmp_var (unsigned_type_node);
7122 gcall *g
7123 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7124 2, simduid,
7125 TREE_OPERAND (val, 1));
7126 gimple_call_set_lhs (g, lastlane);
7127 gimple_seq_add_stmt (this_stmt_list, g);
7128 }
7129 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7130 TREE_OPERAND (val, 0), lastlane,
7131 NULL_TREE, NULL_TREE);
7132 TREE_THIS_NOTRAP (new_var) = 1;
7133 }
7134 }
7135 else if (maybe_simt)
7136 {
7137 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7138 ? DECL_VALUE_EXPR (new_var)
7139 : new_var);
7140 if (simtlast == NULL)
7141 {
7142 simtlast = create_tmp_var (unsigned_type_node);
7143 gcall *g = gimple_build_call_internal
7144 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7145 gimple_call_set_lhs (g, simtlast);
7146 gimple_seq_add_stmt (this_stmt_list, g);
7147 }
7148 x = build_call_expr_internal_loc
7149 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7150 TREE_TYPE (val), 2, val, simtlast);
7151 new_var = unshare_expr (new_var);
7152 gimplify_assign (new_var, x, this_stmt_list);
7153 new_var = unshare_expr (new_var);
7154 }
7155
7156 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7157 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7158 {
7159 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7160 gimple_seq_add_seq (this_stmt_list,
7161 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7162 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7163 }
7164 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7165 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7166 {
7167 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7168 gimple_seq_add_seq (this_stmt_list,
7169 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7170 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7171 }
7172
7173 x = NULL_TREE;
7174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7175 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7176 && is_taskloop_ctx (ctx))
7177 {
7178 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7179 ctx->outer->outer);
7180 if (is_global_var (ovar))
7181 x = ovar;
7182 }
7183 if (!x)
7184 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7185 if (omp_is_reference (var))
7186 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7187 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7188 gimplify_and_add (x, this_stmt_list);
7189
7190 if (lab2)
7191 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7192 }
7193
7194 next:
7195 c = OMP_CLAUSE_CHAIN (c);
7196 if (c == NULL && !par_clauses)
7197 {
7198 /* If this was a workshare clause, see if it had been combined
7199 with its parallel. In that case, continue looking for the
7200 clauses also on the parallel statement itself. */
7201 if (is_parallel_ctx (ctx))
7202 break;
7203
7204 ctx = ctx->outer;
7205 if (ctx == NULL || !is_parallel_ctx (ctx))
7206 break;
7207
7208 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7209 OMP_CLAUSE_LASTPRIVATE);
7210 par_clauses = true;
7211 }
7212 }
7213
7214 if (label)
7215 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7216 gimple_seq_add_seq (stmt_list, post_stmt_list);
7217 }
7218
7219 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7220 (which might be a placeholder). INNER is true if this is an inner
7221 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7222 join markers. Generate the before-loop forking sequence in
7223 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7224 general form of these sequences is
7225
7226 GOACC_REDUCTION_SETUP
7227 GOACC_FORK
7228 GOACC_REDUCTION_INIT
7229 ...
7230 GOACC_REDUCTION_FINI
7231 GOACC_JOIN
7232 GOACC_REDUCTION_TEARDOWN. */
7233
7234 static void
7235 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7236 gcall *fork, gcall *private_marker, gcall *join,
7237 gimple_seq *fork_seq, gimple_seq *join_seq,
7238 omp_context *ctx)
7239 {
7240 gimple_seq before_fork = NULL;
7241 gimple_seq after_fork = NULL;
7242 gimple_seq before_join = NULL;
7243 gimple_seq after_join = NULL;
7244 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7245 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7246 unsigned offset = 0;
7247
7248 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7250 {
7251 /* No 'reduction' clauses on OpenACC 'kernels'. */
7252 gcc_checking_assert (!is_oacc_kernels (ctx));
7253 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7254 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7255
7256 tree orig = OMP_CLAUSE_DECL (c);
7257 tree var = maybe_lookup_decl (orig, ctx);
7258 tree ref_to_res = NULL_TREE;
7259 tree incoming, outgoing, v1, v2, v3;
7260 bool is_private = false;
7261
7262 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7263 if (rcode == MINUS_EXPR)
7264 rcode = PLUS_EXPR;
7265 else if (rcode == TRUTH_ANDIF_EXPR)
7266 rcode = BIT_AND_EXPR;
7267 else if (rcode == TRUTH_ORIF_EXPR)
7268 rcode = BIT_IOR_EXPR;
7269 tree op = build_int_cst (unsigned_type_node, rcode);
7270
7271 if (!var)
7272 var = orig;
7273
7274 incoming = outgoing = var;
7275
7276 if (!inner)
7277 {
7278 /* See if an outer construct also reduces this variable. */
7279 omp_context *outer = ctx;
7280
7281 while (omp_context *probe = outer->outer)
7282 {
7283 enum gimple_code type = gimple_code (probe->stmt);
7284 tree cls;
7285
7286 switch (type)
7287 {
7288 case GIMPLE_OMP_FOR:
7289 cls = gimple_omp_for_clauses (probe->stmt);
7290 break;
7291
7292 case GIMPLE_OMP_TARGET:
7293 /* No 'reduction' clauses inside OpenACC 'kernels'
7294 regions. */
7295 gcc_checking_assert (!is_oacc_kernels (probe));
7296
7297 if (!is_gimple_omp_offloaded (probe->stmt))
7298 goto do_lookup;
7299
7300 cls = gimple_omp_target_clauses (probe->stmt);
7301 break;
7302
7303 default:
7304 goto do_lookup;
7305 }
7306
7307 outer = probe;
7308 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7309 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7310 && orig == OMP_CLAUSE_DECL (cls))
7311 {
7312 incoming = outgoing = lookup_decl (orig, probe);
7313 goto has_outer_reduction;
7314 }
7315 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7316 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7317 && orig == OMP_CLAUSE_DECL (cls))
7318 {
7319 is_private = true;
7320 goto do_lookup;
7321 }
7322 }
7323
7324 do_lookup:
7325 /* This is the outermost construct with this reduction,
7326 see if there's a mapping for it. */
7327 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7328 && maybe_lookup_field (orig, outer) && !is_private)
7329 {
7330 ref_to_res = build_receiver_ref (orig, false, outer);
7331 if (omp_is_reference (orig))
7332 ref_to_res = build_simple_mem_ref (ref_to_res);
7333
7334 tree type = TREE_TYPE (var);
7335 if (POINTER_TYPE_P (type))
7336 type = TREE_TYPE (type);
7337
7338 outgoing = var;
7339 incoming = omp_reduction_init_op (loc, rcode, type);
7340 }
7341 else
7342 {
7343 /* Try to look at enclosing contexts for reduction var,
7344 use original if no mapping found. */
7345 tree t = NULL_TREE;
7346 omp_context *c = ctx->outer;
7347 while (c && !t)
7348 {
7349 t = maybe_lookup_decl (orig, c);
7350 c = c->outer;
7351 }
7352 incoming = outgoing = (t ? t : orig);
7353 }
7354
7355 has_outer_reduction:;
7356 }
7357
7358 if (!ref_to_res)
7359 ref_to_res = integer_zero_node;
7360
7361 if (omp_is_reference (orig))
7362 {
7363 tree type = TREE_TYPE (var);
7364 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7365
7366 if (!inner)
7367 {
7368 tree x = create_tmp_var (TREE_TYPE (type), id);
7369 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7370 }
7371
7372 v1 = create_tmp_var (type, id);
7373 v2 = create_tmp_var (type, id);
7374 v3 = create_tmp_var (type, id);
7375
7376 gimplify_assign (v1, var, fork_seq);
7377 gimplify_assign (v2, var, fork_seq);
7378 gimplify_assign (v3, var, fork_seq);
7379
7380 var = build_simple_mem_ref (var);
7381 v1 = build_simple_mem_ref (v1);
7382 v2 = build_simple_mem_ref (v2);
7383 v3 = build_simple_mem_ref (v3);
7384 outgoing = build_simple_mem_ref (outgoing);
7385
7386 if (!TREE_CONSTANT (incoming))
7387 incoming = build_simple_mem_ref (incoming);
7388 }
7389 else
7390 v1 = v2 = v3 = var;
7391
7392 /* Determine position in reduction buffer, which may be used
7393 by target. The parser has ensured that this is not a
7394 variable-sized type. */
7395 fixed_size_mode mode
7396 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7397 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7398 offset = (offset + align - 1) & ~(align - 1);
7399 tree off = build_int_cst (sizetype, offset);
7400 offset += GET_MODE_SIZE (mode);
7401
7402 if (!init_code)
7403 {
7404 init_code = build_int_cst (integer_type_node,
7405 IFN_GOACC_REDUCTION_INIT);
7406 fini_code = build_int_cst (integer_type_node,
7407 IFN_GOACC_REDUCTION_FINI);
7408 setup_code = build_int_cst (integer_type_node,
7409 IFN_GOACC_REDUCTION_SETUP);
7410 teardown_code = build_int_cst (integer_type_node,
7411 IFN_GOACC_REDUCTION_TEARDOWN);
7412 }
7413
7414 tree setup_call
7415 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7416 TREE_TYPE (var), 6, setup_code,
7417 unshare_expr (ref_to_res),
7418 incoming, level, op, off);
7419 tree init_call
7420 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7421 TREE_TYPE (var), 6, init_code,
7422 unshare_expr (ref_to_res),
7423 v1, level, op, off);
7424 tree fini_call
7425 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7426 TREE_TYPE (var), 6, fini_code,
7427 unshare_expr (ref_to_res),
7428 v2, level, op, off);
7429 tree teardown_call
7430 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7431 TREE_TYPE (var), 6, teardown_code,
7432 ref_to_res, v3, level, op, off);
7433
7434 gimplify_assign (v1, setup_call, &before_fork);
7435 gimplify_assign (v2, init_call, &after_fork);
7436 gimplify_assign (v3, fini_call, &before_join);
7437 gimplify_assign (outgoing, teardown_call, &after_join);
7438 }
7439
7440 /* Now stitch things together. */
7441 gimple_seq_add_seq (fork_seq, before_fork);
7442 if (private_marker)
7443 gimple_seq_add_stmt (fork_seq, private_marker);
7444 if (fork)
7445 gimple_seq_add_stmt (fork_seq, fork);
7446 gimple_seq_add_seq (fork_seq, after_fork);
7447
7448 gimple_seq_add_seq (join_seq, before_join);
7449 if (join)
7450 gimple_seq_add_stmt (join_seq, join);
7451 gimple_seq_add_seq (join_seq, after_join);
7452 }
7453
7454 /* Generate code to implement the REDUCTION clauses, append it
7455 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7456 that should be emitted also inside of the critical section,
7457 in that case clear *CLIST afterwards, otherwise leave it as is
7458 and let the caller emit it itself. */
7459
7460 static void
7461 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7462 gimple_seq *clist, omp_context *ctx)
7463 {
7464 gimple_seq sub_seq = NULL;
7465 gimple *stmt;
7466 tree x, c;
7467 int count = 0;
7468
7469 /* OpenACC loop reductions are handled elsewhere. */
7470 if (is_gimple_omp_oacc (ctx->stmt))
7471 return;
7472
7473 /* SIMD reductions are handled in lower_rec_input_clauses. */
7474 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7475 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7476 return;
7477
7478 /* inscan reductions are handled elsewhere. */
7479 if (ctx->scan_inclusive || ctx->scan_exclusive)
7480 return;
7481
7482 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7483 update in that case, otherwise use a lock. */
7484 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7485 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7486 && !OMP_CLAUSE_REDUCTION_TASK (c))
7487 {
7488 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7489 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7490 {
7491 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7492 count = -1;
7493 break;
7494 }
7495 count++;
7496 }
7497
7498 if (count == 0)
7499 return;
7500
7501 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7502 {
7503 tree var, ref, new_var, orig_var;
7504 enum tree_code code;
7505 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7506
7507 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7508 || OMP_CLAUSE_REDUCTION_TASK (c))
7509 continue;
7510
7511 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7512 orig_var = var = OMP_CLAUSE_DECL (c);
7513 if (TREE_CODE (var) == MEM_REF)
7514 {
7515 var = TREE_OPERAND (var, 0);
7516 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7517 var = TREE_OPERAND (var, 0);
7518 if (TREE_CODE (var) == ADDR_EXPR)
7519 var = TREE_OPERAND (var, 0);
7520 else
7521 {
7522 /* If this is a pointer or referenced based array
7523 section, the var could be private in the outer
7524 context e.g. on orphaned loop construct. Pretend this
7525 is private variable's outer reference. */
7526 ccode = OMP_CLAUSE_PRIVATE;
7527 if (TREE_CODE (var) == INDIRECT_REF)
7528 var = TREE_OPERAND (var, 0);
7529 }
7530 orig_var = var;
7531 if (is_variable_sized (var))
7532 {
7533 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7534 var = DECL_VALUE_EXPR (var);
7535 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7536 var = TREE_OPERAND (var, 0);
7537 gcc_assert (DECL_P (var));
7538 }
7539 }
7540 new_var = lookup_decl (var, ctx);
7541 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7542 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7543 ref = build_outer_var_ref (var, ctx, ccode);
7544 code = OMP_CLAUSE_REDUCTION_CODE (c);
7545
7546 /* reduction(-:var) sums up the partial results, so it acts
7547 identically to reduction(+:var). */
7548 if (code == MINUS_EXPR)
7549 code = PLUS_EXPR;
7550
7551 /* C/C++ permits FP/complex with || and &&. */
7552 bool is_fp_and_or = ((code == TRUTH_ANDIF_EXPR
7553 || code == TRUTH_ORIF_EXPR)
7554 && (FLOAT_TYPE_P (TREE_TYPE (new_var))
7555 || (TREE_CODE (TREE_TYPE (new_var))
7556 == COMPLEX_TYPE)));
7557 if (count == 1)
7558 {
7559 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7560
7561 addr = save_expr (addr);
7562 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7563 tree new_var2 = new_var;
7564 tree ref2 = ref;
7565 if (is_fp_and_or)
7566 {
7567 tree zero = build_zero_cst (TREE_TYPE (new_var));
7568 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7569 integer_type_node, new_var, zero);
7570 ref2 = fold_build2_loc (clause_loc, NE_EXPR, integer_type_node,
7571 ref, zero);
7572 }
7573 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7574 new_var2);
7575 if (is_fp_and_or)
7576 x = fold_convert (TREE_TYPE (new_var), x);
7577 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7578 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7579 gimplify_and_add (x, stmt_seqp);
7580 return;
7581 }
7582 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7583 {
7584 tree d = OMP_CLAUSE_DECL (c);
7585 tree type = TREE_TYPE (d);
7586 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7587 tree i = create_tmp_var (TREE_TYPE (v));
7588 tree ptype = build_pointer_type (TREE_TYPE (type));
7589 tree bias = TREE_OPERAND (d, 1);
7590 d = TREE_OPERAND (d, 0);
7591 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7592 {
7593 tree b = TREE_OPERAND (d, 1);
7594 b = maybe_lookup_decl (b, ctx);
7595 if (b == NULL)
7596 {
7597 b = TREE_OPERAND (d, 1);
7598 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7599 }
7600 if (integer_zerop (bias))
7601 bias = b;
7602 else
7603 {
7604 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7605 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7606 TREE_TYPE (b), b, bias);
7607 }
7608 d = TREE_OPERAND (d, 0);
7609 }
7610 /* For ref build_outer_var_ref already performs this, so
7611 only new_var needs a dereference. */
7612 if (TREE_CODE (d) == INDIRECT_REF)
7613 {
7614 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7615 gcc_assert (omp_is_reference (var) && var == orig_var);
7616 }
7617 else if (TREE_CODE (d) == ADDR_EXPR)
7618 {
7619 if (orig_var == var)
7620 {
7621 new_var = build_fold_addr_expr (new_var);
7622 ref = build_fold_addr_expr (ref);
7623 }
7624 }
7625 else
7626 {
7627 gcc_assert (orig_var == var);
7628 if (omp_is_reference (var))
7629 ref = build_fold_addr_expr (ref);
7630 }
7631 if (DECL_P (v))
7632 {
7633 tree t = maybe_lookup_decl (v, ctx);
7634 if (t)
7635 v = t;
7636 else
7637 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7638 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7639 }
7640 if (!integer_zerop (bias))
7641 {
7642 bias = fold_convert_loc (clause_loc, sizetype, bias);
7643 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7644 TREE_TYPE (new_var), new_var,
7645 unshare_expr (bias));
7646 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7647 TREE_TYPE (ref), ref, bias);
7648 }
7649 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7650 ref = fold_convert_loc (clause_loc, ptype, ref);
7651 tree m = create_tmp_var (ptype);
7652 gimplify_assign (m, new_var, stmt_seqp);
7653 new_var = m;
7654 m = create_tmp_var (ptype);
7655 gimplify_assign (m, ref, stmt_seqp);
7656 ref = m;
7657 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7658 tree body = create_artificial_label (UNKNOWN_LOCATION);
7659 tree end = create_artificial_label (UNKNOWN_LOCATION);
7660 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7661 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7662 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7663 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7664 {
7665 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7666 tree decl_placeholder
7667 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7668 SET_DECL_VALUE_EXPR (placeholder, out);
7669 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7670 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7671 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7672 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7673 gimple_seq_add_seq (&sub_seq,
7674 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7675 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7676 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7677 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7678 }
7679 else
7680 {
7681 tree out2 = out;
7682 tree priv2 = priv;
7683 if (is_fp_and_or)
7684 {
7685 tree zero = build_zero_cst (TREE_TYPE (out));
7686 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7687 integer_type_node, out, zero);
7688 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7689 integer_type_node, priv, zero);
7690 }
7691 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7692 if (is_fp_and_or)
7693 x = fold_convert (TREE_TYPE (out), x);
7694 out = unshare_expr (out);
7695 gimplify_assign (out, x, &sub_seq);
7696 }
7697 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7698 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7699 gimple_seq_add_stmt (&sub_seq, g);
7700 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7701 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7702 gimple_seq_add_stmt (&sub_seq, g);
7703 g = gimple_build_assign (i, PLUS_EXPR, i,
7704 build_int_cst (TREE_TYPE (i), 1));
7705 gimple_seq_add_stmt (&sub_seq, g);
7706 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7707 gimple_seq_add_stmt (&sub_seq, g);
7708 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7709 }
7710 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7711 {
7712 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7713
7714 if (omp_is_reference (var)
7715 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7716 TREE_TYPE (ref)))
7717 ref = build_fold_addr_expr_loc (clause_loc, ref);
7718 SET_DECL_VALUE_EXPR (placeholder, ref);
7719 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7720 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7721 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7722 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7723 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7724 }
7725 else
7726 {
7727 tree new_var2 = new_var;
7728 tree ref2 = ref;
7729 if (is_fp_and_or)
7730 {
7731 tree zero = build_zero_cst (TREE_TYPE (new_var));
7732 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7733 integer_type_node, new_var, zero);
7734 ref2 = fold_build2_loc (clause_loc, NE_EXPR, integer_type_node,
7735 ref, zero);
7736 }
7737 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7738 if (is_fp_and_or)
7739 x = fold_convert (TREE_TYPE (new_var), x);
7740 ref = build_outer_var_ref (var, ctx);
7741 gimplify_assign (ref, x, &sub_seq);
7742 }
7743 }
7744
7745 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7746 0);
7747 gimple_seq_add_stmt (stmt_seqp, stmt);
7748
7749 gimple_seq_add_seq (stmt_seqp, sub_seq);
7750
7751 if (clist)
7752 {
7753 gimple_seq_add_seq (stmt_seqp, *clist);
7754 *clist = NULL;
7755 }
7756
7757 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7758 0);
7759 gimple_seq_add_stmt (stmt_seqp, stmt);
7760 }
7761
7762
7763 /* Generate code to implement the COPYPRIVATE clauses. */
7764
7765 static void
7766 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7767 omp_context *ctx)
7768 {
7769 tree c;
7770
7771 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7772 {
7773 tree var, new_var, ref, x;
7774 bool by_ref;
7775 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7776
7777 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7778 continue;
7779
7780 var = OMP_CLAUSE_DECL (c);
7781 by_ref = use_pointer_for_field (var, NULL);
7782
7783 ref = build_sender_ref (var, ctx);
7784 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7785 if (by_ref)
7786 {
7787 x = build_fold_addr_expr_loc (clause_loc, new_var);
7788 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7789 }
7790 gimplify_assign (ref, x, slist);
7791
7792 ref = build_receiver_ref (var, false, ctx);
7793 if (by_ref)
7794 {
7795 ref = fold_convert_loc (clause_loc,
7796 build_pointer_type (TREE_TYPE (new_var)),
7797 ref);
7798 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7799 }
7800 if (omp_is_reference (var))
7801 {
7802 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7803 ref = build_simple_mem_ref_loc (clause_loc, ref);
7804 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7805 }
7806 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7807 gimplify_and_add (x, rlist);
7808 }
7809 }
7810
7811
7812 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7813 and REDUCTION from the sender (aka parent) side. */
7814
7815 static void
7816 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7817 omp_context *ctx)
7818 {
7819 tree c, t;
7820 int ignored_looptemp = 0;
7821 bool is_taskloop = false;
7822
7823 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7824 by GOMP_taskloop. */
7825 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7826 {
7827 ignored_looptemp = 2;
7828 is_taskloop = true;
7829 }
7830
7831 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7832 {
7833 tree val, ref, x, var;
7834 bool by_ref, do_in = false, do_out = false;
7835 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7836
7837 switch (OMP_CLAUSE_CODE (c))
7838 {
7839 case OMP_CLAUSE_PRIVATE:
7840 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7841 break;
7842 continue;
7843 case OMP_CLAUSE_FIRSTPRIVATE:
7844 case OMP_CLAUSE_COPYIN:
7845 case OMP_CLAUSE_LASTPRIVATE:
7846 case OMP_CLAUSE_IN_REDUCTION:
7847 case OMP_CLAUSE__REDUCTEMP_:
7848 break;
7849 case OMP_CLAUSE_REDUCTION:
7850 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7851 continue;
7852 break;
7853 case OMP_CLAUSE_SHARED:
7854 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7855 break;
7856 continue;
7857 case OMP_CLAUSE__LOOPTEMP_:
7858 if (ignored_looptemp)
7859 {
7860 ignored_looptemp--;
7861 continue;
7862 }
7863 break;
7864 default:
7865 continue;
7866 }
7867
7868 val = OMP_CLAUSE_DECL (c);
7869 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7870 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7871 && TREE_CODE (val) == MEM_REF)
7872 {
7873 val = TREE_OPERAND (val, 0);
7874 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7875 val = TREE_OPERAND (val, 0);
7876 if (TREE_CODE (val) == INDIRECT_REF
7877 || TREE_CODE (val) == ADDR_EXPR)
7878 val = TREE_OPERAND (val, 0);
7879 if (is_variable_sized (val))
7880 continue;
7881 }
7882
7883 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7884 outer taskloop region. */
7885 omp_context *ctx_for_o = ctx;
7886 if (is_taskloop
7887 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7888 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7889 ctx_for_o = ctx->outer;
7890
7891 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7892
7893 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7894 && is_global_var (var)
7895 && (val == OMP_CLAUSE_DECL (c)
7896 || !is_task_ctx (ctx)
7897 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7898 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7899 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7900 != POINTER_TYPE)))))
7901 continue;
7902
7903 t = omp_member_access_dummy_var (var);
7904 if (t)
7905 {
7906 var = DECL_VALUE_EXPR (var);
7907 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7908 if (o != t)
7909 var = unshare_and_remap (var, t, o);
7910 else
7911 var = unshare_expr (var);
7912 }
7913
7914 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7915 {
7916 /* Handle taskloop firstprivate/lastprivate, where the
7917 lastprivate on GIMPLE_OMP_TASK is represented as
7918 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7919 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7920 x = omp_build_component_ref (ctx->sender_decl, f);
7921 if (use_pointer_for_field (val, ctx))
7922 var = build_fold_addr_expr (var);
7923 gimplify_assign (x, var, ilist);
7924 DECL_ABSTRACT_ORIGIN (f) = NULL;
7925 continue;
7926 }
7927
7928 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7929 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7930 || val == OMP_CLAUSE_DECL (c))
7931 && is_variable_sized (val))
7932 continue;
7933 by_ref = use_pointer_for_field (val, NULL);
7934
7935 switch (OMP_CLAUSE_CODE (c))
7936 {
7937 case OMP_CLAUSE_FIRSTPRIVATE:
7938 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7939 && !by_ref
7940 && is_task_ctx (ctx))
7941 suppress_warning (var);
7942 do_in = true;
7943 break;
7944
7945 case OMP_CLAUSE_PRIVATE:
7946 case OMP_CLAUSE_COPYIN:
7947 case OMP_CLAUSE__LOOPTEMP_:
7948 case OMP_CLAUSE__REDUCTEMP_:
7949 do_in = true;
7950 break;
7951
7952 case OMP_CLAUSE_LASTPRIVATE:
7953 if (by_ref || omp_is_reference (val))
7954 {
7955 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7956 continue;
7957 do_in = true;
7958 }
7959 else
7960 {
7961 do_out = true;
7962 if (lang_hooks.decls.omp_private_outer_ref (val))
7963 do_in = true;
7964 }
7965 break;
7966
7967 case OMP_CLAUSE_REDUCTION:
7968 case OMP_CLAUSE_IN_REDUCTION:
7969 do_in = true;
7970 if (val == OMP_CLAUSE_DECL (c))
7971 {
7972 if (is_task_ctx (ctx))
7973 by_ref = use_pointer_for_field (val, ctx);
7974 else
7975 do_out = !(by_ref || omp_is_reference (val));
7976 }
7977 else
7978 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7979 break;
7980
7981 default:
7982 gcc_unreachable ();
7983 }
7984
7985 if (do_in)
7986 {
7987 ref = build_sender_ref (val, ctx);
7988 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7989 gimplify_assign (ref, x, ilist);
7990 if (is_task_ctx (ctx))
7991 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7992 }
7993
7994 if (do_out)
7995 {
7996 ref = build_sender_ref (val, ctx);
7997 gimplify_assign (var, ref, olist);
7998 }
7999 }
8000 }
8001
8002 /* Generate code to implement SHARED from the sender (aka parent)
8003 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8004 list things that got automatically shared. */
8005
8006 static void
8007 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8008 {
8009 tree var, ovar, nvar, t, f, x, record_type;
8010
8011 if (ctx->record_type == NULL)
8012 return;
8013
8014 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8015 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8016 {
8017 ovar = DECL_ABSTRACT_ORIGIN (f);
8018 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8019 continue;
8020
8021 nvar = maybe_lookup_decl (ovar, ctx);
8022 if (!nvar
8023 || !DECL_HAS_VALUE_EXPR_P (nvar)
8024 || (ctx->allocate_map
8025 && ctx->allocate_map->get (ovar)))
8026 continue;
8027
8028 /* If CTX is a nested parallel directive. Find the immediately
8029 enclosing parallel or workshare construct that contains a
8030 mapping for OVAR. */
8031 var = lookup_decl_in_outer_ctx (ovar, ctx);
8032
8033 t = omp_member_access_dummy_var (var);
8034 if (t)
8035 {
8036 var = DECL_VALUE_EXPR (var);
8037 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8038 if (o != t)
8039 var = unshare_and_remap (var, t, o);
8040 else
8041 var = unshare_expr (var);
8042 }
8043
8044 if (use_pointer_for_field (ovar, ctx))
8045 {
8046 x = build_sender_ref (ovar, ctx);
8047 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8048 && TREE_TYPE (f) == TREE_TYPE (ovar))
8049 {
8050 gcc_assert (is_parallel_ctx (ctx)
8051 && DECL_ARTIFICIAL (ovar));
8052 /* _condtemp_ clause. */
8053 var = build_constructor (TREE_TYPE (x), NULL);
8054 }
8055 else
8056 var = build_fold_addr_expr (var);
8057 gimplify_assign (x, var, ilist);
8058 }
8059 else
8060 {
8061 x = build_sender_ref (ovar, ctx);
8062 gimplify_assign (x, var, ilist);
8063
8064 if (!TREE_READONLY (var)
8065 /* We don't need to receive a new reference to a result
8066 or parm decl. In fact we may not store to it as we will
8067 invalidate any pending RSO and generate wrong gimple
8068 during inlining. */
8069 && !((TREE_CODE (var) == RESULT_DECL
8070 || TREE_CODE (var) == PARM_DECL)
8071 && DECL_BY_REFERENCE (var)))
8072 {
8073 x = build_sender_ref (ovar, ctx);
8074 gimplify_assign (var, x, olist);
8075 }
8076 }
8077 }
8078 }
8079
8080 /* Emit an OpenACC head marker call, encapulating the partitioning and
8081 other information that must be processed by the target compiler.
8082 Return the maximum number of dimensions the associated loop might
8083 be partitioned over. */
8084
8085 static unsigned
8086 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8087 gimple_seq *seq, omp_context *ctx)
8088 {
8089 unsigned levels = 0;
8090 unsigned tag = 0;
8091 tree gang_static = NULL_TREE;
8092 auto_vec<tree, 5> args;
8093
8094 args.quick_push (build_int_cst
8095 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8096 args.quick_push (ddvar);
8097 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8098 {
8099 switch (OMP_CLAUSE_CODE (c))
8100 {
8101 case OMP_CLAUSE_GANG:
8102 tag |= OLF_DIM_GANG;
8103 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8104 /* static:* is represented by -1, and we can ignore it, as
8105 scheduling is always static. */
8106 if (gang_static && integer_minus_onep (gang_static))
8107 gang_static = NULL_TREE;
8108 levels++;
8109 break;
8110
8111 case OMP_CLAUSE_WORKER:
8112 tag |= OLF_DIM_WORKER;
8113 levels++;
8114 break;
8115
8116 case OMP_CLAUSE_VECTOR:
8117 tag |= OLF_DIM_VECTOR;
8118 levels++;
8119 break;
8120
8121 case OMP_CLAUSE_SEQ:
8122 tag |= OLF_SEQ;
8123 break;
8124
8125 case OMP_CLAUSE_AUTO:
8126 tag |= OLF_AUTO;
8127 break;
8128
8129 case OMP_CLAUSE_INDEPENDENT:
8130 tag |= OLF_INDEPENDENT;
8131 break;
8132
8133 case OMP_CLAUSE_TILE:
8134 tag |= OLF_TILE;
8135 break;
8136
8137 default:
8138 continue;
8139 }
8140 }
8141
8142 if (gang_static)
8143 {
8144 if (DECL_P (gang_static))
8145 gang_static = build_outer_var_ref (gang_static, ctx);
8146 tag |= OLF_GANG_STATIC;
8147 }
8148
8149 omp_context *tgt = enclosing_target_ctx (ctx);
8150 if (!tgt || is_oacc_parallel_or_serial (tgt))
8151 ;
8152 else if (is_oacc_kernels (tgt))
8153 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8154 gcc_unreachable ();
8155 else if (is_oacc_kernels_decomposed_part (tgt))
8156 ;
8157 else
8158 gcc_unreachable ();
8159
8160 /* In a parallel region, loops are implicitly INDEPENDENT. */
8161 if (!tgt || is_oacc_parallel_or_serial (tgt))
8162 tag |= OLF_INDEPENDENT;
8163
8164 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8165 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8166 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8167 {
8168 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8169 gcc_assert (!(tag & OLF_AUTO));
8170 }
8171
8172 if (tag & OLF_TILE)
8173 /* Tiling could use all 3 levels. */
8174 levels = 3;
8175 else
8176 {
8177 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8178 Ensure at least one level, or 2 for possible auto
8179 partitioning */
8180 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8181 << OLF_DIM_BASE) | OLF_SEQ));
8182
8183 if (levels < 1u + maybe_auto)
8184 levels = 1u + maybe_auto;
8185 }
8186
8187 args.quick_push (build_int_cst (integer_type_node, levels));
8188 args.quick_push (build_int_cst (integer_type_node, tag));
8189 if (gang_static)
8190 args.quick_push (gang_static);
8191
8192 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8193 gimple_set_location (call, loc);
8194 gimple_set_lhs (call, ddvar);
8195 gimple_seq_add_stmt (seq, call);
8196
8197 return levels;
8198 }
8199
8200 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8201 partitioning level of the enclosed region. */
8202
8203 static void
8204 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8205 tree tofollow, gimple_seq *seq)
8206 {
8207 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8208 : IFN_UNIQUE_OACC_TAIL_MARK);
8209 tree marker = build_int_cst (integer_type_node, marker_kind);
8210 int nargs = 2 + (tofollow != NULL_TREE);
8211 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8212 marker, ddvar, tofollow);
8213 gimple_set_location (call, loc);
8214 gimple_set_lhs (call, ddvar);
8215 gimple_seq_add_stmt (seq, call);
8216 }
8217
8218 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8219 the loop clauses, from which we extract reductions. Initialize
8220 HEAD and TAIL. */
8221
8222 static void
8223 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8224 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8225 {
8226 bool inner = false;
8227 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8228 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8229
8230 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8231
8232 if (private_marker)
8233 {
8234 gimple_set_location (private_marker, loc);
8235 gimple_call_set_lhs (private_marker, ddvar);
8236 gimple_call_set_arg (private_marker, 1, ddvar);
8237 }
8238
8239 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8240 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8241
8242 gcc_assert (count);
8243 for (unsigned done = 1; count; count--, done++)
8244 {
8245 gimple_seq fork_seq = NULL;
8246 gimple_seq join_seq = NULL;
8247
8248 tree place = build_int_cst (integer_type_node, -1);
8249 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8250 fork_kind, ddvar, place);
8251 gimple_set_location (fork, loc);
8252 gimple_set_lhs (fork, ddvar);
8253
8254 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8255 join_kind, ddvar, place);
8256 gimple_set_location (join, loc);
8257 gimple_set_lhs (join, ddvar);
8258
8259 /* Mark the beginning of this level sequence. */
8260 if (inner)
8261 lower_oacc_loop_marker (loc, ddvar, true,
8262 build_int_cst (integer_type_node, count),
8263 &fork_seq);
8264 lower_oacc_loop_marker (loc, ddvar, false,
8265 build_int_cst (integer_type_node, done),
8266 &join_seq);
8267
8268 lower_oacc_reductions (loc, clauses, place, inner,
8269 fork, (count == 1) ? private_marker : NULL,
8270 join, &fork_seq, &join_seq, ctx);
8271
8272 /* Append this level to head. */
8273 gimple_seq_add_seq (head, fork_seq);
8274 /* Prepend it to tail. */
8275 gimple_seq_add_seq (&join_seq, *tail);
8276 *tail = join_seq;
8277
8278 inner = true;
8279 }
8280
8281 /* Mark the end of the sequence. */
8282 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8283 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8284 }
8285
8286 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8287 catch handler and return it. This prevents programs from violating the
8288 structured block semantics with throws. */
8289
8290 static gimple_seq
8291 maybe_catch_exception (gimple_seq body)
8292 {
8293 gimple *g;
8294 tree decl;
8295
8296 if (!flag_exceptions)
8297 return body;
8298
8299 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8300 decl = lang_hooks.eh_protect_cleanup_actions ();
8301 else
8302 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8303
8304 g = gimple_build_eh_must_not_throw (decl);
8305 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8306 GIMPLE_TRY_CATCH);
8307
8308 return gimple_seq_alloc_with_stmt (g);
8309 }
8310
8311 \f
8312 /* Routines to lower OMP directives into OMP-GIMPLE. */
8313
8314 /* If ctx is a worksharing context inside of a cancellable parallel
8315 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8316 and conditional branch to parallel's cancel_label to handle
8317 cancellation in the implicit barrier. */
8318
8319 static void
8320 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8321 gimple_seq *body)
8322 {
8323 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8324 if (gimple_omp_return_nowait_p (omp_return))
8325 return;
8326 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8327 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8328 && outer->cancellable)
8329 {
8330 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8331 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8332 tree lhs = create_tmp_var (c_bool_type);
8333 gimple_omp_return_set_lhs (omp_return, lhs);
8334 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8335 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8336 fold_convert (c_bool_type,
8337 boolean_false_node),
8338 outer->cancel_label, fallthru_label);
8339 gimple_seq_add_stmt (body, g);
8340 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8341 }
8342 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8343 return;
8344 }
8345
8346 /* Find the first task_reduction or reduction clause or return NULL
8347 if there are none. */
8348
8349 static inline tree
8350 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8351 enum omp_clause_code ccode)
8352 {
8353 while (1)
8354 {
8355 clauses = omp_find_clause (clauses, ccode);
8356 if (clauses == NULL_TREE)
8357 return NULL_TREE;
8358 if (ccode != OMP_CLAUSE_REDUCTION
8359 || code == OMP_TASKLOOP
8360 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8361 return clauses;
8362 clauses = OMP_CLAUSE_CHAIN (clauses);
8363 }
8364 }
8365
8366 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8367 gimple_seq *, gimple_seq *);
8368
8369 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8370 CTX is the enclosing OMP context for the current statement. */
8371
8372 static void
8373 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8374 {
8375 tree block, control;
8376 gimple_stmt_iterator tgsi;
8377 gomp_sections *stmt;
8378 gimple *t;
8379 gbind *new_stmt, *bind;
8380 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8381
8382 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8383
8384 push_gimplify_context ();
8385
8386 dlist = NULL;
8387 ilist = NULL;
8388
8389 tree rclauses
8390 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8391 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8392 tree rtmp = NULL_TREE;
8393 if (rclauses)
8394 {
8395 tree type = build_pointer_type (pointer_sized_int_node);
8396 tree temp = create_tmp_var (type);
8397 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8398 OMP_CLAUSE_DECL (c) = temp;
8399 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8400 gimple_omp_sections_set_clauses (stmt, c);
8401 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8402 gimple_omp_sections_clauses (stmt),
8403 &ilist, &tred_dlist);
8404 rclauses = c;
8405 rtmp = make_ssa_name (type);
8406 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8407 }
8408
8409 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8410 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8411
8412 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8413 &ilist, &dlist, ctx, NULL);
8414
8415 control = create_tmp_var (unsigned_type_node, ".section");
8416 gimple_omp_sections_set_control (stmt, control);
8417
8418 new_body = gimple_omp_body (stmt);
8419 gimple_omp_set_body (stmt, NULL);
8420 tgsi = gsi_start (new_body);
8421 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8422 {
8423 omp_context *sctx;
8424 gimple *sec_start;
8425
8426 sec_start = gsi_stmt (tgsi);
8427 sctx = maybe_lookup_ctx (sec_start);
8428 gcc_assert (sctx);
8429
8430 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8431 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8432 GSI_CONTINUE_LINKING);
8433 gimple_omp_set_body (sec_start, NULL);
8434
8435 if (gsi_one_before_end_p (tgsi))
8436 {
8437 gimple_seq l = NULL;
8438 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8439 &ilist, &l, &clist, ctx);
8440 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8441 gimple_omp_section_set_last (sec_start);
8442 }
8443
8444 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8445 GSI_CONTINUE_LINKING);
8446 }
8447
8448 block = make_node (BLOCK);
8449 bind = gimple_build_bind (NULL, new_body, block);
8450
8451 olist = NULL;
8452 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8453 &clist, ctx);
8454 if (clist)
8455 {
8456 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8457 gcall *g = gimple_build_call (fndecl, 0);
8458 gimple_seq_add_stmt (&olist, g);
8459 gimple_seq_add_seq (&olist, clist);
8460 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8461 g = gimple_build_call (fndecl, 0);
8462 gimple_seq_add_stmt (&olist, g);
8463 }
8464
8465 block = make_node (BLOCK);
8466 new_stmt = gimple_build_bind (NULL, NULL, block);
8467 gsi_replace (gsi_p, new_stmt, true);
8468
8469 pop_gimplify_context (new_stmt);
8470 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8471 BLOCK_VARS (block) = gimple_bind_vars (bind);
8472 if (BLOCK_VARS (block))
8473 TREE_USED (block) = 1;
8474
8475 new_body = NULL;
8476 gimple_seq_add_seq (&new_body, ilist);
8477 gimple_seq_add_stmt (&new_body, stmt);
8478 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8479 gimple_seq_add_stmt (&new_body, bind);
8480
8481 t = gimple_build_omp_continue (control, control);
8482 gimple_seq_add_stmt (&new_body, t);
8483
8484 gimple_seq_add_seq (&new_body, olist);
8485 if (ctx->cancellable)
8486 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8487 gimple_seq_add_seq (&new_body, dlist);
8488
8489 new_body = maybe_catch_exception (new_body);
8490
8491 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8492 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8493 t = gimple_build_omp_return (nowait);
8494 gimple_seq_add_stmt (&new_body, t);
8495 gimple_seq_add_seq (&new_body, tred_dlist);
8496 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8497
8498 if (rclauses)
8499 OMP_CLAUSE_DECL (rclauses) = rtmp;
8500
8501 gimple_bind_set_body (new_stmt, new_body);
8502 }
8503
8504
8505 /* A subroutine of lower_omp_single. Expand the simple form of
8506 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8507
8508 if (GOMP_single_start ())
8509 BODY;
8510 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8511
8512 FIXME. It may be better to delay expanding the logic of this until
8513 pass_expand_omp. The expanded logic may make the job more difficult
8514 to a synchronization analysis pass. */
8515
8516 static void
8517 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8518 {
8519 location_t loc = gimple_location (single_stmt);
8520 tree tlabel = create_artificial_label (loc);
8521 tree flabel = create_artificial_label (loc);
8522 gimple *call, *cond;
8523 tree lhs, decl;
8524
8525 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8526 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8527 call = gimple_build_call (decl, 0);
8528 gimple_call_set_lhs (call, lhs);
8529 gimple_seq_add_stmt (pre_p, call);
8530
8531 cond = gimple_build_cond (EQ_EXPR, lhs,
8532 fold_convert_loc (loc, TREE_TYPE (lhs),
8533 boolean_true_node),
8534 tlabel, flabel);
8535 gimple_seq_add_stmt (pre_p, cond);
8536 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8537 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8538 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8539 }
8540
8541
8542 /* A subroutine of lower_omp_single. Expand the simple form of
8543 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8544
8545 #pragma omp single copyprivate (a, b, c)
8546
8547 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8548
8549 {
8550 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8551 {
8552 BODY;
8553 copyout.a = a;
8554 copyout.b = b;
8555 copyout.c = c;
8556 GOMP_single_copy_end (&copyout);
8557 }
8558 else
8559 {
8560 a = copyout_p->a;
8561 b = copyout_p->b;
8562 c = copyout_p->c;
8563 }
8564 GOMP_barrier ();
8565 }
8566
8567 FIXME. It may be better to delay expanding the logic of this until
8568 pass_expand_omp. The expanded logic may make the job more difficult
8569 to a synchronization analysis pass. */
8570
8571 static void
8572 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8573 omp_context *ctx)
8574 {
8575 tree ptr_type, t, l0, l1, l2, bfn_decl;
8576 gimple_seq copyin_seq;
8577 location_t loc = gimple_location (single_stmt);
8578
8579 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8580
8581 ptr_type = build_pointer_type (ctx->record_type);
8582 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8583
8584 l0 = create_artificial_label (loc);
8585 l1 = create_artificial_label (loc);
8586 l2 = create_artificial_label (loc);
8587
8588 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8589 t = build_call_expr_loc (loc, bfn_decl, 0);
8590 t = fold_convert_loc (loc, ptr_type, t);
8591 gimplify_assign (ctx->receiver_decl, t, pre_p);
8592
8593 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8594 build_int_cst (ptr_type, 0));
8595 t = build3 (COND_EXPR, void_type_node, t,
8596 build_and_jump (&l0), build_and_jump (&l1));
8597 gimplify_and_add (t, pre_p);
8598
8599 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8600
8601 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8602
8603 copyin_seq = NULL;
8604 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8605 &copyin_seq, ctx);
8606
8607 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8608 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8609 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8610 gimplify_and_add (t, pre_p);
8611
8612 t = build_and_jump (&l2);
8613 gimplify_and_add (t, pre_p);
8614
8615 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8616
8617 gimple_seq_add_seq (pre_p, copyin_seq);
8618
8619 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8620 }
8621
8622
8623 /* Expand code for an OpenMP single directive. */
8624
8625 static void
8626 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8627 {
8628 tree block;
8629 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8630 gbind *bind;
8631 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8632
8633 push_gimplify_context ();
8634
8635 block = make_node (BLOCK);
8636 bind = gimple_build_bind (NULL, NULL, block);
8637 gsi_replace (gsi_p, bind, true);
8638 bind_body = NULL;
8639 dlist = NULL;
8640 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8641 &bind_body, &dlist, ctx, NULL);
8642 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8643
8644 gimple_seq_add_stmt (&bind_body, single_stmt);
8645
8646 if (ctx->record_type)
8647 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8648 else
8649 lower_omp_single_simple (single_stmt, &bind_body);
8650
8651 gimple_omp_set_body (single_stmt, NULL);
8652
8653 gimple_seq_add_seq (&bind_body, dlist);
8654
8655 bind_body = maybe_catch_exception (bind_body);
8656
8657 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8658 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8659 gimple *g = gimple_build_omp_return (nowait);
8660 gimple_seq_add_stmt (&bind_body_tail, g);
8661 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8662 if (ctx->record_type)
8663 {
8664 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8665 tree clobber = build_clobber (ctx->record_type);
8666 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8667 clobber), GSI_SAME_STMT);
8668 }
8669 gimple_seq_add_seq (&bind_body, bind_body_tail);
8670 gimple_bind_set_body (bind, bind_body);
8671
8672 pop_gimplify_context (bind);
8673
8674 gimple_bind_append_vars (bind, ctx->block_vars);
8675 BLOCK_VARS (block) = ctx->block_vars;
8676 if (BLOCK_VARS (block))
8677 TREE_USED (block) = 1;
8678 }
8679
8680
8681 /* Expand code for an OpenMP master directive. */
8682
8683 static void
8684 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8685 {
8686 tree block, lab = NULL, x, bfn_decl;
8687 gimple *stmt = gsi_stmt (*gsi_p);
8688 gbind *bind;
8689 location_t loc = gimple_location (stmt);
8690 gimple_seq tseq;
8691
8692 push_gimplify_context ();
8693
8694 block = make_node (BLOCK);
8695 bind = gimple_build_bind (NULL, NULL, block);
8696 gsi_replace (gsi_p, bind, true);
8697 gimple_bind_add_stmt (bind, stmt);
8698
8699 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8700 x = build_call_expr_loc (loc, bfn_decl, 0);
8701 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8702 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8703 tseq = NULL;
8704 gimplify_and_add (x, &tseq);
8705 gimple_bind_add_seq (bind, tseq);
8706
8707 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8708 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8709 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8710 gimple_omp_set_body (stmt, NULL);
8711
8712 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8713
8714 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8715
8716 pop_gimplify_context (bind);
8717
8718 gimple_bind_append_vars (bind, ctx->block_vars);
8719 BLOCK_VARS (block) = ctx->block_vars;
8720 }
8721
8722 /* Helper function for lower_omp_task_reductions. For a specific PASS
8723 find out the current clause it should be processed, or return false
8724 if all have been processed already. */
8725
8726 static inline bool
8727 omp_task_reduction_iterate (int pass, enum tree_code code,
8728 enum omp_clause_code ccode, tree *c, tree *decl,
8729 tree *type, tree *next)
8730 {
8731 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8732 {
8733 if (ccode == OMP_CLAUSE_REDUCTION
8734 && code != OMP_TASKLOOP
8735 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8736 continue;
8737 *decl = OMP_CLAUSE_DECL (*c);
8738 *type = TREE_TYPE (*decl);
8739 if (TREE_CODE (*decl) == MEM_REF)
8740 {
8741 if (pass != 1)
8742 continue;
8743 }
8744 else
8745 {
8746 if (omp_is_reference (*decl))
8747 *type = TREE_TYPE (*type);
8748 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8749 continue;
8750 }
8751 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8752 return true;
8753 }
8754 *decl = NULL_TREE;
8755 *type = NULL_TREE;
8756 *next = NULL_TREE;
8757 return false;
8758 }
8759
8760 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8761 OMP_TASKGROUP only with task modifier). Register mapping of those in
8762 START sequence and reducing them and unregister them in the END sequence. */
8763
8764 static void
8765 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8766 gimple_seq *start, gimple_seq *end)
8767 {
8768 enum omp_clause_code ccode
8769 = (code == OMP_TASKGROUP
8770 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8771 tree cancellable = NULL_TREE;
8772 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8773 if (clauses == NULL_TREE)
8774 return;
8775 if (code == OMP_FOR || code == OMP_SECTIONS)
8776 {
8777 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8778 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8779 && outer->cancellable)
8780 {
8781 cancellable = error_mark_node;
8782 break;
8783 }
8784 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8785 break;
8786 }
8787 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8788 tree *last = &TYPE_FIELDS (record_type);
8789 unsigned cnt = 0;
8790 if (cancellable)
8791 {
8792 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8793 ptr_type_node);
8794 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8795 integer_type_node);
8796 *last = field;
8797 DECL_CHAIN (field) = ifield;
8798 last = &DECL_CHAIN (ifield);
8799 DECL_CONTEXT (field) = record_type;
8800 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8801 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8802 DECL_CONTEXT (ifield) = record_type;
8803 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8804 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8805 }
8806 for (int pass = 0; pass < 2; pass++)
8807 {
8808 tree decl, type, next;
8809 for (tree c = clauses;
8810 omp_task_reduction_iterate (pass, code, ccode,
8811 &c, &decl, &type, &next); c = next)
8812 {
8813 ++cnt;
8814 tree new_type = type;
8815 if (ctx->outer)
8816 new_type = remap_type (type, &ctx->outer->cb);
8817 tree field
8818 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8819 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8820 new_type);
8821 if (DECL_P (decl) && type == TREE_TYPE (decl))
8822 {
8823 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8824 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8825 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8826 }
8827 else
8828 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8829 DECL_CONTEXT (field) = record_type;
8830 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8831 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8832 *last = field;
8833 last = &DECL_CHAIN (field);
8834 tree bfield
8835 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8836 boolean_type_node);
8837 DECL_CONTEXT (bfield) = record_type;
8838 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8839 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8840 *last = bfield;
8841 last = &DECL_CHAIN (bfield);
8842 }
8843 }
8844 *last = NULL_TREE;
8845 layout_type (record_type);
8846
8847 /* Build up an array which registers with the runtime all the reductions
8848 and deregisters them at the end. Format documented in libgomp/task.c. */
8849 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8850 tree avar = create_tmp_var_raw (atype);
8851 gimple_add_tmp_var (avar);
8852 TREE_ADDRESSABLE (avar) = 1;
8853 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8854 NULL_TREE, NULL_TREE);
8855 tree t = build_int_cst (pointer_sized_int_node, cnt);
8856 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8857 gimple_seq seq = NULL;
8858 tree sz = fold_convert (pointer_sized_int_node,
8859 TYPE_SIZE_UNIT (record_type));
8860 int cachesz = 64;
8861 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8862 build_int_cst (pointer_sized_int_node, cachesz - 1));
8863 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8864 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8865 ctx->task_reductions.create (1 + cnt);
8866 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8867 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8868 ? sz : NULL_TREE);
8869 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8870 gimple_seq_add_seq (start, seq);
8871 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8872 NULL_TREE, NULL_TREE);
8873 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8874 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8875 NULL_TREE, NULL_TREE);
8876 t = build_int_cst (pointer_sized_int_node,
8877 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8878 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8879 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8880 NULL_TREE, NULL_TREE);
8881 t = build_int_cst (pointer_sized_int_node, -1);
8882 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8883 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8884 NULL_TREE, NULL_TREE);
8885 t = build_int_cst (pointer_sized_int_node, 0);
8886 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8887
8888 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8889 and for each task reduction checks a bool right after the private variable
8890 within that thread's chunk; if the bool is clear, it hasn't been
8891 initialized and thus isn't going to be reduced nor destructed, otherwise
8892 reduce and destruct it. */
8893 tree idx = create_tmp_var (size_type_node);
8894 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8895 tree num_thr_sz = create_tmp_var (size_type_node);
8896 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8897 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8898 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
8899 gimple *g;
8900 if (code == OMP_FOR || code == OMP_SECTIONS)
8901 {
8902 /* For worksharing constructs, only perform it in the master thread,
8903 with the exception of cancelled implicit barriers - then only handle
8904 the current thread. */
8905 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8906 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8907 tree thr_num = create_tmp_var (integer_type_node);
8908 g = gimple_build_call (t, 0);
8909 gimple_call_set_lhs (g, thr_num);
8910 gimple_seq_add_stmt (end, g);
8911 if (cancellable)
8912 {
8913 tree c;
8914 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8915 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8916 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8917 if (code == OMP_FOR)
8918 c = gimple_omp_for_clauses (ctx->stmt);
8919 else /* if (code == OMP_SECTIONS) */
8920 c = gimple_omp_sections_clauses (ctx->stmt);
8921 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8922 cancellable = c;
8923 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8924 lab5, lab6);
8925 gimple_seq_add_stmt (end, g);
8926 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8927 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8928 gimple_seq_add_stmt (end, g);
8929 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8930 build_one_cst (TREE_TYPE (idx)));
8931 gimple_seq_add_stmt (end, g);
8932 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8933 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8934 }
8935 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8936 gimple_seq_add_stmt (end, g);
8937 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8938 }
8939 if (code != OMP_PARALLEL)
8940 {
8941 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8942 tree num_thr = create_tmp_var (integer_type_node);
8943 g = gimple_build_call (t, 0);
8944 gimple_call_set_lhs (g, num_thr);
8945 gimple_seq_add_stmt (end, g);
8946 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8947 gimple_seq_add_stmt (end, g);
8948 if (cancellable)
8949 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8950 }
8951 else
8952 {
8953 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8954 OMP_CLAUSE__REDUCTEMP_);
8955 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8956 t = fold_convert (size_type_node, t);
8957 gimplify_assign (num_thr_sz, t, end);
8958 }
8959 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8960 NULL_TREE, NULL_TREE);
8961 tree data = create_tmp_var (pointer_sized_int_node);
8962 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8963 if (code == OMP_TASKLOOP)
8964 {
8965 lab7 = create_artificial_label (UNKNOWN_LOCATION);
8966 g = gimple_build_cond (NE_EXPR, data,
8967 build_zero_cst (pointer_sized_int_node),
8968 lab1, lab7);
8969 gimple_seq_add_stmt (end, g);
8970 }
8971 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8972 tree ptr;
8973 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8974 ptr = create_tmp_var (build_pointer_type (record_type));
8975 else
8976 ptr = create_tmp_var (ptr_type_node);
8977 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8978
8979 tree field = TYPE_FIELDS (record_type);
8980 cnt = 0;
8981 if (cancellable)
8982 field = DECL_CHAIN (DECL_CHAIN (field));
8983 for (int pass = 0; pass < 2; pass++)
8984 {
8985 tree decl, type, next;
8986 for (tree c = clauses;
8987 omp_task_reduction_iterate (pass, code, ccode,
8988 &c, &decl, &type, &next); c = next)
8989 {
8990 tree var = decl, ref;
8991 if (TREE_CODE (decl) == MEM_REF)
8992 {
8993 var = TREE_OPERAND (var, 0);
8994 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8995 var = TREE_OPERAND (var, 0);
8996 tree v = var;
8997 if (TREE_CODE (var) == ADDR_EXPR)
8998 var = TREE_OPERAND (var, 0);
8999 else if (TREE_CODE (var) == INDIRECT_REF)
9000 var = TREE_OPERAND (var, 0);
9001 tree orig_var = var;
9002 if (is_variable_sized (var))
9003 {
9004 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9005 var = DECL_VALUE_EXPR (var);
9006 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9007 var = TREE_OPERAND (var, 0);
9008 gcc_assert (DECL_P (var));
9009 }
9010 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9011 if (orig_var != var)
9012 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9013 else if (TREE_CODE (v) == ADDR_EXPR)
9014 t = build_fold_addr_expr (t);
9015 else if (TREE_CODE (v) == INDIRECT_REF)
9016 t = build_fold_indirect_ref (t);
9017 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9018 {
9019 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9020 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9021 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9022 }
9023 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9024 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9025 fold_convert (size_type_node,
9026 TREE_OPERAND (decl, 1)));
9027 }
9028 else
9029 {
9030 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9031 if (!omp_is_reference (decl))
9032 t = build_fold_addr_expr (t);
9033 }
9034 t = fold_convert (pointer_sized_int_node, t);
9035 seq = NULL;
9036 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9037 gimple_seq_add_seq (start, seq);
9038 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9039 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9040 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9041 t = unshare_expr (byte_position (field));
9042 t = fold_convert (pointer_sized_int_node, t);
9043 ctx->task_reduction_map->put (c, cnt);
9044 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9045 ? t : NULL_TREE);
9046 seq = NULL;
9047 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9048 gimple_seq_add_seq (start, seq);
9049 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9050 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9051 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9052
9053 tree bfield = DECL_CHAIN (field);
9054 tree cond;
9055 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
9056 /* In parallel or worksharing all threads unconditionally
9057 initialize all their task reduction private variables. */
9058 cond = boolean_true_node;
9059 else if (TREE_TYPE (ptr) == ptr_type_node)
9060 {
9061 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9062 unshare_expr (byte_position (bfield)));
9063 seq = NULL;
9064 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9065 gimple_seq_add_seq (end, seq);
9066 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9067 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9068 build_int_cst (pbool, 0));
9069 }
9070 else
9071 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9072 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9073 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9074 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9075 tree condv = create_tmp_var (boolean_type_node);
9076 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9077 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9078 lab3, lab4);
9079 gimple_seq_add_stmt (end, g);
9080 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9081 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9082 {
9083 /* If this reduction doesn't need destruction and parallel
9084 has been cancelled, there is nothing to do for this
9085 reduction, so jump around the merge operation. */
9086 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9087 g = gimple_build_cond (NE_EXPR, cancellable,
9088 build_zero_cst (TREE_TYPE (cancellable)),
9089 lab4, lab5);
9090 gimple_seq_add_stmt (end, g);
9091 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9092 }
9093
9094 tree new_var;
9095 if (TREE_TYPE (ptr) == ptr_type_node)
9096 {
9097 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9098 unshare_expr (byte_position (field)));
9099 seq = NULL;
9100 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9101 gimple_seq_add_seq (end, seq);
9102 tree pbool = build_pointer_type (TREE_TYPE (field));
9103 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9104 build_int_cst (pbool, 0));
9105 }
9106 else
9107 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9108 build_simple_mem_ref (ptr), field, NULL_TREE);
9109
9110 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9111 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
9112 ref = build_simple_mem_ref (ref);
9113 /* reduction(-:var) sums up the partial results, so it acts
9114 identically to reduction(+:var). */
9115 if (rcode == MINUS_EXPR)
9116 rcode = PLUS_EXPR;
9117 if (TREE_CODE (decl) == MEM_REF)
9118 {
9119 tree type = TREE_TYPE (new_var);
9120 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9121 tree i = create_tmp_var (TREE_TYPE (v));
9122 tree ptype = build_pointer_type (TREE_TYPE (type));
9123 if (DECL_P (v))
9124 {
9125 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9126 tree vv = create_tmp_var (TREE_TYPE (v));
9127 gimplify_assign (vv, v, start);
9128 v = vv;
9129 }
9130 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9131 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9132 new_var = build_fold_addr_expr (new_var);
9133 new_var = fold_convert (ptype, new_var);
9134 ref = fold_convert (ptype, ref);
9135 tree m = create_tmp_var (ptype);
9136 gimplify_assign (m, new_var, end);
9137 new_var = m;
9138 m = create_tmp_var (ptype);
9139 gimplify_assign (m, ref, end);
9140 ref = m;
9141 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9142 tree body = create_artificial_label (UNKNOWN_LOCATION);
9143 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9144 gimple_seq_add_stmt (end, gimple_build_label (body));
9145 tree priv = build_simple_mem_ref (new_var);
9146 tree out = build_simple_mem_ref (ref);
9147 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9148 {
9149 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9150 tree decl_placeholder
9151 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9152 tree lab6 = NULL_TREE;
9153 if (cancellable)
9154 {
9155 /* If this reduction needs destruction and parallel
9156 has been cancelled, jump around the merge operation
9157 to the destruction. */
9158 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9159 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9160 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9161 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9162 lab6, lab5);
9163 gimple_seq_add_stmt (end, g);
9164 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9165 }
9166 SET_DECL_VALUE_EXPR (placeholder, out);
9167 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9168 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9169 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9170 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9171 gimple_seq_add_seq (end,
9172 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9175 {
9176 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9178 }
9179 if (cancellable)
9180 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9181 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9182 if (x)
9183 {
9184 gimple_seq tseq = NULL;
9185 gimplify_stmt (&x, &tseq);
9186 gimple_seq_add_seq (end, tseq);
9187 }
9188 }
9189 else
9190 {
9191 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9192 out = unshare_expr (out);
9193 gimplify_assign (out, x, end);
9194 }
9195 gimple *g
9196 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9197 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9198 gimple_seq_add_stmt (end, g);
9199 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9200 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9201 gimple_seq_add_stmt (end, g);
9202 g = gimple_build_assign (i, PLUS_EXPR, i,
9203 build_int_cst (TREE_TYPE (i), 1));
9204 gimple_seq_add_stmt (end, g);
9205 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9206 gimple_seq_add_stmt (end, g);
9207 gimple_seq_add_stmt (end, gimple_build_label (endl));
9208 }
9209 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9210 {
9211 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9212 tree oldv = NULL_TREE;
9213 tree lab6 = NULL_TREE;
9214 if (cancellable)
9215 {
9216 /* If this reduction needs destruction and parallel
9217 has been cancelled, jump around the merge operation
9218 to the destruction. */
9219 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9220 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9221 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9222 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9223 lab6, lab5);
9224 gimple_seq_add_stmt (end, g);
9225 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9226 }
9227 if (omp_is_reference (decl)
9228 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9229 TREE_TYPE (ref)))
9230 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9231 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9232 tree refv = create_tmp_var (TREE_TYPE (ref));
9233 gimplify_assign (refv, ref, end);
9234 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9235 SET_DECL_VALUE_EXPR (placeholder, ref);
9236 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9237 tree d = maybe_lookup_decl (decl, ctx);
9238 gcc_assert (d);
9239 if (DECL_HAS_VALUE_EXPR_P (d))
9240 oldv = DECL_VALUE_EXPR (d);
9241 if (omp_is_reference (var))
9242 {
9243 tree v = fold_convert (TREE_TYPE (d),
9244 build_fold_addr_expr (new_var));
9245 SET_DECL_VALUE_EXPR (d, v);
9246 }
9247 else
9248 SET_DECL_VALUE_EXPR (d, new_var);
9249 DECL_HAS_VALUE_EXPR_P (d) = 1;
9250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9251 if (oldv)
9252 SET_DECL_VALUE_EXPR (d, oldv);
9253 else
9254 {
9255 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9256 DECL_HAS_VALUE_EXPR_P (d) = 0;
9257 }
9258 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9260 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9261 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9262 if (cancellable)
9263 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9264 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9265 if (x)
9266 {
9267 gimple_seq tseq = NULL;
9268 gimplify_stmt (&x, &tseq);
9269 gimple_seq_add_seq (end, tseq);
9270 }
9271 }
9272 else
9273 {
9274 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9275 ref = unshare_expr (ref);
9276 gimplify_assign (ref, x, end);
9277 }
9278 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9279 ++cnt;
9280 field = DECL_CHAIN (bfield);
9281 }
9282 }
9283
9284 if (code == OMP_TASKGROUP)
9285 {
9286 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9287 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9288 gimple_seq_add_stmt (start, g);
9289 }
9290 else
9291 {
9292 tree c;
9293 if (code == OMP_FOR)
9294 c = gimple_omp_for_clauses (ctx->stmt);
9295 else if (code == OMP_SECTIONS)
9296 c = gimple_omp_sections_clauses (ctx->stmt);
9297 else
9298 c = gimple_omp_taskreg_clauses (ctx->stmt);
9299 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9300 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9301 build_fold_addr_expr (avar));
9302 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9303 }
9304
9305 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9306 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9307 size_one_node));
9308 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9309 gimple_seq_add_stmt (end, g);
9310 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9311 if (code == OMP_FOR || code == OMP_SECTIONS)
9312 {
9313 enum built_in_function bfn
9314 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9315 t = builtin_decl_explicit (bfn);
9316 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9317 tree arg;
9318 if (cancellable)
9319 {
9320 arg = create_tmp_var (c_bool_type);
9321 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9322 cancellable));
9323 }
9324 else
9325 arg = build_int_cst (c_bool_type, 0);
9326 g = gimple_build_call (t, 1, arg);
9327 }
9328 else
9329 {
9330 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9331 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9332 }
9333 gimple_seq_add_stmt (end, g);
9334 if (lab7)
9335 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9336 t = build_constructor (atype, NULL);
9337 TREE_THIS_VOLATILE (t) = 1;
9338 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9339 }
9340
9341 /* Expand code for an OpenMP taskgroup directive. */
9342
9343 static void
9344 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9345 {
9346 gimple *stmt = gsi_stmt (*gsi_p);
9347 gcall *x;
9348 gbind *bind;
9349 gimple_seq dseq = NULL;
9350 tree block = make_node (BLOCK);
9351
9352 bind = gimple_build_bind (NULL, NULL, block);
9353 gsi_replace (gsi_p, bind, true);
9354 gimple_bind_add_stmt (bind, stmt);
9355
9356 push_gimplify_context ();
9357
9358 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9359 0);
9360 gimple_bind_add_stmt (bind, x);
9361
9362 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9363 gimple_omp_taskgroup_clauses (stmt),
9364 gimple_bind_body_ptr (bind), &dseq);
9365
9366 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9367 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9368 gimple_omp_set_body (stmt, NULL);
9369
9370 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9371 gimple_bind_add_seq (bind, dseq);
9372
9373 pop_gimplify_context (bind);
9374
9375 gimple_bind_append_vars (bind, ctx->block_vars);
9376 BLOCK_VARS (block) = ctx->block_vars;
9377 }
9378
9379
9380 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9381
9382 static void
9383 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9384 omp_context *ctx)
9385 {
9386 struct omp_for_data fd;
9387 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9388 return;
9389
9390 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9391 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9392 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9393 if (!fd.ordered)
9394 return;
9395
9396 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9397 tree c = gimple_omp_ordered_clauses (ord_stmt);
9398 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9399 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9400 {
9401 /* Merge depend clauses from multiple adjacent
9402 #pragma omp ordered depend(sink:...) constructs
9403 into one #pragma omp ordered depend(sink:...), so that
9404 we can optimize them together. */
9405 gimple_stmt_iterator gsi = *gsi_p;
9406 gsi_next (&gsi);
9407 while (!gsi_end_p (gsi))
9408 {
9409 gimple *stmt = gsi_stmt (gsi);
9410 if (is_gimple_debug (stmt)
9411 || gimple_code (stmt) == GIMPLE_NOP)
9412 {
9413 gsi_next (&gsi);
9414 continue;
9415 }
9416 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9417 break;
9418 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9419 c = gimple_omp_ordered_clauses (ord_stmt2);
9420 if (c == NULL_TREE
9421 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9422 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9423 break;
9424 while (*list_p)
9425 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9426 *list_p = c;
9427 gsi_remove (&gsi, true);
9428 }
9429 }
9430
9431 /* Canonicalize sink dependence clauses into one folded clause if
9432 possible.
9433
9434 The basic algorithm is to create a sink vector whose first
9435 element is the GCD of all the first elements, and whose remaining
9436 elements are the minimum of the subsequent columns.
9437
9438 We ignore dependence vectors whose first element is zero because
9439 such dependencies are known to be executed by the same thread.
9440
9441 We take into account the direction of the loop, so a minimum
9442 becomes a maximum if the loop is iterating forwards. We also
9443 ignore sink clauses where the loop direction is unknown, or where
9444 the offsets are clearly invalid because they are not a multiple
9445 of the loop increment.
9446
9447 For example:
9448
9449 #pragma omp for ordered(2)
9450 for (i=0; i < N; ++i)
9451 for (j=0; j < M; ++j)
9452 {
9453 #pragma omp ordered \
9454 depend(sink:i-8,j-2) \
9455 depend(sink:i,j-1) \ // Completely ignored because i+0.
9456 depend(sink:i-4,j-3) \
9457 depend(sink:i-6,j-4)
9458 #pragma omp ordered depend(source)
9459 }
9460
9461 Folded clause is:
9462
9463 depend(sink:-gcd(8,4,6),-min(2,3,4))
9464 -or-
9465 depend(sink:-2,-2)
9466 */
9467
9468 /* FIXME: Computing GCD's where the first element is zero is
9469 non-trivial in the presence of collapsed loops. Do this later. */
9470 if (fd.collapse > 1)
9471 return;
9472
9473 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9474
9475 /* wide_int is not a POD so it must be default-constructed. */
9476 for (unsigned i = 0; i != 2 * len - 1; ++i)
9477 new (static_cast<void*>(folded_deps + i)) wide_int ();
9478
9479 tree folded_dep = NULL_TREE;
9480 /* TRUE if the first dimension's offset is negative. */
9481 bool neg_offset_p = false;
9482
9483 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9484 unsigned int i;
9485 while ((c = *list_p) != NULL)
9486 {
9487 bool remove = false;
9488
9489 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9490 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9491 goto next_ordered_clause;
9492
9493 tree vec;
9494 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9495 vec && TREE_CODE (vec) == TREE_LIST;
9496 vec = TREE_CHAIN (vec), ++i)
9497 {
9498 gcc_assert (i < len);
9499
9500 /* omp_extract_for_data has canonicalized the condition. */
9501 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9502 || fd.loops[i].cond_code == GT_EXPR);
9503 bool forward = fd.loops[i].cond_code == LT_EXPR;
9504 bool maybe_lexically_later = true;
9505
9506 /* While the committee makes up its mind, bail if we have any
9507 non-constant steps. */
9508 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9509 goto lower_omp_ordered_ret;
9510
9511 tree itype = TREE_TYPE (TREE_VALUE (vec));
9512 if (POINTER_TYPE_P (itype))
9513 itype = sizetype;
9514 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9515 TYPE_PRECISION (itype),
9516 TYPE_SIGN (itype));
9517
9518 /* Ignore invalid offsets that are not multiples of the step. */
9519 if (!wi::multiple_of_p (wi::abs (offset),
9520 wi::abs (wi::to_wide (fd.loops[i].step)),
9521 UNSIGNED))
9522 {
9523 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9524 "ignoring sink clause with offset that is not "
9525 "a multiple of the loop step");
9526 remove = true;
9527 goto next_ordered_clause;
9528 }
9529
9530 /* Calculate the first dimension. The first dimension of
9531 the folded dependency vector is the GCD of the first
9532 elements, while ignoring any first elements whose offset
9533 is 0. */
9534 if (i == 0)
9535 {
9536 /* Ignore dependence vectors whose first dimension is 0. */
9537 if (offset == 0)
9538 {
9539 remove = true;
9540 goto next_ordered_clause;
9541 }
9542 else
9543 {
9544 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9545 {
9546 error_at (OMP_CLAUSE_LOCATION (c),
9547 "first offset must be in opposite direction "
9548 "of loop iterations");
9549 goto lower_omp_ordered_ret;
9550 }
9551 if (forward)
9552 offset = -offset;
9553 neg_offset_p = forward;
9554 /* Initialize the first time around. */
9555 if (folded_dep == NULL_TREE)
9556 {
9557 folded_dep = c;
9558 folded_deps[0] = offset;
9559 }
9560 else
9561 folded_deps[0] = wi::gcd (folded_deps[0],
9562 offset, UNSIGNED);
9563 }
9564 }
9565 /* Calculate minimum for the remaining dimensions. */
9566 else
9567 {
9568 folded_deps[len + i - 1] = offset;
9569 if (folded_dep == c)
9570 folded_deps[i] = offset;
9571 else if (maybe_lexically_later
9572 && !wi::eq_p (folded_deps[i], offset))
9573 {
9574 if (forward ^ wi::gts_p (folded_deps[i], offset))
9575 {
9576 unsigned int j;
9577 folded_dep = c;
9578 for (j = 1; j <= i; j++)
9579 folded_deps[j] = folded_deps[len + j - 1];
9580 }
9581 else
9582 maybe_lexically_later = false;
9583 }
9584 }
9585 }
9586 gcc_assert (i == len);
9587
9588 remove = true;
9589
9590 next_ordered_clause:
9591 if (remove)
9592 *list_p = OMP_CLAUSE_CHAIN (c);
9593 else
9594 list_p = &OMP_CLAUSE_CHAIN (c);
9595 }
9596
9597 if (folded_dep)
9598 {
9599 if (neg_offset_p)
9600 folded_deps[0] = -folded_deps[0];
9601
9602 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9603 if (POINTER_TYPE_P (itype))
9604 itype = sizetype;
9605
9606 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9607 = wide_int_to_tree (itype, folded_deps[0]);
9608 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9609 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9610 }
9611
9612 lower_omp_ordered_ret:
9613
9614 /* Ordered without clauses is #pragma omp threads, while we want
9615 a nop instead if we remove all clauses. */
9616 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9617 gsi_replace (gsi_p, gimple_build_nop (), true);
9618 }
9619
9620
9621 /* Expand code for an OpenMP ordered directive. */
9622
9623 static void
9624 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9625 {
9626 tree block;
9627 gimple *stmt = gsi_stmt (*gsi_p), *g;
9628 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9629 gcall *x;
9630 gbind *bind;
9631 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9632 OMP_CLAUSE_SIMD);
9633 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9634 loop. */
9635 bool maybe_simt
9636 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9637 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9638 OMP_CLAUSE_THREADS);
9639
9640 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9641 OMP_CLAUSE_DEPEND))
9642 {
9643 /* FIXME: This is needs to be moved to the expansion to verify various
9644 conditions only testable on cfg with dominators computed, and also
9645 all the depend clauses to be merged still might need to be available
9646 for the runtime checks. */
9647 if (0)
9648 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9649 return;
9650 }
9651
9652 push_gimplify_context ();
9653
9654 block = make_node (BLOCK);
9655 bind = gimple_build_bind (NULL, NULL, block);
9656 gsi_replace (gsi_p, bind, true);
9657 gimple_bind_add_stmt (bind, stmt);
9658
9659 if (simd)
9660 {
9661 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9662 build_int_cst (NULL_TREE, threads));
9663 cfun->has_simduid_loops = true;
9664 }
9665 else
9666 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9667 0);
9668 gimple_bind_add_stmt (bind, x);
9669
9670 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9671 if (maybe_simt)
9672 {
9673 counter = create_tmp_var (integer_type_node);
9674 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9675 gimple_call_set_lhs (g, counter);
9676 gimple_bind_add_stmt (bind, g);
9677
9678 body = create_artificial_label (UNKNOWN_LOCATION);
9679 test = create_artificial_label (UNKNOWN_LOCATION);
9680 gimple_bind_add_stmt (bind, gimple_build_label (body));
9681
9682 tree simt_pred = create_tmp_var (integer_type_node);
9683 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9684 gimple_call_set_lhs (g, simt_pred);
9685 gimple_bind_add_stmt (bind, g);
9686
9687 tree t = create_artificial_label (UNKNOWN_LOCATION);
9688 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9689 gimple_bind_add_stmt (bind, g);
9690
9691 gimple_bind_add_stmt (bind, gimple_build_label (t));
9692 }
9693 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9694 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9695 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9696 gimple_omp_set_body (stmt, NULL);
9697
9698 if (maybe_simt)
9699 {
9700 gimple_bind_add_stmt (bind, gimple_build_label (test));
9701 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9702 gimple_bind_add_stmt (bind, g);
9703
9704 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9705 tree nonneg = create_tmp_var (integer_type_node);
9706 gimple_seq tseq = NULL;
9707 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9708 gimple_bind_add_seq (bind, tseq);
9709
9710 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9711 gimple_call_set_lhs (g, nonneg);
9712 gimple_bind_add_stmt (bind, g);
9713
9714 tree end = create_artificial_label (UNKNOWN_LOCATION);
9715 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9716 gimple_bind_add_stmt (bind, g);
9717
9718 gimple_bind_add_stmt (bind, gimple_build_label (end));
9719 }
9720 if (simd)
9721 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9722 build_int_cst (NULL_TREE, threads));
9723 else
9724 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9725 0);
9726 gimple_bind_add_stmt (bind, x);
9727
9728 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9729
9730 pop_gimplify_context (bind);
9731
9732 gimple_bind_append_vars (bind, ctx->block_vars);
9733 BLOCK_VARS (block) = gimple_bind_vars (bind);
9734 }
9735
9736
9737 /* Expand code for an OpenMP scan directive and the structured block
9738 before the scan directive. */
9739
9740 static void
9741 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9742 {
9743 gimple *stmt = gsi_stmt (*gsi_p);
9744 bool has_clauses
9745 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9746 tree lane = NULL_TREE;
9747 gimple_seq before = NULL;
9748 omp_context *octx = ctx->outer;
9749 gcc_assert (octx);
9750 if (octx->scan_exclusive && !has_clauses)
9751 {
9752 gimple_stmt_iterator gsi2 = *gsi_p;
9753 gsi_next (&gsi2);
9754 gimple *stmt2 = gsi_stmt (gsi2);
9755 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9756 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9757 the one with exclusive clause(s), comes first. */
9758 if (stmt2
9759 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9760 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9761 {
9762 gsi_remove (gsi_p, false);
9763 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9764 ctx = maybe_lookup_ctx (stmt2);
9765 gcc_assert (ctx);
9766 lower_omp_scan (gsi_p, ctx);
9767 return;
9768 }
9769 }
9770
9771 bool input_phase = has_clauses ^ octx->scan_inclusive;
9772 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9773 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9774 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9775 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9776 && !gimple_omp_for_combined_p (octx->stmt));
9777 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9778 if (is_for_simd && octx->for_simd_scan_phase)
9779 is_simd = false;
9780 if (is_simd)
9781 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9782 OMP_CLAUSE__SIMDUID_))
9783 {
9784 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9785 lane = create_tmp_var (unsigned_type_node);
9786 tree t = build_int_cst (integer_type_node,
9787 input_phase ? 1
9788 : octx->scan_inclusive ? 2 : 3);
9789 gimple *g
9790 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9791 gimple_call_set_lhs (g, lane);
9792 gimple_seq_add_stmt (&before, g);
9793 }
9794
9795 if (is_simd || is_for)
9796 {
9797 for (tree c = gimple_omp_for_clauses (octx->stmt);
9798 c; c = OMP_CLAUSE_CHAIN (c))
9799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9800 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9801 {
9802 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9803 tree var = OMP_CLAUSE_DECL (c);
9804 tree new_var = lookup_decl (var, octx);
9805 tree val = new_var;
9806 tree var2 = NULL_TREE;
9807 tree var3 = NULL_TREE;
9808 tree var4 = NULL_TREE;
9809 tree lane0 = NULL_TREE;
9810 tree new_vard = new_var;
9811 if (omp_is_reference (var))
9812 {
9813 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9814 val = new_var;
9815 }
9816 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9817 {
9818 val = DECL_VALUE_EXPR (new_vard);
9819 if (new_vard != new_var)
9820 {
9821 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9822 val = TREE_OPERAND (val, 0);
9823 }
9824 if (TREE_CODE (val) == ARRAY_REF
9825 && VAR_P (TREE_OPERAND (val, 0)))
9826 {
9827 tree v = TREE_OPERAND (val, 0);
9828 if (lookup_attribute ("omp simd array",
9829 DECL_ATTRIBUTES (v)))
9830 {
9831 val = unshare_expr (val);
9832 lane0 = TREE_OPERAND (val, 1);
9833 TREE_OPERAND (val, 1) = lane;
9834 var2 = lookup_decl (v, octx);
9835 if (octx->scan_exclusive)
9836 var4 = lookup_decl (var2, octx);
9837 if (input_phase
9838 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9839 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9840 if (!input_phase)
9841 {
9842 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9843 var2, lane, NULL_TREE, NULL_TREE);
9844 TREE_THIS_NOTRAP (var2) = 1;
9845 if (octx->scan_exclusive)
9846 {
9847 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9848 var4, lane, NULL_TREE,
9849 NULL_TREE);
9850 TREE_THIS_NOTRAP (var4) = 1;
9851 }
9852 }
9853 else
9854 var2 = val;
9855 }
9856 }
9857 gcc_assert (var2);
9858 }
9859 else
9860 {
9861 var2 = build_outer_var_ref (var, octx);
9862 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9863 {
9864 var3 = maybe_lookup_decl (new_vard, octx);
9865 if (var3 == new_vard || var3 == NULL_TREE)
9866 var3 = NULL_TREE;
9867 else if (is_simd && octx->scan_exclusive && !input_phase)
9868 {
9869 var4 = maybe_lookup_decl (var3, octx);
9870 if (var4 == var3 || var4 == NULL_TREE)
9871 {
9872 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9873 {
9874 var4 = var3;
9875 var3 = NULL_TREE;
9876 }
9877 else
9878 var4 = NULL_TREE;
9879 }
9880 }
9881 }
9882 if (is_simd
9883 && octx->scan_exclusive
9884 && !input_phase
9885 && var4 == NULL_TREE)
9886 var4 = create_tmp_var (TREE_TYPE (val));
9887 }
9888 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9889 {
9890 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9891 if (input_phase)
9892 {
9893 if (var3)
9894 {
9895 /* If we've added a separate identity element
9896 variable, copy it over into val. */
9897 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9898 var3);
9899 gimplify_and_add (x, &before);
9900 }
9901 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9902 {
9903 /* Otherwise, assign to it the identity element. */
9904 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9905 if (is_for)
9906 tseq = copy_gimple_seq_and_replace_locals (tseq);
9907 tree ref = build_outer_var_ref (var, octx);
9908 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9909 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9910 if (x)
9911 {
9912 if (new_vard != new_var)
9913 val = build_fold_addr_expr_loc (clause_loc, val);
9914 SET_DECL_VALUE_EXPR (new_vard, val);
9915 }
9916 SET_DECL_VALUE_EXPR (placeholder, ref);
9917 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9918 lower_omp (&tseq, octx);
9919 if (x)
9920 SET_DECL_VALUE_EXPR (new_vard, x);
9921 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9922 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9923 gimple_seq_add_seq (&before, tseq);
9924 if (is_simd)
9925 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9926 }
9927 }
9928 else if (is_simd)
9929 {
9930 tree x;
9931 if (octx->scan_exclusive)
9932 {
9933 tree v4 = unshare_expr (var4);
9934 tree v2 = unshare_expr (var2);
9935 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9936 gimplify_and_add (x, &before);
9937 }
9938 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9939 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9940 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9941 tree vexpr = val;
9942 if (x && new_vard != new_var)
9943 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9944 if (x)
9945 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9946 SET_DECL_VALUE_EXPR (placeholder, var2);
9947 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9948 lower_omp (&tseq, octx);
9949 gimple_seq_add_seq (&before, tseq);
9950 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9951 if (x)
9952 SET_DECL_VALUE_EXPR (new_vard, x);
9953 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9954 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9955 if (octx->scan_inclusive)
9956 {
9957 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9958 var2);
9959 gimplify_and_add (x, &before);
9960 }
9961 else if (lane0 == NULL_TREE)
9962 {
9963 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9964 var4);
9965 gimplify_and_add (x, &before);
9966 }
9967 }
9968 }
9969 else
9970 {
9971 if (input_phase)
9972 {
9973 /* input phase. Set val to initializer before
9974 the body. */
9975 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9976 gimplify_assign (val, x, &before);
9977 }
9978 else if (is_simd)
9979 {
9980 /* scan phase. */
9981 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9982 if (code == MINUS_EXPR)
9983 code = PLUS_EXPR;
9984
9985 tree x = build2 (code, TREE_TYPE (var2),
9986 unshare_expr (var2), unshare_expr (val));
9987 if (octx->scan_inclusive)
9988 {
9989 gimplify_assign (unshare_expr (var2), x, &before);
9990 gimplify_assign (val, var2, &before);
9991 }
9992 else
9993 {
9994 gimplify_assign (unshare_expr (var4),
9995 unshare_expr (var2), &before);
9996 gimplify_assign (var2, x, &before);
9997 if (lane0 == NULL_TREE)
9998 gimplify_assign (val, var4, &before);
9999 }
10000 }
10001 }
10002 if (octx->scan_exclusive && !input_phase && lane0)
10003 {
10004 tree vexpr = unshare_expr (var4);
10005 TREE_OPERAND (vexpr, 1) = lane0;
10006 if (new_vard != new_var)
10007 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10008 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10009 }
10010 }
10011 }
10012 if (is_simd && !is_for_simd)
10013 {
10014 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10015 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10016 gsi_replace (gsi_p, gimple_build_nop (), true);
10017 return;
10018 }
10019 lower_omp (gimple_omp_body_ptr (stmt), octx);
10020 if (before)
10021 {
10022 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10023 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10024 }
10025 }
10026
10027
10028 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10029 substitution of a couple of function calls. But in the NAMED case,
10030 requires that languages coordinate a symbol name. It is therefore
10031 best put here in common code. */
10032
10033 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10034
10035 static void
10036 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10037 {
10038 tree block;
10039 tree name, lock, unlock;
10040 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10041 gbind *bind;
10042 location_t loc = gimple_location (stmt);
10043 gimple_seq tbody;
10044
10045 name = gimple_omp_critical_name (stmt);
10046 if (name)
10047 {
10048 tree decl;
10049
10050 if (!critical_name_mutexes)
10051 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10052
10053 tree *n = critical_name_mutexes->get (name);
10054 if (n == NULL)
10055 {
10056 char *new_str;
10057
10058 decl = create_tmp_var_raw (ptr_type_node);
10059
10060 new_str = ACONCAT ((".gomp_critical_user_",
10061 IDENTIFIER_POINTER (name), NULL));
10062 DECL_NAME (decl) = get_identifier (new_str);
10063 TREE_PUBLIC (decl) = 1;
10064 TREE_STATIC (decl) = 1;
10065 DECL_COMMON (decl) = 1;
10066 DECL_ARTIFICIAL (decl) = 1;
10067 DECL_IGNORED_P (decl) = 1;
10068
10069 varpool_node::finalize_decl (decl);
10070
10071 critical_name_mutexes->put (name, decl);
10072 }
10073 else
10074 decl = *n;
10075
10076 /* If '#pragma omp critical' is inside offloaded region or
10077 inside function marked as offloadable, the symbol must be
10078 marked as offloadable too. */
10079 omp_context *octx;
10080 if (cgraph_node::get (current_function_decl)->offloadable)
10081 varpool_node::get_create (decl)->offloadable = 1;
10082 else
10083 for (octx = ctx->outer; octx; octx = octx->outer)
10084 if (is_gimple_omp_offloaded (octx->stmt))
10085 {
10086 varpool_node::get_create (decl)->offloadable = 1;
10087 break;
10088 }
10089
10090 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10091 lock = build_call_expr_loc (loc, lock, 1,
10092 build_fold_addr_expr_loc (loc, decl));
10093
10094 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10095 unlock = build_call_expr_loc (loc, unlock, 1,
10096 build_fold_addr_expr_loc (loc, decl));
10097 }
10098 else
10099 {
10100 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10101 lock = build_call_expr_loc (loc, lock, 0);
10102
10103 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10104 unlock = build_call_expr_loc (loc, unlock, 0);
10105 }
10106
10107 push_gimplify_context ();
10108
10109 block = make_node (BLOCK);
10110 bind = gimple_build_bind (NULL, NULL, block);
10111 gsi_replace (gsi_p, bind, true);
10112 gimple_bind_add_stmt (bind, stmt);
10113
10114 tbody = gimple_bind_body (bind);
10115 gimplify_and_add (lock, &tbody);
10116 gimple_bind_set_body (bind, tbody);
10117
10118 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10119 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10120 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10121 gimple_omp_set_body (stmt, NULL);
10122
10123 tbody = gimple_bind_body (bind);
10124 gimplify_and_add (unlock, &tbody);
10125 gimple_bind_set_body (bind, tbody);
10126
10127 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10128
10129 pop_gimplify_context (bind);
10130 gimple_bind_append_vars (bind, ctx->block_vars);
10131 BLOCK_VARS (block) = gimple_bind_vars (bind);
10132 }
10133
10134 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10135 for a lastprivate clause. Given a loop control predicate of (V
10136 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10137 is appended to *DLIST, iterator initialization is appended to
10138 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10139 to be emitted in a critical section. */
10140
10141 static void
10142 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10143 gimple_seq *dlist, gimple_seq *clist,
10144 struct omp_context *ctx)
10145 {
10146 tree clauses, cond, vinit;
10147 enum tree_code cond_code;
10148 gimple_seq stmts;
10149
10150 cond_code = fd->loop.cond_code;
10151 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10152
10153 /* When possible, use a strict equality expression. This can let VRP
10154 type optimizations deduce the value and remove a copy. */
10155 if (tree_fits_shwi_p (fd->loop.step))
10156 {
10157 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10158 if (step == 1 || step == -1)
10159 cond_code = EQ_EXPR;
10160 }
10161
10162 tree n2 = fd->loop.n2;
10163 if (fd->collapse > 1
10164 && TREE_CODE (n2) != INTEGER_CST
10165 && gimple_omp_for_combined_into_p (fd->for_stmt))
10166 {
10167 struct omp_context *taskreg_ctx = NULL;
10168 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10169 {
10170 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10171 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10172 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10173 {
10174 if (gimple_omp_for_combined_into_p (gfor))
10175 {
10176 gcc_assert (ctx->outer->outer
10177 && is_parallel_ctx (ctx->outer->outer));
10178 taskreg_ctx = ctx->outer->outer;
10179 }
10180 else
10181 {
10182 struct omp_for_data outer_fd;
10183 omp_extract_for_data (gfor, &outer_fd, NULL);
10184 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10185 }
10186 }
10187 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10188 taskreg_ctx = ctx->outer->outer;
10189 }
10190 else if (is_taskreg_ctx (ctx->outer))
10191 taskreg_ctx = ctx->outer;
10192 if (taskreg_ctx)
10193 {
10194 int i;
10195 tree taskreg_clauses
10196 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10197 tree innerc = omp_find_clause (taskreg_clauses,
10198 OMP_CLAUSE__LOOPTEMP_);
10199 gcc_assert (innerc);
10200 int count = fd->collapse;
10201 if (fd->non_rect
10202 && fd->last_nonrect == fd->first_nonrect + 1)
10203 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10204 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10205 count += 4;
10206 for (i = 0; i < count; i++)
10207 {
10208 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10209 OMP_CLAUSE__LOOPTEMP_);
10210 gcc_assert (innerc);
10211 }
10212 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10213 OMP_CLAUSE__LOOPTEMP_);
10214 if (innerc)
10215 n2 = fold_convert (TREE_TYPE (n2),
10216 lookup_decl (OMP_CLAUSE_DECL (innerc),
10217 taskreg_ctx));
10218 }
10219 }
10220 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10221
10222 clauses = gimple_omp_for_clauses (fd->for_stmt);
10223 stmts = NULL;
10224 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10225 if (!gimple_seq_empty_p (stmts))
10226 {
10227 gimple_seq_add_seq (&stmts, *dlist);
10228 *dlist = stmts;
10229
10230 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10231 vinit = fd->loop.n1;
10232 if (cond_code == EQ_EXPR
10233 && tree_fits_shwi_p (fd->loop.n2)
10234 && ! integer_zerop (fd->loop.n2))
10235 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10236 else
10237 vinit = unshare_expr (vinit);
10238
10239 /* Initialize the iterator variable, so that threads that don't execute
10240 any iterations don't execute the lastprivate clauses by accident. */
10241 gimplify_assign (fd->loop.v, vinit, body_p);
10242 }
10243 }
10244
10245 /* OpenACC privatization.
10246
10247 Or, in other words, *sharing* at the respective OpenACC level of
10248 parallelism.
10249
10250 From a correctness perspective, a non-addressable variable can't be accessed
10251 outside the current thread, so it can go in a (faster than shared memory)
10252 register -- though that register may need to be broadcast in some
10253 circumstances. A variable can only meaningfully be "shared" across workers
10254 or vector lanes if its address is taken, e.g. by a call to an atomic
10255 builtin.
10256
10257 From an optimisation perspective, the answer might be fuzzier: maybe
10258 sometimes, using shared memory directly would be faster than
10259 broadcasting. */
10260
10261 static void
10262 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10263 const location_t loc, const tree c,
10264 const tree decl)
10265 {
10266 const dump_user_location_t d_u_loc
10267 = dump_user_location_t::from_location_t (loc);
10268 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10269 #if __GNUC__ >= 10
10270 # pragma GCC diagnostic push
10271 # pragma GCC diagnostic ignored "-Wformat"
10272 #endif
10273 dump_printf_loc (l_dump_flags, d_u_loc,
10274 "variable %<%T%> ", decl);
10275 #if __GNUC__ >= 10
10276 # pragma GCC diagnostic pop
10277 #endif
10278 if (c)
10279 dump_printf (l_dump_flags,
10280 "in %qs clause ",
10281 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10282 else
10283 dump_printf (l_dump_flags,
10284 "declared in block ");
10285 }
10286
10287 static bool
10288 oacc_privatization_candidate_p (const location_t loc, const tree c,
10289 const tree decl)
10290 {
10291 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10292
10293 /* There is some differentiation depending on block vs. clause. */
10294 bool block = !c;
10295
10296 bool res = true;
10297
10298 if (res && !VAR_P (decl))
10299 {
10300 res = false;
10301
10302 if (dump_enabled_p ())
10303 {
10304 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10305 dump_printf (l_dump_flags,
10306 "potentially has improper OpenACC privatization level: %qs\n",
10307 get_tree_code_name (TREE_CODE (decl)));
10308 }
10309 }
10310
10311 if (res && block && TREE_STATIC (decl))
10312 {
10313 res = false;
10314
10315 if (dump_enabled_p ())
10316 {
10317 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10318 dump_printf (l_dump_flags,
10319 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10320 "static");
10321 }
10322 }
10323
10324 if (res && block && DECL_EXTERNAL (decl))
10325 {
10326 res = false;
10327
10328 if (dump_enabled_p ())
10329 {
10330 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10331 dump_printf (l_dump_flags,
10332 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10333 "external");
10334 }
10335 }
10336
10337 if (res && !TREE_ADDRESSABLE (decl))
10338 {
10339 res = false;
10340
10341 if (dump_enabled_p ())
10342 {
10343 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10344 dump_printf (l_dump_flags,
10345 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10346 "not addressable");
10347 }
10348 }
10349
10350 if (res)
10351 {
10352 if (dump_enabled_p ())
10353 {
10354 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10355 dump_printf (l_dump_flags,
10356 "is candidate for adjusting OpenACC privatization level\n");
10357 }
10358 }
10359
10360 if (dump_file && (dump_flags & TDF_DETAILS))
10361 {
10362 print_generic_decl (dump_file, decl, dump_flags);
10363 fprintf (dump_file, "\n");
10364 }
10365
10366 return res;
10367 }
10368
10369 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10370 CTX. */
10371
10372 static void
10373 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10374 {
10375 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10377 {
10378 tree decl = OMP_CLAUSE_DECL (c);
10379
10380 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10381 continue;
10382
10383 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10384 ctx->oacc_privatization_candidates.safe_push (decl);
10385 }
10386 }
10387
10388 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10389 CTX. */
10390
10391 static void
10392 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10393 {
10394 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10395 {
10396 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10397 continue;
10398
10399 gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10400 ctx->oacc_privatization_candidates.safe_push (decl);
10401 }
10402 }
10403
10404 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10405
10406 static tree
10407 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10408 struct walk_stmt_info *wi)
10409 {
10410 gimple *stmt = gsi_stmt (*gsi_p);
10411
10412 *handled_ops_p = true;
10413 switch (gimple_code (stmt))
10414 {
10415 WALK_SUBSTMTS;
10416
10417 case GIMPLE_OMP_FOR:
10418 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10419 && gimple_omp_for_combined_into_p (stmt))
10420 *handled_ops_p = false;
10421 break;
10422
10423 case GIMPLE_OMP_SCAN:
10424 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10425 return integer_zero_node;
10426 default:
10427 break;
10428 }
10429 return NULL;
10430 }
10431
10432 /* Helper function for lower_omp_for, add transformations for a worksharing
10433 loop with scan directives inside of it.
10434 For worksharing loop not combined with simd, transform:
10435 #pragma omp for reduction(inscan,+:r) private(i)
10436 for (i = 0; i < n; i = i + 1)
10437 {
10438 {
10439 update (r);
10440 }
10441 #pragma omp scan inclusive(r)
10442 {
10443 use (r);
10444 }
10445 }
10446
10447 into two worksharing loops + code to merge results:
10448
10449 num_threads = omp_get_num_threads ();
10450 thread_num = omp_get_thread_num ();
10451 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10452 <D.2099>:
10453 var2 = r;
10454 goto <D.2101>;
10455 <D.2100>:
10456 // For UDRs this is UDR init, or if ctors are needed, copy from
10457 // var3 that has been constructed to contain the neutral element.
10458 var2 = 0;
10459 <D.2101>:
10460 ivar = 0;
10461 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10462 // a shared array with num_threads elements and rprivb to a local array
10463 // number of elements equal to the number of (contiguous) iterations the
10464 // current thread will perform. controlb and controlp variables are
10465 // temporaries to handle deallocation of rprivb at the end of second
10466 // GOMP_FOR.
10467 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10468 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10469 for (i = 0; i < n; i = i + 1)
10470 {
10471 {
10472 // For UDRs this is UDR init or copy from var3.
10473 r = 0;
10474 // This is the input phase from user code.
10475 update (r);
10476 }
10477 {
10478 // For UDRs this is UDR merge.
10479 var2 = var2 + r;
10480 // Rather than handing it over to the user, save to local thread's
10481 // array.
10482 rprivb[ivar] = var2;
10483 // For exclusive scan, the above two statements are swapped.
10484 ivar = ivar + 1;
10485 }
10486 }
10487 // And remember the final value from this thread's into the shared
10488 // rpriva array.
10489 rpriva[(sizetype) thread_num] = var2;
10490 // If more than one thread, compute using Work-Efficient prefix sum
10491 // the inclusive parallel scan of the rpriva array.
10492 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10493 <D.2102>:
10494 GOMP_barrier ();
10495 down = 0;
10496 k = 1;
10497 num_threadsu = (unsigned int) num_threads;
10498 thread_numup1 = (unsigned int) thread_num + 1;
10499 <D.2108>:
10500 twok = k << 1;
10501 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10502 <D.2110>:
10503 down = 4294967295;
10504 k = k >> 1;
10505 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10506 <D.2112>:
10507 k = k >> 1;
10508 <D.2111>:
10509 twok = k << 1;
10510 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10511 mul = REALPART_EXPR <cplx>;
10512 ovf = IMAGPART_EXPR <cplx>;
10513 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10514 <D.2116>:
10515 andv = k & down;
10516 andvm1 = andv + 4294967295;
10517 l = mul + andvm1;
10518 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10519 <D.2120>:
10520 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10521 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10522 rpriva[l] = rpriva[l - k] + rpriva[l];
10523 <D.2117>:
10524 if (down == 0) goto <D.2121>; else goto <D.2122>;
10525 <D.2121>:
10526 k = k << 1;
10527 goto <D.2123>;
10528 <D.2122>:
10529 k = k >> 1;
10530 <D.2123>:
10531 GOMP_barrier ();
10532 if (k != 0) goto <D.2108>; else goto <D.2103>;
10533 <D.2103>:
10534 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10535 <D.2124>:
10536 // For UDRs this is UDR init or copy from var3.
10537 var2 = 0;
10538 goto <D.2126>;
10539 <D.2125>:
10540 var2 = rpriva[thread_num - 1];
10541 <D.2126>:
10542 ivar = 0;
10543 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10544 reduction(inscan,+:r) private(i)
10545 for (i = 0; i < n; i = i + 1)
10546 {
10547 {
10548 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10549 r = var2 + rprivb[ivar];
10550 }
10551 {
10552 // This is the scan phase from user code.
10553 use (r);
10554 // Plus a bump of the iterator.
10555 ivar = ivar + 1;
10556 }
10557 } */
10558
10559 static void
10560 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10561 struct omp_for_data *fd, omp_context *ctx)
10562 {
10563 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10564 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10565
10566 gimple_seq body = gimple_omp_body (stmt);
10567 gimple_stmt_iterator input1_gsi = gsi_none ();
10568 struct walk_stmt_info wi;
10569 memset (&wi, 0, sizeof (wi));
10570 wi.val_only = true;
10571 wi.info = (void *) &input1_gsi;
10572 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10573 gcc_assert (!gsi_end_p (input1_gsi));
10574
10575 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10576 gimple_stmt_iterator gsi = input1_gsi;
10577 gsi_next (&gsi);
10578 gimple_stmt_iterator scan1_gsi = gsi;
10579 gimple *scan_stmt1 = gsi_stmt (gsi);
10580 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10581
10582 gimple_seq input_body = gimple_omp_body (input_stmt1);
10583 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10584 gimple_omp_set_body (input_stmt1, NULL);
10585 gimple_omp_set_body (scan_stmt1, NULL);
10586 gimple_omp_set_body (stmt, NULL);
10587
10588 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10589 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10590 gimple_omp_set_body (stmt, body);
10591 gimple_omp_set_body (input_stmt1, input_body);
10592
10593 gimple_stmt_iterator input2_gsi = gsi_none ();
10594 memset (&wi, 0, sizeof (wi));
10595 wi.val_only = true;
10596 wi.info = (void *) &input2_gsi;
10597 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10598 gcc_assert (!gsi_end_p (input2_gsi));
10599
10600 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10601 gsi = input2_gsi;
10602 gsi_next (&gsi);
10603 gimple_stmt_iterator scan2_gsi = gsi;
10604 gimple *scan_stmt2 = gsi_stmt (gsi);
10605 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10606 gimple_omp_set_body (scan_stmt2, scan_body);
10607
10608 gimple_stmt_iterator input3_gsi = gsi_none ();
10609 gimple_stmt_iterator scan3_gsi = gsi_none ();
10610 gimple_stmt_iterator input4_gsi = gsi_none ();
10611 gimple_stmt_iterator scan4_gsi = gsi_none ();
10612 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10613 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10614 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10615 if (is_for_simd)
10616 {
10617 memset (&wi, 0, sizeof (wi));
10618 wi.val_only = true;
10619 wi.info = (void *) &input3_gsi;
10620 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10621 gcc_assert (!gsi_end_p (input3_gsi));
10622
10623 input_stmt3 = gsi_stmt (input3_gsi);
10624 gsi = input3_gsi;
10625 gsi_next (&gsi);
10626 scan3_gsi = gsi;
10627 scan_stmt3 = gsi_stmt (gsi);
10628 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10629
10630 memset (&wi, 0, sizeof (wi));
10631 wi.val_only = true;
10632 wi.info = (void *) &input4_gsi;
10633 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10634 gcc_assert (!gsi_end_p (input4_gsi));
10635
10636 input_stmt4 = gsi_stmt (input4_gsi);
10637 gsi = input4_gsi;
10638 gsi_next (&gsi);
10639 scan4_gsi = gsi;
10640 scan_stmt4 = gsi_stmt (gsi);
10641 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10642
10643 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10644 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10645 }
10646
10647 tree num_threads = create_tmp_var (integer_type_node);
10648 tree thread_num = create_tmp_var (integer_type_node);
10649 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10650 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10651 gimple *g = gimple_build_call (nthreads_decl, 0);
10652 gimple_call_set_lhs (g, num_threads);
10653 gimple_seq_add_stmt (body_p, g);
10654 g = gimple_build_call (threadnum_decl, 0);
10655 gimple_call_set_lhs (g, thread_num);
10656 gimple_seq_add_stmt (body_p, g);
10657
10658 tree ivar = create_tmp_var (sizetype);
10659 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10660 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10661 tree k = create_tmp_var (unsigned_type_node);
10662 tree l = create_tmp_var (unsigned_type_node);
10663
10664 gimple_seq clist = NULL, mdlist = NULL;
10665 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10666 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10667 gimple_seq scan1_list = NULL, input2_list = NULL;
10668 gimple_seq last_list = NULL, reduc_list = NULL;
10669 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10670 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10671 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10672 {
10673 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10674 tree var = OMP_CLAUSE_DECL (c);
10675 tree new_var = lookup_decl (var, ctx);
10676 tree var3 = NULL_TREE;
10677 tree new_vard = new_var;
10678 if (omp_is_reference (var))
10679 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10680 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10681 {
10682 var3 = maybe_lookup_decl (new_vard, ctx);
10683 if (var3 == new_vard)
10684 var3 = NULL_TREE;
10685 }
10686
10687 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10688 tree rpriva = create_tmp_var (ptype);
10689 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10690 OMP_CLAUSE_DECL (nc) = rpriva;
10691 *cp1 = nc;
10692 cp1 = &OMP_CLAUSE_CHAIN (nc);
10693
10694 tree rprivb = create_tmp_var (ptype);
10695 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10696 OMP_CLAUSE_DECL (nc) = rprivb;
10697 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10698 *cp1 = nc;
10699 cp1 = &OMP_CLAUSE_CHAIN (nc);
10700
10701 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10702 if (new_vard != new_var)
10703 TREE_ADDRESSABLE (var2) = 1;
10704 gimple_add_tmp_var (var2);
10705
10706 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10707 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10708 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10709 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10710 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10711
10712 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10713 thread_num, integer_minus_one_node);
10714 x = fold_convert_loc (clause_loc, sizetype, x);
10715 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10716 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10717 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10718 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10719
10720 x = fold_convert_loc (clause_loc, sizetype, l);
10721 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10722 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10723 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10724 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10725
10726 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10727 x = fold_convert_loc (clause_loc, sizetype, x);
10728 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10729 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10730 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10731 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10732
10733 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10734 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10735 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10736 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10737
10738 tree var4 = is_for_simd ? new_var : var2;
10739 tree var5 = NULL_TREE, var6 = NULL_TREE;
10740 if (is_for_simd)
10741 {
10742 var5 = lookup_decl (var, input_simd_ctx);
10743 var6 = lookup_decl (var, scan_simd_ctx);
10744 if (new_vard != new_var)
10745 {
10746 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10747 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10748 }
10749 }
10750 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10751 {
10752 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10753 tree val = var2;
10754
10755 x = lang_hooks.decls.omp_clause_default_ctor
10756 (c, var2, build_outer_var_ref (var, ctx));
10757 if (x)
10758 gimplify_and_add (x, &clist);
10759
10760 x = build_outer_var_ref (var, ctx);
10761 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10762 x);
10763 gimplify_and_add (x, &thr01_list);
10764
10765 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10766 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10767 if (var3)
10768 {
10769 x = unshare_expr (var4);
10770 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10771 gimplify_and_add (x, &thrn1_list);
10772 x = unshare_expr (var4);
10773 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10774 gimplify_and_add (x, &thr02_list);
10775 }
10776 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10777 {
10778 /* Otherwise, assign to it the identity element. */
10779 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10780 tseq = copy_gimple_seq_and_replace_locals (tseq);
10781 if (!is_for_simd)
10782 {
10783 if (new_vard != new_var)
10784 val = build_fold_addr_expr_loc (clause_loc, val);
10785 SET_DECL_VALUE_EXPR (new_vard, val);
10786 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10787 }
10788 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10789 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10790 lower_omp (&tseq, ctx);
10791 gimple_seq_add_seq (&thrn1_list, tseq);
10792 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10793 lower_omp (&tseq, ctx);
10794 gimple_seq_add_seq (&thr02_list, tseq);
10795 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10796 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10797 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10798 if (y)
10799 SET_DECL_VALUE_EXPR (new_vard, y);
10800 else
10801 {
10802 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10803 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10804 }
10805 }
10806
10807 x = unshare_expr (var4);
10808 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10809 gimplify_and_add (x, &thrn2_list);
10810
10811 if (is_for_simd)
10812 {
10813 x = unshare_expr (rprivb_ref);
10814 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10815 gimplify_and_add (x, &scan1_list);
10816 }
10817 else
10818 {
10819 if (ctx->scan_exclusive)
10820 {
10821 x = unshare_expr (rprivb_ref);
10822 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10823 gimplify_and_add (x, &scan1_list);
10824 }
10825
10826 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10827 tseq = copy_gimple_seq_and_replace_locals (tseq);
10828 SET_DECL_VALUE_EXPR (placeholder, var2);
10829 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10830 lower_omp (&tseq, ctx);
10831 gimple_seq_add_seq (&scan1_list, tseq);
10832
10833 if (ctx->scan_inclusive)
10834 {
10835 x = unshare_expr (rprivb_ref);
10836 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10837 gimplify_and_add (x, &scan1_list);
10838 }
10839 }
10840
10841 x = unshare_expr (rpriva_ref);
10842 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10843 unshare_expr (var4));
10844 gimplify_and_add (x, &mdlist);
10845
10846 x = unshare_expr (is_for_simd ? var6 : new_var);
10847 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10848 gimplify_and_add (x, &input2_list);
10849
10850 val = rprivb_ref;
10851 if (new_vard != new_var)
10852 val = build_fold_addr_expr_loc (clause_loc, val);
10853
10854 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10855 tseq = copy_gimple_seq_and_replace_locals (tseq);
10856 SET_DECL_VALUE_EXPR (new_vard, val);
10857 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10858 if (is_for_simd)
10859 {
10860 SET_DECL_VALUE_EXPR (placeholder, var6);
10861 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10862 }
10863 else
10864 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10865 lower_omp (&tseq, ctx);
10866 if (y)
10867 SET_DECL_VALUE_EXPR (new_vard, y);
10868 else
10869 {
10870 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10871 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10872 }
10873 if (!is_for_simd)
10874 {
10875 SET_DECL_VALUE_EXPR (placeholder, new_var);
10876 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10877 lower_omp (&tseq, ctx);
10878 }
10879 gimple_seq_add_seq (&input2_list, tseq);
10880
10881 x = build_outer_var_ref (var, ctx);
10882 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10883 gimplify_and_add (x, &last_list);
10884
10885 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10886 gimplify_and_add (x, &reduc_list);
10887 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10888 tseq = copy_gimple_seq_and_replace_locals (tseq);
10889 val = rprival_ref;
10890 if (new_vard != new_var)
10891 val = build_fold_addr_expr_loc (clause_loc, val);
10892 SET_DECL_VALUE_EXPR (new_vard, val);
10893 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10894 SET_DECL_VALUE_EXPR (placeholder, var2);
10895 lower_omp (&tseq, ctx);
10896 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10897 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10898 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10899 if (y)
10900 SET_DECL_VALUE_EXPR (new_vard, y);
10901 else
10902 {
10903 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10904 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10905 }
10906 gimple_seq_add_seq (&reduc_list, tseq);
10907 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10908 gimplify_and_add (x, &reduc_list);
10909
10910 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10911 if (x)
10912 gimplify_and_add (x, dlist);
10913 }
10914 else
10915 {
10916 x = build_outer_var_ref (var, ctx);
10917 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10918
10919 x = omp_reduction_init (c, TREE_TYPE (new_var));
10920 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10921 &thrn1_list);
10922 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10923
10924 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10925
10926 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10927 if (code == MINUS_EXPR)
10928 code = PLUS_EXPR;
10929
10930 if (is_for_simd)
10931 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10932 else
10933 {
10934 if (ctx->scan_exclusive)
10935 gimplify_assign (unshare_expr (rprivb_ref), var2,
10936 &scan1_list);
10937 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10938 gimplify_assign (var2, x, &scan1_list);
10939 if (ctx->scan_inclusive)
10940 gimplify_assign (unshare_expr (rprivb_ref), var2,
10941 &scan1_list);
10942 }
10943
10944 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10945 &mdlist);
10946
10947 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10948 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10949
10950 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10951 &last_list);
10952
10953 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10954 unshare_expr (rprival_ref));
10955 gimplify_assign (rprival_ref, x, &reduc_list);
10956 }
10957 }
10958
10959 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10960 gimple_seq_add_stmt (&scan1_list, g);
10961 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10962 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10963 ? scan_stmt4 : scan_stmt2), g);
10964
10965 tree controlb = create_tmp_var (boolean_type_node);
10966 tree controlp = create_tmp_var (ptr_type_node);
10967 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10968 OMP_CLAUSE_DECL (nc) = controlb;
10969 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10970 *cp1 = nc;
10971 cp1 = &OMP_CLAUSE_CHAIN (nc);
10972 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10973 OMP_CLAUSE_DECL (nc) = controlp;
10974 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10975 *cp1 = nc;
10976 cp1 = &OMP_CLAUSE_CHAIN (nc);
10977 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10978 OMP_CLAUSE_DECL (nc) = controlb;
10979 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10980 *cp2 = nc;
10981 cp2 = &OMP_CLAUSE_CHAIN (nc);
10982 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10983 OMP_CLAUSE_DECL (nc) = controlp;
10984 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10985 *cp2 = nc;
10986 cp2 = &OMP_CLAUSE_CHAIN (nc);
10987
10988 *cp1 = gimple_omp_for_clauses (stmt);
10989 gimple_omp_for_set_clauses (stmt, new_clauses1);
10990 *cp2 = gimple_omp_for_clauses (new_stmt);
10991 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10992
10993 if (is_for_simd)
10994 {
10995 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10996 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10997
10998 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10999 GSI_SAME_STMT);
11000 gsi_remove (&input3_gsi, true);
11001 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11002 GSI_SAME_STMT);
11003 gsi_remove (&scan3_gsi, true);
11004 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11005 GSI_SAME_STMT);
11006 gsi_remove (&input4_gsi, true);
11007 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11008 GSI_SAME_STMT);
11009 gsi_remove (&scan4_gsi, true);
11010 }
11011 else
11012 {
11013 gimple_omp_set_body (scan_stmt1, scan1_list);
11014 gimple_omp_set_body (input_stmt2, input2_list);
11015 }
11016
11017 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11018 GSI_SAME_STMT);
11019 gsi_remove (&input1_gsi, true);
11020 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11021 GSI_SAME_STMT);
11022 gsi_remove (&scan1_gsi, true);
11023 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11024 GSI_SAME_STMT);
11025 gsi_remove (&input2_gsi, true);
11026 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11027 GSI_SAME_STMT);
11028 gsi_remove (&scan2_gsi, true);
11029
11030 gimple_seq_add_seq (body_p, clist);
11031
11032 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11033 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11034 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11035 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11036 gimple_seq_add_stmt (body_p, g);
11037 g = gimple_build_label (lab1);
11038 gimple_seq_add_stmt (body_p, g);
11039 gimple_seq_add_seq (body_p, thr01_list);
11040 g = gimple_build_goto (lab3);
11041 gimple_seq_add_stmt (body_p, g);
11042 g = gimple_build_label (lab2);
11043 gimple_seq_add_stmt (body_p, g);
11044 gimple_seq_add_seq (body_p, thrn1_list);
11045 g = gimple_build_label (lab3);
11046 gimple_seq_add_stmt (body_p, g);
11047
11048 g = gimple_build_assign (ivar, size_zero_node);
11049 gimple_seq_add_stmt (body_p, g);
11050
11051 gimple_seq_add_stmt (body_p, stmt);
11052 gimple_seq_add_seq (body_p, body);
11053 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11054 fd->loop.v));
11055
11056 g = gimple_build_omp_return (true);
11057 gimple_seq_add_stmt (body_p, g);
11058 gimple_seq_add_seq (body_p, mdlist);
11059
11060 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11061 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11062 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11063 gimple_seq_add_stmt (body_p, g);
11064 g = gimple_build_label (lab1);
11065 gimple_seq_add_stmt (body_p, g);
11066
11067 g = omp_build_barrier (NULL);
11068 gimple_seq_add_stmt (body_p, g);
11069
11070 tree down = create_tmp_var (unsigned_type_node);
11071 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11072 gimple_seq_add_stmt (body_p, g);
11073
11074 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11075 gimple_seq_add_stmt (body_p, g);
11076
11077 tree num_threadsu = create_tmp_var (unsigned_type_node);
11078 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11079 gimple_seq_add_stmt (body_p, g);
11080
11081 tree thread_numu = create_tmp_var (unsigned_type_node);
11082 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11083 gimple_seq_add_stmt (body_p, g);
11084
11085 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11086 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11087 build_int_cst (unsigned_type_node, 1));
11088 gimple_seq_add_stmt (body_p, g);
11089
11090 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11091 g = gimple_build_label (lab3);
11092 gimple_seq_add_stmt (body_p, g);
11093
11094 tree twok = create_tmp_var (unsigned_type_node);
11095 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11096 gimple_seq_add_stmt (body_p, g);
11097
11098 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11099 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11100 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11101 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11102 gimple_seq_add_stmt (body_p, g);
11103 g = gimple_build_label (lab4);
11104 gimple_seq_add_stmt (body_p, g);
11105 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11106 gimple_seq_add_stmt (body_p, g);
11107 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11108 gimple_seq_add_stmt (body_p, g);
11109
11110 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11111 gimple_seq_add_stmt (body_p, g);
11112 g = gimple_build_label (lab6);
11113 gimple_seq_add_stmt (body_p, g);
11114
11115 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11116 gimple_seq_add_stmt (body_p, g);
11117
11118 g = gimple_build_label (lab5);
11119 gimple_seq_add_stmt (body_p, g);
11120
11121 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11122 gimple_seq_add_stmt (body_p, g);
11123
11124 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11125 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11126 gimple_call_set_lhs (g, cplx);
11127 gimple_seq_add_stmt (body_p, g);
11128 tree mul = create_tmp_var (unsigned_type_node);
11129 g = gimple_build_assign (mul, REALPART_EXPR,
11130 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11131 gimple_seq_add_stmt (body_p, g);
11132 tree ovf = create_tmp_var (unsigned_type_node);
11133 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11134 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11135 gimple_seq_add_stmt (body_p, g);
11136
11137 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11138 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11139 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11140 lab7, lab8);
11141 gimple_seq_add_stmt (body_p, g);
11142 g = gimple_build_label (lab7);
11143 gimple_seq_add_stmt (body_p, g);
11144
11145 tree andv = create_tmp_var (unsigned_type_node);
11146 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11147 gimple_seq_add_stmt (body_p, g);
11148 tree andvm1 = create_tmp_var (unsigned_type_node);
11149 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11150 build_minus_one_cst (unsigned_type_node));
11151 gimple_seq_add_stmt (body_p, g);
11152
11153 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11154 gimple_seq_add_stmt (body_p, g);
11155
11156 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11157 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11158 gimple_seq_add_stmt (body_p, g);
11159 g = gimple_build_label (lab9);
11160 gimple_seq_add_stmt (body_p, g);
11161 gimple_seq_add_seq (body_p, reduc_list);
11162 g = gimple_build_label (lab8);
11163 gimple_seq_add_stmt (body_p, g);
11164
11165 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11166 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11167 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11168 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11169 lab10, lab11);
11170 gimple_seq_add_stmt (body_p, g);
11171 g = gimple_build_label (lab10);
11172 gimple_seq_add_stmt (body_p, g);
11173 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11174 gimple_seq_add_stmt (body_p, g);
11175 g = gimple_build_goto (lab12);
11176 gimple_seq_add_stmt (body_p, g);
11177 g = gimple_build_label (lab11);
11178 gimple_seq_add_stmt (body_p, g);
11179 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11180 gimple_seq_add_stmt (body_p, g);
11181 g = gimple_build_label (lab12);
11182 gimple_seq_add_stmt (body_p, g);
11183
11184 g = omp_build_barrier (NULL);
11185 gimple_seq_add_stmt (body_p, g);
11186
11187 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11188 lab3, lab2);
11189 gimple_seq_add_stmt (body_p, g);
11190
11191 g = gimple_build_label (lab2);
11192 gimple_seq_add_stmt (body_p, g);
11193
11194 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11195 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11196 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11197 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11198 gimple_seq_add_stmt (body_p, g);
11199 g = gimple_build_label (lab1);
11200 gimple_seq_add_stmt (body_p, g);
11201 gimple_seq_add_seq (body_p, thr02_list);
11202 g = gimple_build_goto (lab3);
11203 gimple_seq_add_stmt (body_p, g);
11204 g = gimple_build_label (lab2);
11205 gimple_seq_add_stmt (body_p, g);
11206 gimple_seq_add_seq (body_p, thrn2_list);
11207 g = gimple_build_label (lab3);
11208 gimple_seq_add_stmt (body_p, g);
11209
11210 g = gimple_build_assign (ivar, size_zero_node);
11211 gimple_seq_add_stmt (body_p, g);
11212 gimple_seq_add_stmt (body_p, new_stmt);
11213 gimple_seq_add_seq (body_p, new_body);
11214
11215 gimple_seq new_dlist = NULL;
11216 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11217 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11218 tree num_threadsm1 = create_tmp_var (integer_type_node);
11219 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11220 integer_minus_one_node);
11221 gimple_seq_add_stmt (&new_dlist, g);
11222 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11223 gimple_seq_add_stmt (&new_dlist, g);
11224 g = gimple_build_label (lab1);
11225 gimple_seq_add_stmt (&new_dlist, g);
11226 gimple_seq_add_seq (&new_dlist, last_list);
11227 g = gimple_build_label (lab2);
11228 gimple_seq_add_stmt (&new_dlist, g);
11229 gimple_seq_add_seq (&new_dlist, *dlist);
11230 *dlist = new_dlist;
11231 }
11232
11233 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11234 the addresses of variables to be made private at the surrounding
11235 parallelism level. Such functions appear in the gimple code stream in two
11236 forms, e.g. for a partitioned loop:
11237
11238 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11239 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11240 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11241 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11242
11243 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11244 not as part of a HEAD_MARK sequence:
11245
11246 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11247
11248 For such stand-alone appearances, the 3rd argument is always 0, denoting
11249 gang partitioning. */
11250
11251 static gcall *
11252 lower_oacc_private_marker (omp_context *ctx)
11253 {
11254 if (ctx->oacc_privatization_candidates.length () == 0)
11255 return NULL;
11256
11257 auto_vec<tree, 5> args;
11258
11259 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11260 args.quick_push (integer_zero_node);
11261 args.quick_push (integer_minus_one_node);
11262
11263 int i;
11264 tree decl;
11265 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11266 {
11267 for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11268 {
11269 tree inner_decl = maybe_lookup_decl (decl, thisctx);
11270 if (inner_decl)
11271 {
11272 decl = inner_decl;
11273 break;
11274 }
11275 }
11276 gcc_checking_assert (decl);
11277
11278 tree addr = build_fold_addr_expr (decl);
11279 args.safe_push (addr);
11280 }
11281
11282 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11283 }
11284
11285 /* Lower code for an OMP loop directive. */
11286
11287 static void
11288 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11289 {
11290 tree *rhs_p, block;
11291 struct omp_for_data fd, *fdp = NULL;
11292 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11293 gbind *new_stmt;
11294 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11295 gimple_seq cnt_list = NULL, clist = NULL;
11296 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11297 size_t i;
11298
11299 push_gimplify_context ();
11300
11301 if (is_gimple_omp_oacc (ctx->stmt))
11302 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11303
11304 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11305
11306 block = make_node (BLOCK);
11307 new_stmt = gimple_build_bind (NULL, NULL, block);
11308 /* Replace at gsi right away, so that 'stmt' is no member
11309 of a sequence anymore as we're going to add to a different
11310 one below. */
11311 gsi_replace (gsi_p, new_stmt, true);
11312
11313 /* Move declaration of temporaries in the loop body before we make
11314 it go away. */
11315 omp_for_body = gimple_omp_body (stmt);
11316 if (!gimple_seq_empty_p (omp_for_body)
11317 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11318 {
11319 gbind *inner_bind
11320 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11321 tree vars = gimple_bind_vars (inner_bind);
11322 if (is_gimple_omp_oacc (ctx->stmt))
11323 oacc_privatization_scan_decl_chain (ctx, vars);
11324 gimple_bind_append_vars (new_stmt, vars);
11325 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11326 keep them on the inner_bind and it's block. */
11327 gimple_bind_set_vars (inner_bind, NULL_TREE);
11328 if (gimple_bind_block (inner_bind))
11329 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11330 }
11331
11332 if (gimple_omp_for_combined_into_p (stmt))
11333 {
11334 omp_extract_for_data (stmt, &fd, NULL);
11335 fdp = &fd;
11336
11337 /* We need two temporaries with fd.loop.v type (istart/iend)
11338 and then (fd.collapse - 1) temporaries with the same
11339 type for count2 ... countN-1 vars if not constant. */
11340 size_t count = 2;
11341 tree type = fd.iter_type;
11342 if (fd.collapse > 1
11343 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11344 count += fd.collapse - 1;
11345 size_t count2 = 0;
11346 tree type2 = NULL_TREE;
11347 bool taskreg_for
11348 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11349 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11350 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11351 tree simtc = NULL;
11352 tree clauses = *pc;
11353 if (fd.collapse > 1
11354 && fd.non_rect
11355 && fd.last_nonrect == fd.first_nonrect + 1
11356 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11357 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11358 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11359 {
11360 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11361 type2 = TREE_TYPE (v);
11362 count++;
11363 count2 = 3;
11364 }
11365 if (taskreg_for)
11366 outerc
11367 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11368 OMP_CLAUSE__LOOPTEMP_);
11369 if (ctx->simt_stmt)
11370 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11371 OMP_CLAUSE__LOOPTEMP_);
11372 for (i = 0; i < count + count2; i++)
11373 {
11374 tree temp;
11375 if (taskreg_for)
11376 {
11377 gcc_assert (outerc);
11378 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11379 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11380 OMP_CLAUSE__LOOPTEMP_);
11381 }
11382 else
11383 {
11384 /* If there are 2 adjacent SIMD stmts, one with _simt_
11385 clause, another without, make sure they have the same
11386 decls in _looptemp_ clauses, because the outer stmt
11387 they are combined into will look up just one inner_stmt. */
11388 if (ctx->simt_stmt)
11389 temp = OMP_CLAUSE_DECL (simtc);
11390 else
11391 temp = create_tmp_var (i >= count ? type2 : type);
11392 insert_decl_map (&ctx->outer->cb, temp, temp);
11393 }
11394 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11395 OMP_CLAUSE_DECL (*pc) = temp;
11396 pc = &OMP_CLAUSE_CHAIN (*pc);
11397 if (ctx->simt_stmt)
11398 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11399 OMP_CLAUSE__LOOPTEMP_);
11400 }
11401 *pc = clauses;
11402 }
11403
11404 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11405 dlist = NULL;
11406 body = NULL;
11407 tree rclauses
11408 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11409 OMP_CLAUSE_REDUCTION);
11410 tree rtmp = NULL_TREE;
11411 if (rclauses)
11412 {
11413 tree type = build_pointer_type (pointer_sized_int_node);
11414 tree temp = create_tmp_var (type);
11415 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11416 OMP_CLAUSE_DECL (c) = temp;
11417 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11418 gimple_omp_for_set_clauses (stmt, c);
11419 lower_omp_task_reductions (ctx, OMP_FOR,
11420 gimple_omp_for_clauses (stmt),
11421 &tred_ilist, &tred_dlist);
11422 rclauses = c;
11423 rtmp = make_ssa_name (type);
11424 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11425 }
11426
11427 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11428 ctx);
11429
11430 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11431 fdp);
11432 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11433 gimple_omp_for_pre_body (stmt));
11434
11435 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11436
11437 gcall *private_marker = NULL;
11438 if (is_gimple_omp_oacc (ctx->stmt)
11439 && !gimple_seq_empty_p (omp_for_body))
11440 private_marker = lower_oacc_private_marker (ctx);
11441
11442 /* Lower the header expressions. At this point, we can assume that
11443 the header is of the form:
11444
11445 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11446
11447 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11448 using the .omp_data_s mapping, if needed. */
11449 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11450 {
11451 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11452 if (TREE_CODE (*rhs_p) == TREE_VEC)
11453 {
11454 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11455 TREE_VEC_ELT (*rhs_p, 1)
11456 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11457 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11458 TREE_VEC_ELT (*rhs_p, 2)
11459 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11460 }
11461 else if (!is_gimple_min_invariant (*rhs_p))
11462 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11463 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11464 recompute_tree_invariant_for_addr_expr (*rhs_p);
11465
11466 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11467 if (TREE_CODE (*rhs_p) == TREE_VEC)
11468 {
11469 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11470 TREE_VEC_ELT (*rhs_p, 1)
11471 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11472 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11473 TREE_VEC_ELT (*rhs_p, 2)
11474 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11475 }
11476 else if (!is_gimple_min_invariant (*rhs_p))
11477 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11478 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11479 recompute_tree_invariant_for_addr_expr (*rhs_p);
11480
11481 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11482 if (!is_gimple_min_invariant (*rhs_p))
11483 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11484 }
11485 if (rclauses)
11486 gimple_seq_add_seq (&tred_ilist, cnt_list);
11487 else
11488 gimple_seq_add_seq (&body, cnt_list);
11489
11490 /* Once lowered, extract the bounds and clauses. */
11491 omp_extract_for_data (stmt, &fd, NULL);
11492
11493 if (is_gimple_omp_oacc (ctx->stmt)
11494 && !ctx_in_oacc_kernels_region (ctx))
11495 lower_oacc_head_tail (gimple_location (stmt),
11496 gimple_omp_for_clauses (stmt), private_marker,
11497 &oacc_head, &oacc_tail, ctx);
11498
11499 /* Add OpenACC partitioning and reduction markers just before the loop. */
11500 if (oacc_head)
11501 gimple_seq_add_seq (&body, oacc_head);
11502
11503 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11504
11505 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11506 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11507 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11508 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11509 {
11510 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11511 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11512 OMP_CLAUSE_LINEAR_STEP (c)
11513 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11514 ctx);
11515 }
11516
11517 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11518 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11519 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11520 else
11521 {
11522 gimple_seq_add_stmt (&body, stmt);
11523 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11524 }
11525
11526 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11527 fd.loop.v));
11528
11529 /* After the loop, add exit clauses. */
11530 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11531
11532 if (clist)
11533 {
11534 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11535 gcall *g = gimple_build_call (fndecl, 0);
11536 gimple_seq_add_stmt (&body, g);
11537 gimple_seq_add_seq (&body, clist);
11538 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11539 g = gimple_build_call (fndecl, 0);
11540 gimple_seq_add_stmt (&body, g);
11541 }
11542
11543 if (ctx->cancellable)
11544 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11545
11546 gimple_seq_add_seq (&body, dlist);
11547
11548 if (rclauses)
11549 {
11550 gimple_seq_add_seq (&tred_ilist, body);
11551 body = tred_ilist;
11552 }
11553
11554 body = maybe_catch_exception (body);
11555
11556 /* Region exit marker goes at the end of the loop body. */
11557 gimple *g = gimple_build_omp_return (fd.have_nowait);
11558 gimple_seq_add_stmt (&body, g);
11559
11560 gimple_seq_add_seq (&body, tred_dlist);
11561
11562 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11563
11564 if (rclauses)
11565 OMP_CLAUSE_DECL (rclauses) = rtmp;
11566
11567 /* Add OpenACC joining and reduction markers just after the loop. */
11568 if (oacc_tail)
11569 gimple_seq_add_seq (&body, oacc_tail);
11570
11571 pop_gimplify_context (new_stmt);
11572
11573 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11574 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11575 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11576 if (BLOCK_VARS (block))
11577 TREE_USED (block) = 1;
11578
11579 gimple_bind_set_body (new_stmt, body);
11580 gimple_omp_set_body (stmt, NULL);
11581 gimple_omp_for_set_pre_body (stmt, NULL);
11582 }
11583
11584 /* Callback for walk_stmts. Check if the current statement only contains
11585 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11586
11587 static tree
11588 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11589 bool *handled_ops_p,
11590 struct walk_stmt_info *wi)
11591 {
11592 int *info = (int *) wi->info;
11593 gimple *stmt = gsi_stmt (*gsi_p);
11594
11595 *handled_ops_p = true;
11596 switch (gimple_code (stmt))
11597 {
11598 WALK_SUBSTMTS;
11599
11600 case GIMPLE_DEBUG:
11601 break;
11602 case GIMPLE_OMP_FOR:
11603 case GIMPLE_OMP_SECTIONS:
11604 *info = *info == 0 ? 1 : -1;
11605 break;
11606 default:
11607 *info = -1;
11608 break;
11609 }
11610 return NULL;
11611 }
11612
11613 struct omp_taskcopy_context
11614 {
11615 /* This field must be at the beginning, as we do "inheritance": Some
11616 callback functions for tree-inline.c (e.g., omp_copy_decl)
11617 receive a copy_body_data pointer that is up-casted to an
11618 omp_context pointer. */
11619 copy_body_data cb;
11620 omp_context *ctx;
11621 };
11622
11623 static tree
11624 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11625 {
11626 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11627
11628 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11629 return create_tmp_var (TREE_TYPE (var));
11630
11631 return var;
11632 }
11633
11634 static tree
11635 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11636 {
11637 tree name, new_fields = NULL, type, f;
11638
11639 type = lang_hooks.types.make_type (RECORD_TYPE);
11640 name = DECL_NAME (TYPE_NAME (orig_type));
11641 name = build_decl (gimple_location (tcctx->ctx->stmt),
11642 TYPE_DECL, name, type);
11643 TYPE_NAME (type) = name;
11644
11645 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11646 {
11647 tree new_f = copy_node (f);
11648 DECL_CONTEXT (new_f) = type;
11649 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11650 TREE_CHAIN (new_f) = new_fields;
11651 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11652 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11653 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11654 &tcctx->cb, NULL);
11655 new_fields = new_f;
11656 tcctx->cb.decl_map->put (f, new_f);
11657 }
11658 TYPE_FIELDS (type) = nreverse (new_fields);
11659 layout_type (type);
11660 return type;
11661 }
11662
11663 /* Create task copyfn. */
11664
11665 static void
11666 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11667 {
11668 struct function *child_cfun;
11669 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11670 tree record_type, srecord_type, bind, list;
11671 bool record_needs_remap = false, srecord_needs_remap = false;
11672 splay_tree_node n;
11673 struct omp_taskcopy_context tcctx;
11674 location_t loc = gimple_location (task_stmt);
11675 size_t looptempno = 0;
11676
11677 child_fn = gimple_omp_task_copy_fn (task_stmt);
11678 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11679 gcc_assert (child_cfun->cfg == NULL);
11680 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11681
11682 /* Reset DECL_CONTEXT on function arguments. */
11683 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11684 DECL_CONTEXT (t) = child_fn;
11685
11686 /* Populate the function. */
11687 push_gimplify_context ();
11688 push_cfun (child_cfun);
11689
11690 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11691 TREE_SIDE_EFFECTS (bind) = 1;
11692 list = NULL;
11693 DECL_SAVED_TREE (child_fn) = bind;
11694 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11695
11696 /* Remap src and dst argument types if needed. */
11697 record_type = ctx->record_type;
11698 srecord_type = ctx->srecord_type;
11699 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11700 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11701 {
11702 record_needs_remap = true;
11703 break;
11704 }
11705 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11706 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11707 {
11708 srecord_needs_remap = true;
11709 break;
11710 }
11711
11712 if (record_needs_remap || srecord_needs_remap)
11713 {
11714 memset (&tcctx, '\0', sizeof (tcctx));
11715 tcctx.cb.src_fn = ctx->cb.src_fn;
11716 tcctx.cb.dst_fn = child_fn;
11717 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11718 gcc_checking_assert (tcctx.cb.src_node);
11719 tcctx.cb.dst_node = tcctx.cb.src_node;
11720 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11721 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11722 tcctx.cb.eh_lp_nr = 0;
11723 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11724 tcctx.cb.decl_map = new hash_map<tree, tree>;
11725 tcctx.ctx = ctx;
11726
11727 if (record_needs_remap)
11728 record_type = task_copyfn_remap_type (&tcctx, record_type);
11729 if (srecord_needs_remap)
11730 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11731 }
11732 else
11733 tcctx.cb.decl_map = NULL;
11734
11735 arg = DECL_ARGUMENTS (child_fn);
11736 TREE_TYPE (arg) = build_pointer_type (record_type);
11737 sarg = DECL_CHAIN (arg);
11738 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11739
11740 /* First pass: initialize temporaries used in record_type and srecord_type
11741 sizes and field offsets. */
11742 if (tcctx.cb.decl_map)
11743 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11744 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11745 {
11746 tree *p;
11747
11748 decl = OMP_CLAUSE_DECL (c);
11749 p = tcctx.cb.decl_map->get (decl);
11750 if (p == NULL)
11751 continue;
11752 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11753 sf = (tree) n->value;
11754 sf = *tcctx.cb.decl_map->get (sf);
11755 src = build_simple_mem_ref_loc (loc, sarg);
11756 src = omp_build_component_ref (src, sf);
11757 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11758 append_to_statement_list (t, &list);
11759 }
11760
11761 /* Second pass: copy shared var pointers and copy construct non-VLA
11762 firstprivate vars. */
11763 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11764 switch (OMP_CLAUSE_CODE (c))
11765 {
11766 splay_tree_key key;
11767 case OMP_CLAUSE_SHARED:
11768 decl = OMP_CLAUSE_DECL (c);
11769 key = (splay_tree_key) decl;
11770 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11771 key = (splay_tree_key) &DECL_UID (decl);
11772 n = splay_tree_lookup (ctx->field_map, key);
11773 if (n == NULL)
11774 break;
11775 f = (tree) n->value;
11776 if (tcctx.cb.decl_map)
11777 f = *tcctx.cb.decl_map->get (f);
11778 n = splay_tree_lookup (ctx->sfield_map, key);
11779 sf = (tree) n->value;
11780 if (tcctx.cb.decl_map)
11781 sf = *tcctx.cb.decl_map->get (sf);
11782 src = build_simple_mem_ref_loc (loc, sarg);
11783 src = omp_build_component_ref (src, sf);
11784 dst = build_simple_mem_ref_loc (loc, arg);
11785 dst = omp_build_component_ref (dst, f);
11786 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11787 append_to_statement_list (t, &list);
11788 break;
11789 case OMP_CLAUSE_REDUCTION:
11790 case OMP_CLAUSE_IN_REDUCTION:
11791 decl = OMP_CLAUSE_DECL (c);
11792 if (TREE_CODE (decl) == MEM_REF)
11793 {
11794 decl = TREE_OPERAND (decl, 0);
11795 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11796 decl = TREE_OPERAND (decl, 0);
11797 if (TREE_CODE (decl) == INDIRECT_REF
11798 || TREE_CODE (decl) == ADDR_EXPR)
11799 decl = TREE_OPERAND (decl, 0);
11800 }
11801 key = (splay_tree_key) decl;
11802 n = splay_tree_lookup (ctx->field_map, key);
11803 if (n == NULL)
11804 break;
11805 f = (tree) n->value;
11806 if (tcctx.cb.decl_map)
11807 f = *tcctx.cb.decl_map->get (f);
11808 n = splay_tree_lookup (ctx->sfield_map, key);
11809 sf = (tree) n->value;
11810 if (tcctx.cb.decl_map)
11811 sf = *tcctx.cb.decl_map->get (sf);
11812 src = build_simple_mem_ref_loc (loc, sarg);
11813 src = omp_build_component_ref (src, sf);
11814 if (decl != OMP_CLAUSE_DECL (c)
11815 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11816 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11817 src = build_simple_mem_ref_loc (loc, src);
11818 dst = build_simple_mem_ref_loc (loc, arg);
11819 dst = omp_build_component_ref (dst, f);
11820 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11821 append_to_statement_list (t, &list);
11822 break;
11823 case OMP_CLAUSE__LOOPTEMP_:
11824 /* Fields for first two _looptemp_ clauses are initialized by
11825 GOMP_taskloop*, the rest are handled like firstprivate. */
11826 if (looptempno < 2)
11827 {
11828 looptempno++;
11829 break;
11830 }
11831 /* FALLTHRU */
11832 case OMP_CLAUSE__REDUCTEMP_:
11833 case OMP_CLAUSE_FIRSTPRIVATE:
11834 decl = OMP_CLAUSE_DECL (c);
11835 if (is_variable_sized (decl))
11836 break;
11837 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11838 if (n == NULL)
11839 break;
11840 f = (tree) n->value;
11841 if (tcctx.cb.decl_map)
11842 f = *tcctx.cb.decl_map->get (f);
11843 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11844 if (n != NULL)
11845 {
11846 sf = (tree) n->value;
11847 if (tcctx.cb.decl_map)
11848 sf = *tcctx.cb.decl_map->get (sf);
11849 src = build_simple_mem_ref_loc (loc, sarg);
11850 src = omp_build_component_ref (src, sf);
11851 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11852 src = build_simple_mem_ref_loc (loc, src);
11853 }
11854 else
11855 src = decl;
11856 dst = build_simple_mem_ref_loc (loc, arg);
11857 dst = omp_build_component_ref (dst, f);
11858 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11859 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11860 else
11861 {
11862 if (ctx->allocate_map)
11863 if (tree *allocatorp = ctx->allocate_map->get (decl))
11864 {
11865 tree allocator = *allocatorp;
11866 if (TREE_CODE (allocator) != INTEGER_CST)
11867 {
11868 n = splay_tree_lookup (ctx->sfield_map,
11869 (splay_tree_key) allocator);
11870 allocator = (tree) n->value;
11871 if (tcctx.cb.decl_map)
11872 allocator = *tcctx.cb.decl_map->get (allocator);
11873 tree a = build_simple_mem_ref_loc (loc, sarg);
11874 allocator = omp_build_component_ref (a, allocator);
11875 }
11876 allocator = fold_convert (pointer_sized_int_node, allocator);
11877 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
11878 tree align = build_int_cst (size_type_node,
11879 DECL_ALIGN_UNIT (decl));
11880 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
11881 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
11882 allocator);
11883 ptr = fold_convert (TREE_TYPE (dst), ptr);
11884 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
11885 append_to_statement_list (t, &list);
11886 dst = build_simple_mem_ref_loc (loc, dst);
11887 }
11888 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11889 }
11890 append_to_statement_list (t, &list);
11891 break;
11892 case OMP_CLAUSE_PRIVATE:
11893 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11894 break;
11895 decl = OMP_CLAUSE_DECL (c);
11896 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11897 f = (tree) n->value;
11898 if (tcctx.cb.decl_map)
11899 f = *tcctx.cb.decl_map->get (f);
11900 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11901 if (n != NULL)
11902 {
11903 sf = (tree) n->value;
11904 if (tcctx.cb.decl_map)
11905 sf = *tcctx.cb.decl_map->get (sf);
11906 src = build_simple_mem_ref_loc (loc, sarg);
11907 src = omp_build_component_ref (src, sf);
11908 if (use_pointer_for_field (decl, NULL))
11909 src = build_simple_mem_ref_loc (loc, src);
11910 }
11911 else
11912 src = decl;
11913 dst = build_simple_mem_ref_loc (loc, arg);
11914 dst = omp_build_component_ref (dst, f);
11915 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11916 append_to_statement_list (t, &list);
11917 break;
11918 default:
11919 break;
11920 }
11921
11922 /* Last pass: handle VLA firstprivates. */
11923 if (tcctx.cb.decl_map)
11924 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11925 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11926 {
11927 tree ind, ptr, df;
11928
11929 decl = OMP_CLAUSE_DECL (c);
11930 if (!is_variable_sized (decl))
11931 continue;
11932 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11933 if (n == NULL)
11934 continue;
11935 f = (tree) n->value;
11936 f = *tcctx.cb.decl_map->get (f);
11937 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11938 ind = DECL_VALUE_EXPR (decl);
11939 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11940 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11941 n = splay_tree_lookup (ctx->sfield_map,
11942 (splay_tree_key) TREE_OPERAND (ind, 0));
11943 sf = (tree) n->value;
11944 sf = *tcctx.cb.decl_map->get (sf);
11945 src = build_simple_mem_ref_loc (loc, sarg);
11946 src = omp_build_component_ref (src, sf);
11947 src = build_simple_mem_ref_loc (loc, src);
11948 dst = build_simple_mem_ref_loc (loc, arg);
11949 dst = omp_build_component_ref (dst, f);
11950 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11951 append_to_statement_list (t, &list);
11952 n = splay_tree_lookup (ctx->field_map,
11953 (splay_tree_key) TREE_OPERAND (ind, 0));
11954 df = (tree) n->value;
11955 df = *tcctx.cb.decl_map->get (df);
11956 ptr = build_simple_mem_ref_loc (loc, arg);
11957 ptr = omp_build_component_ref (ptr, df);
11958 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11959 build_fold_addr_expr_loc (loc, dst));
11960 append_to_statement_list (t, &list);
11961 }
11962
11963 t = build1 (RETURN_EXPR, void_type_node, NULL);
11964 append_to_statement_list (t, &list);
11965
11966 if (tcctx.cb.decl_map)
11967 delete tcctx.cb.decl_map;
11968 pop_gimplify_context (NULL);
11969 BIND_EXPR_BODY (bind) = list;
11970 pop_cfun ();
11971 }
11972
11973 static void
11974 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11975 {
11976 tree c, clauses;
11977 gimple *g;
11978 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11979
11980 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11981 gcc_assert (clauses);
11982 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11983 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11984 switch (OMP_CLAUSE_DEPEND_KIND (c))
11985 {
11986 case OMP_CLAUSE_DEPEND_LAST:
11987 /* Lowering already done at gimplification. */
11988 return;
11989 case OMP_CLAUSE_DEPEND_IN:
11990 cnt[2]++;
11991 break;
11992 case OMP_CLAUSE_DEPEND_OUT:
11993 case OMP_CLAUSE_DEPEND_INOUT:
11994 cnt[0]++;
11995 break;
11996 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11997 cnt[1]++;
11998 break;
11999 case OMP_CLAUSE_DEPEND_DEPOBJ:
12000 cnt[3]++;
12001 break;
12002 case OMP_CLAUSE_DEPEND_SOURCE:
12003 case OMP_CLAUSE_DEPEND_SINK:
12004 /* FALLTHRU */
12005 default:
12006 gcc_unreachable ();
12007 }
12008 if (cnt[1] || cnt[3])
12009 idx = 5;
12010 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12011 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12012 tree array = create_tmp_var (type);
12013 TREE_ADDRESSABLE (array) = 1;
12014 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12015 NULL_TREE);
12016 if (idx == 5)
12017 {
12018 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12019 gimple_seq_add_stmt (iseq, g);
12020 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12021 NULL_TREE);
12022 }
12023 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12024 gimple_seq_add_stmt (iseq, g);
12025 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12026 {
12027 r = build4 (ARRAY_REF, ptr_type_node, array,
12028 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12029 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12030 gimple_seq_add_stmt (iseq, g);
12031 }
12032 for (i = 0; i < 4; i++)
12033 {
12034 if (cnt[i] == 0)
12035 continue;
12036 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12037 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12038 continue;
12039 else
12040 {
12041 switch (OMP_CLAUSE_DEPEND_KIND (c))
12042 {
12043 case OMP_CLAUSE_DEPEND_IN:
12044 if (i != 2)
12045 continue;
12046 break;
12047 case OMP_CLAUSE_DEPEND_OUT:
12048 case OMP_CLAUSE_DEPEND_INOUT:
12049 if (i != 0)
12050 continue;
12051 break;
12052 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12053 if (i != 1)
12054 continue;
12055 break;
12056 case OMP_CLAUSE_DEPEND_DEPOBJ:
12057 if (i != 3)
12058 continue;
12059 break;
12060 default:
12061 gcc_unreachable ();
12062 }
12063 tree t = OMP_CLAUSE_DECL (c);
12064 t = fold_convert (ptr_type_node, t);
12065 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12066 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12067 NULL_TREE, NULL_TREE);
12068 g = gimple_build_assign (r, t);
12069 gimple_seq_add_stmt (iseq, g);
12070 }
12071 }
12072 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12073 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12074 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12075 OMP_CLAUSE_CHAIN (c) = *pclauses;
12076 *pclauses = c;
12077 tree clobber = build_clobber (type);
12078 g = gimple_build_assign (array, clobber);
12079 gimple_seq_add_stmt (oseq, g);
12080 }
12081
12082 /* Lower the OpenMP parallel or task directive in the current statement
12083 in GSI_P. CTX holds context information for the directive. */
12084
12085 static void
12086 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12087 {
12088 tree clauses;
12089 tree child_fn, t;
12090 gimple *stmt = gsi_stmt (*gsi_p);
12091 gbind *par_bind, *bind, *dep_bind = NULL;
12092 gimple_seq par_body;
12093 location_t loc = gimple_location (stmt);
12094
12095 clauses = gimple_omp_taskreg_clauses (stmt);
12096 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12097 && gimple_omp_task_taskwait_p (stmt))
12098 {
12099 par_bind = NULL;
12100 par_body = NULL;
12101 }
12102 else
12103 {
12104 par_bind
12105 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12106 par_body = gimple_bind_body (par_bind);
12107 }
12108 child_fn = ctx->cb.dst_fn;
12109 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12110 && !gimple_omp_parallel_combined_p (stmt))
12111 {
12112 struct walk_stmt_info wi;
12113 int ws_num = 0;
12114
12115 memset (&wi, 0, sizeof (wi));
12116 wi.info = &ws_num;
12117 wi.val_only = true;
12118 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12119 if (ws_num == 1)
12120 gimple_omp_parallel_set_combined_p (stmt, true);
12121 }
12122 gimple_seq dep_ilist = NULL;
12123 gimple_seq dep_olist = NULL;
12124 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12125 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12126 {
12127 push_gimplify_context ();
12128 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12129 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12130 &dep_ilist, &dep_olist);
12131 }
12132
12133 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12134 && gimple_omp_task_taskwait_p (stmt))
12135 {
12136 if (dep_bind)
12137 {
12138 gsi_replace (gsi_p, dep_bind, true);
12139 gimple_bind_add_seq (dep_bind, dep_ilist);
12140 gimple_bind_add_stmt (dep_bind, stmt);
12141 gimple_bind_add_seq (dep_bind, dep_olist);
12142 pop_gimplify_context (dep_bind);
12143 }
12144 return;
12145 }
12146
12147 if (ctx->srecord_type)
12148 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12149
12150 gimple_seq tskred_ilist = NULL;
12151 gimple_seq tskred_olist = NULL;
12152 if ((is_task_ctx (ctx)
12153 && gimple_omp_task_taskloop_p (ctx->stmt)
12154 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12155 OMP_CLAUSE_REDUCTION))
12156 || (is_parallel_ctx (ctx)
12157 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12158 OMP_CLAUSE__REDUCTEMP_)))
12159 {
12160 if (dep_bind == NULL)
12161 {
12162 push_gimplify_context ();
12163 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12164 }
12165 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12166 : OMP_PARALLEL,
12167 gimple_omp_taskreg_clauses (ctx->stmt),
12168 &tskred_ilist, &tskred_olist);
12169 }
12170
12171 push_gimplify_context ();
12172
12173 gimple_seq par_olist = NULL;
12174 gimple_seq par_ilist = NULL;
12175 gimple_seq par_rlist = NULL;
12176 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12177 lower_omp (&par_body, ctx);
12178 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12179 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12180
12181 /* Declare all the variables created by mapping and the variables
12182 declared in the scope of the parallel body. */
12183 record_vars_into (ctx->block_vars, child_fn);
12184 maybe_remove_omp_member_access_dummy_vars (par_bind);
12185 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12186
12187 if (ctx->record_type)
12188 {
12189 ctx->sender_decl
12190 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12191 : ctx->record_type, ".omp_data_o");
12192 DECL_NAMELESS (ctx->sender_decl) = 1;
12193 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12194 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12195 }
12196
12197 gimple_seq olist = NULL;
12198 gimple_seq ilist = NULL;
12199 lower_send_clauses (clauses, &ilist, &olist, ctx);
12200 lower_send_shared_vars (&ilist, &olist, ctx);
12201
12202 if (ctx->record_type)
12203 {
12204 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12205 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12206 clobber));
12207 }
12208
12209 /* Once all the expansions are done, sequence all the different
12210 fragments inside gimple_omp_body. */
12211
12212 gimple_seq new_body = NULL;
12213
12214 if (ctx->record_type)
12215 {
12216 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12217 /* fixup_child_record_type might have changed receiver_decl's type. */
12218 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12219 gimple_seq_add_stmt (&new_body,
12220 gimple_build_assign (ctx->receiver_decl, t));
12221 }
12222
12223 gimple_seq_add_seq (&new_body, par_ilist);
12224 gimple_seq_add_seq (&new_body, par_body);
12225 gimple_seq_add_seq (&new_body, par_rlist);
12226 if (ctx->cancellable)
12227 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12228 gimple_seq_add_seq (&new_body, par_olist);
12229 new_body = maybe_catch_exception (new_body);
12230 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12231 gimple_seq_add_stmt (&new_body,
12232 gimple_build_omp_continue (integer_zero_node,
12233 integer_zero_node));
12234 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12235 gimple_omp_set_body (stmt, new_body);
12236
12237 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12238 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12239 else
12240 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12241 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12242 gimple_bind_add_seq (bind, ilist);
12243 gimple_bind_add_stmt (bind, stmt);
12244 gimple_bind_add_seq (bind, olist);
12245
12246 pop_gimplify_context (NULL);
12247
12248 if (dep_bind)
12249 {
12250 gimple_bind_add_seq (dep_bind, dep_ilist);
12251 gimple_bind_add_seq (dep_bind, tskred_ilist);
12252 gimple_bind_add_stmt (dep_bind, bind);
12253 gimple_bind_add_seq (dep_bind, tskred_olist);
12254 gimple_bind_add_seq (dep_bind, dep_olist);
12255 pop_gimplify_context (dep_bind);
12256 }
12257 }
12258
12259 /* Lower the GIMPLE_OMP_TARGET in the current statement
12260 in GSI_P. CTX holds context information for the directive. */
12261
12262 static void
12263 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12264 {
12265 tree clauses;
12266 tree child_fn, t, c;
12267 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12268 gbind *tgt_bind, *bind, *dep_bind = NULL;
12269 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12270 location_t loc = gimple_location (stmt);
12271 bool offloaded, data_region;
12272 unsigned int map_cnt = 0;
12273 tree in_reduction_clauses = NULL_TREE;
12274
12275 offloaded = is_gimple_omp_offloaded (stmt);
12276 switch (gimple_omp_target_kind (stmt))
12277 {
12278 case GF_OMP_TARGET_KIND_REGION:
12279 tree *p, *q;
12280 q = &in_reduction_clauses;
12281 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12282 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12283 {
12284 *q = *p;
12285 q = &OMP_CLAUSE_CHAIN (*q);
12286 *p = OMP_CLAUSE_CHAIN (*p);
12287 }
12288 else
12289 p = &OMP_CLAUSE_CHAIN (*p);
12290 *q = NULL_TREE;
12291 *p = in_reduction_clauses;
12292 /* FALLTHRU */
12293 case GF_OMP_TARGET_KIND_UPDATE:
12294 case GF_OMP_TARGET_KIND_ENTER_DATA:
12295 case GF_OMP_TARGET_KIND_EXIT_DATA:
12296 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12297 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12298 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12299 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12300 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12301 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12302 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12303 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12304 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12305 data_region = false;
12306 break;
12307 case GF_OMP_TARGET_KIND_DATA:
12308 case GF_OMP_TARGET_KIND_OACC_DATA:
12309 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12310 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12311 data_region = true;
12312 break;
12313 default:
12314 gcc_unreachable ();
12315 }
12316
12317 clauses = gimple_omp_target_clauses (stmt);
12318
12319 gimple_seq dep_ilist = NULL;
12320 gimple_seq dep_olist = NULL;
12321 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12322 if (has_depend || in_reduction_clauses)
12323 {
12324 push_gimplify_context ();
12325 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12326 if (has_depend)
12327 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12328 &dep_ilist, &dep_olist);
12329 if (in_reduction_clauses)
12330 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12331 ctx, NULL);
12332 }
12333
12334 tgt_bind = NULL;
12335 tgt_body = NULL;
12336 if (offloaded)
12337 {
12338 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12339 tgt_body = gimple_bind_body (tgt_bind);
12340 }
12341 else if (data_region)
12342 tgt_body = gimple_omp_body (stmt);
12343 child_fn = ctx->cb.dst_fn;
12344
12345 push_gimplify_context ();
12346 fplist = NULL;
12347
12348 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12349 switch (OMP_CLAUSE_CODE (c))
12350 {
12351 tree var, x;
12352
12353 default:
12354 break;
12355 case OMP_CLAUSE_MAP:
12356 #if CHECKING_P
12357 /* First check what we're prepared to handle in the following. */
12358 switch (OMP_CLAUSE_MAP_KIND (c))
12359 {
12360 case GOMP_MAP_ALLOC:
12361 case GOMP_MAP_TO:
12362 case GOMP_MAP_FROM:
12363 case GOMP_MAP_TOFROM:
12364 case GOMP_MAP_POINTER:
12365 case GOMP_MAP_TO_PSET:
12366 case GOMP_MAP_DELETE:
12367 case GOMP_MAP_RELEASE:
12368 case GOMP_MAP_ALWAYS_TO:
12369 case GOMP_MAP_ALWAYS_FROM:
12370 case GOMP_MAP_ALWAYS_TOFROM:
12371 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12372 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12373 case GOMP_MAP_STRUCT:
12374 case GOMP_MAP_ALWAYS_POINTER:
12375 case GOMP_MAP_ATTACH:
12376 case GOMP_MAP_DETACH:
12377 break;
12378 case GOMP_MAP_IF_PRESENT:
12379 case GOMP_MAP_FORCE_ALLOC:
12380 case GOMP_MAP_FORCE_TO:
12381 case GOMP_MAP_FORCE_FROM:
12382 case GOMP_MAP_FORCE_TOFROM:
12383 case GOMP_MAP_FORCE_PRESENT:
12384 case GOMP_MAP_FORCE_DEVICEPTR:
12385 case GOMP_MAP_DEVICE_RESIDENT:
12386 case GOMP_MAP_LINK:
12387 case GOMP_MAP_FORCE_DETACH:
12388 gcc_assert (is_gimple_omp_oacc (stmt));
12389 break;
12390 default:
12391 gcc_unreachable ();
12392 }
12393 #endif
12394 /* FALLTHRU */
12395 case OMP_CLAUSE_TO:
12396 case OMP_CLAUSE_FROM:
12397 oacc_firstprivate:
12398 var = OMP_CLAUSE_DECL (c);
12399 if (!DECL_P (var))
12400 {
12401 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12402 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12403 && (OMP_CLAUSE_MAP_KIND (c)
12404 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12405 map_cnt++;
12406 continue;
12407 }
12408
12409 if (DECL_SIZE (var)
12410 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12411 {
12412 tree var2 = DECL_VALUE_EXPR (var);
12413 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12414 var2 = TREE_OPERAND (var2, 0);
12415 gcc_assert (DECL_P (var2));
12416 var = var2;
12417 }
12418
12419 if (offloaded
12420 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12421 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12422 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12423 {
12424 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12425 {
12426 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12427 && varpool_node::get_create (var)->offloadable)
12428 continue;
12429
12430 tree type = build_pointer_type (TREE_TYPE (var));
12431 tree new_var = lookup_decl (var, ctx);
12432 x = create_tmp_var_raw (type, get_name (new_var));
12433 gimple_add_tmp_var (x);
12434 x = build_simple_mem_ref (x);
12435 SET_DECL_VALUE_EXPR (new_var, x);
12436 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12437 }
12438 continue;
12439 }
12440
12441 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12442 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12443 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12444 && is_omp_target (stmt))
12445 {
12446 gcc_assert (maybe_lookup_field (c, ctx));
12447 map_cnt++;
12448 continue;
12449 }
12450
12451 if (!maybe_lookup_field (var, ctx))
12452 continue;
12453
12454 /* Don't remap compute constructs' reduction variables, because the
12455 intermediate result must be local to each gang. */
12456 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12457 && is_gimple_omp_oacc (ctx->stmt)
12458 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12459 {
12460 x = build_receiver_ref (var, true, ctx);
12461 tree new_var = lookup_decl (var, ctx);
12462
12463 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12464 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12465 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12466 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12467 x = build_simple_mem_ref (x);
12468 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12469 {
12470 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12471 if (omp_is_reference (new_var)
12472 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12473 || DECL_BY_REFERENCE (var)))
12474 {
12475 /* Create a local object to hold the instance
12476 value. */
12477 tree type = TREE_TYPE (TREE_TYPE (new_var));
12478 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12479 tree inst = create_tmp_var (type, id);
12480 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12481 x = build_fold_addr_expr (inst);
12482 }
12483 gimplify_assign (new_var, x, &fplist);
12484 }
12485 else if (DECL_P (new_var))
12486 {
12487 SET_DECL_VALUE_EXPR (new_var, x);
12488 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12489 }
12490 else
12491 gcc_unreachable ();
12492 }
12493 map_cnt++;
12494 break;
12495
12496 case OMP_CLAUSE_FIRSTPRIVATE:
12497 gcc_checking_assert (offloaded);
12498 if (is_gimple_omp_oacc (ctx->stmt))
12499 {
12500 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12501 gcc_checking_assert (!is_oacc_kernels (ctx));
12502 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12503 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12504
12505 goto oacc_firstprivate;
12506 }
12507 map_cnt++;
12508 var = OMP_CLAUSE_DECL (c);
12509 if (!omp_is_reference (var)
12510 && !is_gimple_reg_type (TREE_TYPE (var)))
12511 {
12512 tree new_var = lookup_decl (var, ctx);
12513 if (is_variable_sized (var))
12514 {
12515 tree pvar = DECL_VALUE_EXPR (var);
12516 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12517 pvar = TREE_OPERAND (pvar, 0);
12518 gcc_assert (DECL_P (pvar));
12519 tree new_pvar = lookup_decl (pvar, ctx);
12520 x = build_fold_indirect_ref (new_pvar);
12521 TREE_THIS_NOTRAP (x) = 1;
12522 }
12523 else
12524 x = build_receiver_ref (var, true, ctx);
12525 SET_DECL_VALUE_EXPR (new_var, x);
12526 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12527 }
12528 break;
12529
12530 case OMP_CLAUSE_PRIVATE:
12531 gcc_checking_assert (offloaded);
12532 if (is_gimple_omp_oacc (ctx->stmt))
12533 {
12534 /* No 'private' clauses on OpenACC 'kernels'. */
12535 gcc_checking_assert (!is_oacc_kernels (ctx));
12536 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12537 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12538
12539 break;
12540 }
12541 var = OMP_CLAUSE_DECL (c);
12542 if (is_variable_sized (var))
12543 {
12544 tree new_var = lookup_decl (var, ctx);
12545 tree pvar = DECL_VALUE_EXPR (var);
12546 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12547 pvar = TREE_OPERAND (pvar, 0);
12548 gcc_assert (DECL_P (pvar));
12549 tree new_pvar = lookup_decl (pvar, ctx);
12550 x = build_fold_indirect_ref (new_pvar);
12551 TREE_THIS_NOTRAP (x) = 1;
12552 SET_DECL_VALUE_EXPR (new_var, x);
12553 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12554 }
12555 break;
12556
12557 case OMP_CLAUSE_USE_DEVICE_PTR:
12558 case OMP_CLAUSE_USE_DEVICE_ADDR:
12559 case OMP_CLAUSE_IS_DEVICE_PTR:
12560 var = OMP_CLAUSE_DECL (c);
12561 map_cnt++;
12562 if (is_variable_sized (var))
12563 {
12564 tree new_var = lookup_decl (var, ctx);
12565 tree pvar = DECL_VALUE_EXPR (var);
12566 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12567 pvar = TREE_OPERAND (pvar, 0);
12568 gcc_assert (DECL_P (pvar));
12569 tree new_pvar = lookup_decl (pvar, ctx);
12570 x = build_fold_indirect_ref (new_pvar);
12571 TREE_THIS_NOTRAP (x) = 1;
12572 SET_DECL_VALUE_EXPR (new_var, x);
12573 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12574 }
12575 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12576 && !omp_is_reference (var)
12577 && !omp_is_allocatable_or_ptr (var)
12578 && !lang_hooks.decls.omp_array_data (var, true))
12579 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12580 {
12581 tree new_var = lookup_decl (var, ctx);
12582 tree type = build_pointer_type (TREE_TYPE (var));
12583 x = create_tmp_var_raw (type, get_name (new_var));
12584 gimple_add_tmp_var (x);
12585 x = build_simple_mem_ref (x);
12586 SET_DECL_VALUE_EXPR (new_var, x);
12587 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12588 }
12589 else
12590 {
12591 tree new_var = lookup_decl (var, ctx);
12592 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12593 gimple_add_tmp_var (x);
12594 SET_DECL_VALUE_EXPR (new_var, x);
12595 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12596 }
12597 break;
12598 }
12599
12600 if (offloaded)
12601 {
12602 target_nesting_level++;
12603 lower_omp (&tgt_body, ctx);
12604 target_nesting_level--;
12605 }
12606 else if (data_region)
12607 lower_omp (&tgt_body, ctx);
12608
12609 if (offloaded)
12610 {
12611 /* Declare all the variables created by mapping and the variables
12612 declared in the scope of the target body. */
12613 record_vars_into (ctx->block_vars, child_fn);
12614 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12615 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12616 }
12617
12618 olist = NULL;
12619 ilist = NULL;
12620 if (ctx->record_type)
12621 {
12622 ctx->sender_decl
12623 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12624 DECL_NAMELESS (ctx->sender_decl) = 1;
12625 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12626 t = make_tree_vec (3);
12627 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12628 TREE_VEC_ELT (t, 1)
12629 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12630 ".omp_data_sizes");
12631 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12632 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12633 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12634 tree tkind_type = short_unsigned_type_node;
12635 int talign_shift = 8;
12636 TREE_VEC_ELT (t, 2)
12637 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12638 ".omp_data_kinds");
12639 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12640 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12641 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12642 gimple_omp_target_set_data_arg (stmt, t);
12643
12644 vec<constructor_elt, va_gc> *vsize;
12645 vec<constructor_elt, va_gc> *vkind;
12646 vec_alloc (vsize, map_cnt);
12647 vec_alloc (vkind, map_cnt);
12648 unsigned int map_idx = 0;
12649
12650 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12651 switch (OMP_CLAUSE_CODE (c))
12652 {
12653 tree ovar, nc, s, purpose, var, x, type;
12654 unsigned int talign;
12655
12656 default:
12657 break;
12658
12659 case OMP_CLAUSE_MAP:
12660 case OMP_CLAUSE_TO:
12661 case OMP_CLAUSE_FROM:
12662 oacc_firstprivate_map:
12663 nc = c;
12664 ovar = OMP_CLAUSE_DECL (c);
12665 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12666 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12667 || (OMP_CLAUSE_MAP_KIND (c)
12668 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12669 break;
12670 if (!DECL_P (ovar))
12671 {
12672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12673 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12674 {
12675 nc = OMP_CLAUSE_CHAIN (c);
12676 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12677 == get_base_address (ovar));
12678 ovar = OMP_CLAUSE_DECL (nc);
12679 }
12680 else
12681 {
12682 tree x = build_sender_ref (ovar, ctx);
12683 tree v = ovar;
12684 if (in_reduction_clauses
12685 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12686 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12687 {
12688 v = unshare_expr (v);
12689 tree *p = &v;
12690 while (handled_component_p (*p)
12691 || TREE_CODE (*p) == INDIRECT_REF
12692 || TREE_CODE (*p) == ADDR_EXPR
12693 || TREE_CODE (*p) == MEM_REF
12694 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12695 p = &TREE_OPERAND (*p, 0);
12696 tree d = *p;
12697 if (is_variable_sized (d))
12698 {
12699 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12700 d = DECL_VALUE_EXPR (d);
12701 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12702 d = TREE_OPERAND (d, 0);
12703 gcc_assert (DECL_P (d));
12704 }
12705 splay_tree_key key
12706 = (splay_tree_key) &DECL_CONTEXT (d);
12707 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12708 key)->value;
12709 if (d == *p)
12710 *p = nd;
12711 else
12712 *p = build_fold_indirect_ref (nd);
12713 }
12714 v = build_fold_addr_expr_with_type (v, ptr_type_node);
12715 gimplify_assign (x, v, &ilist);
12716 nc = NULL_TREE;
12717 }
12718 }
12719 else
12720 {
12721 if (DECL_SIZE (ovar)
12722 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12723 {
12724 tree ovar2 = DECL_VALUE_EXPR (ovar);
12725 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12726 ovar2 = TREE_OPERAND (ovar2, 0);
12727 gcc_assert (DECL_P (ovar2));
12728 ovar = ovar2;
12729 }
12730 if (!maybe_lookup_field (ovar, ctx)
12731 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12732 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12733 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12734 continue;
12735 }
12736
12737 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12738 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12739 talign = DECL_ALIGN_UNIT (ovar);
12740
12741 var = NULL_TREE;
12742 if (nc)
12743 {
12744 if (in_reduction_clauses
12745 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12746 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12747 {
12748 tree d = ovar;
12749 if (is_variable_sized (d))
12750 {
12751 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12752 d = DECL_VALUE_EXPR (d);
12753 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12754 d = TREE_OPERAND (d, 0);
12755 gcc_assert (DECL_P (d));
12756 }
12757 splay_tree_key key
12758 = (splay_tree_key) &DECL_CONTEXT (d);
12759 tree nd = (tree) splay_tree_lookup (ctx->field_map,
12760 key)->value;
12761 if (d == ovar)
12762 var = nd;
12763 else
12764 var = build_fold_indirect_ref (nd);
12765 }
12766 else
12767 var = lookup_decl_in_outer_ctx (ovar, ctx);
12768 }
12769 if (nc
12770 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12771 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12772 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12773 && is_omp_target (stmt))
12774 {
12775 x = build_sender_ref (c, ctx);
12776 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12777 }
12778 else if (nc)
12779 {
12780 x = build_sender_ref (ovar, ctx);
12781
12782 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12783 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12784 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12785 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12786 {
12787 gcc_assert (offloaded);
12788 tree avar
12789 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12790 mark_addressable (avar);
12791 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12792 talign = DECL_ALIGN_UNIT (avar);
12793 avar = build_fold_addr_expr (avar);
12794 gimplify_assign (x, avar, &ilist);
12795 }
12796 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12797 {
12798 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12799 if (!omp_is_reference (var))
12800 {
12801 if (is_gimple_reg (var)
12802 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12803 suppress_warning (var);
12804 var = build_fold_addr_expr (var);
12805 }
12806 else
12807 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12808 gimplify_assign (x, var, &ilist);
12809 }
12810 else if (is_gimple_reg (var))
12811 {
12812 gcc_assert (offloaded);
12813 tree avar = create_tmp_var (TREE_TYPE (var));
12814 mark_addressable (avar);
12815 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12816 if (GOMP_MAP_COPY_TO_P (map_kind)
12817 || map_kind == GOMP_MAP_POINTER
12818 || map_kind == GOMP_MAP_TO_PSET
12819 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12820 {
12821 /* If we need to initialize a temporary
12822 with VAR because it is not addressable, and
12823 the variable hasn't been initialized yet, then
12824 we'll get a warning for the store to avar.
12825 Don't warn in that case, the mapping might
12826 be implicit. */
12827 suppress_warning (var, OPT_Wuninitialized);
12828 gimplify_assign (avar, var, &ilist);
12829 }
12830 avar = build_fold_addr_expr (avar);
12831 gimplify_assign (x, avar, &ilist);
12832 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12833 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12834 && !TYPE_READONLY (TREE_TYPE (var)))
12835 {
12836 x = unshare_expr (x);
12837 x = build_simple_mem_ref (x);
12838 gimplify_assign (var, x, &olist);
12839 }
12840 }
12841 else
12842 {
12843 /* While MAP is handled explicitly by the FE,
12844 for 'target update', only the identified is passed. */
12845 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12846 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12847 && (omp_is_allocatable_or_ptr (var)
12848 && omp_check_optional_argument (var, false)))
12849 var = build_fold_indirect_ref (var);
12850 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12851 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12852 || (!omp_is_allocatable_or_ptr (var)
12853 && !omp_check_optional_argument (var, false)))
12854 var = build_fold_addr_expr (var);
12855 gimplify_assign (x, var, &ilist);
12856 }
12857 }
12858 s = NULL_TREE;
12859 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12860 {
12861 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12862 s = TREE_TYPE (ovar);
12863 if (TREE_CODE (s) == REFERENCE_TYPE
12864 || omp_check_optional_argument (ovar, false))
12865 s = TREE_TYPE (s);
12866 s = TYPE_SIZE_UNIT (s);
12867 }
12868 else
12869 s = OMP_CLAUSE_SIZE (c);
12870 if (s == NULL_TREE)
12871 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12872 s = fold_convert (size_type_node, s);
12873 purpose = size_int (map_idx++);
12874 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12875 if (TREE_CODE (s) != INTEGER_CST)
12876 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12877
12878 unsigned HOST_WIDE_INT tkind, tkind_zero;
12879 switch (OMP_CLAUSE_CODE (c))
12880 {
12881 case OMP_CLAUSE_MAP:
12882 tkind = OMP_CLAUSE_MAP_KIND (c);
12883 tkind_zero = tkind;
12884 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12885 switch (tkind)
12886 {
12887 case GOMP_MAP_ALLOC:
12888 case GOMP_MAP_IF_PRESENT:
12889 case GOMP_MAP_TO:
12890 case GOMP_MAP_FROM:
12891 case GOMP_MAP_TOFROM:
12892 case GOMP_MAP_ALWAYS_TO:
12893 case GOMP_MAP_ALWAYS_FROM:
12894 case GOMP_MAP_ALWAYS_TOFROM:
12895 case GOMP_MAP_RELEASE:
12896 case GOMP_MAP_FORCE_TO:
12897 case GOMP_MAP_FORCE_FROM:
12898 case GOMP_MAP_FORCE_TOFROM:
12899 case GOMP_MAP_FORCE_PRESENT:
12900 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12901 break;
12902 case GOMP_MAP_DELETE:
12903 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12904 default:
12905 break;
12906 }
12907 if (tkind_zero != tkind)
12908 {
12909 if (integer_zerop (s))
12910 tkind = tkind_zero;
12911 else if (integer_nonzerop (s))
12912 tkind_zero = tkind;
12913 }
12914 break;
12915 case OMP_CLAUSE_FIRSTPRIVATE:
12916 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12917 tkind = GOMP_MAP_TO;
12918 tkind_zero = tkind;
12919 break;
12920 case OMP_CLAUSE_TO:
12921 tkind = GOMP_MAP_TO;
12922 tkind_zero = tkind;
12923 break;
12924 case OMP_CLAUSE_FROM:
12925 tkind = GOMP_MAP_FROM;
12926 tkind_zero = tkind;
12927 break;
12928 default:
12929 gcc_unreachable ();
12930 }
12931 gcc_checking_assert (tkind
12932 < (HOST_WIDE_INT_C (1U) << talign_shift));
12933 gcc_checking_assert (tkind_zero
12934 < (HOST_WIDE_INT_C (1U) << talign_shift));
12935 talign = ceil_log2 (talign);
12936 tkind |= talign << talign_shift;
12937 tkind_zero |= talign << talign_shift;
12938 gcc_checking_assert (tkind
12939 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12940 gcc_checking_assert (tkind_zero
12941 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12942 if (tkind == tkind_zero)
12943 x = build_int_cstu (tkind_type, tkind);
12944 else
12945 {
12946 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12947 x = build3 (COND_EXPR, tkind_type,
12948 fold_build2 (EQ_EXPR, boolean_type_node,
12949 unshare_expr (s), size_zero_node),
12950 build_int_cstu (tkind_type, tkind_zero),
12951 build_int_cstu (tkind_type, tkind));
12952 }
12953 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12954 if (nc && nc != c)
12955 c = nc;
12956 break;
12957
12958 case OMP_CLAUSE_FIRSTPRIVATE:
12959 if (is_gimple_omp_oacc (ctx->stmt))
12960 goto oacc_firstprivate_map;
12961 ovar = OMP_CLAUSE_DECL (c);
12962 if (omp_is_reference (ovar))
12963 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12964 else
12965 talign = DECL_ALIGN_UNIT (ovar);
12966 var = lookup_decl_in_outer_ctx (ovar, ctx);
12967 x = build_sender_ref (ovar, ctx);
12968 tkind = GOMP_MAP_FIRSTPRIVATE;
12969 type = TREE_TYPE (ovar);
12970 if (omp_is_reference (ovar))
12971 type = TREE_TYPE (type);
12972 if ((INTEGRAL_TYPE_P (type)
12973 && TYPE_PRECISION (type) <= POINTER_SIZE)
12974 || TREE_CODE (type) == POINTER_TYPE)
12975 {
12976 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12977 tree t = var;
12978 if (omp_is_reference (var))
12979 t = build_simple_mem_ref (var);
12980 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12981 suppress_warning (var);
12982 if (TREE_CODE (type) != POINTER_TYPE)
12983 t = fold_convert (pointer_sized_int_node, t);
12984 t = fold_convert (TREE_TYPE (x), t);
12985 gimplify_assign (x, t, &ilist);
12986 }
12987 else if (omp_is_reference (var))
12988 gimplify_assign (x, var, &ilist);
12989 else if (is_gimple_reg (var))
12990 {
12991 tree avar = create_tmp_var (TREE_TYPE (var));
12992 mark_addressable (avar);
12993 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12994 suppress_warning (var);
12995 gimplify_assign (avar, var, &ilist);
12996 avar = build_fold_addr_expr (avar);
12997 gimplify_assign (x, avar, &ilist);
12998 }
12999 else
13000 {
13001 var = build_fold_addr_expr (var);
13002 gimplify_assign (x, var, &ilist);
13003 }
13004 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13005 s = size_int (0);
13006 else if (omp_is_reference (ovar))
13007 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13008 else
13009 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13010 s = fold_convert (size_type_node, s);
13011 purpose = size_int (map_idx++);
13012 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13013 if (TREE_CODE (s) != INTEGER_CST)
13014 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13015
13016 gcc_checking_assert (tkind
13017 < (HOST_WIDE_INT_C (1U) << talign_shift));
13018 talign = ceil_log2 (talign);
13019 tkind |= talign << talign_shift;
13020 gcc_checking_assert (tkind
13021 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13022 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13023 build_int_cstu (tkind_type, tkind));
13024 break;
13025
13026 case OMP_CLAUSE_USE_DEVICE_PTR:
13027 case OMP_CLAUSE_USE_DEVICE_ADDR:
13028 case OMP_CLAUSE_IS_DEVICE_PTR:
13029 ovar = OMP_CLAUSE_DECL (c);
13030 var = lookup_decl_in_outer_ctx (ovar, ctx);
13031
13032 if (lang_hooks.decls.omp_array_data (ovar, true))
13033 {
13034 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13035 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13036 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13037 }
13038 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13039 {
13040 tkind = GOMP_MAP_USE_DEVICE_PTR;
13041 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13042 }
13043 else
13044 {
13045 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13046 x = build_sender_ref (ovar, ctx);
13047 }
13048
13049 if (is_gimple_omp_oacc (ctx->stmt))
13050 {
13051 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13052
13053 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13054 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13055 }
13056
13057 type = TREE_TYPE (ovar);
13058 if (lang_hooks.decls.omp_array_data (ovar, true))
13059 var = lang_hooks.decls.omp_array_data (ovar, false);
13060 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13061 && !omp_is_reference (ovar)
13062 && !omp_is_allocatable_or_ptr (ovar))
13063 || TREE_CODE (type) == ARRAY_TYPE)
13064 var = build_fold_addr_expr (var);
13065 else
13066 {
13067 if (omp_is_reference (ovar)
13068 || omp_check_optional_argument (ovar, false)
13069 || omp_is_allocatable_or_ptr (ovar))
13070 {
13071 type = TREE_TYPE (type);
13072 if (POINTER_TYPE_P (type)
13073 && TREE_CODE (type) != ARRAY_TYPE
13074 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13075 && !omp_is_allocatable_or_ptr (ovar))
13076 || (omp_is_reference (ovar)
13077 && omp_is_allocatable_or_ptr (ovar))))
13078 var = build_simple_mem_ref (var);
13079 var = fold_convert (TREE_TYPE (x), var);
13080 }
13081 }
13082 tree present;
13083 present = omp_check_optional_argument (ovar, true);
13084 if (present)
13085 {
13086 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13087 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13088 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13089 tree new_x = unshare_expr (x);
13090 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13091 fb_rvalue);
13092 gcond *cond = gimple_build_cond_from_tree (present,
13093 notnull_label,
13094 null_label);
13095 gimple_seq_add_stmt (&ilist, cond);
13096 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13097 gimplify_assign (new_x, null_pointer_node, &ilist);
13098 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13099 gimple_seq_add_stmt (&ilist,
13100 gimple_build_label (notnull_label));
13101 gimplify_assign (x, var, &ilist);
13102 gimple_seq_add_stmt (&ilist,
13103 gimple_build_label (opt_arg_label));
13104 }
13105 else
13106 gimplify_assign (x, var, &ilist);
13107 s = size_int (0);
13108 purpose = size_int (map_idx++);
13109 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13110 gcc_checking_assert (tkind
13111 < (HOST_WIDE_INT_C (1U) << talign_shift));
13112 gcc_checking_assert (tkind
13113 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13114 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13115 build_int_cstu (tkind_type, tkind));
13116 break;
13117 }
13118
13119 gcc_assert (map_idx == map_cnt);
13120
13121 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13122 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13123 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13124 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13125 for (int i = 1; i <= 2; i++)
13126 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13127 {
13128 gimple_seq initlist = NULL;
13129 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13130 TREE_VEC_ELT (t, i)),
13131 &initlist, true, NULL_TREE);
13132 gimple_seq_add_seq (&ilist, initlist);
13133
13134 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13135 gimple_seq_add_stmt (&olist,
13136 gimple_build_assign (TREE_VEC_ELT (t, i),
13137 clobber));
13138 }
13139 else if (omp_maybe_offloaded_ctx (ctx->outer))
13140 {
13141 tree id = get_identifier ("omp declare target");
13142 tree decl = TREE_VEC_ELT (t, i);
13143 DECL_ATTRIBUTES (decl)
13144 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13145 varpool_node *node = varpool_node::get (decl);
13146 if (node)
13147 {
13148 node->offloadable = 1;
13149 if (ENABLE_OFFLOADING)
13150 {
13151 g->have_offload = true;
13152 vec_safe_push (offload_vars, t);
13153 }
13154 }
13155 }
13156
13157 tree clobber = build_clobber (ctx->record_type);
13158 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13159 clobber));
13160 }
13161
13162 /* Once all the expansions are done, sequence all the different
13163 fragments inside gimple_omp_body. */
13164
13165 new_body = NULL;
13166
13167 if (offloaded
13168 && ctx->record_type)
13169 {
13170 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13171 /* fixup_child_record_type might have changed receiver_decl's type. */
13172 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13173 gimple_seq_add_stmt (&new_body,
13174 gimple_build_assign (ctx->receiver_decl, t));
13175 }
13176 gimple_seq_add_seq (&new_body, fplist);
13177
13178 if (offloaded || data_region)
13179 {
13180 tree prev = NULL_TREE;
13181 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13182 switch (OMP_CLAUSE_CODE (c))
13183 {
13184 tree var, x;
13185 default:
13186 break;
13187 case OMP_CLAUSE_FIRSTPRIVATE:
13188 if (is_gimple_omp_oacc (ctx->stmt))
13189 break;
13190 var = OMP_CLAUSE_DECL (c);
13191 if (omp_is_reference (var)
13192 || is_gimple_reg_type (TREE_TYPE (var)))
13193 {
13194 tree new_var = lookup_decl (var, ctx);
13195 tree type;
13196 type = TREE_TYPE (var);
13197 if (omp_is_reference (var))
13198 type = TREE_TYPE (type);
13199 if ((INTEGRAL_TYPE_P (type)
13200 && TYPE_PRECISION (type) <= POINTER_SIZE)
13201 || TREE_CODE (type) == POINTER_TYPE)
13202 {
13203 x = build_receiver_ref (var, false, ctx);
13204 if (TREE_CODE (type) != POINTER_TYPE)
13205 x = fold_convert (pointer_sized_int_node, x);
13206 x = fold_convert (type, x);
13207 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13208 fb_rvalue);
13209 if (omp_is_reference (var))
13210 {
13211 tree v = create_tmp_var_raw (type, get_name (var));
13212 gimple_add_tmp_var (v);
13213 TREE_ADDRESSABLE (v) = 1;
13214 gimple_seq_add_stmt (&new_body,
13215 gimple_build_assign (v, x));
13216 x = build_fold_addr_expr (v);
13217 }
13218 gimple_seq_add_stmt (&new_body,
13219 gimple_build_assign (new_var, x));
13220 }
13221 else
13222 {
13223 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
13224 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13225 fb_rvalue);
13226 gimple_seq_add_stmt (&new_body,
13227 gimple_build_assign (new_var, x));
13228 }
13229 }
13230 else if (is_variable_sized (var))
13231 {
13232 tree pvar = DECL_VALUE_EXPR (var);
13233 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13234 pvar = TREE_OPERAND (pvar, 0);
13235 gcc_assert (DECL_P (pvar));
13236 tree new_var = lookup_decl (pvar, ctx);
13237 x = build_receiver_ref (var, false, ctx);
13238 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13239 gimple_seq_add_stmt (&new_body,
13240 gimple_build_assign (new_var, x));
13241 }
13242 break;
13243 case OMP_CLAUSE_PRIVATE:
13244 if (is_gimple_omp_oacc (ctx->stmt))
13245 break;
13246 var = OMP_CLAUSE_DECL (c);
13247 if (omp_is_reference (var))
13248 {
13249 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13250 tree new_var = lookup_decl (var, ctx);
13251 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13252 if (TREE_CONSTANT (x))
13253 {
13254 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13255 get_name (var));
13256 gimple_add_tmp_var (x);
13257 TREE_ADDRESSABLE (x) = 1;
13258 x = build_fold_addr_expr_loc (clause_loc, x);
13259 }
13260 else
13261 break;
13262
13263 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13264 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13265 gimple_seq_add_stmt (&new_body,
13266 gimple_build_assign (new_var, x));
13267 }
13268 break;
13269 case OMP_CLAUSE_USE_DEVICE_PTR:
13270 case OMP_CLAUSE_USE_DEVICE_ADDR:
13271 case OMP_CLAUSE_IS_DEVICE_PTR:
13272 tree new_var;
13273 gimple_seq assign_body;
13274 bool is_array_data;
13275 bool do_optional_check;
13276 assign_body = NULL;
13277 do_optional_check = false;
13278 var = OMP_CLAUSE_DECL (c);
13279 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13280
13281 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13282 x = build_sender_ref (is_array_data
13283 ? (splay_tree_key) &DECL_NAME (var)
13284 : (splay_tree_key) &DECL_UID (var), ctx);
13285 else
13286 x = build_receiver_ref (var, false, ctx);
13287
13288 if (is_array_data)
13289 {
13290 bool is_ref = omp_is_reference (var);
13291 do_optional_check = true;
13292 /* First, we copy the descriptor data from the host; then
13293 we update its data to point to the target address. */
13294 new_var = lookup_decl (var, ctx);
13295 new_var = DECL_VALUE_EXPR (new_var);
13296 tree v = new_var;
13297
13298 if (is_ref)
13299 {
13300 var = build_fold_indirect_ref (var);
13301 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13302 fb_rvalue);
13303 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13304 gimple_add_tmp_var (v);
13305 TREE_ADDRESSABLE (v) = 1;
13306 gimple_seq_add_stmt (&assign_body,
13307 gimple_build_assign (v, var));
13308 tree rhs = build_fold_addr_expr (v);
13309 gimple_seq_add_stmt (&assign_body,
13310 gimple_build_assign (new_var, rhs));
13311 }
13312 else
13313 gimple_seq_add_stmt (&assign_body,
13314 gimple_build_assign (new_var, var));
13315
13316 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13317 gcc_assert (v2);
13318 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13319 gimple_seq_add_stmt (&assign_body,
13320 gimple_build_assign (v2, x));
13321 }
13322 else if (is_variable_sized (var))
13323 {
13324 tree pvar = DECL_VALUE_EXPR (var);
13325 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13326 pvar = TREE_OPERAND (pvar, 0);
13327 gcc_assert (DECL_P (pvar));
13328 new_var = lookup_decl (pvar, ctx);
13329 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13330 gimple_seq_add_stmt (&assign_body,
13331 gimple_build_assign (new_var, x));
13332 }
13333 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13334 && !omp_is_reference (var)
13335 && !omp_is_allocatable_or_ptr (var))
13336 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13337 {
13338 new_var = lookup_decl (var, ctx);
13339 new_var = DECL_VALUE_EXPR (new_var);
13340 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13341 new_var = TREE_OPERAND (new_var, 0);
13342 gcc_assert (DECL_P (new_var));
13343 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13344 gimple_seq_add_stmt (&assign_body,
13345 gimple_build_assign (new_var, x));
13346 }
13347 else
13348 {
13349 tree type = TREE_TYPE (var);
13350 new_var = lookup_decl (var, ctx);
13351 if (omp_is_reference (var))
13352 {
13353 type = TREE_TYPE (type);
13354 if (POINTER_TYPE_P (type)
13355 && TREE_CODE (type) != ARRAY_TYPE
13356 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13357 || (omp_is_reference (var)
13358 && omp_is_allocatable_or_ptr (var))))
13359 {
13360 tree v = create_tmp_var_raw (type, get_name (var));
13361 gimple_add_tmp_var (v);
13362 TREE_ADDRESSABLE (v) = 1;
13363 x = fold_convert (type, x);
13364 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13365 fb_rvalue);
13366 gimple_seq_add_stmt (&assign_body,
13367 gimple_build_assign (v, x));
13368 x = build_fold_addr_expr (v);
13369 do_optional_check = true;
13370 }
13371 }
13372 new_var = DECL_VALUE_EXPR (new_var);
13373 x = fold_convert (TREE_TYPE (new_var), x);
13374 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13375 gimple_seq_add_stmt (&assign_body,
13376 gimple_build_assign (new_var, x));
13377 }
13378 tree present;
13379 present = (do_optional_check
13380 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13381 : NULL_TREE);
13382 if (present)
13383 {
13384 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13385 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13386 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13387 glabel *null_glabel = gimple_build_label (null_label);
13388 glabel *notnull_glabel = gimple_build_label (notnull_label);
13389 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13390 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13391 fb_rvalue);
13392 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13393 fb_rvalue);
13394 gcond *cond = gimple_build_cond_from_tree (present,
13395 notnull_label,
13396 null_label);
13397 gimple_seq_add_stmt (&new_body, cond);
13398 gimple_seq_add_stmt (&new_body, null_glabel);
13399 gimplify_assign (new_var, null_pointer_node, &new_body);
13400 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13401 gimple_seq_add_stmt (&new_body, notnull_glabel);
13402 gimple_seq_add_seq (&new_body, assign_body);
13403 gimple_seq_add_stmt (&new_body,
13404 gimple_build_label (opt_arg_label));
13405 }
13406 else
13407 gimple_seq_add_seq (&new_body, assign_body);
13408 break;
13409 }
13410 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13411 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13412 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13413 or references to VLAs. */
13414 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13415 switch (OMP_CLAUSE_CODE (c))
13416 {
13417 tree var;
13418 default:
13419 break;
13420 case OMP_CLAUSE_MAP:
13421 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13422 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13423 {
13424 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13425 poly_int64 offset = 0;
13426 gcc_assert (prev);
13427 var = OMP_CLAUSE_DECL (c);
13428 if (DECL_P (var)
13429 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13430 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13431 ctx))
13432 && varpool_node::get_create (var)->offloadable)
13433 break;
13434 if (TREE_CODE (var) == INDIRECT_REF
13435 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13436 var = TREE_OPERAND (var, 0);
13437 if (TREE_CODE (var) == COMPONENT_REF)
13438 {
13439 var = get_addr_base_and_unit_offset (var, &offset);
13440 gcc_assert (var != NULL_TREE && DECL_P (var));
13441 }
13442 else if (DECL_SIZE (var)
13443 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13444 {
13445 tree var2 = DECL_VALUE_EXPR (var);
13446 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13447 var2 = TREE_OPERAND (var2, 0);
13448 gcc_assert (DECL_P (var2));
13449 var = var2;
13450 }
13451 tree new_var = lookup_decl (var, ctx), x;
13452 tree type = TREE_TYPE (new_var);
13453 bool is_ref;
13454 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13455 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13456 == COMPONENT_REF))
13457 {
13458 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13459 is_ref = true;
13460 new_var = build2 (MEM_REF, type,
13461 build_fold_addr_expr (new_var),
13462 build_int_cst (build_pointer_type (type),
13463 offset));
13464 }
13465 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13466 {
13467 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13468 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13469 new_var = build2 (MEM_REF, type,
13470 build_fold_addr_expr (new_var),
13471 build_int_cst (build_pointer_type (type),
13472 offset));
13473 }
13474 else
13475 is_ref = omp_is_reference (var);
13476 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13477 is_ref = false;
13478 bool ref_to_array = false;
13479 if (is_ref)
13480 {
13481 type = TREE_TYPE (type);
13482 if (TREE_CODE (type) == ARRAY_TYPE)
13483 {
13484 type = build_pointer_type (type);
13485 ref_to_array = true;
13486 }
13487 }
13488 else if (TREE_CODE (type) == ARRAY_TYPE)
13489 {
13490 tree decl2 = DECL_VALUE_EXPR (new_var);
13491 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13492 decl2 = TREE_OPERAND (decl2, 0);
13493 gcc_assert (DECL_P (decl2));
13494 new_var = decl2;
13495 type = TREE_TYPE (new_var);
13496 }
13497 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13498 x = fold_convert_loc (clause_loc, type, x);
13499 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13500 {
13501 tree bias = OMP_CLAUSE_SIZE (c);
13502 if (DECL_P (bias))
13503 bias = lookup_decl (bias, ctx);
13504 bias = fold_convert_loc (clause_loc, sizetype, bias);
13505 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13506 bias);
13507 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13508 TREE_TYPE (x), x, bias);
13509 }
13510 if (ref_to_array)
13511 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13512 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13513 if (is_ref && !ref_to_array)
13514 {
13515 tree t = create_tmp_var_raw (type, get_name (var));
13516 gimple_add_tmp_var (t);
13517 TREE_ADDRESSABLE (t) = 1;
13518 gimple_seq_add_stmt (&new_body,
13519 gimple_build_assign (t, x));
13520 x = build_fold_addr_expr_loc (clause_loc, t);
13521 }
13522 gimple_seq_add_stmt (&new_body,
13523 gimple_build_assign (new_var, x));
13524 prev = NULL_TREE;
13525 }
13526 else if (OMP_CLAUSE_CHAIN (c)
13527 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13528 == OMP_CLAUSE_MAP
13529 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13530 == GOMP_MAP_FIRSTPRIVATE_POINTER
13531 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13532 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13533 prev = c;
13534 break;
13535 case OMP_CLAUSE_PRIVATE:
13536 var = OMP_CLAUSE_DECL (c);
13537 if (is_variable_sized (var))
13538 {
13539 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13540 tree new_var = lookup_decl (var, ctx);
13541 tree pvar = DECL_VALUE_EXPR (var);
13542 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13543 pvar = TREE_OPERAND (pvar, 0);
13544 gcc_assert (DECL_P (pvar));
13545 tree new_pvar = lookup_decl (pvar, ctx);
13546 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13547 tree al = size_int (DECL_ALIGN (var));
13548 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13549 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13550 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13551 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13552 gimple_seq_add_stmt (&new_body,
13553 gimple_build_assign (new_pvar, x));
13554 }
13555 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
13556 {
13557 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13558 tree new_var = lookup_decl (var, ctx);
13559 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13560 if (TREE_CONSTANT (x))
13561 break;
13562 else
13563 {
13564 tree atmp
13565 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13566 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13567 tree al = size_int (TYPE_ALIGN (rtype));
13568 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13569 }
13570
13571 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13572 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13573 gimple_seq_add_stmt (&new_body,
13574 gimple_build_assign (new_var, x));
13575 }
13576 break;
13577 }
13578
13579 gimple_seq fork_seq = NULL;
13580 gimple_seq join_seq = NULL;
13581
13582 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13583 {
13584 /* If there are reductions on the offloaded region itself, treat
13585 them as a dummy GANG loop. */
13586 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13587
13588 gcall *private_marker = lower_oacc_private_marker (ctx);
13589
13590 if (private_marker)
13591 gimple_call_set_arg (private_marker, 2, level);
13592
13593 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13594 false, NULL, private_marker, NULL, &fork_seq,
13595 &join_seq, ctx);
13596 }
13597
13598 gimple_seq_add_seq (&new_body, fork_seq);
13599 gimple_seq_add_seq (&new_body, tgt_body);
13600 gimple_seq_add_seq (&new_body, join_seq);
13601
13602 if (offloaded)
13603 {
13604 new_body = maybe_catch_exception (new_body);
13605 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13606 }
13607 gimple_omp_set_body (stmt, new_body);
13608 }
13609
13610 bind = gimple_build_bind (NULL, NULL,
13611 tgt_bind ? gimple_bind_block (tgt_bind)
13612 : NULL_TREE);
13613 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13614 gimple_bind_add_seq (bind, ilist);
13615 gimple_bind_add_stmt (bind, stmt);
13616 gimple_bind_add_seq (bind, olist);
13617
13618 pop_gimplify_context (NULL);
13619
13620 if (dep_bind)
13621 {
13622 gimple_bind_add_seq (dep_bind, dep_ilist);
13623 gimple_bind_add_stmt (dep_bind, bind);
13624 gimple_bind_add_seq (dep_bind, dep_olist);
13625 pop_gimplify_context (dep_bind);
13626 }
13627 }
13628
13629 /* Expand code for an OpenMP teams directive. */
13630
13631 static void
13632 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13633 {
13634 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13635 push_gimplify_context ();
13636
13637 tree block = make_node (BLOCK);
13638 gbind *bind = gimple_build_bind (NULL, NULL, block);
13639 gsi_replace (gsi_p, bind, true);
13640 gimple_seq bind_body = NULL;
13641 gimple_seq dlist = NULL;
13642 gimple_seq olist = NULL;
13643
13644 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13645 OMP_CLAUSE_NUM_TEAMS);
13646 if (num_teams == NULL_TREE)
13647 num_teams = build_int_cst (unsigned_type_node, 0);
13648 else
13649 {
13650 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13651 num_teams = fold_convert (unsigned_type_node, num_teams);
13652 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13653 }
13654 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13655 OMP_CLAUSE_THREAD_LIMIT);
13656 if (thread_limit == NULL_TREE)
13657 thread_limit = build_int_cst (unsigned_type_node, 0);
13658 else
13659 {
13660 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13661 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13662 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13663 fb_rvalue);
13664 }
13665
13666 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13667 &bind_body, &dlist, ctx, NULL);
13668 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13669 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13670 NULL, ctx);
13671 gimple_seq_add_stmt (&bind_body, teams_stmt);
13672
13673 location_t loc = gimple_location (teams_stmt);
13674 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13675 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13676 gimple_set_location (call, loc);
13677 gimple_seq_add_stmt (&bind_body, call);
13678
13679 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13680 gimple_omp_set_body (teams_stmt, NULL);
13681 gimple_seq_add_seq (&bind_body, olist);
13682 gimple_seq_add_seq (&bind_body, dlist);
13683 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13684 gimple_bind_set_body (bind, bind_body);
13685
13686 pop_gimplify_context (bind);
13687
13688 gimple_bind_append_vars (bind, ctx->block_vars);
13689 BLOCK_VARS (block) = ctx->block_vars;
13690 if (BLOCK_VARS (block))
13691 TREE_USED (block) = 1;
13692 }
13693
13694 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13695 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13696 of OMP context, but with task_shared_vars set. */
13697
13698 static tree
13699 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13700 void *data)
13701 {
13702 tree t = *tp;
13703
13704 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13705 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13706 && data == NULL
13707 && DECL_HAS_VALUE_EXPR_P (t))
13708 return t;
13709
13710 if (task_shared_vars
13711 && DECL_P (t)
13712 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13713 return t;
13714
13715 /* If a global variable has been privatized, TREE_CONSTANT on
13716 ADDR_EXPR might be wrong. */
13717 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13718 recompute_tree_invariant_for_addr_expr (t);
13719
13720 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13721 return NULL_TREE;
13722 }
13723
13724 /* Data to be communicated between lower_omp_regimplify_operands and
13725 lower_omp_regimplify_operands_p. */
13726
13727 struct lower_omp_regimplify_operands_data
13728 {
13729 omp_context *ctx;
13730 vec<tree> *decls;
13731 };
13732
13733 /* Helper function for lower_omp_regimplify_operands. Find
13734 omp_member_access_dummy_var vars and adjust temporarily their
13735 DECL_VALUE_EXPRs if needed. */
13736
13737 static tree
13738 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13739 void *data)
13740 {
13741 tree t = omp_member_access_dummy_var (*tp);
13742 if (t)
13743 {
13744 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13745 lower_omp_regimplify_operands_data *ldata
13746 = (lower_omp_regimplify_operands_data *) wi->info;
13747 tree o = maybe_lookup_decl (t, ldata->ctx);
13748 if (o != t)
13749 {
13750 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13751 ldata->decls->safe_push (*tp);
13752 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13753 SET_DECL_VALUE_EXPR (*tp, v);
13754 }
13755 }
13756 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13757 return NULL_TREE;
13758 }
13759
13760 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13761 of omp_member_access_dummy_var vars during regimplification. */
13762
13763 static void
13764 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13765 gimple_stmt_iterator *gsi_p)
13766 {
13767 auto_vec<tree, 10> decls;
13768 if (ctx)
13769 {
13770 struct walk_stmt_info wi;
13771 memset (&wi, '\0', sizeof (wi));
13772 struct lower_omp_regimplify_operands_data data;
13773 data.ctx = ctx;
13774 data.decls = &decls;
13775 wi.info = &data;
13776 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13777 }
13778 gimple_regimplify_operands (stmt, gsi_p);
13779 while (!decls.is_empty ())
13780 {
13781 tree t = decls.pop ();
13782 tree v = decls.pop ();
13783 SET_DECL_VALUE_EXPR (t, v);
13784 }
13785 }
13786
13787 static void
13788 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13789 {
13790 gimple *stmt = gsi_stmt (*gsi_p);
13791 struct walk_stmt_info wi;
13792 gcall *call_stmt;
13793
13794 if (gimple_has_location (stmt))
13795 input_location = gimple_location (stmt);
13796
13797 if (task_shared_vars)
13798 memset (&wi, '\0', sizeof (wi));
13799
13800 /* If we have issued syntax errors, avoid doing any heavy lifting.
13801 Just replace the OMP directives with a NOP to avoid
13802 confusing RTL expansion. */
13803 if (seen_error () && is_gimple_omp (stmt))
13804 {
13805 gsi_replace (gsi_p, gimple_build_nop (), true);
13806 return;
13807 }
13808
13809 switch (gimple_code (stmt))
13810 {
13811 case GIMPLE_COND:
13812 {
13813 gcond *cond_stmt = as_a <gcond *> (stmt);
13814 if ((ctx || task_shared_vars)
13815 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13816 lower_omp_regimplify_p,
13817 ctx ? NULL : &wi, NULL)
13818 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13819 lower_omp_regimplify_p,
13820 ctx ? NULL : &wi, NULL)))
13821 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
13822 }
13823 break;
13824 case GIMPLE_CATCH:
13825 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13826 break;
13827 case GIMPLE_EH_FILTER:
13828 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13829 break;
13830 case GIMPLE_TRY:
13831 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13832 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13833 break;
13834 case GIMPLE_TRANSACTION:
13835 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13836 ctx);
13837 break;
13838 case GIMPLE_BIND:
13839 if (ctx && is_gimple_omp_oacc (ctx->stmt))
13840 {
13841 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
13842 oacc_privatization_scan_decl_chain (ctx, vars);
13843 }
13844 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13845 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13846 break;
13847 case GIMPLE_OMP_PARALLEL:
13848 case GIMPLE_OMP_TASK:
13849 ctx = maybe_lookup_ctx (stmt);
13850 gcc_assert (ctx);
13851 if (ctx->cancellable)
13852 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13853 lower_omp_taskreg (gsi_p, ctx);
13854 break;
13855 case GIMPLE_OMP_FOR:
13856 ctx = maybe_lookup_ctx (stmt);
13857 gcc_assert (ctx);
13858 if (ctx->cancellable)
13859 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13860 lower_omp_for (gsi_p, ctx);
13861 break;
13862 case GIMPLE_OMP_SECTIONS:
13863 ctx = maybe_lookup_ctx (stmt);
13864 gcc_assert (ctx);
13865 if (ctx->cancellable)
13866 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13867 lower_omp_sections (gsi_p, ctx);
13868 break;
13869 case GIMPLE_OMP_SINGLE:
13870 ctx = maybe_lookup_ctx (stmt);
13871 gcc_assert (ctx);
13872 lower_omp_single (gsi_p, ctx);
13873 break;
13874 case GIMPLE_OMP_MASTER:
13875 ctx = maybe_lookup_ctx (stmt);
13876 gcc_assert (ctx);
13877 lower_omp_master (gsi_p, ctx);
13878 break;
13879 case GIMPLE_OMP_TASKGROUP:
13880 ctx = maybe_lookup_ctx (stmt);
13881 gcc_assert (ctx);
13882 lower_omp_taskgroup (gsi_p, ctx);
13883 break;
13884 case GIMPLE_OMP_ORDERED:
13885 ctx = maybe_lookup_ctx (stmt);
13886 gcc_assert (ctx);
13887 lower_omp_ordered (gsi_p, ctx);
13888 break;
13889 case GIMPLE_OMP_SCAN:
13890 ctx = maybe_lookup_ctx (stmt);
13891 gcc_assert (ctx);
13892 lower_omp_scan (gsi_p, ctx);
13893 break;
13894 case GIMPLE_OMP_CRITICAL:
13895 ctx = maybe_lookup_ctx (stmt);
13896 gcc_assert (ctx);
13897 lower_omp_critical (gsi_p, ctx);
13898 break;
13899 case GIMPLE_OMP_ATOMIC_LOAD:
13900 if ((ctx || task_shared_vars)
13901 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13902 as_a <gomp_atomic_load *> (stmt)),
13903 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13904 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13905 break;
13906 case GIMPLE_OMP_TARGET:
13907 ctx = maybe_lookup_ctx (stmt);
13908 gcc_assert (ctx);
13909 lower_omp_target (gsi_p, ctx);
13910 break;
13911 case GIMPLE_OMP_TEAMS:
13912 ctx = maybe_lookup_ctx (stmt);
13913 gcc_assert (ctx);
13914 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13915 lower_omp_taskreg (gsi_p, ctx);
13916 else
13917 lower_omp_teams (gsi_p, ctx);
13918 break;
13919 case GIMPLE_CALL:
13920 tree fndecl;
13921 call_stmt = as_a <gcall *> (stmt);
13922 fndecl = gimple_call_fndecl (call_stmt);
13923 if (fndecl
13924 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13925 switch (DECL_FUNCTION_CODE (fndecl))
13926 {
13927 case BUILT_IN_GOMP_BARRIER:
13928 if (ctx == NULL)
13929 break;
13930 /* FALLTHRU */
13931 case BUILT_IN_GOMP_CANCEL:
13932 case BUILT_IN_GOMP_CANCELLATION_POINT:
13933 omp_context *cctx;
13934 cctx = ctx;
13935 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13936 cctx = cctx->outer;
13937 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13938 if (!cctx->cancellable)
13939 {
13940 if (DECL_FUNCTION_CODE (fndecl)
13941 == BUILT_IN_GOMP_CANCELLATION_POINT)
13942 {
13943 stmt = gimple_build_nop ();
13944 gsi_replace (gsi_p, stmt, false);
13945 }
13946 break;
13947 }
13948 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13949 {
13950 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13951 gimple_call_set_fndecl (call_stmt, fndecl);
13952 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13953 }
13954 tree lhs;
13955 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13956 gimple_call_set_lhs (call_stmt, lhs);
13957 tree fallthru_label;
13958 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13959 gimple *g;
13960 g = gimple_build_label (fallthru_label);
13961 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13962 g = gimple_build_cond (NE_EXPR, lhs,
13963 fold_convert (TREE_TYPE (lhs),
13964 boolean_false_node),
13965 cctx->cancel_label, fallthru_label);
13966 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13967 break;
13968 default:
13969 break;
13970 }
13971 goto regimplify;
13972
13973 case GIMPLE_ASSIGN:
13974 for (omp_context *up = ctx; up; up = up->outer)
13975 {
13976 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13977 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13978 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13979 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13980 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13981 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13982 && (gimple_omp_target_kind (up->stmt)
13983 == GF_OMP_TARGET_KIND_DATA)))
13984 continue;
13985 else if (!up->lastprivate_conditional_map)
13986 break;
13987 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13988 if (TREE_CODE (lhs) == MEM_REF
13989 && DECL_P (TREE_OPERAND (lhs, 0))
13990 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13991 0))) == REFERENCE_TYPE)
13992 lhs = TREE_OPERAND (lhs, 0);
13993 if (DECL_P (lhs))
13994 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13995 {
13996 tree clauses;
13997 if (up->combined_into_simd_safelen1)
13998 {
13999 up = up->outer;
14000 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14001 up = up->outer;
14002 }
14003 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14004 clauses = gimple_omp_for_clauses (up->stmt);
14005 else
14006 clauses = gimple_omp_sections_clauses (up->stmt);
14007 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14008 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14009 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14010 OMP_CLAUSE__CONDTEMP_);
14011 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14012 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14013 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14014 }
14015 }
14016 /* FALLTHRU */
14017
14018 default:
14019 regimplify:
14020 if ((ctx || task_shared_vars)
14021 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14022 ctx ? NULL : &wi))
14023 {
14024 /* Just remove clobbers, this should happen only if we have
14025 "privatized" local addressable variables in SIMD regions,
14026 the clobber isn't needed in that case and gimplifying address
14027 of the ARRAY_REF into a pointer and creating MEM_REF based
14028 clobber would create worse code than we get with the clobber
14029 dropped. */
14030 if (gimple_clobber_p (stmt))
14031 {
14032 gsi_replace (gsi_p, gimple_build_nop (), true);
14033 break;
14034 }
14035 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14036 }
14037 break;
14038 }
14039 }
14040
14041 static void
14042 lower_omp (gimple_seq *body, omp_context *ctx)
14043 {
14044 location_t saved_location = input_location;
14045 gimple_stmt_iterator gsi;
14046 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14047 lower_omp_1 (&gsi, ctx);
14048 /* During gimplification, we haven't folded statments inside offloading
14049 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
14050 if (target_nesting_level || taskreg_nesting_level)
14051 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14052 fold_stmt (&gsi);
14053 input_location = saved_location;
14054 }
14055
14056 /* Main entry point. */
14057
14058 static unsigned int
14059 execute_lower_omp (void)
14060 {
14061 gimple_seq body;
14062 int i;
14063 omp_context *ctx;
14064
14065 /* This pass always runs, to provide PROP_gimple_lomp.
14066 But often, there is nothing to do. */
14067 if (flag_openacc == 0 && flag_openmp == 0
14068 && flag_openmp_simd == 0)
14069 return 0;
14070
14071 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14072 delete_omp_context);
14073
14074 body = gimple_body (current_function_decl);
14075
14076 scan_omp (&body, NULL);
14077 gcc_assert (taskreg_nesting_level == 0);
14078 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14079 finish_taskreg_scan (ctx);
14080 taskreg_contexts.release ();
14081
14082 if (all_contexts->root)
14083 {
14084 if (task_shared_vars)
14085 push_gimplify_context ();
14086 lower_omp (&body, NULL);
14087 if (task_shared_vars)
14088 pop_gimplify_context (NULL);
14089 }
14090
14091 if (all_contexts)
14092 {
14093 splay_tree_delete (all_contexts);
14094 all_contexts = NULL;
14095 }
14096 BITMAP_FREE (task_shared_vars);
14097 BITMAP_FREE (global_nonaddressable_vars);
14098
14099 /* If current function is a method, remove artificial dummy VAR_DECL created
14100 for non-static data member privatization, they aren't needed for
14101 debuginfo nor anything else, have been already replaced everywhere in the
14102 IL and cause problems with LTO. */
14103 if (DECL_ARGUMENTS (current_function_decl)
14104 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14105 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14106 == POINTER_TYPE))
14107 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14108 return 0;
14109 }
14110
14111 namespace {
14112
14113 const pass_data pass_data_lower_omp =
14114 {
14115 GIMPLE_PASS, /* type */
14116 "omplower", /* name */
14117 OPTGROUP_OMP, /* optinfo_flags */
14118 TV_NONE, /* tv_id */
14119 PROP_gimple_any, /* properties_required */
14120 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14121 0, /* properties_destroyed */
14122 0, /* todo_flags_start */
14123 0, /* todo_flags_finish */
14124 };
14125
14126 class pass_lower_omp : public gimple_opt_pass
14127 {
14128 public:
14129 pass_lower_omp (gcc::context *ctxt)
14130 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14131 {}
14132
14133 /* opt_pass methods: */
14134 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14135
14136 }; // class pass_lower_omp
14137
14138 } // anon namespace
14139
14140 gimple_opt_pass *
14141 make_pass_lower_omp (gcc::context *ctxt)
14142 {
14143 return new pass_lower_omp (ctxt);
14144 }
14145 \f
14146 /* The following is a utility to diagnose structured block violations.
14147 It is not part of the "omplower" pass, as that's invoked too late. It
14148 should be invoked by the respective front ends after gimplification. */
14149
14150 static splay_tree all_labels;
14151
14152 /* Check for mismatched contexts and generate an error if needed. Return
14153 true if an error is detected. */
14154
14155 static bool
14156 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14157 gimple *branch_ctx, gimple *label_ctx)
14158 {
14159 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14160 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14161
14162 if (label_ctx == branch_ctx)
14163 return false;
14164
14165 const char* kind = NULL;
14166
14167 if (flag_openacc)
14168 {
14169 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14170 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14171 {
14172 gcc_checking_assert (kind == NULL);
14173 kind = "OpenACC";
14174 }
14175 }
14176 if (kind == NULL)
14177 {
14178 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14179 kind = "OpenMP";
14180 }
14181
14182 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14183 so we could traverse it and issue a correct "exit" or "enter" error
14184 message upon a structured block violation.
14185
14186 We built the context by building a list with tree_cons'ing, but there is
14187 no easy counterpart in gimple tuples. It seems like far too much work
14188 for issuing exit/enter error messages. If someone really misses the
14189 distinct error message... patches welcome. */
14190
14191 #if 0
14192 /* Try to avoid confusing the user by producing and error message
14193 with correct "exit" or "enter" verbiage. We prefer "exit"
14194 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14195 if (branch_ctx == NULL)
14196 exit_p = false;
14197 else
14198 {
14199 while (label_ctx)
14200 {
14201 if (TREE_VALUE (label_ctx) == branch_ctx)
14202 {
14203 exit_p = false;
14204 break;
14205 }
14206 label_ctx = TREE_CHAIN (label_ctx);
14207 }
14208 }
14209
14210 if (exit_p)
14211 error ("invalid exit from %s structured block", kind);
14212 else
14213 error ("invalid entry to %s structured block", kind);
14214 #endif
14215
14216 /* If it's obvious we have an invalid entry, be specific about the error. */
14217 if (branch_ctx == NULL)
14218 error ("invalid entry to %s structured block", kind);
14219 else
14220 {
14221 /* Otherwise, be vague and lazy, but efficient. */
14222 error ("invalid branch to/from %s structured block", kind);
14223 }
14224
14225 gsi_replace (gsi_p, gimple_build_nop (), false);
14226 return true;
14227 }
14228
14229 /* Pass 1: Create a minimal tree of structured blocks, and record
14230 where each label is found. */
14231
14232 static tree
14233 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14234 struct walk_stmt_info *wi)
14235 {
14236 gimple *context = (gimple *) wi->info;
14237 gimple *inner_context;
14238 gimple *stmt = gsi_stmt (*gsi_p);
14239
14240 *handled_ops_p = true;
14241
14242 switch (gimple_code (stmt))
14243 {
14244 WALK_SUBSTMTS;
14245
14246 case GIMPLE_OMP_PARALLEL:
14247 case GIMPLE_OMP_TASK:
14248 case GIMPLE_OMP_SECTIONS:
14249 case GIMPLE_OMP_SINGLE:
14250 case GIMPLE_OMP_SECTION:
14251 case GIMPLE_OMP_MASTER:
14252 case GIMPLE_OMP_ORDERED:
14253 case GIMPLE_OMP_SCAN:
14254 case GIMPLE_OMP_CRITICAL:
14255 case GIMPLE_OMP_TARGET:
14256 case GIMPLE_OMP_TEAMS:
14257 case GIMPLE_OMP_TASKGROUP:
14258 /* The minimal context here is just the current OMP construct. */
14259 inner_context = stmt;
14260 wi->info = inner_context;
14261 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14262 wi->info = context;
14263 break;
14264
14265 case GIMPLE_OMP_FOR:
14266 inner_context = stmt;
14267 wi->info = inner_context;
14268 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14269 walk them. */
14270 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14271 diagnose_sb_1, NULL, wi);
14272 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14273 wi->info = context;
14274 break;
14275
14276 case GIMPLE_LABEL:
14277 splay_tree_insert (all_labels,
14278 (splay_tree_key) gimple_label_label (
14279 as_a <glabel *> (stmt)),
14280 (splay_tree_value) context);
14281 break;
14282
14283 default:
14284 break;
14285 }
14286
14287 return NULL_TREE;
14288 }
14289
14290 /* Pass 2: Check each branch and see if its context differs from that of
14291 the destination label's context. */
14292
14293 static tree
14294 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14295 struct walk_stmt_info *wi)
14296 {
14297 gimple *context = (gimple *) wi->info;
14298 splay_tree_node n;
14299 gimple *stmt = gsi_stmt (*gsi_p);
14300
14301 *handled_ops_p = true;
14302
14303 switch (gimple_code (stmt))
14304 {
14305 WALK_SUBSTMTS;
14306
14307 case GIMPLE_OMP_PARALLEL:
14308 case GIMPLE_OMP_TASK:
14309 case GIMPLE_OMP_SECTIONS:
14310 case GIMPLE_OMP_SINGLE:
14311 case GIMPLE_OMP_SECTION:
14312 case GIMPLE_OMP_MASTER:
14313 case GIMPLE_OMP_ORDERED:
14314 case GIMPLE_OMP_SCAN:
14315 case GIMPLE_OMP_CRITICAL:
14316 case GIMPLE_OMP_TARGET:
14317 case GIMPLE_OMP_TEAMS:
14318 case GIMPLE_OMP_TASKGROUP:
14319 wi->info = stmt;
14320 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14321 wi->info = context;
14322 break;
14323
14324 case GIMPLE_OMP_FOR:
14325 wi->info = stmt;
14326 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14327 walk them. */
14328 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14329 diagnose_sb_2, NULL, wi);
14330 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14331 wi->info = context;
14332 break;
14333
14334 case GIMPLE_COND:
14335 {
14336 gcond *cond_stmt = as_a <gcond *> (stmt);
14337 tree lab = gimple_cond_true_label (cond_stmt);
14338 if (lab)
14339 {
14340 n = splay_tree_lookup (all_labels,
14341 (splay_tree_key) lab);
14342 diagnose_sb_0 (gsi_p, context,
14343 n ? (gimple *) n->value : NULL);
14344 }
14345 lab = gimple_cond_false_label (cond_stmt);
14346 if (lab)
14347 {
14348 n = splay_tree_lookup (all_labels,
14349 (splay_tree_key) lab);
14350 diagnose_sb_0 (gsi_p, context,
14351 n ? (gimple *) n->value : NULL);
14352 }
14353 }
14354 break;
14355
14356 case GIMPLE_GOTO:
14357 {
14358 tree lab = gimple_goto_dest (stmt);
14359 if (TREE_CODE (lab) != LABEL_DECL)
14360 break;
14361
14362 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14363 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14364 }
14365 break;
14366
14367 case GIMPLE_SWITCH:
14368 {
14369 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14370 unsigned int i;
14371 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14372 {
14373 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14374 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14375 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14376 break;
14377 }
14378 }
14379 break;
14380
14381 case GIMPLE_RETURN:
14382 diagnose_sb_0 (gsi_p, context, NULL);
14383 break;
14384
14385 default:
14386 break;
14387 }
14388
14389 return NULL_TREE;
14390 }
14391
14392 static unsigned int
14393 diagnose_omp_structured_block_errors (void)
14394 {
14395 struct walk_stmt_info wi;
14396 gimple_seq body = gimple_body (current_function_decl);
14397
14398 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14399
14400 memset (&wi, 0, sizeof (wi));
14401 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14402
14403 memset (&wi, 0, sizeof (wi));
14404 wi.want_locations = true;
14405 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14406
14407 gimple_set_body (current_function_decl, body);
14408
14409 splay_tree_delete (all_labels);
14410 all_labels = NULL;
14411
14412 return 0;
14413 }
14414
14415 namespace {
14416
14417 const pass_data pass_data_diagnose_omp_blocks =
14418 {
14419 GIMPLE_PASS, /* type */
14420 "*diagnose_omp_blocks", /* name */
14421 OPTGROUP_OMP, /* optinfo_flags */
14422 TV_NONE, /* tv_id */
14423 PROP_gimple_any, /* properties_required */
14424 0, /* properties_provided */
14425 0, /* properties_destroyed */
14426 0, /* todo_flags_start */
14427 0, /* todo_flags_finish */
14428 };
14429
14430 class pass_diagnose_omp_blocks : public gimple_opt_pass
14431 {
14432 public:
14433 pass_diagnose_omp_blocks (gcc::context *ctxt)
14434 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14435 {}
14436
14437 /* opt_pass methods: */
14438 virtual bool gate (function *)
14439 {
14440 return flag_openacc || flag_openmp || flag_openmp_simd;
14441 }
14442 virtual unsigned int execute (function *)
14443 {
14444 return diagnose_omp_structured_block_errors ();
14445 }
14446
14447 }; // class pass_diagnose_omp_blocks
14448
14449 } // anon namespace
14450
14451 gimple_opt_pass *
14452 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14453 {
14454 return new pass_diagnose_omp_blocks (ctxt);
14455 }
14456 \f
14457
14458 #include "gt-omp-low.h"