]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/omp-low.c
[Ada] Disable unwanted warnings in Assertion_Policy(Ignore) mode
[thirdparty/gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "alloc-pool.h"
56 #include "symbol-summary.h"
57 #include "tree-nested.h"
58 #include "context.h"
59 #include "gomp-constants.h"
60 #include "gimple-pretty-print.h"
61 #include "hsa-common.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64
65 /* Lowering of OMP parallel and workshare constructs proceeds in two
66 phases. The first phase scans the function looking for OMP statements
67 and then for variables that must be replaced to satisfy data sharing
68 clauses. The second phase expands code for the constructs, as well as
69 re-gimplifying things when variables have been replaced with complex
70 expressions.
71
72 Final code generation is done by pass_expand_omp. The flowgraph is
73 scanned for regions which are then moved to a new
74 function, to be invoked by the thread library, or offloaded. */
75
76 /* Context structure. Used to store information about each parallel
77 directive in the code. */
78
79 struct omp_context
80 {
81 /* This field must be at the beginning, as we do "inheritance": Some
82 callback functions for tree-inline.c (e.g., omp_copy_decl)
83 receive a copy_body_data pointer that is up-casted to an
84 omp_context pointer. */
85 copy_body_data cb;
86
87 /* The tree of contexts corresponding to the encountered constructs. */
88 struct omp_context *outer;
89 gimple *stmt;
90
91 /* Map variables to fields in a structure that allows communication
92 between sending and receiving threads. */
93 splay_tree field_map;
94 tree record_type;
95 tree sender_decl;
96 tree receiver_decl;
97
98 /* These are used just by task contexts, if task firstprivate fn is
99 needed. srecord_type is used to communicate from the thread
100 that encountered the task construct to task firstprivate fn,
101 record_type is allocated by GOMP_task, initialized by task firstprivate
102 fn and passed to the task body fn. */
103 splay_tree sfield_map;
104 tree srecord_type;
105
106 /* A chain of variables to add to the top-level block surrounding the
107 construct. In the case of a parallel, this is in the child function. */
108 tree block_vars;
109
110 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
111 barriers should jump to during omplower pass. */
112 tree cancel_label;
113
114 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
115 otherwise. */
116 gimple *simt_stmt;
117
118 /* For task reductions registered in this context, a vector containing
119 the length of the private copies block (if constant, otherwise NULL)
120 and then offsets (if constant, otherwise NULL) for each entry. */
121 vec<tree> task_reductions;
122
123 /* A hash map from the reduction clauses to the registered array
124 elts. */
125 hash_map<tree, unsigned> *task_reduction_map;
126
127 /* And a hash map from the lastprivate(conditional:) variables to their
128 corresponding tracking loop iteration variables. */
129 hash_map<tree, tree> *lastprivate_conditional_map;
130
131 /* A tree_list of the reduction clauses in this context. This is
132 only used for checking the consistency of OpenACC reduction
133 clauses in scan_omp_for and is not guaranteed to contain a valid
134 value outside of this function. */
135 tree local_reduction_clauses;
136
137 /* A tree_list of the reduction clauses in outer contexts. This is
138 only used for checking the consistency of OpenACC reduction
139 clauses in scan_omp_for and is not guaranteed to contain a valid
140 value outside of this function. */
141 tree outer_reduction_clauses;
142
143 /* Nesting depth of this context. Used to beautify error messages re
144 invalid gotos. The outermost ctx is depth 1, with depth 0 being
145 reserved for the main body of the function. */
146 int depth;
147
148 /* True if this parallel directive is nested within another. */
149 bool is_nested;
150
151 /* True if this construct can be cancelled. */
152 bool cancellable;
153
154 /* True if lower_omp_1 should look up lastprivate conditional in parent
155 context. */
156 bool combined_into_simd_safelen1;
157
158 /* True if there is nested scan context with inclusive clause. */
159 bool scan_inclusive;
160
161 /* True if there is nested scan context with exclusive clause. */
162 bool scan_exclusive;
163
164 /* True in the second simd loop of for simd with inscan reductions. */
165 bool for_simd_scan_phase;
166
167 /* True if there is order(concurrent) clause on the construct. */
168 bool order_concurrent;
169
170 /* True if there is bind clause on the construct (i.e. a loop construct). */
171 bool loop_p;
172 };
173
174 static splay_tree all_contexts;
175 static int taskreg_nesting_level;
176 static int target_nesting_level;
177 static bitmap task_shared_vars;
178 static bitmap global_nonaddressable_vars;
179 static vec<omp_context *> taskreg_contexts;
180
181 static void scan_omp (gimple_seq *, omp_context *);
182 static tree scan_omp_1_op (tree *, int *, void *);
183
184 #define WALK_SUBSTMTS \
185 case GIMPLE_BIND: \
186 case GIMPLE_TRY: \
187 case GIMPLE_CATCH: \
188 case GIMPLE_EH_FILTER: \
189 case GIMPLE_TRANSACTION: \
190 /* The sub-statements for these should be walked. */ \
191 *handled_ops_p = false; \
192 break;
193
194 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
195 region. */
196
197 static bool
198 is_oacc_parallel_or_serial (omp_context *ctx)
199 {
200 enum gimple_code outer_type = gimple_code (ctx->stmt);
201 return ((outer_type == GIMPLE_OMP_TARGET)
202 && ((gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
204 || (gimple_omp_target_kind (ctx->stmt)
205 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
206 }
207
208 /* Return true if CTX corresponds to an oacc kernels region. */
209
210 static bool
211 is_oacc_kernels (omp_context *ctx)
212 {
213 enum gimple_code outer_type = gimple_code (ctx->stmt);
214 return ((outer_type == GIMPLE_OMP_TARGET)
215 && (gimple_omp_target_kind (ctx->stmt)
216 == GF_OMP_TARGET_KIND_OACC_KERNELS));
217 }
218
219 /* If DECL is the artificial dummy VAR_DECL created for non-static
220 data member privatization, return the underlying "this" parameter,
221 otherwise return NULL. */
222
223 tree
224 omp_member_access_dummy_var (tree decl)
225 {
226 if (!VAR_P (decl)
227 || !DECL_ARTIFICIAL (decl)
228 || !DECL_IGNORED_P (decl)
229 || !DECL_HAS_VALUE_EXPR_P (decl)
230 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
231 return NULL_TREE;
232
233 tree v = DECL_VALUE_EXPR (decl);
234 if (TREE_CODE (v) != COMPONENT_REF)
235 return NULL_TREE;
236
237 while (1)
238 switch (TREE_CODE (v))
239 {
240 case COMPONENT_REF:
241 case MEM_REF:
242 case INDIRECT_REF:
243 CASE_CONVERT:
244 case POINTER_PLUS_EXPR:
245 v = TREE_OPERAND (v, 0);
246 continue;
247 case PARM_DECL:
248 if (DECL_CONTEXT (v) == current_function_decl
249 && DECL_ARTIFICIAL (v)
250 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
251 return v;
252 return NULL_TREE;
253 default:
254 return NULL_TREE;
255 }
256 }
257
258 /* Helper for unshare_and_remap, called through walk_tree. */
259
260 static tree
261 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
262 {
263 tree *pair = (tree *) data;
264 if (*tp == pair[0])
265 {
266 *tp = unshare_expr (pair[1]);
267 *walk_subtrees = 0;
268 }
269 else if (IS_TYPE_OR_DECL_P (*tp))
270 *walk_subtrees = 0;
271 return NULL_TREE;
272 }
273
274 /* Return unshare_expr (X) with all occurrences of FROM
275 replaced with TO. */
276
277 static tree
278 unshare_and_remap (tree x, tree from, tree to)
279 {
280 tree pair[2] = { from, to };
281 x = unshare_expr (x);
282 walk_tree (&x, unshare_and_remap_1, pair, NULL);
283 return x;
284 }
285
286 /* Convenience function for calling scan_omp_1_op on tree operands. */
287
288 static inline tree
289 scan_omp_op (tree *tp, omp_context *ctx)
290 {
291 struct walk_stmt_info wi;
292
293 memset (&wi, 0, sizeof (wi));
294 wi.info = ctx;
295 wi.want_locations = true;
296
297 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
298 }
299
300 static void lower_omp (gimple_seq *, omp_context *);
301 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
302 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
303
304 /* Return true if CTX is for an omp parallel. */
305
306 static inline bool
307 is_parallel_ctx (omp_context *ctx)
308 {
309 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
310 }
311
312
313 /* Return true if CTX is for an omp task. */
314
315 static inline bool
316 is_task_ctx (omp_context *ctx)
317 {
318 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
319 }
320
321
322 /* Return true if CTX is for an omp taskloop. */
323
324 static inline bool
325 is_taskloop_ctx (omp_context *ctx)
326 {
327 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
328 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
329 }
330
331
332 /* Return true if CTX is for a host omp teams. */
333
334 static inline bool
335 is_host_teams_ctx (omp_context *ctx)
336 {
337 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
338 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
339 }
340
341 /* Return true if CTX is for an omp parallel or omp task or host omp teams
342 (the last one is strictly not a task region in OpenMP speak, but we
343 need to treat it similarly). */
344
345 static inline bool
346 is_taskreg_ctx (omp_context *ctx)
347 {
348 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
349 }
350
351 /* Return true if EXPR is variable sized. */
352
353 static inline bool
354 is_variable_sized (const_tree expr)
355 {
356 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
357 }
358
359 /* Lookup variables. The "maybe" form
360 allows for the variable form to not have been entered, otherwise we
361 assert that the variable must have been entered. */
362
363 static inline tree
364 lookup_decl (tree var, omp_context *ctx)
365 {
366 tree *n = ctx->cb.decl_map->get (var);
367 return *n;
368 }
369
370 static inline tree
371 maybe_lookup_decl (const_tree var, omp_context *ctx)
372 {
373 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
374 return n ? *n : NULL_TREE;
375 }
376
377 static inline tree
378 lookup_field (tree var, omp_context *ctx)
379 {
380 splay_tree_node n;
381 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
382 return (tree) n->value;
383 }
384
385 static inline tree
386 lookup_sfield (splay_tree_key key, omp_context *ctx)
387 {
388 splay_tree_node n;
389 n = splay_tree_lookup (ctx->sfield_map
390 ? ctx->sfield_map : ctx->field_map, key);
391 return (tree) n->value;
392 }
393
394 static inline tree
395 lookup_sfield (tree var, omp_context *ctx)
396 {
397 return lookup_sfield ((splay_tree_key) var, ctx);
398 }
399
400 static inline tree
401 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
402 {
403 splay_tree_node n;
404 n = splay_tree_lookup (ctx->field_map, key);
405 return n ? (tree) n->value : NULL_TREE;
406 }
407
408 static inline tree
409 maybe_lookup_field (tree var, omp_context *ctx)
410 {
411 return maybe_lookup_field ((splay_tree_key) var, ctx);
412 }
413
414 /* Return true if DECL should be copied by pointer. SHARED_CTX is
415 the parallel context if DECL is to be shared. */
416
417 static bool
418 use_pointer_for_field (tree decl, omp_context *shared_ctx)
419 {
420 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
421 || TYPE_ATOMIC (TREE_TYPE (decl)))
422 return true;
423
424 /* We can only use copy-in/copy-out semantics for shared variables
425 when we know the value is not accessible from an outer scope. */
426 if (shared_ctx)
427 {
428 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
429
430 /* ??? Trivially accessible from anywhere. But why would we even
431 be passing an address in this case? Should we simply assert
432 this to be false, or should we have a cleanup pass that removes
433 these from the list of mappings? */
434 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
435 return true;
436
437 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
438 without analyzing the expression whether or not its location
439 is accessible to anyone else. In the case of nested parallel
440 regions it certainly may be. */
441 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
442 return true;
443
444 /* Do not use copy-in/copy-out for variables that have their
445 address taken. */
446 if (is_global_var (decl))
447 {
448 /* For file scope vars, track whether we've seen them as
449 non-addressable initially and in that case, keep the same
450 answer for the duration of the pass, even when they are made
451 addressable later on e.g. through reduction expansion. Global
452 variables which weren't addressable before the pass will not
453 have their privatized copies address taken. See PR91216. */
454 if (!TREE_ADDRESSABLE (decl))
455 {
456 if (!global_nonaddressable_vars)
457 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
458 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
459 }
460 else if (!global_nonaddressable_vars
461 || !bitmap_bit_p (global_nonaddressable_vars,
462 DECL_UID (decl)))
463 return true;
464 }
465 else if (TREE_ADDRESSABLE (decl))
466 return true;
467
468 /* lower_send_shared_vars only uses copy-in, but not copy-out
469 for these. */
470 if (TREE_READONLY (decl)
471 || ((TREE_CODE (decl) == RESULT_DECL
472 || TREE_CODE (decl) == PARM_DECL)
473 && DECL_BY_REFERENCE (decl)))
474 return false;
475
476 /* Disallow copy-in/out in nested parallel if
477 decl is shared in outer parallel, otherwise
478 each thread could store the shared variable
479 in its own copy-in location, making the
480 variable no longer really shared. */
481 if (shared_ctx->is_nested)
482 {
483 omp_context *up;
484
485 for (up = shared_ctx->outer; up; up = up->outer)
486 if ((is_taskreg_ctx (up)
487 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
488 && is_gimple_omp_offloaded (up->stmt)))
489 && maybe_lookup_decl (decl, up))
490 break;
491
492 if (up)
493 {
494 tree c;
495
496 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
497 {
498 for (c = gimple_omp_target_clauses (up->stmt);
499 c; c = OMP_CLAUSE_CHAIN (c))
500 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
501 && OMP_CLAUSE_DECL (c) == decl)
502 break;
503 }
504 else
505 for (c = gimple_omp_taskreg_clauses (up->stmt);
506 c; c = OMP_CLAUSE_CHAIN (c))
507 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
508 && OMP_CLAUSE_DECL (c) == decl)
509 break;
510
511 if (c)
512 goto maybe_mark_addressable_and_ret;
513 }
514 }
515
516 /* For tasks avoid using copy-in/out. As tasks can be
517 deferred or executed in different thread, when GOMP_task
518 returns, the task hasn't necessarily terminated. */
519 if (is_task_ctx (shared_ctx))
520 {
521 tree outer;
522 maybe_mark_addressable_and_ret:
523 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
524 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
525 {
526 /* Taking address of OUTER in lower_send_shared_vars
527 might need regimplification of everything that uses the
528 variable. */
529 if (!task_shared_vars)
530 task_shared_vars = BITMAP_ALLOC (NULL);
531 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
532 TREE_ADDRESSABLE (outer) = 1;
533 }
534 return true;
535 }
536 }
537
538 return false;
539 }
540
541 /* Construct a new automatic decl similar to VAR. */
542
543 static tree
544 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
545 {
546 tree copy = copy_var_decl (var, name, type);
547
548 DECL_CONTEXT (copy) = current_function_decl;
549 DECL_CHAIN (copy) = ctx->block_vars;
550 /* If VAR is listed in task_shared_vars, it means it wasn't
551 originally addressable and is just because task needs to take
552 it's address. But we don't need to take address of privatizations
553 from that var. */
554 if (TREE_ADDRESSABLE (var)
555 && ((task_shared_vars
556 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
557 || (global_nonaddressable_vars
558 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
559 TREE_ADDRESSABLE (copy) = 0;
560 ctx->block_vars = copy;
561
562 return copy;
563 }
564
565 static tree
566 omp_copy_decl_1 (tree var, omp_context *ctx)
567 {
568 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
569 }
570
571 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
572 as appropriate. */
573 static tree
574 omp_build_component_ref (tree obj, tree field)
575 {
576 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
577 if (TREE_THIS_VOLATILE (field))
578 TREE_THIS_VOLATILE (ret) |= 1;
579 if (TREE_READONLY (field))
580 TREE_READONLY (ret) |= 1;
581 return ret;
582 }
583
584 /* Build tree nodes to access the field for VAR on the receiver side. */
585
586 static tree
587 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
588 {
589 tree x, field = lookup_field (var, ctx);
590
591 /* If the receiver record type was remapped in the child function,
592 remap the field into the new record type. */
593 x = maybe_lookup_field (field, ctx);
594 if (x != NULL)
595 field = x;
596
597 x = build_simple_mem_ref (ctx->receiver_decl);
598 TREE_THIS_NOTRAP (x) = 1;
599 x = omp_build_component_ref (x, field);
600 if (by_ref)
601 {
602 x = build_simple_mem_ref (x);
603 TREE_THIS_NOTRAP (x) = 1;
604 }
605
606 return x;
607 }
608
609 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
610 of a parallel, this is a component reference; for workshare constructs
611 this is some variable. */
612
613 static tree
614 build_outer_var_ref (tree var, omp_context *ctx,
615 enum omp_clause_code code = OMP_CLAUSE_ERROR)
616 {
617 tree x;
618 omp_context *outer = ctx->outer;
619 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
620 outer = outer->outer;
621
622 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
623 x = var;
624 else if (is_variable_sized (var))
625 {
626 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
627 x = build_outer_var_ref (x, ctx, code);
628 x = build_simple_mem_ref (x);
629 }
630 else if (is_taskreg_ctx (ctx))
631 {
632 bool by_ref = use_pointer_for_field (var, NULL);
633 x = build_receiver_ref (var, by_ref, ctx);
634 }
635 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
636 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
637 || ctx->loop_p
638 || (code == OMP_CLAUSE_PRIVATE
639 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
640 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
641 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
642 {
643 /* #pragma omp simd isn't a worksharing construct, and can reference
644 even private vars in its linear etc. clauses.
645 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
646 to private vars in all worksharing constructs. */
647 x = NULL_TREE;
648 if (outer && is_taskreg_ctx (outer))
649 x = lookup_decl (var, outer);
650 else if (outer)
651 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
652 if (x == NULL_TREE)
653 x = var;
654 }
655 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
656 {
657 gcc_assert (outer);
658 splay_tree_node n
659 = splay_tree_lookup (outer->field_map,
660 (splay_tree_key) &DECL_UID (var));
661 if (n == NULL)
662 {
663 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
664 x = var;
665 else
666 x = lookup_decl (var, outer);
667 }
668 else
669 {
670 tree field = (tree) n->value;
671 /* If the receiver record type was remapped in the child function,
672 remap the field into the new record type. */
673 x = maybe_lookup_field (field, outer);
674 if (x != NULL)
675 field = x;
676
677 x = build_simple_mem_ref (outer->receiver_decl);
678 x = omp_build_component_ref (x, field);
679 if (use_pointer_for_field (var, outer))
680 x = build_simple_mem_ref (x);
681 }
682 }
683 else if (outer)
684 {
685 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
686 {
687 outer = outer->outer;
688 gcc_assert (outer
689 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
690 }
691 x = lookup_decl (var, outer);
692 }
693 else if (omp_is_reference (var))
694 /* This can happen with orphaned constructs. If var is reference, it is
695 possible it is shared and as such valid. */
696 x = var;
697 else if (omp_member_access_dummy_var (var))
698 x = var;
699 else
700 gcc_unreachable ();
701
702 if (x == var)
703 {
704 tree t = omp_member_access_dummy_var (var);
705 if (t)
706 {
707 x = DECL_VALUE_EXPR (var);
708 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
709 if (o != t)
710 x = unshare_and_remap (x, t, o);
711 else
712 x = unshare_expr (x);
713 }
714 }
715
716 if (omp_is_reference (var))
717 x = build_simple_mem_ref (x);
718
719 return x;
720 }
721
722 /* Build tree nodes to access the field for VAR on the sender side. */
723
724 static tree
725 build_sender_ref (splay_tree_key key, omp_context *ctx)
726 {
727 tree field = lookup_sfield (key, ctx);
728 return omp_build_component_ref (ctx->sender_decl, field);
729 }
730
731 static tree
732 build_sender_ref (tree var, omp_context *ctx)
733 {
734 return build_sender_ref ((splay_tree_key) var, ctx);
735 }
736
737 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
738 BASE_POINTERS_RESTRICT, declare the field with restrict. */
739
740 static void
741 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
742 {
743 tree field, type, sfield = NULL_TREE;
744 splay_tree_key key = (splay_tree_key) var;
745
746 if ((mask & 16) != 0)
747 {
748 key = (splay_tree_key) &DECL_NAME (var);
749 gcc_checking_assert (key != (splay_tree_key) var);
750 }
751 if ((mask & 8) != 0)
752 {
753 key = (splay_tree_key) &DECL_UID (var);
754 gcc_checking_assert (key != (splay_tree_key) var);
755 }
756 gcc_assert ((mask & 1) == 0
757 || !splay_tree_lookup (ctx->field_map, key));
758 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
759 || !splay_tree_lookup (ctx->sfield_map, key));
760 gcc_assert ((mask & 3) == 3
761 || !is_gimple_omp_oacc (ctx->stmt));
762
763 type = TREE_TYPE (var);
764 if ((mask & 16) != 0)
765 type = lang_hooks.decls.omp_array_data (var, true);
766
767 /* Prevent redeclaring the var in the split-off function with a restrict
768 pointer type. Note that we only clear type itself, restrict qualifiers in
769 the pointed-to type will be ignored by points-to analysis. */
770 if (POINTER_TYPE_P (type)
771 && TYPE_RESTRICT (type))
772 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
773
774 if (mask & 4)
775 {
776 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
777 type = build_pointer_type (build_pointer_type (type));
778 }
779 else if (by_ref)
780 type = build_pointer_type (type);
781 else if ((mask & 3) == 1 && omp_is_reference (var))
782 type = TREE_TYPE (type);
783
784 field = build_decl (DECL_SOURCE_LOCATION (var),
785 FIELD_DECL, DECL_NAME (var), type);
786
787 /* Remember what variable this field was created for. This does have a
788 side effect of making dwarf2out ignore this member, so for helpful
789 debugging we clear it later in delete_omp_context. */
790 DECL_ABSTRACT_ORIGIN (field) = var;
791 if ((mask & 16) == 0 && type == TREE_TYPE (var))
792 {
793 SET_DECL_ALIGN (field, DECL_ALIGN (var));
794 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
795 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
796 }
797 else
798 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
799
800 if ((mask & 3) == 3)
801 {
802 insert_field_into_struct (ctx->record_type, field);
803 if (ctx->srecord_type)
804 {
805 sfield = build_decl (DECL_SOURCE_LOCATION (var),
806 FIELD_DECL, DECL_NAME (var), type);
807 DECL_ABSTRACT_ORIGIN (sfield) = var;
808 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
809 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
810 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
811 insert_field_into_struct (ctx->srecord_type, sfield);
812 }
813 }
814 else
815 {
816 if (ctx->srecord_type == NULL_TREE)
817 {
818 tree t;
819
820 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
821 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
822 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
823 {
824 sfield = build_decl (DECL_SOURCE_LOCATION (t),
825 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
826 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
827 insert_field_into_struct (ctx->srecord_type, sfield);
828 splay_tree_insert (ctx->sfield_map,
829 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
830 (splay_tree_value) sfield);
831 }
832 }
833 sfield = field;
834 insert_field_into_struct ((mask & 1) ? ctx->record_type
835 : ctx->srecord_type, field);
836 }
837
838 if (mask & 1)
839 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
840 if ((mask & 2) && ctx->sfield_map)
841 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
842 }
843
844 static tree
845 install_var_local (tree var, omp_context *ctx)
846 {
847 tree new_var = omp_copy_decl_1 (var, ctx);
848 insert_decl_map (&ctx->cb, var, new_var);
849 return new_var;
850 }
851
852 /* Adjust the replacement for DECL in CTX for the new context. This means
853 copying the DECL_VALUE_EXPR, and fixing up the type. */
854
855 static void
856 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
857 {
858 tree new_decl, size;
859
860 new_decl = lookup_decl (decl, ctx);
861
862 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
863
864 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
865 && DECL_HAS_VALUE_EXPR_P (decl))
866 {
867 tree ve = DECL_VALUE_EXPR (decl);
868 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
869 SET_DECL_VALUE_EXPR (new_decl, ve);
870 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
871 }
872
873 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
874 {
875 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
876 if (size == error_mark_node)
877 size = TYPE_SIZE (TREE_TYPE (new_decl));
878 DECL_SIZE (new_decl) = size;
879
880 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
881 if (size == error_mark_node)
882 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
883 DECL_SIZE_UNIT (new_decl) = size;
884 }
885 }
886
887 /* The callback for remap_decl. Search all containing contexts for a
888 mapping of the variable; this avoids having to duplicate the splay
889 tree ahead of time. We know a mapping doesn't already exist in the
890 given context. Create new mappings to implement default semantics. */
891
892 static tree
893 omp_copy_decl (tree var, copy_body_data *cb)
894 {
895 omp_context *ctx = (omp_context *) cb;
896 tree new_var;
897
898 if (TREE_CODE (var) == LABEL_DECL)
899 {
900 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
901 return var;
902 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
903 DECL_CONTEXT (new_var) = current_function_decl;
904 insert_decl_map (&ctx->cb, var, new_var);
905 return new_var;
906 }
907
908 while (!is_taskreg_ctx (ctx))
909 {
910 ctx = ctx->outer;
911 if (ctx == NULL)
912 return var;
913 new_var = maybe_lookup_decl (var, ctx);
914 if (new_var)
915 return new_var;
916 }
917
918 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
919 return var;
920
921 return error_mark_node;
922 }
923
924 /* Create a new context, with OUTER_CTX being the surrounding context. */
925
926 static omp_context *
927 new_omp_context (gimple *stmt, omp_context *outer_ctx)
928 {
929 omp_context *ctx = XCNEW (omp_context);
930
931 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
932 (splay_tree_value) ctx);
933 ctx->stmt = stmt;
934
935 if (outer_ctx)
936 {
937 ctx->outer = outer_ctx;
938 ctx->cb = outer_ctx->cb;
939 ctx->cb.block = NULL;
940 ctx->depth = outer_ctx->depth + 1;
941 }
942 else
943 {
944 ctx->cb.src_fn = current_function_decl;
945 ctx->cb.dst_fn = current_function_decl;
946 ctx->cb.src_node = cgraph_node::get (current_function_decl);
947 gcc_checking_assert (ctx->cb.src_node);
948 ctx->cb.dst_node = ctx->cb.src_node;
949 ctx->cb.src_cfun = cfun;
950 ctx->cb.copy_decl = omp_copy_decl;
951 ctx->cb.eh_lp_nr = 0;
952 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
953 ctx->cb.adjust_array_error_bounds = true;
954 ctx->cb.dont_remap_vla_if_no_change = true;
955 ctx->depth = 1;
956 }
957
958 ctx->cb.decl_map = new hash_map<tree, tree>;
959
960 return ctx;
961 }
962
963 static gimple_seq maybe_catch_exception (gimple_seq);
964
965 /* Finalize task copyfn. */
966
967 static void
968 finalize_task_copyfn (gomp_task *task_stmt)
969 {
970 struct function *child_cfun;
971 tree child_fn;
972 gimple_seq seq = NULL, new_seq;
973 gbind *bind;
974
975 child_fn = gimple_omp_task_copy_fn (task_stmt);
976 if (child_fn == NULL_TREE)
977 return;
978
979 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
980 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
981
982 push_cfun (child_cfun);
983 bind = gimplify_body (child_fn, false);
984 gimple_seq_add_stmt (&seq, bind);
985 new_seq = maybe_catch_exception (seq);
986 if (new_seq != seq)
987 {
988 bind = gimple_build_bind (NULL, new_seq, NULL);
989 seq = NULL;
990 gimple_seq_add_stmt (&seq, bind);
991 }
992 gimple_set_body (child_fn, seq);
993 pop_cfun ();
994
995 /* Inform the callgraph about the new function. */
996 cgraph_node *node = cgraph_node::get_create (child_fn);
997 node->parallelized_function = 1;
998 cgraph_node::add_new_function (child_fn, false);
999 }
1000
1001 /* Destroy a omp_context data structures. Called through the splay tree
1002 value delete callback. */
1003
1004 static void
1005 delete_omp_context (splay_tree_value value)
1006 {
1007 omp_context *ctx = (omp_context *) value;
1008
1009 delete ctx->cb.decl_map;
1010
1011 if (ctx->field_map)
1012 splay_tree_delete (ctx->field_map);
1013 if (ctx->sfield_map)
1014 splay_tree_delete (ctx->sfield_map);
1015
1016 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1017 it produces corrupt debug information. */
1018 if (ctx->record_type)
1019 {
1020 tree t;
1021 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1022 DECL_ABSTRACT_ORIGIN (t) = NULL;
1023 }
1024 if (ctx->srecord_type)
1025 {
1026 tree t;
1027 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1028 DECL_ABSTRACT_ORIGIN (t) = NULL;
1029 }
1030
1031 if (is_task_ctx (ctx))
1032 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1033
1034 if (ctx->task_reduction_map)
1035 {
1036 ctx->task_reductions.release ();
1037 delete ctx->task_reduction_map;
1038 }
1039
1040 delete ctx->lastprivate_conditional_map;
1041
1042 XDELETE (ctx);
1043 }
1044
1045 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1046 context. */
1047
1048 static void
1049 fixup_child_record_type (omp_context *ctx)
1050 {
1051 tree f, type = ctx->record_type;
1052
1053 if (!ctx->receiver_decl)
1054 return;
1055 /* ??? It isn't sufficient to just call remap_type here, because
1056 variably_modified_type_p doesn't work the way we expect for
1057 record types. Testing each field for whether it needs remapping
1058 and creating a new record by hand works, however. */
1059 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1060 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1061 break;
1062 if (f)
1063 {
1064 tree name, new_fields = NULL;
1065
1066 type = lang_hooks.types.make_type (RECORD_TYPE);
1067 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1068 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1069 TYPE_DECL, name, type);
1070 TYPE_NAME (type) = name;
1071
1072 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1073 {
1074 tree new_f = copy_node (f);
1075 DECL_CONTEXT (new_f) = type;
1076 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1077 DECL_CHAIN (new_f) = new_fields;
1078 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1079 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1080 &ctx->cb, NULL);
1081 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1082 &ctx->cb, NULL);
1083 new_fields = new_f;
1084
1085 /* Arrange to be able to look up the receiver field
1086 given the sender field. */
1087 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1088 (splay_tree_value) new_f);
1089 }
1090 TYPE_FIELDS (type) = nreverse (new_fields);
1091 layout_type (type);
1092 }
1093
1094 /* In a target region we never modify any of the pointers in *.omp_data_i,
1095 so attempt to help the optimizers. */
1096 if (is_gimple_omp_offloaded (ctx->stmt))
1097 type = build_qualified_type (type, TYPE_QUAL_CONST);
1098
1099 TREE_TYPE (ctx->receiver_decl)
1100 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1101 }
1102
1103 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1104 specified by CLAUSES. */
1105
1106 static void
1107 scan_sharing_clauses (tree clauses, omp_context *ctx)
1108 {
1109 tree c, decl;
1110 bool scan_array_reductions = false;
1111
1112 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1113 {
1114 bool by_ref;
1115
1116 switch (OMP_CLAUSE_CODE (c))
1117 {
1118 case OMP_CLAUSE_PRIVATE:
1119 decl = OMP_CLAUSE_DECL (c);
1120 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1121 goto do_private;
1122 else if (!is_variable_sized (decl))
1123 install_var_local (decl, ctx);
1124 break;
1125
1126 case OMP_CLAUSE_SHARED:
1127 decl = OMP_CLAUSE_DECL (c);
1128 /* Ignore shared directives in teams construct inside of
1129 target construct. */
1130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1131 && !is_host_teams_ctx (ctx))
1132 {
1133 /* Global variables don't need to be copied,
1134 the receiver side will use them directly. */
1135 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1136 if (is_global_var (odecl))
1137 break;
1138 insert_decl_map (&ctx->cb, decl, odecl);
1139 break;
1140 }
1141 gcc_assert (is_taskreg_ctx (ctx));
1142 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1143 || !is_variable_sized (decl));
1144 /* Global variables don't need to be copied,
1145 the receiver side will use them directly. */
1146 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1147 break;
1148 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1149 {
1150 use_pointer_for_field (decl, ctx);
1151 break;
1152 }
1153 by_ref = use_pointer_for_field (decl, NULL);
1154 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1155 || TREE_ADDRESSABLE (decl)
1156 || by_ref
1157 || omp_is_reference (decl))
1158 {
1159 by_ref = use_pointer_for_field (decl, ctx);
1160 install_var_field (decl, by_ref, 3, ctx);
1161 install_var_local (decl, ctx);
1162 break;
1163 }
1164 /* We don't need to copy const scalar vars back. */
1165 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1166 goto do_private;
1167
1168 case OMP_CLAUSE_REDUCTION:
1169 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1170 ctx->local_reduction_clauses
1171 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1172 /* FALLTHRU */
1173
1174 case OMP_CLAUSE_IN_REDUCTION:
1175 decl = OMP_CLAUSE_DECL (c);
1176 if (TREE_CODE (decl) == MEM_REF)
1177 {
1178 tree t = TREE_OPERAND (decl, 0);
1179 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1180 t = TREE_OPERAND (t, 0);
1181 if (TREE_CODE (t) == INDIRECT_REF
1182 || TREE_CODE (t) == ADDR_EXPR)
1183 t = TREE_OPERAND (t, 0);
1184 install_var_local (t, ctx);
1185 if (is_taskreg_ctx (ctx)
1186 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1187 || (is_task_ctx (ctx)
1188 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1189 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1190 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1191 == POINTER_TYPE)))))
1192 && !is_variable_sized (t)
1193 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1194 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1195 && !is_task_ctx (ctx))))
1196 {
1197 by_ref = use_pointer_for_field (t, NULL);
1198 if (is_task_ctx (ctx)
1199 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1200 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1201 {
1202 install_var_field (t, false, 1, ctx);
1203 install_var_field (t, by_ref, 2, ctx);
1204 }
1205 else
1206 install_var_field (t, by_ref, 3, ctx);
1207 }
1208 break;
1209 }
1210 if (is_task_ctx (ctx)
1211 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1212 && OMP_CLAUSE_REDUCTION_TASK (c)
1213 && is_parallel_ctx (ctx)))
1214 {
1215 /* Global variables don't need to be copied,
1216 the receiver side will use them directly. */
1217 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1218 {
1219 by_ref = use_pointer_for_field (decl, ctx);
1220 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1221 install_var_field (decl, by_ref, 3, ctx);
1222 }
1223 install_var_local (decl, ctx);
1224 break;
1225 }
1226 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1227 && OMP_CLAUSE_REDUCTION_TASK (c))
1228 {
1229 install_var_local (decl, ctx);
1230 break;
1231 }
1232 goto do_private;
1233
1234 case OMP_CLAUSE_LASTPRIVATE:
1235 /* Let the corresponding firstprivate clause create
1236 the variable. */
1237 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1238 break;
1239 /* FALLTHRU */
1240
1241 case OMP_CLAUSE_FIRSTPRIVATE:
1242 case OMP_CLAUSE_LINEAR:
1243 decl = OMP_CLAUSE_DECL (c);
1244 do_private:
1245 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1246 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1247 && is_gimple_omp_offloaded (ctx->stmt))
1248 {
1249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1250 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1251 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1252 install_var_field (decl, true, 3, ctx);
1253 else
1254 install_var_field (decl, false, 3, ctx);
1255 }
1256 if (is_variable_sized (decl))
1257 {
1258 if (is_task_ctx (ctx))
1259 install_var_field (decl, false, 1, ctx);
1260 break;
1261 }
1262 else if (is_taskreg_ctx (ctx))
1263 {
1264 bool global
1265 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1266 by_ref = use_pointer_for_field (decl, NULL);
1267
1268 if (is_task_ctx (ctx)
1269 && (global || by_ref || omp_is_reference (decl)))
1270 {
1271 install_var_field (decl, false, 1, ctx);
1272 if (!global)
1273 install_var_field (decl, by_ref, 2, ctx);
1274 }
1275 else if (!global)
1276 install_var_field (decl, by_ref, 3, ctx);
1277 }
1278 install_var_local (decl, ctx);
1279 break;
1280
1281 case OMP_CLAUSE_USE_DEVICE_PTR:
1282 case OMP_CLAUSE_USE_DEVICE_ADDR:
1283 decl = OMP_CLAUSE_DECL (c);
1284
1285 /* Fortran array descriptors. */
1286 if (lang_hooks.decls.omp_array_data (decl, true))
1287 install_var_field (decl, false, 19, ctx);
1288 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1289 && !omp_is_reference (decl)
1290 && !omp_is_allocatable_or_ptr (decl))
1291 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1292 install_var_field (decl, true, 11, ctx);
1293 else
1294 install_var_field (decl, false, 11, ctx);
1295 if (DECL_SIZE (decl)
1296 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1297 {
1298 tree decl2 = DECL_VALUE_EXPR (decl);
1299 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1300 decl2 = TREE_OPERAND (decl2, 0);
1301 gcc_assert (DECL_P (decl2));
1302 install_var_local (decl2, ctx);
1303 }
1304 install_var_local (decl, ctx);
1305 break;
1306
1307 case OMP_CLAUSE_IS_DEVICE_PTR:
1308 decl = OMP_CLAUSE_DECL (c);
1309 goto do_private;
1310
1311 case OMP_CLAUSE__LOOPTEMP_:
1312 case OMP_CLAUSE__REDUCTEMP_:
1313 gcc_assert (is_taskreg_ctx (ctx));
1314 decl = OMP_CLAUSE_DECL (c);
1315 install_var_field (decl, false, 3, ctx);
1316 install_var_local (decl, ctx);
1317 break;
1318
1319 case OMP_CLAUSE_COPYPRIVATE:
1320 case OMP_CLAUSE_COPYIN:
1321 decl = OMP_CLAUSE_DECL (c);
1322 by_ref = use_pointer_for_field (decl, NULL);
1323 install_var_field (decl, by_ref, 3, ctx);
1324 break;
1325
1326 case OMP_CLAUSE_FINAL:
1327 case OMP_CLAUSE_IF:
1328 case OMP_CLAUSE_NUM_THREADS:
1329 case OMP_CLAUSE_NUM_TEAMS:
1330 case OMP_CLAUSE_THREAD_LIMIT:
1331 case OMP_CLAUSE_DEVICE:
1332 case OMP_CLAUSE_SCHEDULE:
1333 case OMP_CLAUSE_DIST_SCHEDULE:
1334 case OMP_CLAUSE_DEPEND:
1335 case OMP_CLAUSE_PRIORITY:
1336 case OMP_CLAUSE_GRAINSIZE:
1337 case OMP_CLAUSE_NUM_TASKS:
1338 case OMP_CLAUSE_NUM_GANGS:
1339 case OMP_CLAUSE_NUM_WORKERS:
1340 case OMP_CLAUSE_VECTOR_LENGTH:
1341 if (ctx->outer)
1342 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1343 break;
1344
1345 case OMP_CLAUSE_TO:
1346 case OMP_CLAUSE_FROM:
1347 case OMP_CLAUSE_MAP:
1348 if (ctx->outer)
1349 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1350 decl = OMP_CLAUSE_DECL (c);
1351 /* Global variables with "omp declare target" attribute
1352 don't need to be copied, the receiver side will use them
1353 directly. However, global variables with "omp declare target link"
1354 attribute need to be copied. Or when ALWAYS modifier is used. */
1355 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1356 && DECL_P (decl)
1357 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1358 && (OMP_CLAUSE_MAP_KIND (c)
1359 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1360 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1361 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1362 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1363 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1364 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1365 && varpool_node::get_create (decl)->offloadable
1366 && !lookup_attribute ("omp declare target link",
1367 DECL_ATTRIBUTES (decl)))
1368 break;
1369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1370 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1371 {
1372 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1373 not offloaded; there is nothing to map for those. */
1374 if (!is_gimple_omp_offloaded (ctx->stmt)
1375 && !POINTER_TYPE_P (TREE_TYPE (decl))
1376 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1377 break;
1378 }
1379 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1380 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1381 || (OMP_CLAUSE_MAP_KIND (c)
1382 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1383 {
1384 if (TREE_CODE (decl) == COMPONENT_REF
1385 || (TREE_CODE (decl) == INDIRECT_REF
1386 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1387 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1388 == REFERENCE_TYPE)))
1389 break;
1390 if (DECL_SIZE (decl)
1391 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1392 {
1393 tree decl2 = DECL_VALUE_EXPR (decl);
1394 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1395 decl2 = TREE_OPERAND (decl2, 0);
1396 gcc_assert (DECL_P (decl2));
1397 install_var_local (decl2, ctx);
1398 }
1399 install_var_local (decl, ctx);
1400 break;
1401 }
1402 if (DECL_P (decl))
1403 {
1404 if (DECL_SIZE (decl)
1405 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1406 {
1407 tree decl2 = DECL_VALUE_EXPR (decl);
1408 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1409 decl2 = TREE_OPERAND (decl2, 0);
1410 gcc_assert (DECL_P (decl2));
1411 install_var_field (decl2, true, 3, ctx);
1412 install_var_local (decl2, ctx);
1413 install_var_local (decl, ctx);
1414 }
1415 else
1416 {
1417 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1418 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1419 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1420 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1421 install_var_field (decl, true, 7, ctx);
1422 else
1423 install_var_field (decl, true, 3, ctx);
1424 if (is_gimple_omp_offloaded (ctx->stmt)
1425 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1426 install_var_local (decl, ctx);
1427 }
1428 }
1429 else
1430 {
1431 tree base = get_base_address (decl);
1432 tree nc = OMP_CLAUSE_CHAIN (c);
1433 if (DECL_P (base)
1434 && nc != NULL_TREE
1435 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1436 && OMP_CLAUSE_DECL (nc) == base
1437 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1438 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1439 {
1440 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1441 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1442 }
1443 else
1444 {
1445 if (ctx->outer)
1446 {
1447 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1448 decl = OMP_CLAUSE_DECL (c);
1449 }
1450 gcc_assert (!splay_tree_lookup (ctx->field_map,
1451 (splay_tree_key) decl));
1452 tree field
1453 = build_decl (OMP_CLAUSE_LOCATION (c),
1454 FIELD_DECL, NULL_TREE, ptr_type_node);
1455 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1456 insert_field_into_struct (ctx->record_type, field);
1457 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1458 (splay_tree_value) field);
1459 }
1460 }
1461 break;
1462
1463 case OMP_CLAUSE__GRIDDIM_:
1464 if (ctx->outer)
1465 {
1466 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1467 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1468 }
1469 break;
1470
1471 case OMP_CLAUSE_ORDER:
1472 ctx->order_concurrent = true;
1473 break;
1474
1475 case OMP_CLAUSE_BIND:
1476 ctx->loop_p = true;
1477 break;
1478
1479 case OMP_CLAUSE_NOWAIT:
1480 case OMP_CLAUSE_ORDERED:
1481 case OMP_CLAUSE_COLLAPSE:
1482 case OMP_CLAUSE_UNTIED:
1483 case OMP_CLAUSE_MERGEABLE:
1484 case OMP_CLAUSE_PROC_BIND:
1485 case OMP_CLAUSE_SAFELEN:
1486 case OMP_CLAUSE_SIMDLEN:
1487 case OMP_CLAUSE_THREADS:
1488 case OMP_CLAUSE_SIMD:
1489 case OMP_CLAUSE_NOGROUP:
1490 case OMP_CLAUSE_DEFAULTMAP:
1491 case OMP_CLAUSE_ASYNC:
1492 case OMP_CLAUSE_WAIT:
1493 case OMP_CLAUSE_GANG:
1494 case OMP_CLAUSE_WORKER:
1495 case OMP_CLAUSE_VECTOR:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_SEQ:
1499 case OMP_CLAUSE_TILE:
1500 case OMP_CLAUSE__SIMT_:
1501 case OMP_CLAUSE_DEFAULT:
1502 case OMP_CLAUSE_NONTEMPORAL:
1503 case OMP_CLAUSE_IF_PRESENT:
1504 case OMP_CLAUSE_FINALIZE:
1505 case OMP_CLAUSE_TASK_REDUCTION:
1506 break;
1507
1508 case OMP_CLAUSE_ALIGNED:
1509 decl = OMP_CLAUSE_DECL (c);
1510 if (is_global_var (decl)
1511 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1512 install_var_local (decl, ctx);
1513 break;
1514
1515 case OMP_CLAUSE__CONDTEMP_:
1516 decl = OMP_CLAUSE_DECL (c);
1517 if (is_parallel_ctx (ctx))
1518 {
1519 install_var_field (decl, false, 3, ctx);
1520 install_var_local (decl, ctx);
1521 }
1522 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1523 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1524 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1525 install_var_local (decl, ctx);
1526 break;
1527
1528 case OMP_CLAUSE__CACHE_:
1529 default:
1530 gcc_unreachable ();
1531 }
1532 }
1533
1534 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1535 {
1536 switch (OMP_CLAUSE_CODE (c))
1537 {
1538 case OMP_CLAUSE_LASTPRIVATE:
1539 /* Let the corresponding firstprivate clause create
1540 the variable. */
1541 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1542 scan_array_reductions = true;
1543 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1544 break;
1545 /* FALLTHRU */
1546
1547 case OMP_CLAUSE_FIRSTPRIVATE:
1548 case OMP_CLAUSE_PRIVATE:
1549 case OMP_CLAUSE_LINEAR:
1550 case OMP_CLAUSE_IS_DEVICE_PTR:
1551 decl = OMP_CLAUSE_DECL (c);
1552 if (is_variable_sized (decl))
1553 {
1554 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1555 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1556 && is_gimple_omp_offloaded (ctx->stmt))
1557 {
1558 tree decl2 = DECL_VALUE_EXPR (decl);
1559 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1560 decl2 = TREE_OPERAND (decl2, 0);
1561 gcc_assert (DECL_P (decl2));
1562 install_var_local (decl2, ctx);
1563 fixup_remapped_decl (decl2, ctx, false);
1564 }
1565 install_var_local (decl, ctx);
1566 }
1567 fixup_remapped_decl (decl, ctx,
1568 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1569 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1570 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1571 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1572 scan_array_reductions = true;
1573 break;
1574
1575 case OMP_CLAUSE_REDUCTION:
1576 case OMP_CLAUSE_IN_REDUCTION:
1577 decl = OMP_CLAUSE_DECL (c);
1578 if (TREE_CODE (decl) != MEM_REF)
1579 {
1580 if (is_variable_sized (decl))
1581 install_var_local (decl, ctx);
1582 fixup_remapped_decl (decl, ctx, false);
1583 }
1584 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1585 scan_array_reductions = true;
1586 break;
1587
1588 case OMP_CLAUSE_TASK_REDUCTION:
1589 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1590 scan_array_reductions = true;
1591 break;
1592
1593 case OMP_CLAUSE_SHARED:
1594 /* Ignore shared directives in teams construct inside of
1595 target construct. */
1596 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1597 && !is_host_teams_ctx (ctx))
1598 break;
1599 decl = OMP_CLAUSE_DECL (c);
1600 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1601 break;
1602 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1603 {
1604 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1605 ctx->outer)))
1606 break;
1607 bool by_ref = use_pointer_for_field (decl, ctx);
1608 install_var_field (decl, by_ref, 11, ctx);
1609 break;
1610 }
1611 fixup_remapped_decl (decl, ctx, false);
1612 break;
1613
1614 case OMP_CLAUSE_MAP:
1615 if (!is_gimple_omp_offloaded (ctx->stmt))
1616 break;
1617 decl = OMP_CLAUSE_DECL (c);
1618 if (DECL_P (decl)
1619 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1620 && (OMP_CLAUSE_MAP_KIND (c)
1621 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1622 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1623 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1624 && varpool_node::get_create (decl)->offloadable)
1625 break;
1626 if (DECL_P (decl))
1627 {
1628 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1629 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1630 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1631 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1632 {
1633 tree new_decl = lookup_decl (decl, ctx);
1634 TREE_TYPE (new_decl)
1635 = remap_type (TREE_TYPE (decl), &ctx->cb);
1636 }
1637 else if (DECL_SIZE (decl)
1638 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1639 {
1640 tree decl2 = DECL_VALUE_EXPR (decl);
1641 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1642 decl2 = TREE_OPERAND (decl2, 0);
1643 gcc_assert (DECL_P (decl2));
1644 fixup_remapped_decl (decl2, ctx, false);
1645 fixup_remapped_decl (decl, ctx, true);
1646 }
1647 else
1648 fixup_remapped_decl (decl, ctx, false);
1649 }
1650 break;
1651
1652 case OMP_CLAUSE_COPYPRIVATE:
1653 case OMP_CLAUSE_COPYIN:
1654 case OMP_CLAUSE_DEFAULT:
1655 case OMP_CLAUSE_IF:
1656 case OMP_CLAUSE_NUM_THREADS:
1657 case OMP_CLAUSE_NUM_TEAMS:
1658 case OMP_CLAUSE_THREAD_LIMIT:
1659 case OMP_CLAUSE_DEVICE:
1660 case OMP_CLAUSE_SCHEDULE:
1661 case OMP_CLAUSE_DIST_SCHEDULE:
1662 case OMP_CLAUSE_NOWAIT:
1663 case OMP_CLAUSE_ORDERED:
1664 case OMP_CLAUSE_COLLAPSE:
1665 case OMP_CLAUSE_UNTIED:
1666 case OMP_CLAUSE_FINAL:
1667 case OMP_CLAUSE_MERGEABLE:
1668 case OMP_CLAUSE_PROC_BIND:
1669 case OMP_CLAUSE_SAFELEN:
1670 case OMP_CLAUSE_SIMDLEN:
1671 case OMP_CLAUSE_ALIGNED:
1672 case OMP_CLAUSE_DEPEND:
1673 case OMP_CLAUSE__LOOPTEMP_:
1674 case OMP_CLAUSE__REDUCTEMP_:
1675 case OMP_CLAUSE_TO:
1676 case OMP_CLAUSE_FROM:
1677 case OMP_CLAUSE_PRIORITY:
1678 case OMP_CLAUSE_GRAINSIZE:
1679 case OMP_CLAUSE_NUM_TASKS:
1680 case OMP_CLAUSE_THREADS:
1681 case OMP_CLAUSE_SIMD:
1682 case OMP_CLAUSE_NOGROUP:
1683 case OMP_CLAUSE_DEFAULTMAP:
1684 case OMP_CLAUSE_ORDER:
1685 case OMP_CLAUSE_BIND:
1686 case OMP_CLAUSE_USE_DEVICE_PTR:
1687 case OMP_CLAUSE_USE_DEVICE_ADDR:
1688 case OMP_CLAUSE_NONTEMPORAL:
1689 case OMP_CLAUSE_ASYNC:
1690 case OMP_CLAUSE_WAIT:
1691 case OMP_CLAUSE_NUM_GANGS:
1692 case OMP_CLAUSE_NUM_WORKERS:
1693 case OMP_CLAUSE_VECTOR_LENGTH:
1694 case OMP_CLAUSE_GANG:
1695 case OMP_CLAUSE_WORKER:
1696 case OMP_CLAUSE_VECTOR:
1697 case OMP_CLAUSE_INDEPENDENT:
1698 case OMP_CLAUSE_AUTO:
1699 case OMP_CLAUSE_SEQ:
1700 case OMP_CLAUSE_TILE:
1701 case OMP_CLAUSE__GRIDDIM_:
1702 case OMP_CLAUSE__SIMT_:
1703 case OMP_CLAUSE_IF_PRESENT:
1704 case OMP_CLAUSE_FINALIZE:
1705 case OMP_CLAUSE__CONDTEMP_:
1706 break;
1707
1708 case OMP_CLAUSE__CACHE_:
1709 default:
1710 gcc_unreachable ();
1711 }
1712 }
1713
1714 gcc_checking_assert (!scan_array_reductions
1715 || !is_gimple_omp_oacc (ctx->stmt));
1716 if (scan_array_reductions)
1717 {
1718 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1719 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1720 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1721 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1722 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1723 {
1724 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1725 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1726 }
1727 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1728 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1729 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1730 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1731 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1732 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1733 }
1734 }
1735
1736 /* Create a new name for omp child function. Returns an identifier. */
1737
1738 static tree
1739 create_omp_child_function_name (bool task_copy)
1740 {
1741 return clone_function_name_numbered (current_function_decl,
1742 task_copy ? "_omp_cpyfn" : "_omp_fn");
1743 }
1744
1745 /* Return true if CTX may belong to offloaded code: either if current function
1746 is offloaded, or any enclosing context corresponds to a target region. */
1747
1748 static bool
1749 omp_maybe_offloaded_ctx (omp_context *ctx)
1750 {
1751 if (cgraph_node::get (current_function_decl)->offloadable)
1752 return true;
1753 for (; ctx; ctx = ctx->outer)
1754 if (is_gimple_omp_offloaded (ctx->stmt))
1755 return true;
1756 return false;
1757 }
1758
1759 /* Build a decl for the omp child function. It'll not contain a body
1760 yet, just the bare decl. */
1761
1762 static void
1763 create_omp_child_function (omp_context *ctx, bool task_copy)
1764 {
1765 tree decl, type, name, t;
1766
1767 name = create_omp_child_function_name (task_copy);
1768 if (task_copy)
1769 type = build_function_type_list (void_type_node, ptr_type_node,
1770 ptr_type_node, NULL_TREE);
1771 else
1772 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1773
1774 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1775
1776 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1777 || !task_copy);
1778 if (!task_copy)
1779 ctx->cb.dst_fn = decl;
1780 else
1781 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1782
1783 TREE_STATIC (decl) = 1;
1784 TREE_USED (decl) = 1;
1785 DECL_ARTIFICIAL (decl) = 1;
1786 DECL_IGNORED_P (decl) = 0;
1787 TREE_PUBLIC (decl) = 0;
1788 DECL_UNINLINABLE (decl) = 1;
1789 DECL_EXTERNAL (decl) = 0;
1790 DECL_CONTEXT (decl) = NULL_TREE;
1791 DECL_INITIAL (decl) = make_node (BLOCK);
1792 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1793 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1794 /* Remove omp declare simd attribute from the new attributes. */
1795 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1796 {
1797 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1798 a = a2;
1799 a = TREE_CHAIN (a);
1800 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1801 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1802 *p = TREE_CHAIN (*p);
1803 else
1804 {
1805 tree chain = TREE_CHAIN (*p);
1806 *p = copy_node (*p);
1807 p = &TREE_CHAIN (*p);
1808 *p = chain;
1809 }
1810 }
1811 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1812 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1813 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1814 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1815 DECL_FUNCTION_VERSIONED (decl)
1816 = DECL_FUNCTION_VERSIONED (current_function_decl);
1817
1818 if (omp_maybe_offloaded_ctx (ctx))
1819 {
1820 cgraph_node::get_create (decl)->offloadable = 1;
1821 if (ENABLE_OFFLOADING)
1822 g->have_offload = true;
1823 }
1824
1825 if (cgraph_node::get_create (decl)->offloadable
1826 && !lookup_attribute ("omp declare target",
1827 DECL_ATTRIBUTES (current_function_decl)))
1828 {
1829 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1830 ? "omp target entrypoint"
1831 : "omp declare target");
1832 DECL_ATTRIBUTES (decl)
1833 = tree_cons (get_identifier (target_attr),
1834 NULL_TREE, DECL_ATTRIBUTES (decl));
1835 }
1836
1837 t = build_decl (DECL_SOURCE_LOCATION (decl),
1838 RESULT_DECL, NULL_TREE, void_type_node);
1839 DECL_ARTIFICIAL (t) = 1;
1840 DECL_IGNORED_P (t) = 1;
1841 DECL_CONTEXT (t) = decl;
1842 DECL_RESULT (decl) = t;
1843
1844 tree data_name = get_identifier (".omp_data_i");
1845 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1846 ptr_type_node);
1847 DECL_ARTIFICIAL (t) = 1;
1848 DECL_NAMELESS (t) = 1;
1849 DECL_ARG_TYPE (t) = ptr_type_node;
1850 DECL_CONTEXT (t) = current_function_decl;
1851 TREE_USED (t) = 1;
1852 TREE_READONLY (t) = 1;
1853 DECL_ARGUMENTS (decl) = t;
1854 if (!task_copy)
1855 ctx->receiver_decl = t;
1856 else
1857 {
1858 t = build_decl (DECL_SOURCE_LOCATION (decl),
1859 PARM_DECL, get_identifier (".omp_data_o"),
1860 ptr_type_node);
1861 DECL_ARTIFICIAL (t) = 1;
1862 DECL_NAMELESS (t) = 1;
1863 DECL_ARG_TYPE (t) = ptr_type_node;
1864 DECL_CONTEXT (t) = current_function_decl;
1865 TREE_USED (t) = 1;
1866 TREE_ADDRESSABLE (t) = 1;
1867 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1868 DECL_ARGUMENTS (decl) = t;
1869 }
1870
1871 /* Allocate memory for the function structure. The call to
1872 allocate_struct_function clobbers CFUN, so we need to restore
1873 it afterward. */
1874 push_struct_function (decl);
1875 cfun->function_end_locus = gimple_location (ctx->stmt);
1876 init_tree_ssa (cfun);
1877 pop_cfun ();
1878 }
1879
1880 /* Callback for walk_gimple_seq. Check if combined parallel
1881 contains gimple_omp_for_combined_into_p OMP_FOR. */
1882
1883 tree
1884 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1885 bool *handled_ops_p,
1886 struct walk_stmt_info *wi)
1887 {
1888 gimple *stmt = gsi_stmt (*gsi_p);
1889
1890 *handled_ops_p = true;
1891 switch (gimple_code (stmt))
1892 {
1893 WALK_SUBSTMTS;
1894
1895 case GIMPLE_OMP_FOR:
1896 if (gimple_omp_for_combined_into_p (stmt)
1897 && gimple_omp_for_kind (stmt)
1898 == *(const enum gf_mask *) (wi->info))
1899 {
1900 wi->info = stmt;
1901 return integer_zero_node;
1902 }
1903 break;
1904 default:
1905 break;
1906 }
1907 return NULL;
1908 }
1909
1910 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1911
1912 static void
1913 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1914 omp_context *outer_ctx)
1915 {
1916 struct walk_stmt_info wi;
1917
1918 memset (&wi, 0, sizeof (wi));
1919 wi.val_only = true;
1920 wi.info = (void *) &msk;
1921 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1922 if (wi.info != (void *) &msk)
1923 {
1924 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1925 struct omp_for_data fd;
1926 omp_extract_for_data (for_stmt, &fd, NULL);
1927 /* We need two temporaries with fd.loop.v type (istart/iend)
1928 and then (fd.collapse - 1) temporaries with the same
1929 type for count2 ... countN-1 vars if not constant. */
1930 size_t count = 2, i;
1931 tree type = fd.iter_type;
1932 if (fd.collapse > 1
1933 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1934 {
1935 count += fd.collapse - 1;
1936 /* If there are lastprivate clauses on the inner
1937 GIMPLE_OMP_FOR, add one more temporaries for the total number
1938 of iterations (product of count1 ... countN-1). */
1939 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1940 OMP_CLAUSE_LASTPRIVATE))
1941 count++;
1942 else if (msk == GF_OMP_FOR_KIND_FOR
1943 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1944 OMP_CLAUSE_LASTPRIVATE))
1945 count++;
1946 }
1947 for (i = 0; i < count; i++)
1948 {
1949 tree temp = create_tmp_var (type);
1950 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1951 insert_decl_map (&outer_ctx->cb, temp, temp);
1952 OMP_CLAUSE_DECL (c) = temp;
1953 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1954 gimple_omp_taskreg_set_clauses (stmt, c);
1955 }
1956 }
1957 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1958 && omp_find_clause (gimple_omp_task_clauses (stmt),
1959 OMP_CLAUSE_REDUCTION))
1960 {
1961 tree type = build_pointer_type (pointer_sized_int_node);
1962 tree temp = create_tmp_var (type);
1963 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1964 insert_decl_map (&outer_ctx->cb, temp, temp);
1965 OMP_CLAUSE_DECL (c) = temp;
1966 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1967 gimple_omp_task_set_clauses (stmt, c);
1968 }
1969 }
1970
1971 /* Scan an OpenMP parallel directive. */
1972
1973 static void
1974 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1975 {
1976 omp_context *ctx;
1977 tree name;
1978 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1979
1980 /* Ignore parallel directives with empty bodies, unless there
1981 are copyin clauses. */
1982 if (optimize > 0
1983 && empty_body_p (gimple_omp_body (stmt))
1984 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1985 OMP_CLAUSE_COPYIN) == NULL)
1986 {
1987 gsi_replace (gsi, gimple_build_nop (), false);
1988 return;
1989 }
1990
1991 if (gimple_omp_parallel_combined_p (stmt))
1992 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1993 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1994 OMP_CLAUSE_REDUCTION);
1995 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1996 if (OMP_CLAUSE_REDUCTION_TASK (c))
1997 {
1998 tree type = build_pointer_type (pointer_sized_int_node);
1999 tree temp = create_tmp_var (type);
2000 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2001 if (outer_ctx)
2002 insert_decl_map (&outer_ctx->cb, temp, temp);
2003 OMP_CLAUSE_DECL (c) = temp;
2004 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2005 gimple_omp_parallel_set_clauses (stmt, c);
2006 break;
2007 }
2008 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2009 break;
2010
2011 ctx = new_omp_context (stmt, outer_ctx);
2012 taskreg_contexts.safe_push (ctx);
2013 if (taskreg_nesting_level > 1)
2014 ctx->is_nested = true;
2015 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2016 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2017 name = create_tmp_var_name (".omp_data_s");
2018 name = build_decl (gimple_location (stmt),
2019 TYPE_DECL, name, ctx->record_type);
2020 DECL_ARTIFICIAL (name) = 1;
2021 DECL_NAMELESS (name) = 1;
2022 TYPE_NAME (ctx->record_type) = name;
2023 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2024 if (!gimple_omp_parallel_grid_phony (stmt))
2025 {
2026 create_omp_child_function (ctx, false);
2027 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2028 }
2029
2030 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2031 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2032
2033 if (TYPE_FIELDS (ctx->record_type) == NULL)
2034 ctx->record_type = ctx->receiver_decl = NULL;
2035 }
2036
2037 /* Scan an OpenMP task directive. */
2038
2039 static void
2040 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2041 {
2042 omp_context *ctx;
2043 tree name, t;
2044 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2045
2046 /* Ignore task directives with empty bodies, unless they have depend
2047 clause. */
2048 if (optimize > 0
2049 && gimple_omp_body (stmt)
2050 && empty_body_p (gimple_omp_body (stmt))
2051 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2052 {
2053 gsi_replace (gsi, gimple_build_nop (), false);
2054 return;
2055 }
2056
2057 if (gimple_omp_task_taskloop_p (stmt))
2058 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2059
2060 ctx = new_omp_context (stmt, outer_ctx);
2061
2062 if (gimple_omp_task_taskwait_p (stmt))
2063 {
2064 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2065 return;
2066 }
2067
2068 taskreg_contexts.safe_push (ctx);
2069 if (taskreg_nesting_level > 1)
2070 ctx->is_nested = true;
2071 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2072 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2073 name = create_tmp_var_name (".omp_data_s");
2074 name = build_decl (gimple_location (stmt),
2075 TYPE_DECL, name, ctx->record_type);
2076 DECL_ARTIFICIAL (name) = 1;
2077 DECL_NAMELESS (name) = 1;
2078 TYPE_NAME (ctx->record_type) = name;
2079 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2080 create_omp_child_function (ctx, false);
2081 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2082
2083 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2084
2085 if (ctx->srecord_type)
2086 {
2087 name = create_tmp_var_name (".omp_data_a");
2088 name = build_decl (gimple_location (stmt),
2089 TYPE_DECL, name, ctx->srecord_type);
2090 DECL_ARTIFICIAL (name) = 1;
2091 DECL_NAMELESS (name) = 1;
2092 TYPE_NAME (ctx->srecord_type) = name;
2093 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2094 create_omp_child_function (ctx, true);
2095 }
2096
2097 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2098
2099 if (TYPE_FIELDS (ctx->record_type) == NULL)
2100 {
2101 ctx->record_type = ctx->receiver_decl = NULL;
2102 t = build_int_cst (long_integer_type_node, 0);
2103 gimple_omp_task_set_arg_size (stmt, t);
2104 t = build_int_cst (long_integer_type_node, 1);
2105 gimple_omp_task_set_arg_align (stmt, t);
2106 }
2107 }
2108
2109 /* Helper function for finish_taskreg_scan, called through walk_tree.
2110 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2111 tree, replace it in the expression. */
2112
2113 static tree
2114 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2115 {
2116 if (VAR_P (*tp))
2117 {
2118 omp_context *ctx = (omp_context *) data;
2119 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2120 if (t != *tp)
2121 {
2122 if (DECL_HAS_VALUE_EXPR_P (t))
2123 t = unshare_expr (DECL_VALUE_EXPR (t));
2124 *tp = t;
2125 }
2126 *walk_subtrees = 0;
2127 }
2128 else if (IS_TYPE_OR_DECL_P (*tp))
2129 *walk_subtrees = 0;
2130 return NULL_TREE;
2131 }
2132
2133 /* If any decls have been made addressable during scan_omp,
2134 adjust their fields if needed, and layout record types
2135 of parallel/task constructs. */
2136
2137 static void
2138 finish_taskreg_scan (omp_context *ctx)
2139 {
2140 if (ctx->record_type == NULL_TREE)
2141 return;
2142
2143 /* If any task_shared_vars were needed, verify all
2144 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2145 statements if use_pointer_for_field hasn't changed
2146 because of that. If it did, update field types now. */
2147 if (task_shared_vars)
2148 {
2149 tree c;
2150
2151 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2152 c; c = OMP_CLAUSE_CHAIN (c))
2153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2154 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2155 {
2156 tree decl = OMP_CLAUSE_DECL (c);
2157
2158 /* Global variables don't need to be copied,
2159 the receiver side will use them directly. */
2160 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2161 continue;
2162 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2163 || !use_pointer_for_field (decl, ctx))
2164 continue;
2165 tree field = lookup_field (decl, ctx);
2166 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2167 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2168 continue;
2169 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2170 TREE_THIS_VOLATILE (field) = 0;
2171 DECL_USER_ALIGN (field) = 0;
2172 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2173 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2174 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2175 if (ctx->srecord_type)
2176 {
2177 tree sfield = lookup_sfield (decl, ctx);
2178 TREE_TYPE (sfield) = TREE_TYPE (field);
2179 TREE_THIS_VOLATILE (sfield) = 0;
2180 DECL_USER_ALIGN (sfield) = 0;
2181 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2182 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2183 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2184 }
2185 }
2186 }
2187
2188 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2189 {
2190 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2191 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2192 if (c)
2193 {
2194 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2195 expects to find it at the start of data. */
2196 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2197 tree *p = &TYPE_FIELDS (ctx->record_type);
2198 while (*p)
2199 if (*p == f)
2200 {
2201 *p = DECL_CHAIN (*p);
2202 break;
2203 }
2204 else
2205 p = &DECL_CHAIN (*p);
2206 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2207 TYPE_FIELDS (ctx->record_type) = f;
2208 }
2209 layout_type (ctx->record_type);
2210 fixup_child_record_type (ctx);
2211 }
2212 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2213 {
2214 layout_type (ctx->record_type);
2215 fixup_child_record_type (ctx);
2216 }
2217 else
2218 {
2219 location_t loc = gimple_location (ctx->stmt);
2220 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2221 /* Move VLA fields to the end. */
2222 p = &TYPE_FIELDS (ctx->record_type);
2223 while (*p)
2224 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2225 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2226 {
2227 *q = *p;
2228 *p = TREE_CHAIN (*p);
2229 TREE_CHAIN (*q) = NULL_TREE;
2230 q = &TREE_CHAIN (*q);
2231 }
2232 else
2233 p = &DECL_CHAIN (*p);
2234 *p = vla_fields;
2235 if (gimple_omp_task_taskloop_p (ctx->stmt))
2236 {
2237 /* Move fields corresponding to first and second _looptemp_
2238 clause first. There are filled by GOMP_taskloop
2239 and thus need to be in specific positions. */
2240 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2241 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2242 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2243 OMP_CLAUSE__LOOPTEMP_);
2244 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2245 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2246 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2247 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2248 p = &TYPE_FIELDS (ctx->record_type);
2249 while (*p)
2250 if (*p == f1 || *p == f2 || *p == f3)
2251 *p = DECL_CHAIN (*p);
2252 else
2253 p = &DECL_CHAIN (*p);
2254 DECL_CHAIN (f1) = f2;
2255 if (c3)
2256 {
2257 DECL_CHAIN (f2) = f3;
2258 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2259 }
2260 else
2261 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2262 TYPE_FIELDS (ctx->record_type) = f1;
2263 if (ctx->srecord_type)
2264 {
2265 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2266 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2267 if (c3)
2268 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2269 p = &TYPE_FIELDS (ctx->srecord_type);
2270 while (*p)
2271 if (*p == f1 || *p == f2 || *p == f3)
2272 *p = DECL_CHAIN (*p);
2273 else
2274 p = &DECL_CHAIN (*p);
2275 DECL_CHAIN (f1) = f2;
2276 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2277 if (c3)
2278 {
2279 DECL_CHAIN (f2) = f3;
2280 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2281 }
2282 else
2283 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2284 TYPE_FIELDS (ctx->srecord_type) = f1;
2285 }
2286 }
2287 layout_type (ctx->record_type);
2288 fixup_child_record_type (ctx);
2289 if (ctx->srecord_type)
2290 layout_type (ctx->srecord_type);
2291 tree t = fold_convert_loc (loc, long_integer_type_node,
2292 TYPE_SIZE_UNIT (ctx->record_type));
2293 if (TREE_CODE (t) != INTEGER_CST)
2294 {
2295 t = unshare_expr (t);
2296 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2297 }
2298 gimple_omp_task_set_arg_size (ctx->stmt, t);
2299 t = build_int_cst (long_integer_type_node,
2300 TYPE_ALIGN_UNIT (ctx->record_type));
2301 gimple_omp_task_set_arg_align (ctx->stmt, t);
2302 }
2303 }
2304
2305 /* Find the enclosing offload context. */
2306
2307 static omp_context *
2308 enclosing_target_ctx (omp_context *ctx)
2309 {
2310 for (; ctx; ctx = ctx->outer)
2311 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2312 break;
2313
2314 return ctx;
2315 }
2316
2317 /* Return true if ctx is part of an oacc kernels region. */
2318
2319 static bool
2320 ctx_in_oacc_kernels_region (omp_context *ctx)
2321 {
2322 for (;ctx != NULL; ctx = ctx->outer)
2323 {
2324 gimple *stmt = ctx->stmt;
2325 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2326 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2327 return true;
2328 }
2329
2330 return false;
2331 }
2332
2333 /* Check the parallelism clauses inside a kernels regions.
2334 Until kernels handling moves to use the same loop indirection
2335 scheme as parallel, we need to do this checking early. */
2336
2337 static unsigned
2338 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2339 {
2340 bool checking = true;
2341 unsigned outer_mask = 0;
2342 unsigned this_mask = 0;
2343 bool has_seq = false, has_auto = false;
2344
2345 if (ctx->outer)
2346 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2347 if (!stmt)
2348 {
2349 checking = false;
2350 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2351 return outer_mask;
2352 stmt = as_a <gomp_for *> (ctx->stmt);
2353 }
2354
2355 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2356 {
2357 switch (OMP_CLAUSE_CODE (c))
2358 {
2359 case OMP_CLAUSE_GANG:
2360 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2361 break;
2362 case OMP_CLAUSE_WORKER:
2363 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2364 break;
2365 case OMP_CLAUSE_VECTOR:
2366 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2367 break;
2368 case OMP_CLAUSE_SEQ:
2369 has_seq = true;
2370 break;
2371 case OMP_CLAUSE_AUTO:
2372 has_auto = true;
2373 break;
2374 default:
2375 break;
2376 }
2377 }
2378
2379 if (checking)
2380 {
2381 if (has_seq && (this_mask || has_auto))
2382 error_at (gimple_location (stmt), "%<seq%> overrides other"
2383 " OpenACC loop specifiers");
2384 else if (has_auto && this_mask)
2385 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2386 " OpenACC loop specifiers");
2387
2388 if (this_mask & outer_mask)
2389 error_at (gimple_location (stmt), "inner loop uses same"
2390 " OpenACC parallelism as containing loop");
2391 }
2392
2393 return outer_mask | this_mask;
2394 }
2395
2396 /* Scan a GIMPLE_OMP_FOR. */
2397
2398 static omp_context *
2399 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2400 {
2401 omp_context *ctx;
2402 size_t i;
2403 tree clauses = gimple_omp_for_clauses (stmt);
2404
2405 ctx = new_omp_context (stmt, outer_ctx);
2406
2407 if (is_gimple_omp_oacc (stmt))
2408 {
2409 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2410
2411 if (!tgt || is_oacc_parallel_or_serial (tgt))
2412 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2413 {
2414 char const *check = NULL;
2415
2416 switch (OMP_CLAUSE_CODE (c))
2417 {
2418 case OMP_CLAUSE_GANG:
2419 check = "gang";
2420 break;
2421
2422 case OMP_CLAUSE_WORKER:
2423 check = "worker";
2424 break;
2425
2426 case OMP_CLAUSE_VECTOR:
2427 check = "vector";
2428 break;
2429
2430 default:
2431 break;
2432 }
2433
2434 if (check && OMP_CLAUSE_OPERAND (c, 0))
2435 error_at (gimple_location (stmt),
2436 "argument not permitted on %qs clause in"
2437 " OpenACC %<parallel%> or %<serial%>", check);
2438 }
2439
2440 if (tgt && is_oacc_kernels (tgt))
2441 {
2442 /* Strip out reductions, as they are not handled yet. */
2443 tree *prev_ptr = &clauses;
2444
2445 while (tree probe = *prev_ptr)
2446 {
2447 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2448
2449 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2450 *prev_ptr = *next_ptr;
2451 else
2452 prev_ptr = next_ptr;
2453 }
2454
2455 gimple_omp_for_set_clauses (stmt, clauses);
2456 check_oacc_kernel_gwv (stmt, ctx);
2457 }
2458
2459 /* Collect all variables named in reductions on this loop. Ensure
2460 that, if this loop has a reduction on some variable v, and there is
2461 a reduction on v somewhere in an outer context, then there is a
2462 reduction on v on all intervening loops as well. */
2463 tree local_reduction_clauses = NULL;
2464 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2465 {
2466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2467 local_reduction_clauses
2468 = tree_cons (NULL, c, local_reduction_clauses);
2469 }
2470 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2471 ctx->outer_reduction_clauses
2472 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2473 ctx->outer->outer_reduction_clauses);
2474 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2475 tree local_iter = local_reduction_clauses;
2476 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2477 {
2478 tree local_clause = TREE_VALUE (local_iter);
2479 tree local_var = OMP_CLAUSE_DECL (local_clause);
2480 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2481 bool have_outer_reduction = false;
2482 tree ctx_iter = outer_reduction_clauses;
2483 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2484 {
2485 tree outer_clause = TREE_VALUE (ctx_iter);
2486 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2487 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2488 if (outer_var == local_var && outer_op != local_op)
2489 {
2490 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2491 "conflicting reduction operations for %qE",
2492 local_var);
2493 inform (OMP_CLAUSE_LOCATION (outer_clause),
2494 "location of the previous reduction for %qE",
2495 outer_var);
2496 }
2497 if (outer_var == local_var)
2498 {
2499 have_outer_reduction = true;
2500 break;
2501 }
2502 }
2503 if (have_outer_reduction)
2504 {
2505 /* There is a reduction on outer_var both on this loop and on
2506 some enclosing loop. Walk up the context tree until such a
2507 loop with a reduction on outer_var is found, and complain
2508 about all intervening loops that do not have such a
2509 reduction. */
2510 struct omp_context *curr_loop = ctx->outer;
2511 bool found = false;
2512 while (curr_loop != NULL)
2513 {
2514 tree curr_iter = curr_loop->local_reduction_clauses;
2515 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2516 {
2517 tree curr_clause = TREE_VALUE (curr_iter);
2518 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2519 if (curr_var == local_var)
2520 {
2521 found = true;
2522 break;
2523 }
2524 }
2525 if (!found)
2526 warning_at (gimple_location (curr_loop->stmt), 0,
2527 "nested loop in reduction needs "
2528 "reduction clause for %qE",
2529 local_var);
2530 else
2531 break;
2532 curr_loop = curr_loop->outer;
2533 }
2534 }
2535 }
2536 ctx->local_reduction_clauses = local_reduction_clauses;
2537 ctx->outer_reduction_clauses
2538 = chainon (unshare_expr (ctx->local_reduction_clauses),
2539 ctx->outer_reduction_clauses);
2540 }
2541
2542 scan_sharing_clauses (clauses, ctx);
2543
2544 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2545 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2546 {
2547 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2548 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2549 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2550 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2551 }
2552 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2553 return ctx;
2554 }
2555
2556 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2557
2558 static void
2559 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2560 omp_context *outer_ctx)
2561 {
2562 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2563 gsi_replace (gsi, bind, false);
2564 gimple_seq seq = NULL;
2565 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2566 tree cond = create_tmp_var_raw (integer_type_node);
2567 DECL_CONTEXT (cond) = current_function_decl;
2568 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2569 gimple_bind_set_vars (bind, cond);
2570 gimple_call_set_lhs (g, cond);
2571 gimple_seq_add_stmt (&seq, g);
2572 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2573 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2574 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2575 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2576 gimple_seq_add_stmt (&seq, g);
2577 g = gimple_build_label (lab1);
2578 gimple_seq_add_stmt (&seq, g);
2579 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2580 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2581 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2582 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2583 gimple_omp_for_set_clauses (new_stmt, clause);
2584 gimple_seq_add_stmt (&seq, new_stmt);
2585 g = gimple_build_goto (lab3);
2586 gimple_seq_add_stmt (&seq, g);
2587 g = gimple_build_label (lab2);
2588 gimple_seq_add_stmt (&seq, g);
2589 gimple_seq_add_stmt (&seq, stmt);
2590 g = gimple_build_label (lab3);
2591 gimple_seq_add_stmt (&seq, g);
2592 gimple_bind_set_body (bind, seq);
2593 update_stmt (bind);
2594 scan_omp_for (new_stmt, outer_ctx);
2595 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2596 }
2597
2598 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2599 struct walk_stmt_info *);
2600 static omp_context *maybe_lookup_ctx (gimple *);
2601
2602 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2603 for scan phase loop. */
2604
2605 static void
2606 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2607 omp_context *outer_ctx)
2608 {
2609 /* The only change between inclusive and exclusive scan will be
2610 within the first simd loop, so just use inclusive in the
2611 worksharing loop. */
2612 outer_ctx->scan_inclusive = true;
2613 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2614 OMP_CLAUSE_DECL (c) = integer_zero_node;
2615
2616 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2617 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2618 gsi_replace (gsi, input_stmt, false);
2619 gimple_seq input_body = NULL;
2620 gimple_seq_add_stmt (&input_body, stmt);
2621 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2622
2623 gimple_stmt_iterator input1_gsi = gsi_none ();
2624 struct walk_stmt_info wi;
2625 memset (&wi, 0, sizeof (wi));
2626 wi.val_only = true;
2627 wi.info = (void *) &input1_gsi;
2628 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2629 gcc_assert (!gsi_end_p (input1_gsi));
2630
2631 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2632 gsi_next (&input1_gsi);
2633 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2634 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2635 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2636 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2637 std::swap (input_stmt1, scan_stmt1);
2638
2639 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2640 gimple_omp_set_body (input_stmt1, NULL);
2641
2642 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2643 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2644
2645 gimple_omp_set_body (input_stmt1, input_body1);
2646 gimple_omp_set_body (scan_stmt1, NULL);
2647
2648 gimple_stmt_iterator input2_gsi = gsi_none ();
2649 memset (&wi, 0, sizeof (wi));
2650 wi.val_only = true;
2651 wi.info = (void *) &input2_gsi;
2652 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2653 NULL, &wi);
2654 gcc_assert (!gsi_end_p (input2_gsi));
2655
2656 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2657 gsi_next (&input2_gsi);
2658 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2659 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2660 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2661 std::swap (input_stmt2, scan_stmt2);
2662
2663 gimple_omp_set_body (input_stmt2, NULL);
2664
2665 gimple_omp_set_body (input_stmt, input_body);
2666 gimple_omp_set_body (scan_stmt, scan_body);
2667
2668 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2669 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2670
2671 ctx = new_omp_context (scan_stmt, outer_ctx);
2672 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2673
2674 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2675 }
2676
2677 /* Scan an OpenMP sections directive. */
2678
2679 static void
2680 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2681 {
2682 omp_context *ctx;
2683
2684 ctx = new_omp_context (stmt, outer_ctx);
2685 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2686 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2687 }
2688
2689 /* Scan an OpenMP single directive. */
2690
2691 static void
2692 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2693 {
2694 omp_context *ctx;
2695 tree name;
2696
2697 ctx = new_omp_context (stmt, outer_ctx);
2698 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2699 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2700 name = create_tmp_var_name (".omp_copy_s");
2701 name = build_decl (gimple_location (stmt),
2702 TYPE_DECL, name, ctx->record_type);
2703 TYPE_NAME (ctx->record_type) = name;
2704
2705 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2706 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2707
2708 if (TYPE_FIELDS (ctx->record_type) == NULL)
2709 ctx->record_type = NULL;
2710 else
2711 layout_type (ctx->record_type);
2712 }
2713
2714 /* Scan a GIMPLE_OMP_TARGET. */
2715
2716 static void
2717 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2718 {
2719 omp_context *ctx;
2720 tree name;
2721 bool offloaded = is_gimple_omp_offloaded (stmt);
2722 tree clauses = gimple_omp_target_clauses (stmt);
2723
2724 ctx = new_omp_context (stmt, outer_ctx);
2725 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2726 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2727 name = create_tmp_var_name (".omp_data_t");
2728 name = build_decl (gimple_location (stmt),
2729 TYPE_DECL, name, ctx->record_type);
2730 DECL_ARTIFICIAL (name) = 1;
2731 DECL_NAMELESS (name) = 1;
2732 TYPE_NAME (ctx->record_type) = name;
2733 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2734
2735 if (offloaded)
2736 {
2737 create_omp_child_function (ctx, false);
2738 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2739 }
2740
2741 scan_sharing_clauses (clauses, ctx);
2742 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2743
2744 if (TYPE_FIELDS (ctx->record_type) == NULL)
2745 ctx->record_type = ctx->receiver_decl = NULL;
2746 else
2747 {
2748 TYPE_FIELDS (ctx->record_type)
2749 = nreverse (TYPE_FIELDS (ctx->record_type));
2750 if (flag_checking)
2751 {
2752 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2753 for (tree field = TYPE_FIELDS (ctx->record_type);
2754 field;
2755 field = DECL_CHAIN (field))
2756 gcc_assert (DECL_ALIGN (field) == align);
2757 }
2758 layout_type (ctx->record_type);
2759 if (offloaded)
2760 fixup_child_record_type (ctx);
2761 }
2762 }
2763
2764 /* Scan an OpenMP teams directive. */
2765
2766 static void
2767 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2768 {
2769 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2770
2771 if (!gimple_omp_teams_host (stmt))
2772 {
2773 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2774 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2775 return;
2776 }
2777 taskreg_contexts.safe_push (ctx);
2778 gcc_assert (taskreg_nesting_level == 1);
2779 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2780 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2781 tree name = create_tmp_var_name (".omp_data_s");
2782 name = build_decl (gimple_location (stmt),
2783 TYPE_DECL, name, ctx->record_type);
2784 DECL_ARTIFICIAL (name) = 1;
2785 DECL_NAMELESS (name) = 1;
2786 TYPE_NAME (ctx->record_type) = name;
2787 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2788 create_omp_child_function (ctx, false);
2789 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2790
2791 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2792 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2793
2794 if (TYPE_FIELDS (ctx->record_type) == NULL)
2795 ctx->record_type = ctx->receiver_decl = NULL;
2796 }
2797
2798 /* Check nesting restrictions. */
2799 static bool
2800 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2801 {
2802 tree c;
2803
2804 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2805 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2806 the original copy of its contents. */
2807 return true;
2808
2809 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2810 inside an OpenACC CTX. */
2811 if (!(is_gimple_omp (stmt)
2812 && is_gimple_omp_oacc (stmt))
2813 /* Except for atomic codes that we share with OpenMP. */
2814 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2815 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2816 {
2817 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2818 {
2819 error_at (gimple_location (stmt),
2820 "non-OpenACC construct inside of OpenACC routine");
2821 return false;
2822 }
2823 else
2824 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2825 if (is_gimple_omp (octx->stmt)
2826 && is_gimple_omp_oacc (octx->stmt))
2827 {
2828 error_at (gimple_location (stmt),
2829 "non-OpenACC construct inside of OpenACC region");
2830 return false;
2831 }
2832 }
2833
2834 if (ctx != NULL)
2835 {
2836 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2837 && ctx->outer
2838 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2839 ctx = ctx->outer;
2840 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2841 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2842 && !ctx->loop_p)
2843 {
2844 c = NULL_TREE;
2845 if (ctx->order_concurrent
2846 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2847 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2848 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2849 {
2850 error_at (gimple_location (stmt),
2851 "OpenMP constructs other than %<parallel%>, %<loop%>"
2852 " or %<simd%> may not be nested inside a region with"
2853 " the %<order(concurrent)%> clause");
2854 return false;
2855 }
2856 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2857 {
2858 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2859 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2860 {
2861 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2862 && (ctx->outer == NULL
2863 || !gimple_omp_for_combined_into_p (ctx->stmt)
2864 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2865 || (gimple_omp_for_kind (ctx->outer->stmt)
2866 != GF_OMP_FOR_KIND_FOR)
2867 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2868 {
2869 error_at (gimple_location (stmt),
2870 "%<ordered simd threads%> must be closely "
2871 "nested inside of %<for simd%> region");
2872 return false;
2873 }
2874 return true;
2875 }
2876 }
2877 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2878 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2879 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2880 return true;
2881 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2882 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2883 return true;
2884 error_at (gimple_location (stmt),
2885 "OpenMP constructs other than "
2886 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2887 "not be nested inside %<simd%> region");
2888 return false;
2889 }
2890 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2891 {
2892 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2893 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2894 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2895 && omp_find_clause (gimple_omp_for_clauses (stmt),
2896 OMP_CLAUSE_BIND) == NULL_TREE))
2897 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2898 {
2899 error_at (gimple_location (stmt),
2900 "only %<distribute%>, %<parallel%> or %<loop%> "
2901 "regions are allowed to be strictly nested inside "
2902 "%<teams%> region");
2903 return false;
2904 }
2905 }
2906 else if (ctx->order_concurrent
2907 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2908 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2909 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2910 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2911 {
2912 if (ctx->loop_p)
2913 error_at (gimple_location (stmt),
2914 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2915 "%<simd%> may not be nested inside a %<loop%> region");
2916 else
2917 error_at (gimple_location (stmt),
2918 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2919 "%<simd%> may not be nested inside a region with "
2920 "the %<order(concurrent)%> clause");
2921 return false;
2922 }
2923 }
2924 switch (gimple_code (stmt))
2925 {
2926 case GIMPLE_OMP_FOR:
2927 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2928 return true;
2929 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2930 {
2931 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2932 {
2933 error_at (gimple_location (stmt),
2934 "%<distribute%> region must be strictly nested "
2935 "inside %<teams%> construct");
2936 return false;
2937 }
2938 return true;
2939 }
2940 /* We split taskloop into task and nested taskloop in it. */
2941 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2942 return true;
2943 /* For now, hope this will change and loop bind(parallel) will not
2944 be allowed in lots of contexts. */
2945 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2946 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2947 return true;
2948 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2949 {
2950 bool ok = false;
2951
2952 if (ctx)
2953 switch (gimple_code (ctx->stmt))
2954 {
2955 case GIMPLE_OMP_FOR:
2956 ok = (gimple_omp_for_kind (ctx->stmt)
2957 == GF_OMP_FOR_KIND_OACC_LOOP);
2958 break;
2959
2960 case GIMPLE_OMP_TARGET:
2961 switch (gimple_omp_target_kind (ctx->stmt))
2962 {
2963 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2964 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2965 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2966 ok = true;
2967 break;
2968
2969 default:
2970 break;
2971 }
2972
2973 default:
2974 break;
2975 }
2976 else if (oacc_get_fn_attrib (current_function_decl))
2977 ok = true;
2978 if (!ok)
2979 {
2980 error_at (gimple_location (stmt),
2981 "OpenACC loop directive must be associated with"
2982 " an OpenACC compute region");
2983 return false;
2984 }
2985 }
2986 /* FALLTHRU */
2987 case GIMPLE_CALL:
2988 if (is_gimple_call (stmt)
2989 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2990 == BUILT_IN_GOMP_CANCEL
2991 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2992 == BUILT_IN_GOMP_CANCELLATION_POINT))
2993 {
2994 const char *bad = NULL;
2995 const char *kind = NULL;
2996 const char *construct
2997 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2998 == BUILT_IN_GOMP_CANCEL)
2999 ? "cancel"
3000 : "cancellation point";
3001 if (ctx == NULL)
3002 {
3003 error_at (gimple_location (stmt), "orphaned %qs construct",
3004 construct);
3005 return false;
3006 }
3007 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3008 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3009 : 0)
3010 {
3011 case 1:
3012 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3013 bad = "parallel";
3014 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3015 == BUILT_IN_GOMP_CANCEL
3016 && !integer_zerop (gimple_call_arg (stmt, 1)))
3017 ctx->cancellable = true;
3018 kind = "parallel";
3019 break;
3020 case 2:
3021 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3022 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3023 bad = "for";
3024 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3025 == BUILT_IN_GOMP_CANCEL
3026 && !integer_zerop (gimple_call_arg (stmt, 1)))
3027 {
3028 ctx->cancellable = true;
3029 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3030 OMP_CLAUSE_NOWAIT))
3031 warning_at (gimple_location (stmt), 0,
3032 "%<cancel for%> inside "
3033 "%<nowait%> for construct");
3034 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3035 OMP_CLAUSE_ORDERED))
3036 warning_at (gimple_location (stmt), 0,
3037 "%<cancel for%> inside "
3038 "%<ordered%> for construct");
3039 }
3040 kind = "for";
3041 break;
3042 case 4:
3043 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3044 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3045 bad = "sections";
3046 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3047 == BUILT_IN_GOMP_CANCEL
3048 && !integer_zerop (gimple_call_arg (stmt, 1)))
3049 {
3050 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3051 {
3052 ctx->cancellable = true;
3053 if (omp_find_clause (gimple_omp_sections_clauses
3054 (ctx->stmt),
3055 OMP_CLAUSE_NOWAIT))
3056 warning_at (gimple_location (stmt), 0,
3057 "%<cancel sections%> inside "
3058 "%<nowait%> sections construct");
3059 }
3060 else
3061 {
3062 gcc_assert (ctx->outer
3063 && gimple_code (ctx->outer->stmt)
3064 == GIMPLE_OMP_SECTIONS);
3065 ctx->outer->cancellable = true;
3066 if (omp_find_clause (gimple_omp_sections_clauses
3067 (ctx->outer->stmt),
3068 OMP_CLAUSE_NOWAIT))
3069 warning_at (gimple_location (stmt), 0,
3070 "%<cancel sections%> inside "
3071 "%<nowait%> sections construct");
3072 }
3073 }
3074 kind = "sections";
3075 break;
3076 case 8:
3077 if (!is_task_ctx (ctx)
3078 && (!is_taskloop_ctx (ctx)
3079 || ctx->outer == NULL
3080 || !is_task_ctx (ctx->outer)))
3081 bad = "task";
3082 else
3083 {
3084 for (omp_context *octx = ctx->outer;
3085 octx; octx = octx->outer)
3086 {
3087 switch (gimple_code (octx->stmt))
3088 {
3089 case GIMPLE_OMP_TASKGROUP:
3090 break;
3091 case GIMPLE_OMP_TARGET:
3092 if (gimple_omp_target_kind (octx->stmt)
3093 != GF_OMP_TARGET_KIND_REGION)
3094 continue;
3095 /* FALLTHRU */
3096 case GIMPLE_OMP_PARALLEL:
3097 case GIMPLE_OMP_TEAMS:
3098 error_at (gimple_location (stmt),
3099 "%<%s taskgroup%> construct not closely "
3100 "nested inside of %<taskgroup%> region",
3101 construct);
3102 return false;
3103 case GIMPLE_OMP_TASK:
3104 if (gimple_omp_task_taskloop_p (octx->stmt)
3105 && octx->outer
3106 && is_taskloop_ctx (octx->outer))
3107 {
3108 tree clauses
3109 = gimple_omp_for_clauses (octx->outer->stmt);
3110 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3111 break;
3112 }
3113 continue;
3114 default:
3115 continue;
3116 }
3117 break;
3118 }
3119 ctx->cancellable = true;
3120 }
3121 kind = "taskgroup";
3122 break;
3123 default:
3124 error_at (gimple_location (stmt), "invalid arguments");
3125 return false;
3126 }
3127 if (bad)
3128 {
3129 error_at (gimple_location (stmt),
3130 "%<%s %s%> construct not closely nested inside of %qs",
3131 construct, kind, bad);
3132 return false;
3133 }
3134 }
3135 /* FALLTHRU */
3136 case GIMPLE_OMP_SECTIONS:
3137 case GIMPLE_OMP_SINGLE:
3138 for (; ctx != NULL; ctx = ctx->outer)
3139 switch (gimple_code (ctx->stmt))
3140 {
3141 case GIMPLE_OMP_FOR:
3142 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3143 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3144 break;
3145 /* FALLTHRU */
3146 case GIMPLE_OMP_SECTIONS:
3147 case GIMPLE_OMP_SINGLE:
3148 case GIMPLE_OMP_ORDERED:
3149 case GIMPLE_OMP_MASTER:
3150 case GIMPLE_OMP_TASK:
3151 case GIMPLE_OMP_CRITICAL:
3152 if (is_gimple_call (stmt))
3153 {
3154 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3155 != BUILT_IN_GOMP_BARRIER)
3156 return true;
3157 error_at (gimple_location (stmt),
3158 "barrier region may not be closely nested inside "
3159 "of work-sharing, %<loop%>, %<critical%>, "
3160 "%<ordered%>, %<master%>, explicit %<task%> or "
3161 "%<taskloop%> region");
3162 return false;
3163 }
3164 error_at (gimple_location (stmt),
3165 "work-sharing region may not be closely nested inside "
3166 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3167 "%<master%>, explicit %<task%> or %<taskloop%> region");
3168 return false;
3169 case GIMPLE_OMP_PARALLEL:
3170 case GIMPLE_OMP_TEAMS:
3171 return true;
3172 case GIMPLE_OMP_TARGET:
3173 if (gimple_omp_target_kind (ctx->stmt)
3174 == GF_OMP_TARGET_KIND_REGION)
3175 return true;
3176 break;
3177 default:
3178 break;
3179 }
3180 break;
3181 case GIMPLE_OMP_MASTER:
3182 for (; ctx != NULL; ctx = ctx->outer)
3183 switch (gimple_code (ctx->stmt))
3184 {
3185 case GIMPLE_OMP_FOR:
3186 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3187 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3188 break;
3189 /* FALLTHRU */
3190 case GIMPLE_OMP_SECTIONS:
3191 case GIMPLE_OMP_SINGLE:
3192 case GIMPLE_OMP_TASK:
3193 error_at (gimple_location (stmt),
3194 "%<master%> region may not be closely nested inside "
3195 "of work-sharing, %<loop%>, explicit %<task%> or "
3196 "%<taskloop%> region");
3197 return false;
3198 case GIMPLE_OMP_PARALLEL:
3199 case GIMPLE_OMP_TEAMS:
3200 return true;
3201 case GIMPLE_OMP_TARGET:
3202 if (gimple_omp_target_kind (ctx->stmt)
3203 == GF_OMP_TARGET_KIND_REGION)
3204 return true;
3205 break;
3206 default:
3207 break;
3208 }
3209 break;
3210 case GIMPLE_OMP_TASK:
3211 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3213 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3214 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3215 {
3216 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3217 error_at (OMP_CLAUSE_LOCATION (c),
3218 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3219 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3220 return false;
3221 }
3222 break;
3223 case GIMPLE_OMP_ORDERED:
3224 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3225 c; c = OMP_CLAUSE_CHAIN (c))
3226 {
3227 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3228 {
3229 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3230 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3231 continue;
3232 }
3233 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3234 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3235 || kind == OMP_CLAUSE_DEPEND_SINK)
3236 {
3237 tree oclause;
3238 /* Look for containing ordered(N) loop. */
3239 if (ctx == NULL
3240 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3241 || (oclause
3242 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3243 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3244 {
3245 error_at (OMP_CLAUSE_LOCATION (c),
3246 "%<ordered%> construct with %<depend%> clause "
3247 "must be closely nested inside an %<ordered%> "
3248 "loop");
3249 return false;
3250 }
3251 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3252 {
3253 error_at (OMP_CLAUSE_LOCATION (c),
3254 "%<ordered%> construct with %<depend%> clause "
3255 "must be closely nested inside a loop with "
3256 "%<ordered%> clause with a parameter");
3257 return false;
3258 }
3259 }
3260 else
3261 {
3262 error_at (OMP_CLAUSE_LOCATION (c),
3263 "invalid depend kind in omp %<ordered%> %<depend%>");
3264 return false;
3265 }
3266 }
3267 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3268 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3269 {
3270 /* ordered simd must be closely nested inside of simd region,
3271 and simd region must not encounter constructs other than
3272 ordered simd, therefore ordered simd may be either orphaned,
3273 or ctx->stmt must be simd. The latter case is handled already
3274 earlier. */
3275 if (ctx != NULL)
3276 {
3277 error_at (gimple_location (stmt),
3278 "%<ordered%> %<simd%> must be closely nested inside "
3279 "%<simd%> region");
3280 return false;
3281 }
3282 }
3283 for (; ctx != NULL; ctx = ctx->outer)
3284 switch (gimple_code (ctx->stmt))
3285 {
3286 case GIMPLE_OMP_CRITICAL:
3287 case GIMPLE_OMP_TASK:
3288 case GIMPLE_OMP_ORDERED:
3289 ordered_in_taskloop:
3290 error_at (gimple_location (stmt),
3291 "%<ordered%> region may not be closely nested inside "
3292 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3293 "%<taskloop%> region");
3294 return false;
3295 case GIMPLE_OMP_FOR:
3296 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3297 goto ordered_in_taskloop;
3298 tree o;
3299 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3300 OMP_CLAUSE_ORDERED);
3301 if (o == NULL)
3302 {
3303 error_at (gimple_location (stmt),
3304 "%<ordered%> region must be closely nested inside "
3305 "a loop region with an %<ordered%> clause");
3306 return false;
3307 }
3308 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3309 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3310 {
3311 error_at (gimple_location (stmt),
3312 "%<ordered%> region without %<depend%> clause may "
3313 "not be closely nested inside a loop region with "
3314 "an %<ordered%> clause with a parameter");
3315 return false;
3316 }
3317 return true;
3318 case GIMPLE_OMP_TARGET:
3319 if (gimple_omp_target_kind (ctx->stmt)
3320 != GF_OMP_TARGET_KIND_REGION)
3321 break;
3322 /* FALLTHRU */
3323 case GIMPLE_OMP_PARALLEL:
3324 case GIMPLE_OMP_TEAMS:
3325 error_at (gimple_location (stmt),
3326 "%<ordered%> region must be closely nested inside "
3327 "a loop region with an %<ordered%> clause");
3328 return false;
3329 default:
3330 break;
3331 }
3332 break;
3333 case GIMPLE_OMP_CRITICAL:
3334 {
3335 tree this_stmt_name
3336 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3337 for (; ctx != NULL; ctx = ctx->outer)
3338 if (gomp_critical *other_crit
3339 = dyn_cast <gomp_critical *> (ctx->stmt))
3340 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3341 {
3342 error_at (gimple_location (stmt),
3343 "%<critical%> region may not be nested inside "
3344 "a %<critical%> region with the same name");
3345 return false;
3346 }
3347 }
3348 break;
3349 case GIMPLE_OMP_TEAMS:
3350 if (ctx == NULL)
3351 break;
3352 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3353 || (gimple_omp_target_kind (ctx->stmt)
3354 != GF_OMP_TARGET_KIND_REGION))
3355 {
3356 /* Teams construct can appear either strictly nested inside of
3357 target construct with no intervening stmts, or can be encountered
3358 only by initial task (so must not appear inside any OpenMP
3359 construct. */
3360 error_at (gimple_location (stmt),
3361 "%<teams%> construct must be closely nested inside of "
3362 "%<target%> construct or not nested in any OpenMP "
3363 "construct");
3364 return false;
3365 }
3366 break;
3367 case GIMPLE_OMP_TARGET:
3368 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3369 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3370 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3371 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3372 {
3373 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3374 error_at (OMP_CLAUSE_LOCATION (c),
3375 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3376 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3377 return false;
3378 }
3379 if (is_gimple_omp_offloaded (stmt)
3380 && oacc_get_fn_attrib (cfun->decl) != NULL)
3381 {
3382 error_at (gimple_location (stmt),
3383 "OpenACC region inside of OpenACC routine, nested "
3384 "parallelism not supported yet");
3385 return false;
3386 }
3387 for (; ctx != NULL; ctx = ctx->outer)
3388 {
3389 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3390 {
3391 if (is_gimple_omp (stmt)
3392 && is_gimple_omp_oacc (stmt)
3393 && is_gimple_omp (ctx->stmt))
3394 {
3395 error_at (gimple_location (stmt),
3396 "OpenACC construct inside of non-OpenACC region");
3397 return false;
3398 }
3399 continue;
3400 }
3401
3402 const char *stmt_name, *ctx_stmt_name;
3403 switch (gimple_omp_target_kind (stmt))
3404 {
3405 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3406 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3407 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3408 case GF_OMP_TARGET_KIND_ENTER_DATA:
3409 stmt_name = "target enter data"; break;
3410 case GF_OMP_TARGET_KIND_EXIT_DATA:
3411 stmt_name = "target exit data"; break;
3412 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3413 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3414 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3415 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3416 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3417 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3418 stmt_name = "enter/exit data"; break;
3419 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3420 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3421 break;
3422 default: gcc_unreachable ();
3423 }
3424 switch (gimple_omp_target_kind (ctx->stmt))
3425 {
3426 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3427 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3428 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3429 ctx_stmt_name = "parallel"; break;
3430 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3431 ctx_stmt_name = "kernels"; break;
3432 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3433 ctx_stmt_name = "serial"; break;
3434 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3435 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3436 ctx_stmt_name = "host_data"; break;
3437 default: gcc_unreachable ();
3438 }
3439
3440 /* OpenACC/OpenMP mismatch? */
3441 if (is_gimple_omp_oacc (stmt)
3442 != is_gimple_omp_oacc (ctx->stmt))
3443 {
3444 error_at (gimple_location (stmt),
3445 "%s %qs construct inside of %s %qs region",
3446 (is_gimple_omp_oacc (stmt)
3447 ? "OpenACC" : "OpenMP"), stmt_name,
3448 (is_gimple_omp_oacc (ctx->stmt)
3449 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3450 return false;
3451 }
3452 if (is_gimple_omp_offloaded (ctx->stmt))
3453 {
3454 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3455 if (is_gimple_omp_oacc (ctx->stmt))
3456 {
3457 error_at (gimple_location (stmt),
3458 "%qs construct inside of %qs region",
3459 stmt_name, ctx_stmt_name);
3460 return false;
3461 }
3462 else
3463 {
3464 warning_at (gimple_location (stmt), 0,
3465 "%qs construct inside of %qs region",
3466 stmt_name, ctx_stmt_name);
3467 }
3468 }
3469 }
3470 break;
3471 default:
3472 break;
3473 }
3474 return true;
3475 }
3476
3477
3478 /* Helper function scan_omp.
3479
3480 Callback for walk_tree or operators in walk_gimple_stmt used to
3481 scan for OMP directives in TP. */
3482
3483 static tree
3484 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3485 {
3486 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3487 omp_context *ctx = (omp_context *) wi->info;
3488 tree t = *tp;
3489
3490 switch (TREE_CODE (t))
3491 {
3492 case VAR_DECL:
3493 case PARM_DECL:
3494 case LABEL_DECL:
3495 case RESULT_DECL:
3496 if (ctx)
3497 {
3498 tree repl = remap_decl (t, &ctx->cb);
3499 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3500 *tp = repl;
3501 }
3502 break;
3503
3504 default:
3505 if (ctx && TYPE_P (t))
3506 *tp = remap_type (t, &ctx->cb);
3507 else if (!DECL_P (t))
3508 {
3509 *walk_subtrees = 1;
3510 if (ctx)
3511 {
3512 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3513 if (tem != TREE_TYPE (t))
3514 {
3515 if (TREE_CODE (t) == INTEGER_CST)
3516 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3517 else
3518 TREE_TYPE (t) = tem;
3519 }
3520 }
3521 }
3522 break;
3523 }
3524
3525 return NULL_TREE;
3526 }
3527
3528 /* Return true if FNDECL is a setjmp or a longjmp. */
3529
3530 static bool
3531 setjmp_or_longjmp_p (const_tree fndecl)
3532 {
3533 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3534 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3535 return true;
3536
3537 tree declname = DECL_NAME (fndecl);
3538 if (!declname
3539 || (DECL_CONTEXT (fndecl) != NULL_TREE
3540 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3541 || !TREE_PUBLIC (fndecl))
3542 return false;
3543
3544 const char *name = IDENTIFIER_POINTER (declname);
3545 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3546 }
3547
3548 /* Return true if FNDECL is an omp_* runtime API call. */
3549
3550 static bool
3551 omp_runtime_api_call (const_tree fndecl)
3552 {
3553 tree declname = DECL_NAME (fndecl);
3554 if (!declname
3555 || (DECL_CONTEXT (fndecl) != NULL_TREE
3556 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3557 || !TREE_PUBLIC (fndecl))
3558 return false;
3559
3560 const char *name = IDENTIFIER_POINTER (declname);
3561 if (strncmp (name, "omp_", 4) != 0)
3562 return false;
3563
3564 static const char *omp_runtime_apis[] =
3565 {
3566 /* This array has 3 sections. First omp_* calls that don't
3567 have any suffixes. */
3568 "target_alloc",
3569 "target_associate_ptr",
3570 "target_disassociate_ptr",
3571 "target_free",
3572 "target_is_present",
3573 "target_memcpy",
3574 "target_memcpy_rect",
3575 NULL,
3576 /* Now omp_* calls that are available as omp_* and omp_*_. */
3577 "capture_affinity",
3578 "destroy_lock",
3579 "destroy_nest_lock",
3580 "display_affinity",
3581 "get_active_level",
3582 "get_affinity_format",
3583 "get_cancellation",
3584 "get_default_device",
3585 "get_dynamic",
3586 "get_initial_device",
3587 "get_level",
3588 "get_max_active_levels",
3589 "get_max_task_priority",
3590 "get_max_threads",
3591 "get_nested",
3592 "get_num_devices",
3593 "get_num_places",
3594 "get_num_procs",
3595 "get_num_teams",
3596 "get_num_threads",
3597 "get_partition_num_places",
3598 "get_place_num",
3599 "get_proc_bind",
3600 "get_team_num",
3601 "get_thread_limit",
3602 "get_thread_num",
3603 "get_wtick",
3604 "get_wtime",
3605 "in_final",
3606 "in_parallel",
3607 "init_lock",
3608 "init_nest_lock",
3609 "is_initial_device",
3610 "pause_resource",
3611 "pause_resource_all",
3612 "set_affinity_format",
3613 "set_lock",
3614 "set_nest_lock",
3615 "test_lock",
3616 "test_nest_lock",
3617 "unset_lock",
3618 "unset_nest_lock",
3619 NULL,
3620 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3621 "get_ancestor_thread_num",
3622 "get_partition_place_nums",
3623 "get_place_num_procs",
3624 "get_place_proc_ids",
3625 "get_schedule",
3626 "get_team_size",
3627 "set_default_device",
3628 "set_dynamic",
3629 "set_max_active_levels",
3630 "set_nested",
3631 "set_num_threads",
3632 "set_schedule"
3633 };
3634
3635 int mode = 0;
3636 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3637 {
3638 if (omp_runtime_apis[i] == NULL)
3639 {
3640 mode++;
3641 continue;
3642 }
3643 size_t len = strlen (omp_runtime_apis[i]);
3644 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3645 && (name[4 + len] == '\0'
3646 || (mode > 0
3647 && name[4 + len] == '_'
3648 && (name[4 + len + 1] == '\0'
3649 || (mode > 1
3650 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3651 return true;
3652 }
3653 return false;
3654 }
3655
3656 /* Helper function for scan_omp.
3657
3658 Callback for walk_gimple_stmt used to scan for OMP directives in
3659 the current statement in GSI. */
3660
3661 static tree
3662 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3663 struct walk_stmt_info *wi)
3664 {
3665 gimple *stmt = gsi_stmt (*gsi);
3666 omp_context *ctx = (omp_context *) wi->info;
3667
3668 if (gimple_has_location (stmt))
3669 input_location = gimple_location (stmt);
3670
3671 /* Check the nesting restrictions. */
3672 bool remove = false;
3673 if (is_gimple_omp (stmt))
3674 remove = !check_omp_nesting_restrictions (stmt, ctx);
3675 else if (is_gimple_call (stmt))
3676 {
3677 tree fndecl = gimple_call_fndecl (stmt);
3678 if (fndecl)
3679 {
3680 if (ctx
3681 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3682 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3683 && setjmp_or_longjmp_p (fndecl)
3684 && !ctx->loop_p)
3685 {
3686 remove = true;
3687 error_at (gimple_location (stmt),
3688 "setjmp/longjmp inside %<simd%> construct");
3689 }
3690 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3691 switch (DECL_FUNCTION_CODE (fndecl))
3692 {
3693 case BUILT_IN_GOMP_BARRIER:
3694 case BUILT_IN_GOMP_CANCEL:
3695 case BUILT_IN_GOMP_CANCELLATION_POINT:
3696 case BUILT_IN_GOMP_TASKYIELD:
3697 case BUILT_IN_GOMP_TASKWAIT:
3698 case BUILT_IN_GOMP_TASKGROUP_START:
3699 case BUILT_IN_GOMP_TASKGROUP_END:
3700 remove = !check_omp_nesting_restrictions (stmt, ctx);
3701 break;
3702 default:
3703 break;
3704 }
3705 else if (ctx)
3706 {
3707 omp_context *octx = ctx;
3708 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3709 octx = ctx->outer;
3710 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3711 {
3712 remove = true;
3713 error_at (gimple_location (stmt),
3714 "OpenMP runtime API call %qD in a region with "
3715 "%<order(concurrent)%> clause", fndecl);
3716 }
3717 }
3718 }
3719 }
3720 if (remove)
3721 {
3722 stmt = gimple_build_nop ();
3723 gsi_replace (gsi, stmt, false);
3724 }
3725
3726 *handled_ops_p = true;
3727
3728 switch (gimple_code (stmt))
3729 {
3730 case GIMPLE_OMP_PARALLEL:
3731 taskreg_nesting_level++;
3732 scan_omp_parallel (gsi, ctx);
3733 taskreg_nesting_level--;
3734 break;
3735
3736 case GIMPLE_OMP_TASK:
3737 taskreg_nesting_level++;
3738 scan_omp_task (gsi, ctx);
3739 taskreg_nesting_level--;
3740 break;
3741
3742 case GIMPLE_OMP_FOR:
3743 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3744 == GF_OMP_FOR_KIND_SIMD)
3745 && gimple_omp_for_combined_into_p (stmt)
3746 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3747 {
3748 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3749 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3750 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3751 {
3752 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3753 break;
3754 }
3755 }
3756 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3757 == GF_OMP_FOR_KIND_SIMD)
3758 && omp_maybe_offloaded_ctx (ctx)
3759 && omp_max_simt_vf ())
3760 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3761 else
3762 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3763 break;
3764
3765 case GIMPLE_OMP_SECTIONS:
3766 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3767 break;
3768
3769 case GIMPLE_OMP_SINGLE:
3770 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3771 break;
3772
3773 case GIMPLE_OMP_SCAN:
3774 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3775 {
3776 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3777 ctx->scan_inclusive = true;
3778 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3779 ctx->scan_exclusive = true;
3780 }
3781 /* FALLTHRU */
3782 case GIMPLE_OMP_SECTION:
3783 case GIMPLE_OMP_MASTER:
3784 case GIMPLE_OMP_ORDERED:
3785 case GIMPLE_OMP_CRITICAL:
3786 case GIMPLE_OMP_GRID_BODY:
3787 ctx = new_omp_context (stmt, ctx);
3788 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3789 break;
3790
3791 case GIMPLE_OMP_TASKGROUP:
3792 ctx = new_omp_context (stmt, ctx);
3793 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3794 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3795 break;
3796
3797 case GIMPLE_OMP_TARGET:
3798 if (is_gimple_omp_offloaded (stmt))
3799 {
3800 taskreg_nesting_level++;
3801 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3802 taskreg_nesting_level--;
3803 }
3804 else
3805 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3806 break;
3807
3808 case GIMPLE_OMP_TEAMS:
3809 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3810 {
3811 taskreg_nesting_level++;
3812 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3813 taskreg_nesting_level--;
3814 }
3815 else
3816 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3817 break;
3818
3819 case GIMPLE_BIND:
3820 {
3821 tree var;
3822
3823 *handled_ops_p = false;
3824 if (ctx)
3825 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3826 var ;
3827 var = DECL_CHAIN (var))
3828 insert_decl_map (&ctx->cb, var, var);
3829 }
3830 break;
3831 default:
3832 *handled_ops_p = false;
3833 break;
3834 }
3835
3836 return NULL_TREE;
3837 }
3838
3839
3840 /* Scan all the statements starting at the current statement. CTX
3841 contains context information about the OMP directives and
3842 clauses found during the scan. */
3843
3844 static void
3845 scan_omp (gimple_seq *body_p, omp_context *ctx)
3846 {
3847 location_t saved_location;
3848 struct walk_stmt_info wi;
3849
3850 memset (&wi, 0, sizeof (wi));
3851 wi.info = ctx;
3852 wi.want_locations = true;
3853
3854 saved_location = input_location;
3855 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3856 input_location = saved_location;
3857 }
3858 \f
3859 /* Re-gimplification and code generation routines. */
3860
3861 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3862 of BIND if in a method. */
3863
3864 static void
3865 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3866 {
3867 if (DECL_ARGUMENTS (current_function_decl)
3868 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3869 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3870 == POINTER_TYPE))
3871 {
3872 tree vars = gimple_bind_vars (bind);
3873 for (tree *pvar = &vars; *pvar; )
3874 if (omp_member_access_dummy_var (*pvar))
3875 *pvar = DECL_CHAIN (*pvar);
3876 else
3877 pvar = &DECL_CHAIN (*pvar);
3878 gimple_bind_set_vars (bind, vars);
3879 }
3880 }
3881
3882 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3883 block and its subblocks. */
3884
3885 static void
3886 remove_member_access_dummy_vars (tree block)
3887 {
3888 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3889 if (omp_member_access_dummy_var (*pvar))
3890 *pvar = DECL_CHAIN (*pvar);
3891 else
3892 pvar = &DECL_CHAIN (*pvar);
3893
3894 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3895 remove_member_access_dummy_vars (block);
3896 }
3897
3898 /* If a context was created for STMT when it was scanned, return it. */
3899
3900 static omp_context *
3901 maybe_lookup_ctx (gimple *stmt)
3902 {
3903 splay_tree_node n;
3904 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3905 return n ? (omp_context *) n->value : NULL;
3906 }
3907
3908
3909 /* Find the mapping for DECL in CTX or the immediately enclosing
3910 context that has a mapping for DECL.
3911
3912 If CTX is a nested parallel directive, we may have to use the decl
3913 mappings created in CTX's parent context. Suppose that we have the
3914 following parallel nesting (variable UIDs showed for clarity):
3915
3916 iD.1562 = 0;
3917 #omp parallel shared(iD.1562) -> outer parallel
3918 iD.1562 = iD.1562 + 1;
3919
3920 #omp parallel shared (iD.1562) -> inner parallel
3921 iD.1562 = iD.1562 - 1;
3922
3923 Each parallel structure will create a distinct .omp_data_s structure
3924 for copying iD.1562 in/out of the directive:
3925
3926 outer parallel .omp_data_s.1.i -> iD.1562
3927 inner parallel .omp_data_s.2.i -> iD.1562
3928
3929 A shared variable mapping will produce a copy-out operation before
3930 the parallel directive and a copy-in operation after it. So, in
3931 this case we would have:
3932
3933 iD.1562 = 0;
3934 .omp_data_o.1.i = iD.1562;
3935 #omp parallel shared(iD.1562) -> outer parallel
3936 .omp_data_i.1 = &.omp_data_o.1
3937 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3938
3939 .omp_data_o.2.i = iD.1562; -> **
3940 #omp parallel shared(iD.1562) -> inner parallel
3941 .omp_data_i.2 = &.omp_data_o.2
3942 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3943
3944
3945 ** This is a problem. The symbol iD.1562 cannot be referenced
3946 inside the body of the outer parallel region. But since we are
3947 emitting this copy operation while expanding the inner parallel
3948 directive, we need to access the CTX structure of the outer
3949 parallel directive to get the correct mapping:
3950
3951 .omp_data_o.2.i = .omp_data_i.1->i
3952
3953 Since there may be other workshare or parallel directives enclosing
3954 the parallel directive, it may be necessary to walk up the context
3955 parent chain. This is not a problem in general because nested
3956 parallelism happens only rarely. */
3957
3958 static tree
3959 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3960 {
3961 tree t;
3962 omp_context *up;
3963
3964 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3965 t = maybe_lookup_decl (decl, up);
3966
3967 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3968
3969 return t ? t : decl;
3970 }
3971
3972
3973 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3974 in outer contexts. */
3975
3976 static tree
3977 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3978 {
3979 tree t = NULL;
3980 omp_context *up;
3981
3982 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3983 t = maybe_lookup_decl (decl, up);
3984
3985 return t ? t : decl;
3986 }
3987
3988
3989 /* Construct the initialization value for reduction operation OP. */
3990
3991 tree
3992 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3993 {
3994 switch (op)
3995 {
3996 case PLUS_EXPR:
3997 case MINUS_EXPR:
3998 case BIT_IOR_EXPR:
3999 case BIT_XOR_EXPR:
4000 case TRUTH_OR_EXPR:
4001 case TRUTH_ORIF_EXPR:
4002 case TRUTH_XOR_EXPR:
4003 case NE_EXPR:
4004 return build_zero_cst (type);
4005
4006 case MULT_EXPR:
4007 case TRUTH_AND_EXPR:
4008 case TRUTH_ANDIF_EXPR:
4009 case EQ_EXPR:
4010 return fold_convert_loc (loc, type, integer_one_node);
4011
4012 case BIT_AND_EXPR:
4013 return fold_convert_loc (loc, type, integer_minus_one_node);
4014
4015 case MAX_EXPR:
4016 if (SCALAR_FLOAT_TYPE_P (type))
4017 {
4018 REAL_VALUE_TYPE max, min;
4019 if (HONOR_INFINITIES (type))
4020 {
4021 real_inf (&max);
4022 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4023 }
4024 else
4025 real_maxval (&min, 1, TYPE_MODE (type));
4026 return build_real (type, min);
4027 }
4028 else if (POINTER_TYPE_P (type))
4029 {
4030 wide_int min
4031 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4032 return wide_int_to_tree (type, min);
4033 }
4034 else
4035 {
4036 gcc_assert (INTEGRAL_TYPE_P (type));
4037 return TYPE_MIN_VALUE (type);
4038 }
4039
4040 case MIN_EXPR:
4041 if (SCALAR_FLOAT_TYPE_P (type))
4042 {
4043 REAL_VALUE_TYPE max;
4044 if (HONOR_INFINITIES (type))
4045 real_inf (&max);
4046 else
4047 real_maxval (&max, 0, TYPE_MODE (type));
4048 return build_real (type, max);
4049 }
4050 else if (POINTER_TYPE_P (type))
4051 {
4052 wide_int max
4053 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4054 return wide_int_to_tree (type, max);
4055 }
4056 else
4057 {
4058 gcc_assert (INTEGRAL_TYPE_P (type));
4059 return TYPE_MAX_VALUE (type);
4060 }
4061
4062 default:
4063 gcc_unreachable ();
4064 }
4065 }
4066
4067 /* Construct the initialization value for reduction CLAUSE. */
4068
4069 tree
4070 omp_reduction_init (tree clause, tree type)
4071 {
4072 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4073 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4074 }
4075
4076 /* Return alignment to be assumed for var in CLAUSE, which should be
4077 OMP_CLAUSE_ALIGNED. */
4078
4079 static tree
4080 omp_clause_aligned_alignment (tree clause)
4081 {
4082 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4083 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4084
4085 /* Otherwise return implementation defined alignment. */
4086 unsigned int al = 1;
4087 opt_scalar_mode mode_iter;
4088 auto_vector_modes modes;
4089 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4090 static enum mode_class classes[]
4091 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4092 for (int i = 0; i < 4; i += 2)
4093 /* The for loop above dictates that we only walk through scalar classes. */
4094 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4095 {
4096 scalar_mode mode = mode_iter.require ();
4097 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4098 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4099 continue;
4100 machine_mode alt_vmode;
4101 for (unsigned int j = 0; j < modes.length (); ++j)
4102 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4103 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4104 vmode = alt_vmode;
4105
4106 tree type = lang_hooks.types.type_for_mode (mode, 1);
4107 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4108 continue;
4109 type = build_vector_type_for_mode (type, vmode);
4110 if (TYPE_MODE (type) != vmode)
4111 continue;
4112 if (TYPE_ALIGN_UNIT (type) > al)
4113 al = TYPE_ALIGN_UNIT (type);
4114 }
4115 return build_int_cst (integer_type_node, al);
4116 }
4117
4118
4119 /* This structure is part of the interface between lower_rec_simd_input_clauses
4120 and lower_rec_input_clauses. */
4121
4122 class omplow_simd_context {
4123 public:
4124 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4125 tree idx;
4126 tree lane;
4127 tree lastlane;
4128 vec<tree, va_heap> simt_eargs;
4129 gimple_seq simt_dlist;
4130 poly_uint64_pod max_vf;
4131 bool is_simt;
4132 };
4133
4134 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4135 privatization. */
4136
4137 static bool
4138 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4139 omplow_simd_context *sctx, tree &ivar,
4140 tree &lvar, tree *rvar = NULL,
4141 tree *rvar2 = NULL)
4142 {
4143 if (known_eq (sctx->max_vf, 0U))
4144 {
4145 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4146 if (maybe_gt (sctx->max_vf, 1U))
4147 {
4148 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4149 OMP_CLAUSE_SAFELEN);
4150 if (c)
4151 {
4152 poly_uint64 safe_len;
4153 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4154 || maybe_lt (safe_len, 1U))
4155 sctx->max_vf = 1;
4156 else
4157 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4158 }
4159 }
4160 if (maybe_gt (sctx->max_vf, 1U))
4161 {
4162 sctx->idx = create_tmp_var (unsigned_type_node);
4163 sctx->lane = create_tmp_var (unsigned_type_node);
4164 }
4165 }
4166 if (known_eq (sctx->max_vf, 1U))
4167 return false;
4168
4169 if (sctx->is_simt)
4170 {
4171 if (is_gimple_reg (new_var))
4172 {
4173 ivar = lvar = new_var;
4174 return true;
4175 }
4176 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4177 ivar = lvar = create_tmp_var (type);
4178 TREE_ADDRESSABLE (ivar) = 1;
4179 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4180 NULL, DECL_ATTRIBUTES (ivar));
4181 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4182 tree clobber = build_clobber (type);
4183 gimple *g = gimple_build_assign (ivar, clobber);
4184 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4185 }
4186 else
4187 {
4188 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4189 tree avar = create_tmp_var_raw (atype);
4190 if (TREE_ADDRESSABLE (new_var))
4191 TREE_ADDRESSABLE (avar) = 1;
4192 DECL_ATTRIBUTES (avar)
4193 = tree_cons (get_identifier ("omp simd array"), NULL,
4194 DECL_ATTRIBUTES (avar));
4195 gimple_add_tmp_var (avar);
4196 tree iavar = avar;
4197 if (rvar && !ctx->for_simd_scan_phase)
4198 {
4199 /* For inscan reductions, create another array temporary,
4200 which will hold the reduced value. */
4201 iavar = create_tmp_var_raw (atype);
4202 if (TREE_ADDRESSABLE (new_var))
4203 TREE_ADDRESSABLE (iavar) = 1;
4204 DECL_ATTRIBUTES (iavar)
4205 = tree_cons (get_identifier ("omp simd array"), NULL,
4206 tree_cons (get_identifier ("omp simd inscan"), NULL,
4207 DECL_ATTRIBUTES (iavar)));
4208 gimple_add_tmp_var (iavar);
4209 ctx->cb.decl_map->put (avar, iavar);
4210 if (sctx->lastlane == NULL_TREE)
4211 sctx->lastlane = create_tmp_var (unsigned_type_node);
4212 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4213 sctx->lastlane, NULL_TREE, NULL_TREE);
4214 TREE_THIS_NOTRAP (*rvar) = 1;
4215
4216 if (ctx->scan_exclusive)
4217 {
4218 /* And for exclusive scan yet another one, which will
4219 hold the value during the scan phase. */
4220 tree savar = create_tmp_var_raw (atype);
4221 if (TREE_ADDRESSABLE (new_var))
4222 TREE_ADDRESSABLE (savar) = 1;
4223 DECL_ATTRIBUTES (savar)
4224 = tree_cons (get_identifier ("omp simd array"), NULL,
4225 tree_cons (get_identifier ("omp simd inscan "
4226 "exclusive"), NULL,
4227 DECL_ATTRIBUTES (savar)));
4228 gimple_add_tmp_var (savar);
4229 ctx->cb.decl_map->put (iavar, savar);
4230 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4231 sctx->idx, NULL_TREE, NULL_TREE);
4232 TREE_THIS_NOTRAP (*rvar2) = 1;
4233 }
4234 }
4235 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4236 NULL_TREE, NULL_TREE);
4237 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4238 NULL_TREE, NULL_TREE);
4239 TREE_THIS_NOTRAP (ivar) = 1;
4240 TREE_THIS_NOTRAP (lvar) = 1;
4241 }
4242 if (DECL_P (new_var))
4243 {
4244 SET_DECL_VALUE_EXPR (new_var, lvar);
4245 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4246 }
4247 return true;
4248 }
4249
4250 /* Helper function of lower_rec_input_clauses. For a reference
4251 in simd reduction, add an underlying variable it will reference. */
4252
4253 static void
4254 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4255 {
4256 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4257 if (TREE_CONSTANT (z))
4258 {
4259 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4260 get_name (new_vard));
4261 gimple_add_tmp_var (z);
4262 TREE_ADDRESSABLE (z) = 1;
4263 z = build_fold_addr_expr_loc (loc, z);
4264 gimplify_assign (new_vard, z, ilist);
4265 }
4266 }
4267
4268 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4269 code to emit (type) (tskred_temp[idx]). */
4270
4271 static tree
4272 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4273 unsigned idx)
4274 {
4275 unsigned HOST_WIDE_INT sz
4276 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4277 tree r = build2 (MEM_REF, pointer_sized_int_node,
4278 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4279 idx * sz));
4280 tree v = create_tmp_var (pointer_sized_int_node);
4281 gimple *g = gimple_build_assign (v, r);
4282 gimple_seq_add_stmt (ilist, g);
4283 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4284 {
4285 v = create_tmp_var (type);
4286 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4287 gimple_seq_add_stmt (ilist, g);
4288 }
4289 return v;
4290 }
4291
4292 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4293 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4294 private variables. Initialization statements go in ILIST, while calls
4295 to destructors go in DLIST. */
4296
4297 static void
4298 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4299 omp_context *ctx, struct omp_for_data *fd)
4300 {
4301 tree c, copyin_seq, x, ptr;
4302 bool copyin_by_ref = false;
4303 bool lastprivate_firstprivate = false;
4304 bool reduction_omp_orig_ref = false;
4305 int pass;
4306 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4307 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4308 omplow_simd_context sctx = omplow_simd_context ();
4309 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4310 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4311 gimple_seq llist[4] = { };
4312 tree nonconst_simd_if = NULL_TREE;
4313
4314 copyin_seq = NULL;
4315 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4316
4317 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4318 with data sharing clauses referencing variable sized vars. That
4319 is unnecessarily hard to support and very unlikely to result in
4320 vectorized code anyway. */
4321 if (is_simd)
4322 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4323 switch (OMP_CLAUSE_CODE (c))
4324 {
4325 case OMP_CLAUSE_LINEAR:
4326 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4327 sctx.max_vf = 1;
4328 /* FALLTHRU */
4329 case OMP_CLAUSE_PRIVATE:
4330 case OMP_CLAUSE_FIRSTPRIVATE:
4331 case OMP_CLAUSE_LASTPRIVATE:
4332 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4333 sctx.max_vf = 1;
4334 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4335 {
4336 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4337 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4338 sctx.max_vf = 1;
4339 }
4340 break;
4341 case OMP_CLAUSE_REDUCTION:
4342 case OMP_CLAUSE_IN_REDUCTION:
4343 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4344 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4345 sctx.max_vf = 1;
4346 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4347 {
4348 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4349 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4350 sctx.max_vf = 1;
4351 }
4352 break;
4353 case OMP_CLAUSE_IF:
4354 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4355 sctx.max_vf = 1;
4356 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4357 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4358 break;
4359 case OMP_CLAUSE_SIMDLEN:
4360 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4361 sctx.max_vf = 1;
4362 break;
4363 case OMP_CLAUSE__CONDTEMP_:
4364 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4365 if (sctx.is_simt)
4366 sctx.max_vf = 1;
4367 break;
4368 default:
4369 continue;
4370 }
4371
4372 /* Add a placeholder for simduid. */
4373 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4374 sctx.simt_eargs.safe_push (NULL_TREE);
4375
4376 unsigned task_reduction_cnt = 0;
4377 unsigned task_reduction_cntorig = 0;
4378 unsigned task_reduction_cnt_full = 0;
4379 unsigned task_reduction_cntorig_full = 0;
4380 unsigned task_reduction_other_cnt = 0;
4381 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4382 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4383 /* Do all the fixed sized types in the first pass, and the variable sized
4384 types in the second pass. This makes sure that the scalar arguments to
4385 the variable sized types are processed before we use them in the
4386 variable sized operations. For task reductions we use 4 passes, in the
4387 first two we ignore them, in the third one gather arguments for
4388 GOMP_task_reduction_remap call and in the last pass actually handle
4389 the task reductions. */
4390 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4391 ? 4 : 2); ++pass)
4392 {
4393 if (pass == 2 && task_reduction_cnt)
4394 {
4395 tskred_atype
4396 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4397 + task_reduction_cntorig);
4398 tskred_avar = create_tmp_var_raw (tskred_atype);
4399 gimple_add_tmp_var (tskred_avar);
4400 TREE_ADDRESSABLE (tskred_avar) = 1;
4401 task_reduction_cnt_full = task_reduction_cnt;
4402 task_reduction_cntorig_full = task_reduction_cntorig;
4403 }
4404 else if (pass == 3 && task_reduction_cnt)
4405 {
4406 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4407 gimple *g
4408 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4409 size_int (task_reduction_cntorig),
4410 build_fold_addr_expr (tskred_avar));
4411 gimple_seq_add_stmt (ilist, g);
4412 }
4413 if (pass == 3 && task_reduction_other_cnt)
4414 {
4415 /* For reduction clauses, build
4416 tskred_base = (void *) tskred_temp[2]
4417 + omp_get_thread_num () * tskred_temp[1]
4418 or if tskred_temp[1] is known to be constant, that constant
4419 directly. This is the start of the private reduction copy block
4420 for the current thread. */
4421 tree v = create_tmp_var (integer_type_node);
4422 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4423 gimple *g = gimple_build_call (x, 0);
4424 gimple_call_set_lhs (g, v);
4425 gimple_seq_add_stmt (ilist, g);
4426 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4427 tskred_temp = OMP_CLAUSE_DECL (c);
4428 if (is_taskreg_ctx (ctx))
4429 tskred_temp = lookup_decl (tskred_temp, ctx);
4430 tree v2 = create_tmp_var (sizetype);
4431 g = gimple_build_assign (v2, NOP_EXPR, v);
4432 gimple_seq_add_stmt (ilist, g);
4433 if (ctx->task_reductions[0])
4434 v = fold_convert (sizetype, ctx->task_reductions[0]);
4435 else
4436 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4437 tree v3 = create_tmp_var (sizetype);
4438 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4439 gimple_seq_add_stmt (ilist, g);
4440 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4441 tskred_base = create_tmp_var (ptr_type_node);
4442 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4443 gimple_seq_add_stmt (ilist, g);
4444 }
4445 task_reduction_cnt = 0;
4446 task_reduction_cntorig = 0;
4447 task_reduction_other_cnt = 0;
4448 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4449 {
4450 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4451 tree var, new_var;
4452 bool by_ref;
4453 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4454 bool task_reduction_p = false;
4455 bool task_reduction_needs_orig_p = false;
4456 tree cond = NULL_TREE;
4457
4458 switch (c_kind)
4459 {
4460 case OMP_CLAUSE_PRIVATE:
4461 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4462 continue;
4463 break;
4464 case OMP_CLAUSE_SHARED:
4465 /* Ignore shared directives in teams construct inside
4466 of target construct. */
4467 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4468 && !is_host_teams_ctx (ctx))
4469 continue;
4470 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4471 {
4472 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4473 || is_global_var (OMP_CLAUSE_DECL (c)));
4474 continue;
4475 }
4476 case OMP_CLAUSE_FIRSTPRIVATE:
4477 case OMP_CLAUSE_COPYIN:
4478 break;
4479 case OMP_CLAUSE_LINEAR:
4480 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4481 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4482 lastprivate_firstprivate = true;
4483 break;
4484 case OMP_CLAUSE_REDUCTION:
4485 case OMP_CLAUSE_IN_REDUCTION:
4486 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4487 {
4488 task_reduction_p = true;
4489 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4490 {
4491 task_reduction_other_cnt++;
4492 if (pass == 2)
4493 continue;
4494 }
4495 else
4496 task_reduction_cnt++;
4497 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4498 {
4499 var = OMP_CLAUSE_DECL (c);
4500 /* If var is a global variable that isn't privatized
4501 in outer contexts, we don't need to look up the
4502 original address, it is always the address of the
4503 global variable itself. */
4504 if (!DECL_P (var)
4505 || omp_is_reference (var)
4506 || !is_global_var
4507 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4508 {
4509 task_reduction_needs_orig_p = true;
4510 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4511 task_reduction_cntorig++;
4512 }
4513 }
4514 }
4515 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4516 reduction_omp_orig_ref = true;
4517 break;
4518 case OMP_CLAUSE__REDUCTEMP_:
4519 if (!is_taskreg_ctx (ctx))
4520 continue;
4521 /* FALLTHRU */
4522 case OMP_CLAUSE__LOOPTEMP_:
4523 /* Handle _looptemp_/_reductemp_ clauses only on
4524 parallel/task. */
4525 if (fd)
4526 continue;
4527 break;
4528 case OMP_CLAUSE_LASTPRIVATE:
4529 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4530 {
4531 lastprivate_firstprivate = true;
4532 if (pass != 0 || is_taskloop_ctx (ctx))
4533 continue;
4534 }
4535 /* Even without corresponding firstprivate, if
4536 decl is Fortran allocatable, it needs outer var
4537 reference. */
4538 else if (pass == 0
4539 && lang_hooks.decls.omp_private_outer_ref
4540 (OMP_CLAUSE_DECL (c)))
4541 lastprivate_firstprivate = true;
4542 break;
4543 case OMP_CLAUSE_ALIGNED:
4544 if (pass != 1)
4545 continue;
4546 var = OMP_CLAUSE_DECL (c);
4547 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4548 && !is_global_var (var))
4549 {
4550 new_var = maybe_lookup_decl (var, ctx);
4551 if (new_var == NULL_TREE)
4552 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4553 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4554 tree alarg = omp_clause_aligned_alignment (c);
4555 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4556 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4557 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4558 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4559 gimplify_and_add (x, ilist);
4560 }
4561 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4562 && is_global_var (var))
4563 {
4564 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4565 new_var = lookup_decl (var, ctx);
4566 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4567 t = build_fold_addr_expr_loc (clause_loc, t);
4568 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4569 tree alarg = omp_clause_aligned_alignment (c);
4570 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4571 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4572 t = fold_convert_loc (clause_loc, ptype, t);
4573 x = create_tmp_var (ptype);
4574 t = build2 (MODIFY_EXPR, ptype, x, t);
4575 gimplify_and_add (t, ilist);
4576 t = build_simple_mem_ref_loc (clause_loc, x);
4577 SET_DECL_VALUE_EXPR (new_var, t);
4578 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4579 }
4580 continue;
4581 case OMP_CLAUSE__CONDTEMP_:
4582 if (is_parallel_ctx (ctx)
4583 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4584 break;
4585 continue;
4586 default:
4587 continue;
4588 }
4589
4590 if (task_reduction_p != (pass >= 2))
4591 continue;
4592
4593 new_var = var = OMP_CLAUSE_DECL (c);
4594 if ((c_kind == OMP_CLAUSE_REDUCTION
4595 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4596 && TREE_CODE (var) == MEM_REF)
4597 {
4598 var = TREE_OPERAND (var, 0);
4599 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4600 var = TREE_OPERAND (var, 0);
4601 if (TREE_CODE (var) == INDIRECT_REF
4602 || TREE_CODE (var) == ADDR_EXPR)
4603 var = TREE_OPERAND (var, 0);
4604 if (is_variable_sized (var))
4605 {
4606 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4607 var = DECL_VALUE_EXPR (var);
4608 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4609 var = TREE_OPERAND (var, 0);
4610 gcc_assert (DECL_P (var));
4611 }
4612 new_var = var;
4613 }
4614 if (c_kind != OMP_CLAUSE_COPYIN)
4615 new_var = lookup_decl (var, ctx);
4616
4617 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4618 {
4619 if (pass != 0)
4620 continue;
4621 }
4622 /* C/C++ array section reductions. */
4623 else if ((c_kind == OMP_CLAUSE_REDUCTION
4624 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4625 && var != OMP_CLAUSE_DECL (c))
4626 {
4627 if (pass == 0)
4628 continue;
4629
4630 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4631 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4632
4633 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4634 {
4635 tree b = TREE_OPERAND (orig_var, 1);
4636 b = maybe_lookup_decl (b, ctx);
4637 if (b == NULL)
4638 {
4639 b = TREE_OPERAND (orig_var, 1);
4640 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4641 }
4642 if (integer_zerop (bias))
4643 bias = b;
4644 else
4645 {
4646 bias = fold_convert_loc (clause_loc,
4647 TREE_TYPE (b), bias);
4648 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4649 TREE_TYPE (b), b, bias);
4650 }
4651 orig_var = TREE_OPERAND (orig_var, 0);
4652 }
4653 if (pass == 2)
4654 {
4655 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4656 if (is_global_var (out)
4657 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4658 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4659 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4660 != POINTER_TYPE)))
4661 x = var;
4662 else
4663 {
4664 bool by_ref = use_pointer_for_field (var, NULL);
4665 x = build_receiver_ref (var, by_ref, ctx);
4666 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4667 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4668 == POINTER_TYPE))
4669 x = build_fold_addr_expr (x);
4670 }
4671 if (TREE_CODE (orig_var) == INDIRECT_REF)
4672 x = build_simple_mem_ref (x);
4673 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4674 {
4675 if (var == TREE_OPERAND (orig_var, 0))
4676 x = build_fold_addr_expr (x);
4677 }
4678 bias = fold_convert (sizetype, bias);
4679 x = fold_convert (ptr_type_node, x);
4680 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4681 TREE_TYPE (x), x, bias);
4682 unsigned cnt = task_reduction_cnt - 1;
4683 if (!task_reduction_needs_orig_p)
4684 cnt += (task_reduction_cntorig_full
4685 - task_reduction_cntorig);
4686 else
4687 cnt = task_reduction_cntorig - 1;
4688 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4689 size_int (cnt), NULL_TREE, NULL_TREE);
4690 gimplify_assign (r, x, ilist);
4691 continue;
4692 }
4693
4694 if (TREE_CODE (orig_var) == INDIRECT_REF
4695 || TREE_CODE (orig_var) == ADDR_EXPR)
4696 orig_var = TREE_OPERAND (orig_var, 0);
4697 tree d = OMP_CLAUSE_DECL (c);
4698 tree type = TREE_TYPE (d);
4699 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4700 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4701 const char *name = get_name (orig_var);
4702 if (pass == 3)
4703 {
4704 tree xv = create_tmp_var (ptr_type_node);
4705 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4706 {
4707 unsigned cnt = task_reduction_cnt - 1;
4708 if (!task_reduction_needs_orig_p)
4709 cnt += (task_reduction_cntorig_full
4710 - task_reduction_cntorig);
4711 else
4712 cnt = task_reduction_cntorig - 1;
4713 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4714 size_int (cnt), NULL_TREE, NULL_TREE);
4715
4716 gimple *g = gimple_build_assign (xv, x);
4717 gimple_seq_add_stmt (ilist, g);
4718 }
4719 else
4720 {
4721 unsigned int idx = *ctx->task_reduction_map->get (c);
4722 tree off;
4723 if (ctx->task_reductions[1 + idx])
4724 off = fold_convert (sizetype,
4725 ctx->task_reductions[1 + idx]);
4726 else
4727 off = task_reduction_read (ilist, tskred_temp, sizetype,
4728 7 + 3 * idx + 1);
4729 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4730 tskred_base, off);
4731 gimple_seq_add_stmt (ilist, g);
4732 }
4733 x = fold_convert (build_pointer_type (boolean_type_node),
4734 xv);
4735 if (TREE_CONSTANT (v))
4736 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4737 TYPE_SIZE_UNIT (type));
4738 else
4739 {
4740 tree t = maybe_lookup_decl (v, ctx);
4741 if (t)
4742 v = t;
4743 else
4744 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4745 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4746 fb_rvalue);
4747 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4748 TREE_TYPE (v), v,
4749 build_int_cst (TREE_TYPE (v), 1));
4750 t = fold_build2_loc (clause_loc, MULT_EXPR,
4751 TREE_TYPE (v), t,
4752 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4753 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4754 }
4755 cond = create_tmp_var (TREE_TYPE (x));
4756 gimplify_assign (cond, x, ilist);
4757 x = xv;
4758 }
4759 else if (TREE_CONSTANT (v))
4760 {
4761 x = create_tmp_var_raw (type, name);
4762 gimple_add_tmp_var (x);
4763 TREE_ADDRESSABLE (x) = 1;
4764 x = build_fold_addr_expr_loc (clause_loc, x);
4765 }
4766 else
4767 {
4768 tree atmp
4769 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4770 tree t = maybe_lookup_decl (v, ctx);
4771 if (t)
4772 v = t;
4773 else
4774 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4775 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4776 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4777 TREE_TYPE (v), v,
4778 build_int_cst (TREE_TYPE (v), 1));
4779 t = fold_build2_loc (clause_loc, MULT_EXPR,
4780 TREE_TYPE (v), t,
4781 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4782 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4783 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4784 }
4785
4786 tree ptype = build_pointer_type (TREE_TYPE (type));
4787 x = fold_convert_loc (clause_loc, ptype, x);
4788 tree y = create_tmp_var (ptype, name);
4789 gimplify_assign (y, x, ilist);
4790 x = y;
4791 tree yb = y;
4792
4793 if (!integer_zerop (bias))
4794 {
4795 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4796 bias);
4797 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4798 x);
4799 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4800 pointer_sized_int_node, yb, bias);
4801 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4802 yb = create_tmp_var (ptype, name);
4803 gimplify_assign (yb, x, ilist);
4804 x = yb;
4805 }
4806
4807 d = TREE_OPERAND (d, 0);
4808 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4809 d = TREE_OPERAND (d, 0);
4810 if (TREE_CODE (d) == ADDR_EXPR)
4811 {
4812 if (orig_var != var)
4813 {
4814 gcc_assert (is_variable_sized (orig_var));
4815 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4816 x);
4817 gimplify_assign (new_var, x, ilist);
4818 tree new_orig_var = lookup_decl (orig_var, ctx);
4819 tree t = build_fold_indirect_ref (new_var);
4820 DECL_IGNORED_P (new_var) = 0;
4821 TREE_THIS_NOTRAP (t) = 1;
4822 SET_DECL_VALUE_EXPR (new_orig_var, t);
4823 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4824 }
4825 else
4826 {
4827 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4828 build_int_cst (ptype, 0));
4829 SET_DECL_VALUE_EXPR (new_var, x);
4830 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4831 }
4832 }
4833 else
4834 {
4835 gcc_assert (orig_var == var);
4836 if (TREE_CODE (d) == INDIRECT_REF)
4837 {
4838 x = create_tmp_var (ptype, name);
4839 TREE_ADDRESSABLE (x) = 1;
4840 gimplify_assign (x, yb, ilist);
4841 x = build_fold_addr_expr_loc (clause_loc, x);
4842 }
4843 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4844 gimplify_assign (new_var, x, ilist);
4845 }
4846 /* GOMP_taskgroup_reduction_register memsets the whole
4847 array to zero. If the initializer is zero, we don't
4848 need to initialize it again, just mark it as ever
4849 used unconditionally, i.e. cond = true. */
4850 if (cond
4851 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4852 && initializer_zerop (omp_reduction_init (c,
4853 TREE_TYPE (type))))
4854 {
4855 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4856 boolean_true_node);
4857 gimple_seq_add_stmt (ilist, g);
4858 continue;
4859 }
4860 tree end = create_artificial_label (UNKNOWN_LOCATION);
4861 if (cond)
4862 {
4863 gimple *g;
4864 if (!is_parallel_ctx (ctx))
4865 {
4866 tree condv = create_tmp_var (boolean_type_node);
4867 g = gimple_build_assign (condv,
4868 build_simple_mem_ref (cond));
4869 gimple_seq_add_stmt (ilist, g);
4870 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4871 g = gimple_build_cond (NE_EXPR, condv,
4872 boolean_false_node, end, lab1);
4873 gimple_seq_add_stmt (ilist, g);
4874 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4875 }
4876 g = gimple_build_assign (build_simple_mem_ref (cond),
4877 boolean_true_node);
4878 gimple_seq_add_stmt (ilist, g);
4879 }
4880
4881 tree y1 = create_tmp_var (ptype);
4882 gimplify_assign (y1, y, ilist);
4883 tree i2 = NULL_TREE, y2 = NULL_TREE;
4884 tree body2 = NULL_TREE, end2 = NULL_TREE;
4885 tree y3 = NULL_TREE, y4 = NULL_TREE;
4886 if (task_reduction_needs_orig_p)
4887 {
4888 y3 = create_tmp_var (ptype);
4889 tree ref;
4890 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4891 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4892 size_int (task_reduction_cnt_full
4893 + task_reduction_cntorig - 1),
4894 NULL_TREE, NULL_TREE);
4895 else
4896 {
4897 unsigned int idx = *ctx->task_reduction_map->get (c);
4898 ref = task_reduction_read (ilist, tskred_temp, ptype,
4899 7 + 3 * idx);
4900 }
4901 gimplify_assign (y3, ref, ilist);
4902 }
4903 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4904 {
4905 if (pass != 3)
4906 {
4907 y2 = create_tmp_var (ptype);
4908 gimplify_assign (y2, y, ilist);
4909 }
4910 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4911 {
4912 tree ref = build_outer_var_ref (var, ctx);
4913 /* For ref build_outer_var_ref already performs this. */
4914 if (TREE_CODE (d) == INDIRECT_REF)
4915 gcc_assert (omp_is_reference (var));
4916 else if (TREE_CODE (d) == ADDR_EXPR)
4917 ref = build_fold_addr_expr (ref);
4918 else if (omp_is_reference (var))
4919 ref = build_fold_addr_expr (ref);
4920 ref = fold_convert_loc (clause_loc, ptype, ref);
4921 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4922 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4923 {
4924 y3 = create_tmp_var (ptype);
4925 gimplify_assign (y3, unshare_expr (ref), ilist);
4926 }
4927 if (is_simd)
4928 {
4929 y4 = create_tmp_var (ptype);
4930 gimplify_assign (y4, ref, dlist);
4931 }
4932 }
4933 }
4934 tree i = create_tmp_var (TREE_TYPE (v));
4935 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4936 tree body = create_artificial_label (UNKNOWN_LOCATION);
4937 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4938 if (y2)
4939 {
4940 i2 = create_tmp_var (TREE_TYPE (v));
4941 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4942 body2 = create_artificial_label (UNKNOWN_LOCATION);
4943 end2 = create_artificial_label (UNKNOWN_LOCATION);
4944 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4945 }
4946 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4947 {
4948 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4949 tree decl_placeholder
4950 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4951 SET_DECL_VALUE_EXPR (decl_placeholder,
4952 build_simple_mem_ref (y1));
4953 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4954 SET_DECL_VALUE_EXPR (placeholder,
4955 y3 ? build_simple_mem_ref (y3)
4956 : error_mark_node);
4957 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4958 x = lang_hooks.decls.omp_clause_default_ctor
4959 (c, build_simple_mem_ref (y1),
4960 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4961 if (x)
4962 gimplify_and_add (x, ilist);
4963 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4964 {
4965 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4966 lower_omp (&tseq, ctx);
4967 gimple_seq_add_seq (ilist, tseq);
4968 }
4969 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4970 if (is_simd)
4971 {
4972 SET_DECL_VALUE_EXPR (decl_placeholder,
4973 build_simple_mem_ref (y2));
4974 SET_DECL_VALUE_EXPR (placeholder,
4975 build_simple_mem_ref (y4));
4976 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4977 lower_omp (&tseq, ctx);
4978 gimple_seq_add_seq (dlist, tseq);
4979 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4980 }
4981 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4982 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4983 if (y2)
4984 {
4985 x = lang_hooks.decls.omp_clause_dtor
4986 (c, build_simple_mem_ref (y2));
4987 if (x)
4988 gimplify_and_add (x, dlist);
4989 }
4990 }
4991 else
4992 {
4993 x = omp_reduction_init (c, TREE_TYPE (type));
4994 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4995
4996 /* reduction(-:var) sums up the partial results, so it
4997 acts identically to reduction(+:var). */
4998 if (code == MINUS_EXPR)
4999 code = PLUS_EXPR;
5000
5001 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5002 if (is_simd)
5003 {
5004 x = build2 (code, TREE_TYPE (type),
5005 build_simple_mem_ref (y4),
5006 build_simple_mem_ref (y2));
5007 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5008 }
5009 }
5010 gimple *g
5011 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5012 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5013 gimple_seq_add_stmt (ilist, g);
5014 if (y3)
5015 {
5016 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5017 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5018 gimple_seq_add_stmt (ilist, g);
5019 }
5020 g = gimple_build_assign (i, PLUS_EXPR, i,
5021 build_int_cst (TREE_TYPE (i), 1));
5022 gimple_seq_add_stmt (ilist, g);
5023 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5024 gimple_seq_add_stmt (ilist, g);
5025 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5026 if (y2)
5027 {
5028 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5029 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5030 gimple_seq_add_stmt (dlist, g);
5031 if (y4)
5032 {
5033 g = gimple_build_assign
5034 (y4, POINTER_PLUS_EXPR, y4,
5035 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5036 gimple_seq_add_stmt (dlist, g);
5037 }
5038 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5039 build_int_cst (TREE_TYPE (i2), 1));
5040 gimple_seq_add_stmt (dlist, g);
5041 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5042 gimple_seq_add_stmt (dlist, g);
5043 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5044 }
5045 continue;
5046 }
5047 else if (pass == 2)
5048 {
5049 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5050 x = var;
5051 else
5052 {
5053 bool by_ref = use_pointer_for_field (var, ctx);
5054 x = build_receiver_ref (var, by_ref, ctx);
5055 }
5056 if (!omp_is_reference (var))
5057 x = build_fold_addr_expr (x);
5058 x = fold_convert (ptr_type_node, x);
5059 unsigned cnt = task_reduction_cnt - 1;
5060 if (!task_reduction_needs_orig_p)
5061 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5062 else
5063 cnt = task_reduction_cntorig - 1;
5064 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5065 size_int (cnt), NULL_TREE, NULL_TREE);
5066 gimplify_assign (r, x, ilist);
5067 continue;
5068 }
5069 else if (pass == 3)
5070 {
5071 tree type = TREE_TYPE (new_var);
5072 if (!omp_is_reference (var))
5073 type = build_pointer_type (type);
5074 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5075 {
5076 unsigned cnt = task_reduction_cnt - 1;
5077 if (!task_reduction_needs_orig_p)
5078 cnt += (task_reduction_cntorig_full
5079 - task_reduction_cntorig);
5080 else
5081 cnt = task_reduction_cntorig - 1;
5082 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5083 size_int (cnt), NULL_TREE, NULL_TREE);
5084 }
5085 else
5086 {
5087 unsigned int idx = *ctx->task_reduction_map->get (c);
5088 tree off;
5089 if (ctx->task_reductions[1 + idx])
5090 off = fold_convert (sizetype,
5091 ctx->task_reductions[1 + idx]);
5092 else
5093 off = task_reduction_read (ilist, tskred_temp, sizetype,
5094 7 + 3 * idx + 1);
5095 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5096 tskred_base, off);
5097 }
5098 x = fold_convert (type, x);
5099 tree t;
5100 if (omp_is_reference (var))
5101 {
5102 gimplify_assign (new_var, x, ilist);
5103 t = new_var;
5104 new_var = build_simple_mem_ref (new_var);
5105 }
5106 else
5107 {
5108 t = create_tmp_var (type);
5109 gimplify_assign (t, x, ilist);
5110 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5111 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5112 }
5113 t = fold_convert (build_pointer_type (boolean_type_node), t);
5114 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5115 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5116 cond = create_tmp_var (TREE_TYPE (t));
5117 gimplify_assign (cond, t, ilist);
5118 }
5119 else if (is_variable_sized (var))
5120 {
5121 /* For variable sized types, we need to allocate the
5122 actual storage here. Call alloca and store the
5123 result in the pointer decl that we created elsewhere. */
5124 if (pass == 0)
5125 continue;
5126
5127 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5128 {
5129 gcall *stmt;
5130 tree tmp, atmp;
5131
5132 ptr = DECL_VALUE_EXPR (new_var);
5133 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5134 ptr = TREE_OPERAND (ptr, 0);
5135 gcc_assert (DECL_P (ptr));
5136 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5137
5138 /* void *tmp = __builtin_alloca */
5139 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5140 stmt = gimple_build_call (atmp, 2, x,
5141 size_int (DECL_ALIGN (var)));
5142 tmp = create_tmp_var_raw (ptr_type_node);
5143 gimple_add_tmp_var (tmp);
5144 gimple_call_set_lhs (stmt, tmp);
5145
5146 gimple_seq_add_stmt (ilist, stmt);
5147
5148 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5149 gimplify_assign (ptr, x, ilist);
5150 }
5151 }
5152 else if (omp_is_reference (var)
5153 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5154 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5155 {
5156 /* For references that are being privatized for Fortran,
5157 allocate new backing storage for the new pointer
5158 variable. This allows us to avoid changing all the
5159 code that expects a pointer to something that expects
5160 a direct variable. */
5161 if (pass == 0)
5162 continue;
5163
5164 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5165 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5166 {
5167 x = build_receiver_ref (var, false, ctx);
5168 x = build_fold_addr_expr_loc (clause_loc, x);
5169 }
5170 else if (TREE_CONSTANT (x))
5171 {
5172 /* For reduction in SIMD loop, defer adding the
5173 initialization of the reference, because if we decide
5174 to use SIMD array for it, the initilization could cause
5175 expansion ICE. Ditto for other privatization clauses. */
5176 if (is_simd)
5177 x = NULL_TREE;
5178 else
5179 {
5180 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5181 get_name (var));
5182 gimple_add_tmp_var (x);
5183 TREE_ADDRESSABLE (x) = 1;
5184 x = build_fold_addr_expr_loc (clause_loc, x);
5185 }
5186 }
5187 else
5188 {
5189 tree atmp
5190 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5191 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5192 tree al = size_int (TYPE_ALIGN (rtype));
5193 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5194 }
5195
5196 if (x)
5197 {
5198 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5199 gimplify_assign (new_var, x, ilist);
5200 }
5201
5202 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5203 }
5204 else if ((c_kind == OMP_CLAUSE_REDUCTION
5205 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5206 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5207 {
5208 if (pass == 0)
5209 continue;
5210 }
5211 else if (pass != 0)
5212 continue;
5213
5214 switch (OMP_CLAUSE_CODE (c))
5215 {
5216 case OMP_CLAUSE_SHARED:
5217 /* Ignore shared directives in teams construct inside
5218 target construct. */
5219 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5220 && !is_host_teams_ctx (ctx))
5221 continue;
5222 /* Shared global vars are just accessed directly. */
5223 if (is_global_var (new_var))
5224 break;
5225 /* For taskloop firstprivate/lastprivate, represented
5226 as firstprivate and shared clause on the task, new_var
5227 is the firstprivate var. */
5228 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5229 break;
5230 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5231 needs to be delayed until after fixup_child_record_type so
5232 that we get the correct type during the dereference. */
5233 by_ref = use_pointer_for_field (var, ctx);
5234 x = build_receiver_ref (var, by_ref, ctx);
5235 SET_DECL_VALUE_EXPR (new_var, x);
5236 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5237
5238 /* ??? If VAR is not passed by reference, and the variable
5239 hasn't been initialized yet, then we'll get a warning for
5240 the store into the omp_data_s structure. Ideally, we'd be
5241 able to notice this and not store anything at all, but
5242 we're generating code too early. Suppress the warning. */
5243 if (!by_ref)
5244 TREE_NO_WARNING (var) = 1;
5245 break;
5246
5247 case OMP_CLAUSE__CONDTEMP_:
5248 if (is_parallel_ctx (ctx))
5249 {
5250 x = build_receiver_ref (var, false, ctx);
5251 SET_DECL_VALUE_EXPR (new_var, x);
5252 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5253 }
5254 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5255 {
5256 x = build_zero_cst (TREE_TYPE (var));
5257 goto do_private;
5258 }
5259 break;
5260
5261 case OMP_CLAUSE_LASTPRIVATE:
5262 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5263 break;
5264 /* FALLTHRU */
5265
5266 case OMP_CLAUSE_PRIVATE:
5267 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5268 x = build_outer_var_ref (var, ctx);
5269 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5270 {
5271 if (is_task_ctx (ctx))
5272 x = build_receiver_ref (var, false, ctx);
5273 else
5274 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5275 }
5276 else
5277 x = NULL;
5278 do_private:
5279 tree nx;
5280 bool copy_ctor;
5281 copy_ctor = false;
5282 nx = unshare_expr (new_var);
5283 if (is_simd
5284 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5285 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5286 copy_ctor = true;
5287 if (copy_ctor)
5288 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5289 else
5290 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5291 if (is_simd)
5292 {
5293 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5294 if ((TREE_ADDRESSABLE (new_var) || nx || y
5295 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5296 && (gimple_omp_for_collapse (ctx->stmt) != 1
5297 || (gimple_omp_for_index (ctx->stmt, 0)
5298 != new_var)))
5299 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5300 || omp_is_reference (var))
5301 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5302 ivar, lvar))
5303 {
5304 if (omp_is_reference (var))
5305 {
5306 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5307 tree new_vard = TREE_OPERAND (new_var, 0);
5308 gcc_assert (DECL_P (new_vard));
5309 SET_DECL_VALUE_EXPR (new_vard,
5310 build_fold_addr_expr (lvar));
5311 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5312 }
5313
5314 if (nx)
5315 {
5316 tree iv = unshare_expr (ivar);
5317 if (copy_ctor)
5318 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5319 x);
5320 else
5321 x = lang_hooks.decls.omp_clause_default_ctor (c,
5322 iv,
5323 x);
5324 }
5325 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5326 {
5327 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5328 unshare_expr (ivar), x);
5329 nx = x;
5330 }
5331 if (nx && x)
5332 gimplify_and_add (x, &llist[0]);
5333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5334 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5335 {
5336 tree v = new_var;
5337 if (!DECL_P (v))
5338 {
5339 gcc_assert (TREE_CODE (v) == MEM_REF);
5340 v = TREE_OPERAND (v, 0);
5341 gcc_assert (DECL_P (v));
5342 }
5343 v = *ctx->lastprivate_conditional_map->get (v);
5344 tree t = create_tmp_var (TREE_TYPE (v));
5345 tree z = build_zero_cst (TREE_TYPE (v));
5346 tree orig_v
5347 = build_outer_var_ref (var, ctx,
5348 OMP_CLAUSE_LASTPRIVATE);
5349 gimple_seq_add_stmt (dlist,
5350 gimple_build_assign (t, z));
5351 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5352 tree civar = DECL_VALUE_EXPR (v);
5353 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5354 civar = unshare_expr (civar);
5355 TREE_OPERAND (civar, 1) = sctx.idx;
5356 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5357 unshare_expr (civar));
5358 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5359 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5360 orig_v, unshare_expr (ivar)));
5361 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5362 civar);
5363 x = build3 (COND_EXPR, void_type_node, cond, x,
5364 void_node);
5365 gimple_seq tseq = NULL;
5366 gimplify_and_add (x, &tseq);
5367 if (ctx->outer)
5368 lower_omp (&tseq, ctx->outer);
5369 gimple_seq_add_seq (&llist[1], tseq);
5370 }
5371 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5372 && ctx->for_simd_scan_phase)
5373 {
5374 x = unshare_expr (ivar);
5375 tree orig_v
5376 = build_outer_var_ref (var, ctx,
5377 OMP_CLAUSE_LASTPRIVATE);
5378 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5379 orig_v);
5380 gimplify_and_add (x, &llist[0]);
5381 }
5382 if (y)
5383 {
5384 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5385 if (y)
5386 gimplify_and_add (y, &llist[1]);
5387 }
5388 break;
5389 }
5390 if (omp_is_reference (var))
5391 {
5392 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5393 tree new_vard = TREE_OPERAND (new_var, 0);
5394 gcc_assert (DECL_P (new_vard));
5395 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5396 x = TYPE_SIZE_UNIT (type);
5397 if (TREE_CONSTANT (x))
5398 {
5399 x = create_tmp_var_raw (type, get_name (var));
5400 gimple_add_tmp_var (x);
5401 TREE_ADDRESSABLE (x) = 1;
5402 x = build_fold_addr_expr_loc (clause_loc, x);
5403 x = fold_convert_loc (clause_loc,
5404 TREE_TYPE (new_vard), x);
5405 gimplify_assign (new_vard, x, ilist);
5406 }
5407 }
5408 }
5409 if (nx)
5410 gimplify_and_add (nx, ilist);
5411 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5412 && is_simd
5413 && ctx->for_simd_scan_phase)
5414 {
5415 tree orig_v = build_outer_var_ref (var, ctx,
5416 OMP_CLAUSE_LASTPRIVATE);
5417 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5418 orig_v);
5419 gimplify_and_add (x, ilist);
5420 }
5421 /* FALLTHRU */
5422
5423 do_dtor:
5424 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5425 if (x)
5426 gimplify_and_add (x, dlist);
5427 break;
5428
5429 case OMP_CLAUSE_LINEAR:
5430 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5431 goto do_firstprivate;
5432 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5433 x = NULL;
5434 else
5435 x = build_outer_var_ref (var, ctx);
5436 goto do_private;
5437
5438 case OMP_CLAUSE_FIRSTPRIVATE:
5439 if (is_task_ctx (ctx))
5440 {
5441 if ((omp_is_reference (var)
5442 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5443 || is_variable_sized (var))
5444 goto do_dtor;
5445 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5446 ctx))
5447 || use_pointer_for_field (var, NULL))
5448 {
5449 x = build_receiver_ref (var, false, ctx);
5450 SET_DECL_VALUE_EXPR (new_var, x);
5451 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5452 goto do_dtor;
5453 }
5454 }
5455 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5456 && omp_is_reference (var))
5457 {
5458 x = build_outer_var_ref (var, ctx);
5459 gcc_assert (TREE_CODE (x) == MEM_REF
5460 && integer_zerop (TREE_OPERAND (x, 1)));
5461 x = TREE_OPERAND (x, 0);
5462 x = lang_hooks.decls.omp_clause_copy_ctor
5463 (c, unshare_expr (new_var), x);
5464 gimplify_and_add (x, ilist);
5465 goto do_dtor;
5466 }
5467 do_firstprivate:
5468 x = build_outer_var_ref (var, ctx);
5469 if (is_simd)
5470 {
5471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5472 && gimple_omp_for_combined_into_p (ctx->stmt))
5473 {
5474 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5475 tree stept = TREE_TYPE (t);
5476 tree ct = omp_find_clause (clauses,
5477 OMP_CLAUSE__LOOPTEMP_);
5478 gcc_assert (ct);
5479 tree l = OMP_CLAUSE_DECL (ct);
5480 tree n1 = fd->loop.n1;
5481 tree step = fd->loop.step;
5482 tree itype = TREE_TYPE (l);
5483 if (POINTER_TYPE_P (itype))
5484 itype = signed_type_for (itype);
5485 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5486 if (TYPE_UNSIGNED (itype)
5487 && fd->loop.cond_code == GT_EXPR)
5488 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5489 fold_build1 (NEGATE_EXPR, itype, l),
5490 fold_build1 (NEGATE_EXPR,
5491 itype, step));
5492 else
5493 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5494 t = fold_build2 (MULT_EXPR, stept,
5495 fold_convert (stept, l), t);
5496
5497 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5498 {
5499 if (omp_is_reference (var))
5500 {
5501 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5502 tree new_vard = TREE_OPERAND (new_var, 0);
5503 gcc_assert (DECL_P (new_vard));
5504 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5505 nx = TYPE_SIZE_UNIT (type);
5506 if (TREE_CONSTANT (nx))
5507 {
5508 nx = create_tmp_var_raw (type,
5509 get_name (var));
5510 gimple_add_tmp_var (nx);
5511 TREE_ADDRESSABLE (nx) = 1;
5512 nx = build_fold_addr_expr_loc (clause_loc,
5513 nx);
5514 nx = fold_convert_loc (clause_loc,
5515 TREE_TYPE (new_vard),
5516 nx);
5517 gimplify_assign (new_vard, nx, ilist);
5518 }
5519 }
5520
5521 x = lang_hooks.decls.omp_clause_linear_ctor
5522 (c, new_var, x, t);
5523 gimplify_and_add (x, ilist);
5524 goto do_dtor;
5525 }
5526
5527 if (POINTER_TYPE_P (TREE_TYPE (x)))
5528 x = fold_build2 (POINTER_PLUS_EXPR,
5529 TREE_TYPE (x), x, t);
5530 else
5531 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5532 }
5533
5534 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5535 || TREE_ADDRESSABLE (new_var)
5536 || omp_is_reference (var))
5537 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5538 ivar, lvar))
5539 {
5540 if (omp_is_reference (var))
5541 {
5542 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5543 tree new_vard = TREE_OPERAND (new_var, 0);
5544 gcc_assert (DECL_P (new_vard));
5545 SET_DECL_VALUE_EXPR (new_vard,
5546 build_fold_addr_expr (lvar));
5547 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5548 }
5549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5550 {
5551 tree iv = create_tmp_var (TREE_TYPE (new_var));
5552 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5553 gimplify_and_add (x, ilist);
5554 gimple_stmt_iterator gsi
5555 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5556 gassign *g
5557 = gimple_build_assign (unshare_expr (lvar), iv);
5558 gsi_insert_before_without_update (&gsi, g,
5559 GSI_SAME_STMT);
5560 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5561 enum tree_code code = PLUS_EXPR;
5562 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5563 code = POINTER_PLUS_EXPR;
5564 g = gimple_build_assign (iv, code, iv, t);
5565 gsi_insert_before_without_update (&gsi, g,
5566 GSI_SAME_STMT);
5567 break;
5568 }
5569 x = lang_hooks.decls.omp_clause_copy_ctor
5570 (c, unshare_expr (ivar), x);
5571 gimplify_and_add (x, &llist[0]);
5572 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5573 if (x)
5574 gimplify_and_add (x, &llist[1]);
5575 break;
5576 }
5577 if (omp_is_reference (var))
5578 {
5579 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5580 tree new_vard = TREE_OPERAND (new_var, 0);
5581 gcc_assert (DECL_P (new_vard));
5582 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5583 nx = TYPE_SIZE_UNIT (type);
5584 if (TREE_CONSTANT (nx))
5585 {
5586 nx = create_tmp_var_raw (type, get_name (var));
5587 gimple_add_tmp_var (nx);
5588 TREE_ADDRESSABLE (nx) = 1;
5589 nx = build_fold_addr_expr_loc (clause_loc, nx);
5590 nx = fold_convert_loc (clause_loc,
5591 TREE_TYPE (new_vard), nx);
5592 gimplify_assign (new_vard, nx, ilist);
5593 }
5594 }
5595 }
5596 x = lang_hooks.decls.omp_clause_copy_ctor
5597 (c, unshare_expr (new_var), x);
5598 gimplify_and_add (x, ilist);
5599 goto do_dtor;
5600
5601 case OMP_CLAUSE__LOOPTEMP_:
5602 case OMP_CLAUSE__REDUCTEMP_:
5603 gcc_assert (is_taskreg_ctx (ctx));
5604 x = build_outer_var_ref (var, ctx);
5605 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5606 gimplify_and_add (x, ilist);
5607 break;
5608
5609 case OMP_CLAUSE_COPYIN:
5610 by_ref = use_pointer_for_field (var, NULL);
5611 x = build_receiver_ref (var, by_ref, ctx);
5612 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5613 append_to_statement_list (x, &copyin_seq);
5614 copyin_by_ref |= by_ref;
5615 break;
5616
5617 case OMP_CLAUSE_REDUCTION:
5618 case OMP_CLAUSE_IN_REDUCTION:
5619 /* OpenACC reductions are initialized using the
5620 GOACC_REDUCTION internal function. */
5621 if (is_gimple_omp_oacc (ctx->stmt))
5622 break;
5623 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5624 {
5625 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5626 gimple *tseq;
5627 tree ptype = TREE_TYPE (placeholder);
5628 if (cond)
5629 {
5630 x = error_mark_node;
5631 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5632 && !task_reduction_needs_orig_p)
5633 x = var;
5634 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5635 {
5636 tree pptype = build_pointer_type (ptype);
5637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5638 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5639 size_int (task_reduction_cnt_full
5640 + task_reduction_cntorig - 1),
5641 NULL_TREE, NULL_TREE);
5642 else
5643 {
5644 unsigned int idx
5645 = *ctx->task_reduction_map->get (c);
5646 x = task_reduction_read (ilist, tskred_temp,
5647 pptype, 7 + 3 * idx);
5648 }
5649 x = fold_convert (pptype, x);
5650 x = build_simple_mem_ref (x);
5651 }
5652 }
5653 else
5654 {
5655 x = build_outer_var_ref (var, ctx);
5656
5657 if (omp_is_reference (var)
5658 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5659 x = build_fold_addr_expr_loc (clause_loc, x);
5660 }
5661 SET_DECL_VALUE_EXPR (placeholder, x);
5662 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5663 tree new_vard = new_var;
5664 if (omp_is_reference (var))
5665 {
5666 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5667 new_vard = TREE_OPERAND (new_var, 0);
5668 gcc_assert (DECL_P (new_vard));
5669 }
5670 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5671 if (is_simd
5672 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5673 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5674 rvarp = &rvar;
5675 if (is_simd
5676 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5677 ivar, lvar, rvarp,
5678 &rvar2))
5679 {
5680 if (new_vard == new_var)
5681 {
5682 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5683 SET_DECL_VALUE_EXPR (new_var, ivar);
5684 }
5685 else
5686 {
5687 SET_DECL_VALUE_EXPR (new_vard,
5688 build_fold_addr_expr (ivar));
5689 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5690 }
5691 x = lang_hooks.decls.omp_clause_default_ctor
5692 (c, unshare_expr (ivar),
5693 build_outer_var_ref (var, ctx));
5694 if (rvarp && ctx->for_simd_scan_phase)
5695 {
5696 if (x)
5697 gimplify_and_add (x, &llist[0]);
5698 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5699 if (x)
5700 gimplify_and_add (x, &llist[1]);
5701 break;
5702 }
5703 else if (rvarp)
5704 {
5705 if (x)
5706 {
5707 gimplify_and_add (x, &llist[0]);
5708
5709 tree ivar2 = unshare_expr (lvar);
5710 TREE_OPERAND (ivar2, 1) = sctx.idx;
5711 x = lang_hooks.decls.omp_clause_default_ctor
5712 (c, ivar2, build_outer_var_ref (var, ctx));
5713 gimplify_and_add (x, &llist[0]);
5714
5715 if (rvar2)
5716 {
5717 x = lang_hooks.decls.omp_clause_default_ctor
5718 (c, unshare_expr (rvar2),
5719 build_outer_var_ref (var, ctx));
5720 gimplify_and_add (x, &llist[0]);
5721 }
5722
5723 /* For types that need construction, add another
5724 private var which will be default constructed
5725 and optionally initialized with
5726 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5727 loop we want to assign this value instead of
5728 constructing and destructing it in each
5729 iteration. */
5730 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5731 gimple_add_tmp_var (nv);
5732 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5733 ? rvar2
5734 : ivar, 0),
5735 nv);
5736 x = lang_hooks.decls.omp_clause_default_ctor
5737 (c, nv, build_outer_var_ref (var, ctx));
5738 gimplify_and_add (x, ilist);
5739
5740 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5741 {
5742 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5743 x = DECL_VALUE_EXPR (new_vard);
5744 tree vexpr = nv;
5745 if (new_vard != new_var)
5746 vexpr = build_fold_addr_expr (nv);
5747 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5748 lower_omp (&tseq, ctx);
5749 SET_DECL_VALUE_EXPR (new_vard, x);
5750 gimple_seq_add_seq (ilist, tseq);
5751 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5752 }
5753
5754 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5755 if (x)
5756 gimplify_and_add (x, dlist);
5757 }
5758
5759 tree ref = build_outer_var_ref (var, ctx);
5760 x = unshare_expr (ivar);
5761 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5762 ref);
5763 gimplify_and_add (x, &llist[0]);
5764
5765 ref = build_outer_var_ref (var, ctx);
5766 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5767 rvar);
5768 gimplify_and_add (x, &llist[3]);
5769
5770 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5771 if (new_vard == new_var)
5772 SET_DECL_VALUE_EXPR (new_var, lvar);
5773 else
5774 SET_DECL_VALUE_EXPR (new_vard,
5775 build_fold_addr_expr (lvar));
5776
5777 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5778 if (x)
5779 gimplify_and_add (x, &llist[1]);
5780
5781 tree ivar2 = unshare_expr (lvar);
5782 TREE_OPERAND (ivar2, 1) = sctx.idx;
5783 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5784 if (x)
5785 gimplify_and_add (x, &llist[1]);
5786
5787 if (rvar2)
5788 {
5789 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5790 if (x)
5791 gimplify_and_add (x, &llist[1]);
5792 }
5793 break;
5794 }
5795 if (x)
5796 gimplify_and_add (x, &llist[0]);
5797 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5798 {
5799 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5800 lower_omp (&tseq, ctx);
5801 gimple_seq_add_seq (&llist[0], tseq);
5802 }
5803 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5804 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5805 lower_omp (&tseq, ctx);
5806 gimple_seq_add_seq (&llist[1], tseq);
5807 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5808 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5809 if (new_vard == new_var)
5810 SET_DECL_VALUE_EXPR (new_var, lvar);
5811 else
5812 SET_DECL_VALUE_EXPR (new_vard,
5813 build_fold_addr_expr (lvar));
5814 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5815 if (x)
5816 gimplify_and_add (x, &llist[1]);
5817 break;
5818 }
5819 /* If this is a reference to constant size reduction var
5820 with placeholder, we haven't emitted the initializer
5821 for it because it is undesirable if SIMD arrays are used.
5822 But if they aren't used, we need to emit the deferred
5823 initialization now. */
5824 else if (omp_is_reference (var) && is_simd)
5825 handle_simd_reference (clause_loc, new_vard, ilist);
5826
5827 tree lab2 = NULL_TREE;
5828 if (cond)
5829 {
5830 gimple *g;
5831 if (!is_parallel_ctx (ctx))
5832 {
5833 tree condv = create_tmp_var (boolean_type_node);
5834 tree m = build_simple_mem_ref (cond);
5835 g = gimple_build_assign (condv, m);
5836 gimple_seq_add_stmt (ilist, g);
5837 tree lab1
5838 = create_artificial_label (UNKNOWN_LOCATION);
5839 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5840 g = gimple_build_cond (NE_EXPR, condv,
5841 boolean_false_node,
5842 lab2, lab1);
5843 gimple_seq_add_stmt (ilist, g);
5844 gimple_seq_add_stmt (ilist,
5845 gimple_build_label (lab1));
5846 }
5847 g = gimple_build_assign (build_simple_mem_ref (cond),
5848 boolean_true_node);
5849 gimple_seq_add_stmt (ilist, g);
5850 }
5851 x = lang_hooks.decls.omp_clause_default_ctor
5852 (c, unshare_expr (new_var),
5853 cond ? NULL_TREE
5854 : build_outer_var_ref (var, ctx));
5855 if (x)
5856 gimplify_and_add (x, ilist);
5857
5858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5859 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5860 {
5861 if (ctx->for_simd_scan_phase)
5862 goto do_dtor;
5863 if (x || (!is_simd
5864 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5865 {
5866 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5867 gimple_add_tmp_var (nv);
5868 ctx->cb.decl_map->put (new_vard, nv);
5869 x = lang_hooks.decls.omp_clause_default_ctor
5870 (c, nv, build_outer_var_ref (var, ctx));
5871 if (x)
5872 gimplify_and_add (x, ilist);
5873 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5874 {
5875 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5876 tree vexpr = nv;
5877 if (new_vard != new_var)
5878 vexpr = build_fold_addr_expr (nv);
5879 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5880 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5881 lower_omp (&tseq, ctx);
5882 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5883 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5884 gimple_seq_add_seq (ilist, tseq);
5885 }
5886 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5887 if (is_simd && ctx->scan_exclusive)
5888 {
5889 tree nv2
5890 = create_tmp_var_raw (TREE_TYPE (new_var));
5891 gimple_add_tmp_var (nv2);
5892 ctx->cb.decl_map->put (nv, nv2);
5893 x = lang_hooks.decls.omp_clause_default_ctor
5894 (c, nv2, build_outer_var_ref (var, ctx));
5895 gimplify_and_add (x, ilist);
5896 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5897 if (x)
5898 gimplify_and_add (x, dlist);
5899 }
5900 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5901 if (x)
5902 gimplify_and_add (x, dlist);
5903 }
5904 else if (is_simd
5905 && ctx->scan_exclusive
5906 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5907 {
5908 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5909 gimple_add_tmp_var (nv2);
5910 ctx->cb.decl_map->put (new_vard, nv2);
5911 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5912 if (x)
5913 gimplify_and_add (x, dlist);
5914 }
5915 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5916 goto do_dtor;
5917 }
5918
5919 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5920 {
5921 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5922 lower_omp (&tseq, ctx);
5923 gimple_seq_add_seq (ilist, tseq);
5924 }
5925 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5926 if (is_simd)
5927 {
5928 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5929 lower_omp (&tseq, ctx);
5930 gimple_seq_add_seq (dlist, tseq);
5931 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5932 }
5933 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5934 if (cond)
5935 {
5936 if (lab2)
5937 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5938 break;
5939 }
5940 goto do_dtor;
5941 }
5942 else
5943 {
5944 x = omp_reduction_init (c, TREE_TYPE (new_var));
5945 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5946 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5947
5948 if (cond)
5949 {
5950 gimple *g;
5951 tree lab2 = NULL_TREE;
5952 /* GOMP_taskgroup_reduction_register memsets the whole
5953 array to zero. If the initializer is zero, we don't
5954 need to initialize it again, just mark it as ever
5955 used unconditionally, i.e. cond = true. */
5956 if (initializer_zerop (x))
5957 {
5958 g = gimple_build_assign (build_simple_mem_ref (cond),
5959 boolean_true_node);
5960 gimple_seq_add_stmt (ilist, g);
5961 break;
5962 }
5963
5964 /* Otherwise, emit
5965 if (!cond) { cond = true; new_var = x; } */
5966 if (!is_parallel_ctx (ctx))
5967 {
5968 tree condv = create_tmp_var (boolean_type_node);
5969 tree m = build_simple_mem_ref (cond);
5970 g = gimple_build_assign (condv, m);
5971 gimple_seq_add_stmt (ilist, g);
5972 tree lab1
5973 = create_artificial_label (UNKNOWN_LOCATION);
5974 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5975 g = gimple_build_cond (NE_EXPR, condv,
5976 boolean_false_node,
5977 lab2, lab1);
5978 gimple_seq_add_stmt (ilist, g);
5979 gimple_seq_add_stmt (ilist,
5980 gimple_build_label (lab1));
5981 }
5982 g = gimple_build_assign (build_simple_mem_ref (cond),
5983 boolean_true_node);
5984 gimple_seq_add_stmt (ilist, g);
5985 gimplify_assign (new_var, x, ilist);
5986 if (lab2)
5987 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5988 break;
5989 }
5990
5991 /* reduction(-:var) sums up the partial results, so it
5992 acts identically to reduction(+:var). */
5993 if (code == MINUS_EXPR)
5994 code = PLUS_EXPR;
5995
5996 tree new_vard = new_var;
5997 if (is_simd && omp_is_reference (var))
5998 {
5999 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6000 new_vard = TREE_OPERAND (new_var, 0);
6001 gcc_assert (DECL_P (new_vard));
6002 }
6003 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6004 if (is_simd
6005 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6006 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6007 rvarp = &rvar;
6008 if (is_simd
6009 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6010 ivar, lvar, rvarp,
6011 &rvar2))
6012 {
6013 if (new_vard != new_var)
6014 {
6015 SET_DECL_VALUE_EXPR (new_vard,
6016 build_fold_addr_expr (lvar));
6017 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6018 }
6019
6020 tree ref = build_outer_var_ref (var, ctx);
6021
6022 if (rvarp)
6023 {
6024 if (ctx->for_simd_scan_phase)
6025 break;
6026 gimplify_assign (ivar, ref, &llist[0]);
6027 ref = build_outer_var_ref (var, ctx);
6028 gimplify_assign (ref, rvar, &llist[3]);
6029 break;
6030 }
6031
6032 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6033
6034 if (sctx.is_simt)
6035 {
6036 if (!simt_lane)
6037 simt_lane = create_tmp_var (unsigned_type_node);
6038 x = build_call_expr_internal_loc
6039 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6040 TREE_TYPE (ivar), 2, ivar, simt_lane);
6041 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6042 gimplify_assign (ivar, x, &llist[2]);
6043 }
6044 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6045 ref = build_outer_var_ref (var, ctx);
6046 gimplify_assign (ref, x, &llist[1]);
6047
6048 }
6049 else
6050 {
6051 if (omp_is_reference (var) && is_simd)
6052 handle_simd_reference (clause_loc, new_vard, ilist);
6053 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6054 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6055 break;
6056 gimplify_assign (new_var, x, ilist);
6057 if (is_simd)
6058 {
6059 tree ref = build_outer_var_ref (var, ctx);
6060
6061 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6062 ref = build_outer_var_ref (var, ctx);
6063 gimplify_assign (ref, x, dlist);
6064 }
6065 }
6066 }
6067 break;
6068
6069 default:
6070 gcc_unreachable ();
6071 }
6072 }
6073 }
6074 if (tskred_avar)
6075 {
6076 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6077 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6078 }
6079
6080 if (known_eq (sctx.max_vf, 1U))
6081 {
6082 sctx.is_simt = false;
6083 if (ctx->lastprivate_conditional_map)
6084 {
6085 if (gimple_omp_for_combined_into_p (ctx->stmt))
6086 {
6087 /* Signal to lower_omp_1 that it should use parent context. */
6088 ctx->combined_into_simd_safelen1 = true;
6089 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6091 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6092 {
6093 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6094 omp_context *outer = ctx->outer;
6095 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6096 outer = outer->outer;
6097 tree *v = ctx->lastprivate_conditional_map->get (o);
6098 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6099 tree *pv = outer->lastprivate_conditional_map->get (po);
6100 *v = *pv;
6101 }
6102 }
6103 else
6104 {
6105 /* When not vectorized, treat lastprivate(conditional:) like
6106 normal lastprivate, as there will be just one simd lane
6107 writing the privatized variable. */
6108 delete ctx->lastprivate_conditional_map;
6109 ctx->lastprivate_conditional_map = NULL;
6110 }
6111 }
6112 }
6113
6114 if (nonconst_simd_if)
6115 {
6116 if (sctx.lane == NULL_TREE)
6117 {
6118 sctx.idx = create_tmp_var (unsigned_type_node);
6119 sctx.lane = create_tmp_var (unsigned_type_node);
6120 }
6121 /* FIXME: For now. */
6122 sctx.is_simt = false;
6123 }
6124
6125 if (sctx.lane || sctx.is_simt)
6126 {
6127 uid = create_tmp_var (ptr_type_node, "simduid");
6128 /* Don't want uninit warnings on simduid, it is always uninitialized,
6129 but we use it not for the value, but for the DECL_UID only. */
6130 TREE_NO_WARNING (uid) = 1;
6131 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6132 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6133 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6134 gimple_omp_for_set_clauses (ctx->stmt, c);
6135 }
6136 /* Emit calls denoting privatized variables and initializing a pointer to
6137 structure that holds private variables as fields after ompdevlow pass. */
6138 if (sctx.is_simt)
6139 {
6140 sctx.simt_eargs[0] = uid;
6141 gimple *g
6142 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6143 gimple_call_set_lhs (g, uid);
6144 gimple_seq_add_stmt (ilist, g);
6145 sctx.simt_eargs.release ();
6146
6147 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6148 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6149 gimple_call_set_lhs (g, simtrec);
6150 gimple_seq_add_stmt (ilist, g);
6151 }
6152 if (sctx.lane)
6153 {
6154 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6155 2 + (nonconst_simd_if != NULL),
6156 uid, integer_zero_node,
6157 nonconst_simd_if);
6158 gimple_call_set_lhs (g, sctx.lane);
6159 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6160 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6161 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6162 build_int_cst (unsigned_type_node, 0));
6163 gimple_seq_add_stmt (ilist, g);
6164 if (sctx.lastlane)
6165 {
6166 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6167 2, uid, sctx.lane);
6168 gimple_call_set_lhs (g, sctx.lastlane);
6169 gimple_seq_add_stmt (dlist, g);
6170 gimple_seq_add_seq (dlist, llist[3]);
6171 }
6172 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6173 if (llist[2])
6174 {
6175 tree simt_vf = create_tmp_var (unsigned_type_node);
6176 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6177 gimple_call_set_lhs (g, simt_vf);
6178 gimple_seq_add_stmt (dlist, g);
6179
6180 tree t = build_int_cst (unsigned_type_node, 1);
6181 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6182 gimple_seq_add_stmt (dlist, g);
6183
6184 t = build_int_cst (unsigned_type_node, 0);
6185 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6186 gimple_seq_add_stmt (dlist, g);
6187
6188 tree body = create_artificial_label (UNKNOWN_LOCATION);
6189 tree header = create_artificial_label (UNKNOWN_LOCATION);
6190 tree end = create_artificial_label (UNKNOWN_LOCATION);
6191 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6192 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6193
6194 gimple_seq_add_seq (dlist, llist[2]);
6195
6196 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6197 gimple_seq_add_stmt (dlist, g);
6198
6199 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6200 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6201 gimple_seq_add_stmt (dlist, g);
6202
6203 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6204 }
6205 for (int i = 0; i < 2; i++)
6206 if (llist[i])
6207 {
6208 tree vf = create_tmp_var (unsigned_type_node);
6209 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6210 gimple_call_set_lhs (g, vf);
6211 gimple_seq *seq = i == 0 ? ilist : dlist;
6212 gimple_seq_add_stmt (seq, g);
6213 tree t = build_int_cst (unsigned_type_node, 0);
6214 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6215 gimple_seq_add_stmt (seq, g);
6216 tree body = create_artificial_label (UNKNOWN_LOCATION);
6217 tree header = create_artificial_label (UNKNOWN_LOCATION);
6218 tree end = create_artificial_label (UNKNOWN_LOCATION);
6219 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6220 gimple_seq_add_stmt (seq, gimple_build_label (body));
6221 gimple_seq_add_seq (seq, llist[i]);
6222 t = build_int_cst (unsigned_type_node, 1);
6223 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6224 gimple_seq_add_stmt (seq, g);
6225 gimple_seq_add_stmt (seq, gimple_build_label (header));
6226 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6227 gimple_seq_add_stmt (seq, g);
6228 gimple_seq_add_stmt (seq, gimple_build_label (end));
6229 }
6230 }
6231 if (sctx.is_simt)
6232 {
6233 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6234 gimple *g
6235 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6236 gimple_seq_add_stmt (dlist, g);
6237 }
6238
6239 /* The copyin sequence is not to be executed by the main thread, since
6240 that would result in self-copies. Perhaps not visible to scalars,
6241 but it certainly is to C++ operator=. */
6242 if (copyin_seq)
6243 {
6244 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6245 0);
6246 x = build2 (NE_EXPR, boolean_type_node, x,
6247 build_int_cst (TREE_TYPE (x), 0));
6248 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6249 gimplify_and_add (x, ilist);
6250 }
6251
6252 /* If any copyin variable is passed by reference, we must ensure the
6253 master thread doesn't modify it before it is copied over in all
6254 threads. Similarly for variables in both firstprivate and
6255 lastprivate clauses we need to ensure the lastprivate copying
6256 happens after firstprivate copying in all threads. And similarly
6257 for UDRs if initializer expression refers to omp_orig. */
6258 if (copyin_by_ref || lastprivate_firstprivate
6259 || (reduction_omp_orig_ref
6260 && !ctx->scan_inclusive
6261 && !ctx->scan_exclusive))
6262 {
6263 /* Don't add any barrier for #pragma omp simd or
6264 #pragma omp distribute. */
6265 if (!is_task_ctx (ctx)
6266 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6267 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6268 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6269 }
6270
6271 /* If max_vf is non-zero, then we can use only a vectorization factor
6272 up to the max_vf we chose. So stick it into the safelen clause. */
6273 if (maybe_ne (sctx.max_vf, 0U))
6274 {
6275 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6276 OMP_CLAUSE_SAFELEN);
6277 poly_uint64 safe_len;
6278 if (c == NULL_TREE
6279 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6280 && maybe_gt (safe_len, sctx.max_vf)))
6281 {
6282 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6283 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6284 sctx.max_vf);
6285 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6286 gimple_omp_for_set_clauses (ctx->stmt, c);
6287 }
6288 }
6289 }
6290
6291 /* Create temporary variables for lastprivate(conditional:) implementation
6292 in context CTX with CLAUSES. */
6293
6294 static void
6295 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6296 {
6297 tree iter_type = NULL_TREE;
6298 tree cond_ptr = NULL_TREE;
6299 tree iter_var = NULL_TREE;
6300 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6301 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6302 tree next = *clauses;
6303 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6305 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6306 {
6307 if (is_simd)
6308 {
6309 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6310 gcc_assert (cc);
6311 if (iter_type == NULL_TREE)
6312 {
6313 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6314 iter_var = create_tmp_var_raw (iter_type);
6315 DECL_CONTEXT (iter_var) = current_function_decl;
6316 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6317 DECL_CHAIN (iter_var) = ctx->block_vars;
6318 ctx->block_vars = iter_var;
6319 tree c3
6320 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6321 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6322 OMP_CLAUSE_DECL (c3) = iter_var;
6323 OMP_CLAUSE_CHAIN (c3) = *clauses;
6324 *clauses = c3;
6325 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6326 }
6327 next = OMP_CLAUSE_CHAIN (cc);
6328 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6329 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6330 ctx->lastprivate_conditional_map->put (o, v);
6331 continue;
6332 }
6333 if (iter_type == NULL)
6334 {
6335 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6336 {
6337 struct omp_for_data fd;
6338 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6339 NULL);
6340 iter_type = unsigned_type_for (fd.iter_type);
6341 }
6342 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6343 iter_type = unsigned_type_node;
6344 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6345 if (c2)
6346 {
6347 cond_ptr
6348 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6349 OMP_CLAUSE_DECL (c2) = cond_ptr;
6350 }
6351 else
6352 {
6353 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6354 DECL_CONTEXT (cond_ptr) = current_function_decl;
6355 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6356 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6357 ctx->block_vars = cond_ptr;
6358 c2 = build_omp_clause (UNKNOWN_LOCATION,
6359 OMP_CLAUSE__CONDTEMP_);
6360 OMP_CLAUSE_DECL (c2) = cond_ptr;
6361 OMP_CLAUSE_CHAIN (c2) = *clauses;
6362 *clauses = c2;
6363 }
6364 iter_var = create_tmp_var_raw (iter_type);
6365 DECL_CONTEXT (iter_var) = current_function_decl;
6366 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6367 DECL_CHAIN (iter_var) = ctx->block_vars;
6368 ctx->block_vars = iter_var;
6369 tree c3
6370 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6371 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6372 OMP_CLAUSE_DECL (c3) = iter_var;
6373 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6374 OMP_CLAUSE_CHAIN (c2) = c3;
6375 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6376 }
6377 tree v = create_tmp_var_raw (iter_type);
6378 DECL_CONTEXT (v) = current_function_decl;
6379 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6380 DECL_CHAIN (v) = ctx->block_vars;
6381 ctx->block_vars = v;
6382 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6383 ctx->lastprivate_conditional_map->put (o, v);
6384 }
6385 }
6386
6387
6388 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6389 both parallel and workshare constructs. PREDICATE may be NULL if it's
6390 always true. BODY_P is the sequence to insert early initialization
6391 if needed, STMT_LIST is where the non-conditional lastprivate handling
6392 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6393 section. */
6394
6395 static void
6396 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6397 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6398 omp_context *ctx)
6399 {
6400 tree x, c, label = NULL, orig_clauses = clauses;
6401 bool par_clauses = false;
6402 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6403 unsigned HOST_WIDE_INT conditional_off = 0;
6404 gimple_seq post_stmt_list = NULL;
6405
6406 /* Early exit if there are no lastprivate or linear clauses. */
6407 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6408 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6409 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6410 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6411 break;
6412 if (clauses == NULL)
6413 {
6414 /* If this was a workshare clause, see if it had been combined
6415 with its parallel. In that case, look for the clauses on the
6416 parallel statement itself. */
6417 if (is_parallel_ctx (ctx))
6418 return;
6419
6420 ctx = ctx->outer;
6421 if (ctx == NULL || !is_parallel_ctx (ctx))
6422 return;
6423
6424 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6425 OMP_CLAUSE_LASTPRIVATE);
6426 if (clauses == NULL)
6427 return;
6428 par_clauses = true;
6429 }
6430
6431 bool maybe_simt = false;
6432 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6433 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6434 {
6435 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6436 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6437 if (simduid)
6438 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6439 }
6440
6441 if (predicate)
6442 {
6443 gcond *stmt;
6444 tree label_true, arm1, arm2;
6445 enum tree_code pred_code = TREE_CODE (predicate);
6446
6447 label = create_artificial_label (UNKNOWN_LOCATION);
6448 label_true = create_artificial_label (UNKNOWN_LOCATION);
6449 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6450 {
6451 arm1 = TREE_OPERAND (predicate, 0);
6452 arm2 = TREE_OPERAND (predicate, 1);
6453 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6454 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6455 }
6456 else
6457 {
6458 arm1 = predicate;
6459 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6460 arm2 = boolean_false_node;
6461 pred_code = NE_EXPR;
6462 }
6463 if (maybe_simt)
6464 {
6465 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6466 c = fold_convert (integer_type_node, c);
6467 simtcond = create_tmp_var (integer_type_node);
6468 gimplify_assign (simtcond, c, stmt_list);
6469 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6470 1, simtcond);
6471 c = create_tmp_var (integer_type_node);
6472 gimple_call_set_lhs (g, c);
6473 gimple_seq_add_stmt (stmt_list, g);
6474 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6475 label_true, label);
6476 }
6477 else
6478 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6479 gimple_seq_add_stmt (stmt_list, stmt);
6480 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6481 }
6482
6483 tree cond_ptr = NULL_TREE;
6484 for (c = clauses; c ;)
6485 {
6486 tree var, new_var;
6487 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6488 gimple_seq *this_stmt_list = stmt_list;
6489 tree lab2 = NULL_TREE;
6490
6491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6492 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6493 && ctx->lastprivate_conditional_map
6494 && !ctx->combined_into_simd_safelen1)
6495 {
6496 gcc_assert (body_p);
6497 if (simduid)
6498 goto next;
6499 if (cond_ptr == NULL_TREE)
6500 {
6501 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6502 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6503 }
6504 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6505 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6506 tree v = *ctx->lastprivate_conditional_map->get (o);
6507 gimplify_assign (v, build_zero_cst (type), body_p);
6508 this_stmt_list = cstmt_list;
6509 tree mem;
6510 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6511 {
6512 mem = build2 (MEM_REF, type, cond_ptr,
6513 build_int_cst (TREE_TYPE (cond_ptr),
6514 conditional_off));
6515 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6516 }
6517 else
6518 mem = build4 (ARRAY_REF, type, cond_ptr,
6519 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6520 tree mem2 = copy_node (mem);
6521 gimple_seq seq = NULL;
6522 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6523 gimple_seq_add_seq (this_stmt_list, seq);
6524 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6525 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6526 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6527 gimple_seq_add_stmt (this_stmt_list, g);
6528 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6529 gimplify_assign (mem2, v, this_stmt_list);
6530 }
6531 else if (predicate
6532 && ctx->combined_into_simd_safelen1
6533 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6534 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6535 && ctx->lastprivate_conditional_map)
6536 this_stmt_list = &post_stmt_list;
6537
6538 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6539 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6540 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6541 {
6542 var = OMP_CLAUSE_DECL (c);
6543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6544 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6545 && is_taskloop_ctx (ctx))
6546 {
6547 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6548 new_var = lookup_decl (var, ctx->outer);
6549 }
6550 else
6551 {
6552 new_var = lookup_decl (var, ctx);
6553 /* Avoid uninitialized warnings for lastprivate and
6554 for linear iterators. */
6555 if (predicate
6556 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6557 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6558 TREE_NO_WARNING (new_var) = 1;
6559 }
6560
6561 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6562 {
6563 tree val = DECL_VALUE_EXPR (new_var);
6564 if (TREE_CODE (val) == ARRAY_REF
6565 && VAR_P (TREE_OPERAND (val, 0))
6566 && lookup_attribute ("omp simd array",
6567 DECL_ATTRIBUTES (TREE_OPERAND (val,
6568 0))))
6569 {
6570 if (lastlane == NULL)
6571 {
6572 lastlane = create_tmp_var (unsigned_type_node);
6573 gcall *g
6574 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6575 2, simduid,
6576 TREE_OPERAND (val, 1));
6577 gimple_call_set_lhs (g, lastlane);
6578 gimple_seq_add_stmt (this_stmt_list, g);
6579 }
6580 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6581 TREE_OPERAND (val, 0), lastlane,
6582 NULL_TREE, NULL_TREE);
6583 TREE_THIS_NOTRAP (new_var) = 1;
6584 }
6585 }
6586 else if (maybe_simt)
6587 {
6588 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6589 ? DECL_VALUE_EXPR (new_var)
6590 : new_var);
6591 if (simtlast == NULL)
6592 {
6593 simtlast = create_tmp_var (unsigned_type_node);
6594 gcall *g = gimple_build_call_internal
6595 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6596 gimple_call_set_lhs (g, simtlast);
6597 gimple_seq_add_stmt (this_stmt_list, g);
6598 }
6599 x = build_call_expr_internal_loc
6600 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6601 TREE_TYPE (val), 2, val, simtlast);
6602 new_var = unshare_expr (new_var);
6603 gimplify_assign (new_var, x, this_stmt_list);
6604 new_var = unshare_expr (new_var);
6605 }
6606
6607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6608 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6609 {
6610 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6611 gimple_seq_add_seq (this_stmt_list,
6612 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6613 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6614 }
6615 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6616 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6617 {
6618 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6619 gimple_seq_add_seq (this_stmt_list,
6620 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6621 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6622 }
6623
6624 x = NULL_TREE;
6625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6626 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6627 && is_taskloop_ctx (ctx))
6628 {
6629 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6630 ctx->outer->outer);
6631 if (is_global_var (ovar))
6632 x = ovar;
6633 }
6634 if (!x)
6635 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6636 if (omp_is_reference (var))
6637 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6638 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6639 gimplify_and_add (x, this_stmt_list);
6640
6641 if (lab2)
6642 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6643 }
6644
6645 next:
6646 c = OMP_CLAUSE_CHAIN (c);
6647 if (c == NULL && !par_clauses)
6648 {
6649 /* If this was a workshare clause, see if it had been combined
6650 with its parallel. In that case, continue looking for the
6651 clauses also on the parallel statement itself. */
6652 if (is_parallel_ctx (ctx))
6653 break;
6654
6655 ctx = ctx->outer;
6656 if (ctx == NULL || !is_parallel_ctx (ctx))
6657 break;
6658
6659 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6660 OMP_CLAUSE_LASTPRIVATE);
6661 par_clauses = true;
6662 }
6663 }
6664
6665 if (label)
6666 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6667 gimple_seq_add_seq (stmt_list, post_stmt_list);
6668 }
6669
6670 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6671 (which might be a placeholder). INNER is true if this is an inner
6672 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6673 join markers. Generate the before-loop forking sequence in
6674 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6675 general form of these sequences is
6676
6677 GOACC_REDUCTION_SETUP
6678 GOACC_FORK
6679 GOACC_REDUCTION_INIT
6680 ...
6681 GOACC_REDUCTION_FINI
6682 GOACC_JOIN
6683 GOACC_REDUCTION_TEARDOWN. */
6684
6685 static void
6686 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6687 gcall *fork, gcall *join, gimple_seq *fork_seq,
6688 gimple_seq *join_seq, omp_context *ctx)
6689 {
6690 gimple_seq before_fork = NULL;
6691 gimple_seq after_fork = NULL;
6692 gimple_seq before_join = NULL;
6693 gimple_seq after_join = NULL;
6694 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6695 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6696 unsigned offset = 0;
6697
6698 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6699 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6700 {
6701 tree orig = OMP_CLAUSE_DECL (c);
6702 tree var = maybe_lookup_decl (orig, ctx);
6703 tree ref_to_res = NULL_TREE;
6704 tree incoming, outgoing, v1, v2, v3;
6705 bool is_private = false;
6706
6707 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6708 if (rcode == MINUS_EXPR)
6709 rcode = PLUS_EXPR;
6710 else if (rcode == TRUTH_ANDIF_EXPR)
6711 rcode = BIT_AND_EXPR;
6712 else if (rcode == TRUTH_ORIF_EXPR)
6713 rcode = BIT_IOR_EXPR;
6714 tree op = build_int_cst (unsigned_type_node, rcode);
6715
6716 if (!var)
6717 var = orig;
6718
6719 incoming = outgoing = var;
6720
6721 if (!inner)
6722 {
6723 /* See if an outer construct also reduces this variable. */
6724 omp_context *outer = ctx;
6725
6726 while (omp_context *probe = outer->outer)
6727 {
6728 enum gimple_code type = gimple_code (probe->stmt);
6729 tree cls;
6730
6731 switch (type)
6732 {
6733 case GIMPLE_OMP_FOR:
6734 cls = gimple_omp_for_clauses (probe->stmt);
6735 break;
6736
6737 case GIMPLE_OMP_TARGET:
6738 if ((gimple_omp_target_kind (probe->stmt)
6739 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6740 && (gimple_omp_target_kind (probe->stmt)
6741 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6742 goto do_lookup;
6743
6744 cls = gimple_omp_target_clauses (probe->stmt);
6745 break;
6746
6747 default:
6748 goto do_lookup;
6749 }
6750
6751 outer = probe;
6752 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6753 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6754 && orig == OMP_CLAUSE_DECL (cls))
6755 {
6756 incoming = outgoing = lookup_decl (orig, probe);
6757 goto has_outer_reduction;
6758 }
6759 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6760 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6761 && orig == OMP_CLAUSE_DECL (cls))
6762 {
6763 is_private = true;
6764 goto do_lookup;
6765 }
6766 }
6767
6768 do_lookup:
6769 /* This is the outermost construct with this reduction,
6770 see if there's a mapping for it. */
6771 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6772 && maybe_lookup_field (orig, outer) && !is_private)
6773 {
6774 ref_to_res = build_receiver_ref (orig, false, outer);
6775 if (omp_is_reference (orig))
6776 ref_to_res = build_simple_mem_ref (ref_to_res);
6777
6778 tree type = TREE_TYPE (var);
6779 if (POINTER_TYPE_P (type))
6780 type = TREE_TYPE (type);
6781
6782 outgoing = var;
6783 incoming = omp_reduction_init_op (loc, rcode, type);
6784 }
6785 else
6786 {
6787 /* Try to look at enclosing contexts for reduction var,
6788 use original if no mapping found. */
6789 tree t = NULL_TREE;
6790 omp_context *c = ctx->outer;
6791 while (c && !t)
6792 {
6793 t = maybe_lookup_decl (orig, c);
6794 c = c->outer;
6795 }
6796 incoming = outgoing = (t ? t : orig);
6797 }
6798
6799 has_outer_reduction:;
6800 }
6801
6802 if (!ref_to_res)
6803 ref_to_res = integer_zero_node;
6804
6805 if (omp_is_reference (orig))
6806 {
6807 tree type = TREE_TYPE (var);
6808 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6809
6810 if (!inner)
6811 {
6812 tree x = create_tmp_var (TREE_TYPE (type), id);
6813 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6814 }
6815
6816 v1 = create_tmp_var (type, id);
6817 v2 = create_tmp_var (type, id);
6818 v3 = create_tmp_var (type, id);
6819
6820 gimplify_assign (v1, var, fork_seq);
6821 gimplify_assign (v2, var, fork_seq);
6822 gimplify_assign (v3, var, fork_seq);
6823
6824 var = build_simple_mem_ref (var);
6825 v1 = build_simple_mem_ref (v1);
6826 v2 = build_simple_mem_ref (v2);
6827 v3 = build_simple_mem_ref (v3);
6828 outgoing = build_simple_mem_ref (outgoing);
6829
6830 if (!TREE_CONSTANT (incoming))
6831 incoming = build_simple_mem_ref (incoming);
6832 }
6833 else
6834 v1 = v2 = v3 = var;
6835
6836 /* Determine position in reduction buffer, which may be used
6837 by target. The parser has ensured that this is not a
6838 variable-sized type. */
6839 fixed_size_mode mode
6840 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6841 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6842 offset = (offset + align - 1) & ~(align - 1);
6843 tree off = build_int_cst (sizetype, offset);
6844 offset += GET_MODE_SIZE (mode);
6845
6846 if (!init_code)
6847 {
6848 init_code = build_int_cst (integer_type_node,
6849 IFN_GOACC_REDUCTION_INIT);
6850 fini_code = build_int_cst (integer_type_node,
6851 IFN_GOACC_REDUCTION_FINI);
6852 setup_code = build_int_cst (integer_type_node,
6853 IFN_GOACC_REDUCTION_SETUP);
6854 teardown_code = build_int_cst (integer_type_node,
6855 IFN_GOACC_REDUCTION_TEARDOWN);
6856 }
6857
6858 tree setup_call
6859 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6860 TREE_TYPE (var), 6, setup_code,
6861 unshare_expr (ref_to_res),
6862 incoming, level, op, off);
6863 tree init_call
6864 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6865 TREE_TYPE (var), 6, init_code,
6866 unshare_expr (ref_to_res),
6867 v1, level, op, off);
6868 tree fini_call
6869 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6870 TREE_TYPE (var), 6, fini_code,
6871 unshare_expr (ref_to_res),
6872 v2, level, op, off);
6873 tree teardown_call
6874 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6875 TREE_TYPE (var), 6, teardown_code,
6876 ref_to_res, v3, level, op, off);
6877
6878 gimplify_assign (v1, setup_call, &before_fork);
6879 gimplify_assign (v2, init_call, &after_fork);
6880 gimplify_assign (v3, fini_call, &before_join);
6881 gimplify_assign (outgoing, teardown_call, &after_join);
6882 }
6883
6884 /* Now stitch things together. */
6885 gimple_seq_add_seq (fork_seq, before_fork);
6886 if (fork)
6887 gimple_seq_add_stmt (fork_seq, fork);
6888 gimple_seq_add_seq (fork_seq, after_fork);
6889
6890 gimple_seq_add_seq (join_seq, before_join);
6891 if (join)
6892 gimple_seq_add_stmt (join_seq, join);
6893 gimple_seq_add_seq (join_seq, after_join);
6894 }
6895
6896 /* Generate code to implement the REDUCTION clauses, append it
6897 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6898 that should be emitted also inside of the critical section,
6899 in that case clear *CLIST afterwards, otherwise leave it as is
6900 and let the caller emit it itself. */
6901
6902 static void
6903 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6904 gimple_seq *clist, omp_context *ctx)
6905 {
6906 gimple_seq sub_seq = NULL;
6907 gimple *stmt;
6908 tree x, c;
6909 int count = 0;
6910
6911 /* OpenACC loop reductions are handled elsewhere. */
6912 if (is_gimple_omp_oacc (ctx->stmt))
6913 return;
6914
6915 /* SIMD reductions are handled in lower_rec_input_clauses. */
6916 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6917 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6918 return;
6919
6920 /* inscan reductions are handled elsewhere. */
6921 if (ctx->scan_inclusive || ctx->scan_exclusive)
6922 return;
6923
6924 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6925 update in that case, otherwise use a lock. */
6926 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6928 && !OMP_CLAUSE_REDUCTION_TASK (c))
6929 {
6930 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6931 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6932 {
6933 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6934 count = -1;
6935 break;
6936 }
6937 count++;
6938 }
6939
6940 if (count == 0)
6941 return;
6942
6943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6944 {
6945 tree var, ref, new_var, orig_var;
6946 enum tree_code code;
6947 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6948
6949 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6950 || OMP_CLAUSE_REDUCTION_TASK (c))
6951 continue;
6952
6953 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6954 orig_var = var = OMP_CLAUSE_DECL (c);
6955 if (TREE_CODE (var) == MEM_REF)
6956 {
6957 var = TREE_OPERAND (var, 0);
6958 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6959 var = TREE_OPERAND (var, 0);
6960 if (TREE_CODE (var) == ADDR_EXPR)
6961 var = TREE_OPERAND (var, 0);
6962 else
6963 {
6964 /* If this is a pointer or referenced based array
6965 section, the var could be private in the outer
6966 context e.g. on orphaned loop construct. Pretend this
6967 is private variable's outer reference. */
6968 ccode = OMP_CLAUSE_PRIVATE;
6969 if (TREE_CODE (var) == INDIRECT_REF)
6970 var = TREE_OPERAND (var, 0);
6971 }
6972 orig_var = var;
6973 if (is_variable_sized (var))
6974 {
6975 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6976 var = DECL_VALUE_EXPR (var);
6977 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6978 var = TREE_OPERAND (var, 0);
6979 gcc_assert (DECL_P (var));
6980 }
6981 }
6982 new_var = lookup_decl (var, ctx);
6983 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6984 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6985 ref = build_outer_var_ref (var, ctx, ccode);
6986 code = OMP_CLAUSE_REDUCTION_CODE (c);
6987
6988 /* reduction(-:var) sums up the partial results, so it acts
6989 identically to reduction(+:var). */
6990 if (code == MINUS_EXPR)
6991 code = PLUS_EXPR;
6992
6993 if (count == 1)
6994 {
6995 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6996
6997 addr = save_expr (addr);
6998 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6999 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7000 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7001 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7002 gimplify_and_add (x, stmt_seqp);
7003 return;
7004 }
7005 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7006 {
7007 tree d = OMP_CLAUSE_DECL (c);
7008 tree type = TREE_TYPE (d);
7009 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7010 tree i = create_tmp_var (TREE_TYPE (v));
7011 tree ptype = build_pointer_type (TREE_TYPE (type));
7012 tree bias = TREE_OPERAND (d, 1);
7013 d = TREE_OPERAND (d, 0);
7014 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7015 {
7016 tree b = TREE_OPERAND (d, 1);
7017 b = maybe_lookup_decl (b, ctx);
7018 if (b == NULL)
7019 {
7020 b = TREE_OPERAND (d, 1);
7021 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7022 }
7023 if (integer_zerop (bias))
7024 bias = b;
7025 else
7026 {
7027 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7028 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7029 TREE_TYPE (b), b, bias);
7030 }
7031 d = TREE_OPERAND (d, 0);
7032 }
7033 /* For ref build_outer_var_ref already performs this, so
7034 only new_var needs a dereference. */
7035 if (TREE_CODE (d) == INDIRECT_REF)
7036 {
7037 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7038 gcc_assert (omp_is_reference (var) && var == orig_var);
7039 }
7040 else if (TREE_CODE (d) == ADDR_EXPR)
7041 {
7042 if (orig_var == var)
7043 {
7044 new_var = build_fold_addr_expr (new_var);
7045 ref = build_fold_addr_expr (ref);
7046 }
7047 }
7048 else
7049 {
7050 gcc_assert (orig_var == var);
7051 if (omp_is_reference (var))
7052 ref = build_fold_addr_expr (ref);
7053 }
7054 if (DECL_P (v))
7055 {
7056 tree t = maybe_lookup_decl (v, ctx);
7057 if (t)
7058 v = t;
7059 else
7060 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7061 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7062 }
7063 if (!integer_zerop (bias))
7064 {
7065 bias = fold_convert_loc (clause_loc, sizetype, bias);
7066 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7067 TREE_TYPE (new_var), new_var,
7068 unshare_expr (bias));
7069 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7070 TREE_TYPE (ref), ref, bias);
7071 }
7072 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7073 ref = fold_convert_loc (clause_loc, ptype, ref);
7074 tree m = create_tmp_var (ptype);
7075 gimplify_assign (m, new_var, stmt_seqp);
7076 new_var = m;
7077 m = create_tmp_var (ptype);
7078 gimplify_assign (m, ref, stmt_seqp);
7079 ref = m;
7080 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7081 tree body = create_artificial_label (UNKNOWN_LOCATION);
7082 tree end = create_artificial_label (UNKNOWN_LOCATION);
7083 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7084 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7085 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7086 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7087 {
7088 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7089 tree decl_placeholder
7090 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7091 SET_DECL_VALUE_EXPR (placeholder, out);
7092 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7093 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7094 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7095 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7096 gimple_seq_add_seq (&sub_seq,
7097 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7098 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7099 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7100 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7101 }
7102 else
7103 {
7104 x = build2 (code, TREE_TYPE (out), out, priv);
7105 out = unshare_expr (out);
7106 gimplify_assign (out, x, &sub_seq);
7107 }
7108 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7109 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7110 gimple_seq_add_stmt (&sub_seq, g);
7111 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7112 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7113 gimple_seq_add_stmt (&sub_seq, g);
7114 g = gimple_build_assign (i, PLUS_EXPR, i,
7115 build_int_cst (TREE_TYPE (i), 1));
7116 gimple_seq_add_stmt (&sub_seq, g);
7117 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7118 gimple_seq_add_stmt (&sub_seq, g);
7119 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7120 }
7121 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7122 {
7123 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7124
7125 if (omp_is_reference (var)
7126 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7127 TREE_TYPE (ref)))
7128 ref = build_fold_addr_expr_loc (clause_loc, ref);
7129 SET_DECL_VALUE_EXPR (placeholder, ref);
7130 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7131 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7132 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7133 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7134 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7135 }
7136 else
7137 {
7138 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7139 ref = build_outer_var_ref (var, ctx);
7140 gimplify_assign (ref, x, &sub_seq);
7141 }
7142 }
7143
7144 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7145 0);
7146 gimple_seq_add_stmt (stmt_seqp, stmt);
7147
7148 gimple_seq_add_seq (stmt_seqp, sub_seq);
7149
7150 if (clist)
7151 {
7152 gimple_seq_add_seq (stmt_seqp, *clist);
7153 *clist = NULL;
7154 }
7155
7156 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7157 0);
7158 gimple_seq_add_stmt (stmt_seqp, stmt);
7159 }
7160
7161
7162 /* Generate code to implement the COPYPRIVATE clauses. */
7163
7164 static void
7165 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7166 omp_context *ctx)
7167 {
7168 tree c;
7169
7170 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7171 {
7172 tree var, new_var, ref, x;
7173 bool by_ref;
7174 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7175
7176 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7177 continue;
7178
7179 var = OMP_CLAUSE_DECL (c);
7180 by_ref = use_pointer_for_field (var, NULL);
7181
7182 ref = build_sender_ref (var, ctx);
7183 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7184 if (by_ref)
7185 {
7186 x = build_fold_addr_expr_loc (clause_loc, new_var);
7187 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7188 }
7189 gimplify_assign (ref, x, slist);
7190
7191 ref = build_receiver_ref (var, false, ctx);
7192 if (by_ref)
7193 {
7194 ref = fold_convert_loc (clause_loc,
7195 build_pointer_type (TREE_TYPE (new_var)),
7196 ref);
7197 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7198 }
7199 if (omp_is_reference (var))
7200 {
7201 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7202 ref = build_simple_mem_ref_loc (clause_loc, ref);
7203 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7204 }
7205 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7206 gimplify_and_add (x, rlist);
7207 }
7208 }
7209
7210
7211 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7212 and REDUCTION from the sender (aka parent) side. */
7213
7214 static void
7215 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7216 omp_context *ctx)
7217 {
7218 tree c, t;
7219 int ignored_looptemp = 0;
7220 bool is_taskloop = false;
7221
7222 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7223 by GOMP_taskloop. */
7224 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7225 {
7226 ignored_looptemp = 2;
7227 is_taskloop = true;
7228 }
7229
7230 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7231 {
7232 tree val, ref, x, var;
7233 bool by_ref, do_in = false, do_out = false;
7234 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7235
7236 switch (OMP_CLAUSE_CODE (c))
7237 {
7238 case OMP_CLAUSE_PRIVATE:
7239 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7240 break;
7241 continue;
7242 case OMP_CLAUSE_FIRSTPRIVATE:
7243 case OMP_CLAUSE_COPYIN:
7244 case OMP_CLAUSE_LASTPRIVATE:
7245 case OMP_CLAUSE_IN_REDUCTION:
7246 case OMP_CLAUSE__REDUCTEMP_:
7247 break;
7248 case OMP_CLAUSE_REDUCTION:
7249 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7250 continue;
7251 break;
7252 case OMP_CLAUSE_SHARED:
7253 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7254 break;
7255 continue;
7256 case OMP_CLAUSE__LOOPTEMP_:
7257 if (ignored_looptemp)
7258 {
7259 ignored_looptemp--;
7260 continue;
7261 }
7262 break;
7263 default:
7264 continue;
7265 }
7266
7267 val = OMP_CLAUSE_DECL (c);
7268 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7269 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7270 && TREE_CODE (val) == MEM_REF)
7271 {
7272 val = TREE_OPERAND (val, 0);
7273 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7274 val = TREE_OPERAND (val, 0);
7275 if (TREE_CODE (val) == INDIRECT_REF
7276 || TREE_CODE (val) == ADDR_EXPR)
7277 val = TREE_OPERAND (val, 0);
7278 if (is_variable_sized (val))
7279 continue;
7280 }
7281
7282 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7283 outer taskloop region. */
7284 omp_context *ctx_for_o = ctx;
7285 if (is_taskloop
7286 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7287 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7288 ctx_for_o = ctx->outer;
7289
7290 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7291
7292 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7293 && is_global_var (var)
7294 && (val == OMP_CLAUSE_DECL (c)
7295 || !is_task_ctx (ctx)
7296 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7297 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7298 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7299 != POINTER_TYPE)))))
7300 continue;
7301
7302 t = omp_member_access_dummy_var (var);
7303 if (t)
7304 {
7305 var = DECL_VALUE_EXPR (var);
7306 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7307 if (o != t)
7308 var = unshare_and_remap (var, t, o);
7309 else
7310 var = unshare_expr (var);
7311 }
7312
7313 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7314 {
7315 /* Handle taskloop firstprivate/lastprivate, where the
7316 lastprivate on GIMPLE_OMP_TASK is represented as
7317 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7318 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7319 x = omp_build_component_ref (ctx->sender_decl, f);
7320 if (use_pointer_for_field (val, ctx))
7321 var = build_fold_addr_expr (var);
7322 gimplify_assign (x, var, ilist);
7323 DECL_ABSTRACT_ORIGIN (f) = NULL;
7324 continue;
7325 }
7326
7327 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7328 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7329 || val == OMP_CLAUSE_DECL (c))
7330 && is_variable_sized (val))
7331 continue;
7332 by_ref = use_pointer_for_field (val, NULL);
7333
7334 switch (OMP_CLAUSE_CODE (c))
7335 {
7336 case OMP_CLAUSE_FIRSTPRIVATE:
7337 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7338 && !by_ref
7339 && is_task_ctx (ctx))
7340 TREE_NO_WARNING (var) = 1;
7341 do_in = true;
7342 break;
7343
7344 case OMP_CLAUSE_PRIVATE:
7345 case OMP_CLAUSE_COPYIN:
7346 case OMP_CLAUSE__LOOPTEMP_:
7347 case OMP_CLAUSE__REDUCTEMP_:
7348 do_in = true;
7349 break;
7350
7351 case OMP_CLAUSE_LASTPRIVATE:
7352 if (by_ref || omp_is_reference (val))
7353 {
7354 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7355 continue;
7356 do_in = true;
7357 }
7358 else
7359 {
7360 do_out = true;
7361 if (lang_hooks.decls.omp_private_outer_ref (val))
7362 do_in = true;
7363 }
7364 break;
7365
7366 case OMP_CLAUSE_REDUCTION:
7367 case OMP_CLAUSE_IN_REDUCTION:
7368 do_in = true;
7369 if (val == OMP_CLAUSE_DECL (c))
7370 {
7371 if (is_task_ctx (ctx))
7372 by_ref = use_pointer_for_field (val, ctx);
7373 else
7374 do_out = !(by_ref || omp_is_reference (val));
7375 }
7376 else
7377 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7378 break;
7379
7380 default:
7381 gcc_unreachable ();
7382 }
7383
7384 if (do_in)
7385 {
7386 ref = build_sender_ref (val, ctx);
7387 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7388 gimplify_assign (ref, x, ilist);
7389 if (is_task_ctx (ctx))
7390 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7391 }
7392
7393 if (do_out)
7394 {
7395 ref = build_sender_ref (val, ctx);
7396 gimplify_assign (var, ref, olist);
7397 }
7398 }
7399 }
7400
7401 /* Generate code to implement SHARED from the sender (aka parent)
7402 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7403 list things that got automatically shared. */
7404
7405 static void
7406 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7407 {
7408 tree var, ovar, nvar, t, f, x, record_type;
7409
7410 if (ctx->record_type == NULL)
7411 return;
7412
7413 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7414 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7415 {
7416 ovar = DECL_ABSTRACT_ORIGIN (f);
7417 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7418 continue;
7419
7420 nvar = maybe_lookup_decl (ovar, ctx);
7421 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7422 continue;
7423
7424 /* If CTX is a nested parallel directive. Find the immediately
7425 enclosing parallel or workshare construct that contains a
7426 mapping for OVAR. */
7427 var = lookup_decl_in_outer_ctx (ovar, ctx);
7428
7429 t = omp_member_access_dummy_var (var);
7430 if (t)
7431 {
7432 var = DECL_VALUE_EXPR (var);
7433 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7434 if (o != t)
7435 var = unshare_and_remap (var, t, o);
7436 else
7437 var = unshare_expr (var);
7438 }
7439
7440 if (use_pointer_for_field (ovar, ctx))
7441 {
7442 x = build_sender_ref (ovar, ctx);
7443 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7444 && TREE_TYPE (f) == TREE_TYPE (ovar))
7445 {
7446 gcc_assert (is_parallel_ctx (ctx)
7447 && DECL_ARTIFICIAL (ovar));
7448 /* _condtemp_ clause. */
7449 var = build_constructor (TREE_TYPE (x), NULL);
7450 }
7451 else
7452 var = build_fold_addr_expr (var);
7453 gimplify_assign (x, var, ilist);
7454 }
7455 else
7456 {
7457 x = build_sender_ref (ovar, ctx);
7458 gimplify_assign (x, var, ilist);
7459
7460 if (!TREE_READONLY (var)
7461 /* We don't need to receive a new reference to a result
7462 or parm decl. In fact we may not store to it as we will
7463 invalidate any pending RSO and generate wrong gimple
7464 during inlining. */
7465 && !((TREE_CODE (var) == RESULT_DECL
7466 || TREE_CODE (var) == PARM_DECL)
7467 && DECL_BY_REFERENCE (var)))
7468 {
7469 x = build_sender_ref (ovar, ctx);
7470 gimplify_assign (var, x, olist);
7471 }
7472 }
7473 }
7474 }
7475
7476 /* Emit an OpenACC head marker call, encapulating the partitioning and
7477 other information that must be processed by the target compiler.
7478 Return the maximum number of dimensions the associated loop might
7479 be partitioned over. */
7480
7481 static unsigned
7482 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7483 gimple_seq *seq, omp_context *ctx)
7484 {
7485 unsigned levels = 0;
7486 unsigned tag = 0;
7487 tree gang_static = NULL_TREE;
7488 auto_vec<tree, 5> args;
7489
7490 args.quick_push (build_int_cst
7491 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7492 args.quick_push (ddvar);
7493 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7494 {
7495 switch (OMP_CLAUSE_CODE (c))
7496 {
7497 case OMP_CLAUSE_GANG:
7498 tag |= OLF_DIM_GANG;
7499 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7500 /* static:* is represented by -1, and we can ignore it, as
7501 scheduling is always static. */
7502 if (gang_static && integer_minus_onep (gang_static))
7503 gang_static = NULL_TREE;
7504 levels++;
7505 break;
7506
7507 case OMP_CLAUSE_WORKER:
7508 tag |= OLF_DIM_WORKER;
7509 levels++;
7510 break;
7511
7512 case OMP_CLAUSE_VECTOR:
7513 tag |= OLF_DIM_VECTOR;
7514 levels++;
7515 break;
7516
7517 case OMP_CLAUSE_SEQ:
7518 tag |= OLF_SEQ;
7519 break;
7520
7521 case OMP_CLAUSE_AUTO:
7522 tag |= OLF_AUTO;
7523 break;
7524
7525 case OMP_CLAUSE_INDEPENDENT:
7526 tag |= OLF_INDEPENDENT;
7527 break;
7528
7529 case OMP_CLAUSE_TILE:
7530 tag |= OLF_TILE;
7531 break;
7532
7533 default:
7534 continue;
7535 }
7536 }
7537
7538 if (gang_static)
7539 {
7540 if (DECL_P (gang_static))
7541 gang_static = build_outer_var_ref (gang_static, ctx);
7542 tag |= OLF_GANG_STATIC;
7543 }
7544
7545 /* In a parallel region, loops are implicitly INDEPENDENT. */
7546 omp_context *tgt = enclosing_target_ctx (ctx);
7547 if (!tgt || is_oacc_parallel_or_serial (tgt))
7548 tag |= OLF_INDEPENDENT;
7549
7550 if (tag & OLF_TILE)
7551 /* Tiling could use all 3 levels. */
7552 levels = 3;
7553 else
7554 {
7555 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7556 Ensure at least one level, or 2 for possible auto
7557 partitioning */
7558 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7559 << OLF_DIM_BASE) | OLF_SEQ));
7560
7561 if (levels < 1u + maybe_auto)
7562 levels = 1u + maybe_auto;
7563 }
7564
7565 args.quick_push (build_int_cst (integer_type_node, levels));
7566 args.quick_push (build_int_cst (integer_type_node, tag));
7567 if (gang_static)
7568 args.quick_push (gang_static);
7569
7570 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7571 gimple_set_location (call, loc);
7572 gimple_set_lhs (call, ddvar);
7573 gimple_seq_add_stmt (seq, call);
7574
7575 return levels;
7576 }
7577
7578 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7579 partitioning level of the enclosed region. */
7580
7581 static void
7582 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7583 tree tofollow, gimple_seq *seq)
7584 {
7585 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7586 : IFN_UNIQUE_OACC_TAIL_MARK);
7587 tree marker = build_int_cst (integer_type_node, marker_kind);
7588 int nargs = 2 + (tofollow != NULL_TREE);
7589 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7590 marker, ddvar, tofollow);
7591 gimple_set_location (call, loc);
7592 gimple_set_lhs (call, ddvar);
7593 gimple_seq_add_stmt (seq, call);
7594 }
7595
7596 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7597 the loop clauses, from which we extract reductions. Initialize
7598 HEAD and TAIL. */
7599
7600 static void
7601 lower_oacc_head_tail (location_t loc, tree clauses,
7602 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7603 {
7604 bool inner = false;
7605 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7606 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7607
7608 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7609 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7610 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7611
7612 gcc_assert (count);
7613 for (unsigned done = 1; count; count--, done++)
7614 {
7615 gimple_seq fork_seq = NULL;
7616 gimple_seq join_seq = NULL;
7617
7618 tree place = build_int_cst (integer_type_node, -1);
7619 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7620 fork_kind, ddvar, place);
7621 gimple_set_location (fork, loc);
7622 gimple_set_lhs (fork, ddvar);
7623
7624 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7625 join_kind, ddvar, place);
7626 gimple_set_location (join, loc);
7627 gimple_set_lhs (join, ddvar);
7628
7629 /* Mark the beginning of this level sequence. */
7630 if (inner)
7631 lower_oacc_loop_marker (loc, ddvar, true,
7632 build_int_cst (integer_type_node, count),
7633 &fork_seq);
7634 lower_oacc_loop_marker (loc, ddvar, false,
7635 build_int_cst (integer_type_node, done),
7636 &join_seq);
7637
7638 lower_oacc_reductions (loc, clauses, place, inner,
7639 fork, join, &fork_seq, &join_seq, ctx);
7640
7641 /* Append this level to head. */
7642 gimple_seq_add_seq (head, fork_seq);
7643 /* Prepend it to tail. */
7644 gimple_seq_add_seq (&join_seq, *tail);
7645 *tail = join_seq;
7646
7647 inner = true;
7648 }
7649
7650 /* Mark the end of the sequence. */
7651 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7652 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7653 }
7654
7655 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7656 catch handler and return it. This prevents programs from violating the
7657 structured block semantics with throws. */
7658
7659 static gimple_seq
7660 maybe_catch_exception (gimple_seq body)
7661 {
7662 gimple *g;
7663 tree decl;
7664
7665 if (!flag_exceptions)
7666 return body;
7667
7668 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7669 decl = lang_hooks.eh_protect_cleanup_actions ();
7670 else
7671 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7672
7673 g = gimple_build_eh_must_not_throw (decl);
7674 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7675 GIMPLE_TRY_CATCH);
7676
7677 return gimple_seq_alloc_with_stmt (g);
7678 }
7679
7680 \f
7681 /* Routines to lower OMP directives into OMP-GIMPLE. */
7682
7683 /* If ctx is a worksharing context inside of a cancellable parallel
7684 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7685 and conditional branch to parallel's cancel_label to handle
7686 cancellation in the implicit barrier. */
7687
7688 static void
7689 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7690 gimple_seq *body)
7691 {
7692 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7693 if (gimple_omp_return_nowait_p (omp_return))
7694 return;
7695 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7696 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7697 && outer->cancellable)
7698 {
7699 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7700 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7701 tree lhs = create_tmp_var (c_bool_type);
7702 gimple_omp_return_set_lhs (omp_return, lhs);
7703 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7704 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7705 fold_convert (c_bool_type,
7706 boolean_false_node),
7707 outer->cancel_label, fallthru_label);
7708 gimple_seq_add_stmt (body, g);
7709 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7710 }
7711 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7712 return;
7713 }
7714
7715 /* Find the first task_reduction or reduction clause or return NULL
7716 if there are none. */
7717
7718 static inline tree
7719 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7720 enum omp_clause_code ccode)
7721 {
7722 while (1)
7723 {
7724 clauses = omp_find_clause (clauses, ccode);
7725 if (clauses == NULL_TREE)
7726 return NULL_TREE;
7727 if (ccode != OMP_CLAUSE_REDUCTION
7728 || code == OMP_TASKLOOP
7729 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7730 return clauses;
7731 clauses = OMP_CLAUSE_CHAIN (clauses);
7732 }
7733 }
7734
7735 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7736 gimple_seq *, gimple_seq *);
7737
7738 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7739 CTX is the enclosing OMP context for the current statement. */
7740
7741 static void
7742 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7743 {
7744 tree block, control;
7745 gimple_stmt_iterator tgsi;
7746 gomp_sections *stmt;
7747 gimple *t;
7748 gbind *new_stmt, *bind;
7749 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7750
7751 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7752
7753 push_gimplify_context ();
7754
7755 dlist = NULL;
7756 ilist = NULL;
7757
7758 tree rclauses
7759 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7760 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7761 tree rtmp = NULL_TREE;
7762 if (rclauses)
7763 {
7764 tree type = build_pointer_type (pointer_sized_int_node);
7765 tree temp = create_tmp_var (type);
7766 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7767 OMP_CLAUSE_DECL (c) = temp;
7768 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7769 gimple_omp_sections_set_clauses (stmt, c);
7770 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7771 gimple_omp_sections_clauses (stmt),
7772 &ilist, &tred_dlist);
7773 rclauses = c;
7774 rtmp = make_ssa_name (type);
7775 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7776 }
7777
7778 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7779 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7780
7781 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7782 &ilist, &dlist, ctx, NULL);
7783
7784 control = create_tmp_var (unsigned_type_node, ".section");
7785 gimple_omp_sections_set_control (stmt, control);
7786
7787 new_body = gimple_omp_body (stmt);
7788 gimple_omp_set_body (stmt, NULL);
7789 tgsi = gsi_start (new_body);
7790 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7791 {
7792 omp_context *sctx;
7793 gimple *sec_start;
7794
7795 sec_start = gsi_stmt (tgsi);
7796 sctx = maybe_lookup_ctx (sec_start);
7797 gcc_assert (sctx);
7798
7799 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7800 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7801 GSI_CONTINUE_LINKING);
7802 gimple_omp_set_body (sec_start, NULL);
7803
7804 if (gsi_one_before_end_p (tgsi))
7805 {
7806 gimple_seq l = NULL;
7807 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7808 &ilist, &l, &clist, ctx);
7809 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7810 gimple_omp_section_set_last (sec_start);
7811 }
7812
7813 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7814 GSI_CONTINUE_LINKING);
7815 }
7816
7817 block = make_node (BLOCK);
7818 bind = gimple_build_bind (NULL, new_body, block);
7819
7820 olist = NULL;
7821 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7822 &clist, ctx);
7823 if (clist)
7824 {
7825 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7826 gcall *g = gimple_build_call (fndecl, 0);
7827 gimple_seq_add_stmt (&olist, g);
7828 gimple_seq_add_seq (&olist, clist);
7829 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7830 g = gimple_build_call (fndecl, 0);
7831 gimple_seq_add_stmt (&olist, g);
7832 }
7833
7834 block = make_node (BLOCK);
7835 new_stmt = gimple_build_bind (NULL, NULL, block);
7836 gsi_replace (gsi_p, new_stmt, true);
7837
7838 pop_gimplify_context (new_stmt);
7839 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7840 BLOCK_VARS (block) = gimple_bind_vars (bind);
7841 if (BLOCK_VARS (block))
7842 TREE_USED (block) = 1;
7843
7844 new_body = NULL;
7845 gimple_seq_add_seq (&new_body, ilist);
7846 gimple_seq_add_stmt (&new_body, stmt);
7847 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7848 gimple_seq_add_stmt (&new_body, bind);
7849
7850 t = gimple_build_omp_continue (control, control);
7851 gimple_seq_add_stmt (&new_body, t);
7852
7853 gimple_seq_add_seq (&new_body, olist);
7854 if (ctx->cancellable)
7855 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7856 gimple_seq_add_seq (&new_body, dlist);
7857
7858 new_body = maybe_catch_exception (new_body);
7859
7860 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7861 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7862 t = gimple_build_omp_return (nowait);
7863 gimple_seq_add_stmt (&new_body, t);
7864 gimple_seq_add_seq (&new_body, tred_dlist);
7865 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7866
7867 if (rclauses)
7868 OMP_CLAUSE_DECL (rclauses) = rtmp;
7869
7870 gimple_bind_set_body (new_stmt, new_body);
7871 }
7872
7873
7874 /* A subroutine of lower_omp_single. Expand the simple form of
7875 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7876
7877 if (GOMP_single_start ())
7878 BODY;
7879 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7880
7881 FIXME. It may be better to delay expanding the logic of this until
7882 pass_expand_omp. The expanded logic may make the job more difficult
7883 to a synchronization analysis pass. */
7884
7885 static void
7886 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7887 {
7888 location_t loc = gimple_location (single_stmt);
7889 tree tlabel = create_artificial_label (loc);
7890 tree flabel = create_artificial_label (loc);
7891 gimple *call, *cond;
7892 tree lhs, decl;
7893
7894 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7895 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7896 call = gimple_build_call (decl, 0);
7897 gimple_call_set_lhs (call, lhs);
7898 gimple_seq_add_stmt (pre_p, call);
7899
7900 cond = gimple_build_cond (EQ_EXPR, lhs,
7901 fold_convert_loc (loc, TREE_TYPE (lhs),
7902 boolean_true_node),
7903 tlabel, flabel);
7904 gimple_seq_add_stmt (pre_p, cond);
7905 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7906 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7907 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7908 }
7909
7910
7911 /* A subroutine of lower_omp_single. Expand the simple form of
7912 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7913
7914 #pragma omp single copyprivate (a, b, c)
7915
7916 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7917
7918 {
7919 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7920 {
7921 BODY;
7922 copyout.a = a;
7923 copyout.b = b;
7924 copyout.c = c;
7925 GOMP_single_copy_end (&copyout);
7926 }
7927 else
7928 {
7929 a = copyout_p->a;
7930 b = copyout_p->b;
7931 c = copyout_p->c;
7932 }
7933 GOMP_barrier ();
7934 }
7935
7936 FIXME. It may be better to delay expanding the logic of this until
7937 pass_expand_omp. The expanded logic may make the job more difficult
7938 to a synchronization analysis pass. */
7939
7940 static void
7941 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7942 omp_context *ctx)
7943 {
7944 tree ptr_type, t, l0, l1, l2, bfn_decl;
7945 gimple_seq copyin_seq;
7946 location_t loc = gimple_location (single_stmt);
7947
7948 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7949
7950 ptr_type = build_pointer_type (ctx->record_type);
7951 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7952
7953 l0 = create_artificial_label (loc);
7954 l1 = create_artificial_label (loc);
7955 l2 = create_artificial_label (loc);
7956
7957 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7958 t = build_call_expr_loc (loc, bfn_decl, 0);
7959 t = fold_convert_loc (loc, ptr_type, t);
7960 gimplify_assign (ctx->receiver_decl, t, pre_p);
7961
7962 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7963 build_int_cst (ptr_type, 0));
7964 t = build3 (COND_EXPR, void_type_node, t,
7965 build_and_jump (&l0), build_and_jump (&l1));
7966 gimplify_and_add (t, pre_p);
7967
7968 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7969
7970 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7971
7972 copyin_seq = NULL;
7973 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7974 &copyin_seq, ctx);
7975
7976 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7977 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7978 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7979 gimplify_and_add (t, pre_p);
7980
7981 t = build_and_jump (&l2);
7982 gimplify_and_add (t, pre_p);
7983
7984 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7985
7986 gimple_seq_add_seq (pre_p, copyin_seq);
7987
7988 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7989 }
7990
7991
7992 /* Expand code for an OpenMP single directive. */
7993
7994 static void
7995 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7996 {
7997 tree block;
7998 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7999 gbind *bind;
8000 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8001
8002 push_gimplify_context ();
8003
8004 block = make_node (BLOCK);
8005 bind = gimple_build_bind (NULL, NULL, block);
8006 gsi_replace (gsi_p, bind, true);
8007 bind_body = NULL;
8008 dlist = NULL;
8009 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8010 &bind_body, &dlist, ctx, NULL);
8011 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8012
8013 gimple_seq_add_stmt (&bind_body, single_stmt);
8014
8015 if (ctx->record_type)
8016 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8017 else
8018 lower_omp_single_simple (single_stmt, &bind_body);
8019
8020 gimple_omp_set_body (single_stmt, NULL);
8021
8022 gimple_seq_add_seq (&bind_body, dlist);
8023
8024 bind_body = maybe_catch_exception (bind_body);
8025
8026 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8027 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8028 gimple *g = gimple_build_omp_return (nowait);
8029 gimple_seq_add_stmt (&bind_body_tail, g);
8030 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8031 if (ctx->record_type)
8032 {
8033 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8034 tree clobber = build_clobber (ctx->record_type);
8035 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8036 clobber), GSI_SAME_STMT);
8037 }
8038 gimple_seq_add_seq (&bind_body, bind_body_tail);
8039 gimple_bind_set_body (bind, bind_body);
8040
8041 pop_gimplify_context (bind);
8042
8043 gimple_bind_append_vars (bind, ctx->block_vars);
8044 BLOCK_VARS (block) = ctx->block_vars;
8045 if (BLOCK_VARS (block))
8046 TREE_USED (block) = 1;
8047 }
8048
8049
8050 /* Expand code for an OpenMP master directive. */
8051
8052 static void
8053 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8054 {
8055 tree block, lab = NULL, x, bfn_decl;
8056 gimple *stmt = gsi_stmt (*gsi_p);
8057 gbind *bind;
8058 location_t loc = gimple_location (stmt);
8059 gimple_seq tseq;
8060
8061 push_gimplify_context ();
8062
8063 block = make_node (BLOCK);
8064 bind = gimple_build_bind (NULL, NULL, block);
8065 gsi_replace (gsi_p, bind, true);
8066 gimple_bind_add_stmt (bind, stmt);
8067
8068 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8069 x = build_call_expr_loc (loc, bfn_decl, 0);
8070 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8071 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8072 tseq = NULL;
8073 gimplify_and_add (x, &tseq);
8074 gimple_bind_add_seq (bind, tseq);
8075
8076 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8077 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8078 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8079 gimple_omp_set_body (stmt, NULL);
8080
8081 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8082
8083 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8084
8085 pop_gimplify_context (bind);
8086
8087 gimple_bind_append_vars (bind, ctx->block_vars);
8088 BLOCK_VARS (block) = ctx->block_vars;
8089 }
8090
8091 /* Helper function for lower_omp_task_reductions. For a specific PASS
8092 find out the current clause it should be processed, or return false
8093 if all have been processed already. */
8094
8095 static inline bool
8096 omp_task_reduction_iterate (int pass, enum tree_code code,
8097 enum omp_clause_code ccode, tree *c, tree *decl,
8098 tree *type, tree *next)
8099 {
8100 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8101 {
8102 if (ccode == OMP_CLAUSE_REDUCTION
8103 && code != OMP_TASKLOOP
8104 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8105 continue;
8106 *decl = OMP_CLAUSE_DECL (*c);
8107 *type = TREE_TYPE (*decl);
8108 if (TREE_CODE (*decl) == MEM_REF)
8109 {
8110 if (pass != 1)
8111 continue;
8112 }
8113 else
8114 {
8115 if (omp_is_reference (*decl))
8116 *type = TREE_TYPE (*type);
8117 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8118 continue;
8119 }
8120 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8121 return true;
8122 }
8123 *decl = NULL_TREE;
8124 *type = NULL_TREE;
8125 *next = NULL_TREE;
8126 return false;
8127 }
8128
8129 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8130 OMP_TASKGROUP only with task modifier). Register mapping of those in
8131 START sequence and reducing them and unregister them in the END sequence. */
8132
8133 static void
8134 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8135 gimple_seq *start, gimple_seq *end)
8136 {
8137 enum omp_clause_code ccode
8138 = (code == OMP_TASKGROUP
8139 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8140 tree cancellable = NULL_TREE;
8141 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8142 if (clauses == NULL_TREE)
8143 return;
8144 if (code == OMP_FOR || code == OMP_SECTIONS)
8145 {
8146 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8147 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8148 && outer->cancellable)
8149 {
8150 cancellable = error_mark_node;
8151 break;
8152 }
8153 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8154 break;
8155 }
8156 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8157 tree *last = &TYPE_FIELDS (record_type);
8158 unsigned cnt = 0;
8159 if (cancellable)
8160 {
8161 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8162 ptr_type_node);
8163 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8164 integer_type_node);
8165 *last = field;
8166 DECL_CHAIN (field) = ifield;
8167 last = &DECL_CHAIN (ifield);
8168 DECL_CONTEXT (field) = record_type;
8169 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8170 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8171 DECL_CONTEXT (ifield) = record_type;
8172 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8173 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8174 }
8175 for (int pass = 0; pass < 2; pass++)
8176 {
8177 tree decl, type, next;
8178 for (tree c = clauses;
8179 omp_task_reduction_iterate (pass, code, ccode,
8180 &c, &decl, &type, &next); c = next)
8181 {
8182 ++cnt;
8183 tree new_type = type;
8184 if (ctx->outer)
8185 new_type = remap_type (type, &ctx->outer->cb);
8186 tree field
8187 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8188 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8189 new_type);
8190 if (DECL_P (decl) && type == TREE_TYPE (decl))
8191 {
8192 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8193 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8194 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8195 }
8196 else
8197 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8198 DECL_CONTEXT (field) = record_type;
8199 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8200 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8201 *last = field;
8202 last = &DECL_CHAIN (field);
8203 tree bfield
8204 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8205 boolean_type_node);
8206 DECL_CONTEXT (bfield) = record_type;
8207 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8208 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8209 *last = bfield;
8210 last = &DECL_CHAIN (bfield);
8211 }
8212 }
8213 *last = NULL_TREE;
8214 layout_type (record_type);
8215
8216 /* Build up an array which registers with the runtime all the reductions
8217 and deregisters them at the end. Format documented in libgomp/task.c. */
8218 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8219 tree avar = create_tmp_var_raw (atype);
8220 gimple_add_tmp_var (avar);
8221 TREE_ADDRESSABLE (avar) = 1;
8222 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8223 NULL_TREE, NULL_TREE);
8224 tree t = build_int_cst (pointer_sized_int_node, cnt);
8225 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8226 gimple_seq seq = NULL;
8227 tree sz = fold_convert (pointer_sized_int_node,
8228 TYPE_SIZE_UNIT (record_type));
8229 int cachesz = 64;
8230 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8231 build_int_cst (pointer_sized_int_node, cachesz - 1));
8232 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8233 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8234 ctx->task_reductions.create (1 + cnt);
8235 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8236 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8237 ? sz : NULL_TREE);
8238 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8239 gimple_seq_add_seq (start, seq);
8240 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8241 NULL_TREE, NULL_TREE);
8242 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8243 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8244 NULL_TREE, NULL_TREE);
8245 t = build_int_cst (pointer_sized_int_node,
8246 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8247 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8248 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8249 NULL_TREE, NULL_TREE);
8250 t = build_int_cst (pointer_sized_int_node, -1);
8251 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8252 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8253 NULL_TREE, NULL_TREE);
8254 t = build_int_cst (pointer_sized_int_node, 0);
8255 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8256
8257 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8258 and for each task reduction checks a bool right after the private variable
8259 within that thread's chunk; if the bool is clear, it hasn't been
8260 initialized and thus isn't going to be reduced nor destructed, otherwise
8261 reduce and destruct it. */
8262 tree idx = create_tmp_var (size_type_node);
8263 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8264 tree num_thr_sz = create_tmp_var (size_type_node);
8265 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8266 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8267 tree lab3 = NULL_TREE;
8268 gimple *g;
8269 if (code == OMP_FOR || code == OMP_SECTIONS)
8270 {
8271 /* For worksharing constructs, only perform it in the master thread,
8272 with the exception of cancelled implicit barriers - then only handle
8273 the current thread. */
8274 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8275 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8276 tree thr_num = create_tmp_var (integer_type_node);
8277 g = gimple_build_call (t, 0);
8278 gimple_call_set_lhs (g, thr_num);
8279 gimple_seq_add_stmt (end, g);
8280 if (cancellable)
8281 {
8282 tree c;
8283 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8284 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8285 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8286 if (code == OMP_FOR)
8287 c = gimple_omp_for_clauses (ctx->stmt);
8288 else /* if (code == OMP_SECTIONS) */
8289 c = gimple_omp_sections_clauses (ctx->stmt);
8290 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8291 cancellable = c;
8292 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8293 lab5, lab6);
8294 gimple_seq_add_stmt (end, g);
8295 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8296 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8297 gimple_seq_add_stmt (end, g);
8298 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8299 build_one_cst (TREE_TYPE (idx)));
8300 gimple_seq_add_stmt (end, g);
8301 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8302 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8303 }
8304 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8305 gimple_seq_add_stmt (end, g);
8306 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8307 }
8308 if (code != OMP_PARALLEL)
8309 {
8310 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8311 tree num_thr = create_tmp_var (integer_type_node);
8312 g = gimple_build_call (t, 0);
8313 gimple_call_set_lhs (g, num_thr);
8314 gimple_seq_add_stmt (end, g);
8315 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8316 gimple_seq_add_stmt (end, g);
8317 if (cancellable)
8318 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8319 }
8320 else
8321 {
8322 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8323 OMP_CLAUSE__REDUCTEMP_);
8324 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8325 t = fold_convert (size_type_node, t);
8326 gimplify_assign (num_thr_sz, t, end);
8327 }
8328 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8329 NULL_TREE, NULL_TREE);
8330 tree data = create_tmp_var (pointer_sized_int_node);
8331 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8332 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8333 tree ptr;
8334 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8335 ptr = create_tmp_var (build_pointer_type (record_type));
8336 else
8337 ptr = create_tmp_var (ptr_type_node);
8338 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8339
8340 tree field = TYPE_FIELDS (record_type);
8341 cnt = 0;
8342 if (cancellable)
8343 field = DECL_CHAIN (DECL_CHAIN (field));
8344 for (int pass = 0; pass < 2; pass++)
8345 {
8346 tree decl, type, next;
8347 for (tree c = clauses;
8348 omp_task_reduction_iterate (pass, code, ccode,
8349 &c, &decl, &type, &next); c = next)
8350 {
8351 tree var = decl, ref;
8352 if (TREE_CODE (decl) == MEM_REF)
8353 {
8354 var = TREE_OPERAND (var, 0);
8355 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8356 var = TREE_OPERAND (var, 0);
8357 tree v = var;
8358 if (TREE_CODE (var) == ADDR_EXPR)
8359 var = TREE_OPERAND (var, 0);
8360 else if (TREE_CODE (var) == INDIRECT_REF)
8361 var = TREE_OPERAND (var, 0);
8362 tree orig_var = var;
8363 if (is_variable_sized (var))
8364 {
8365 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8366 var = DECL_VALUE_EXPR (var);
8367 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8368 var = TREE_OPERAND (var, 0);
8369 gcc_assert (DECL_P (var));
8370 }
8371 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8372 if (orig_var != var)
8373 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8374 else if (TREE_CODE (v) == ADDR_EXPR)
8375 t = build_fold_addr_expr (t);
8376 else if (TREE_CODE (v) == INDIRECT_REF)
8377 t = build_fold_indirect_ref (t);
8378 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8379 {
8380 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8381 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8382 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8383 }
8384 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8385 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8386 fold_convert (size_type_node,
8387 TREE_OPERAND (decl, 1)));
8388 }
8389 else
8390 {
8391 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8392 if (!omp_is_reference (decl))
8393 t = build_fold_addr_expr (t);
8394 }
8395 t = fold_convert (pointer_sized_int_node, t);
8396 seq = NULL;
8397 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8398 gimple_seq_add_seq (start, seq);
8399 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8400 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8401 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8402 t = unshare_expr (byte_position (field));
8403 t = fold_convert (pointer_sized_int_node, t);
8404 ctx->task_reduction_map->put (c, cnt);
8405 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8406 ? t : NULL_TREE);
8407 seq = NULL;
8408 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8409 gimple_seq_add_seq (start, seq);
8410 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8411 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8412 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8413
8414 tree bfield = DECL_CHAIN (field);
8415 tree cond;
8416 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8417 /* In parallel or worksharing all threads unconditionally
8418 initialize all their task reduction private variables. */
8419 cond = boolean_true_node;
8420 else if (TREE_TYPE (ptr) == ptr_type_node)
8421 {
8422 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8423 unshare_expr (byte_position (bfield)));
8424 seq = NULL;
8425 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8426 gimple_seq_add_seq (end, seq);
8427 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8428 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8429 build_int_cst (pbool, 0));
8430 }
8431 else
8432 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8433 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8434 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8435 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8436 tree condv = create_tmp_var (boolean_type_node);
8437 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8438 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8439 lab3, lab4);
8440 gimple_seq_add_stmt (end, g);
8441 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8442 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8443 {
8444 /* If this reduction doesn't need destruction and parallel
8445 has been cancelled, there is nothing to do for this
8446 reduction, so jump around the merge operation. */
8447 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8448 g = gimple_build_cond (NE_EXPR, cancellable,
8449 build_zero_cst (TREE_TYPE (cancellable)),
8450 lab4, lab5);
8451 gimple_seq_add_stmt (end, g);
8452 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8453 }
8454
8455 tree new_var;
8456 if (TREE_TYPE (ptr) == ptr_type_node)
8457 {
8458 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8459 unshare_expr (byte_position (field)));
8460 seq = NULL;
8461 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8462 gimple_seq_add_seq (end, seq);
8463 tree pbool = build_pointer_type (TREE_TYPE (field));
8464 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8465 build_int_cst (pbool, 0));
8466 }
8467 else
8468 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8469 build_simple_mem_ref (ptr), field, NULL_TREE);
8470
8471 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8472 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8473 ref = build_simple_mem_ref (ref);
8474 /* reduction(-:var) sums up the partial results, so it acts
8475 identically to reduction(+:var). */
8476 if (rcode == MINUS_EXPR)
8477 rcode = PLUS_EXPR;
8478 if (TREE_CODE (decl) == MEM_REF)
8479 {
8480 tree type = TREE_TYPE (new_var);
8481 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8482 tree i = create_tmp_var (TREE_TYPE (v));
8483 tree ptype = build_pointer_type (TREE_TYPE (type));
8484 if (DECL_P (v))
8485 {
8486 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8487 tree vv = create_tmp_var (TREE_TYPE (v));
8488 gimplify_assign (vv, v, start);
8489 v = vv;
8490 }
8491 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8492 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8493 new_var = build_fold_addr_expr (new_var);
8494 new_var = fold_convert (ptype, new_var);
8495 ref = fold_convert (ptype, ref);
8496 tree m = create_tmp_var (ptype);
8497 gimplify_assign (m, new_var, end);
8498 new_var = m;
8499 m = create_tmp_var (ptype);
8500 gimplify_assign (m, ref, end);
8501 ref = m;
8502 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8503 tree body = create_artificial_label (UNKNOWN_LOCATION);
8504 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8505 gimple_seq_add_stmt (end, gimple_build_label (body));
8506 tree priv = build_simple_mem_ref (new_var);
8507 tree out = build_simple_mem_ref (ref);
8508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8509 {
8510 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8511 tree decl_placeholder
8512 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8513 tree lab6 = NULL_TREE;
8514 if (cancellable)
8515 {
8516 /* If this reduction needs destruction and parallel
8517 has been cancelled, jump around the merge operation
8518 to the destruction. */
8519 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8520 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8521 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8522 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8523 lab6, lab5);
8524 gimple_seq_add_stmt (end, g);
8525 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8526 }
8527 SET_DECL_VALUE_EXPR (placeholder, out);
8528 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8529 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8530 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8531 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8532 gimple_seq_add_seq (end,
8533 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8534 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8535 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8536 {
8537 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8538 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8539 }
8540 if (cancellable)
8541 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8542 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8543 if (x)
8544 {
8545 gimple_seq tseq = NULL;
8546 gimplify_stmt (&x, &tseq);
8547 gimple_seq_add_seq (end, tseq);
8548 }
8549 }
8550 else
8551 {
8552 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8553 out = unshare_expr (out);
8554 gimplify_assign (out, x, end);
8555 }
8556 gimple *g
8557 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8558 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8559 gimple_seq_add_stmt (end, g);
8560 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8561 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8562 gimple_seq_add_stmt (end, g);
8563 g = gimple_build_assign (i, PLUS_EXPR, i,
8564 build_int_cst (TREE_TYPE (i), 1));
8565 gimple_seq_add_stmt (end, g);
8566 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8567 gimple_seq_add_stmt (end, g);
8568 gimple_seq_add_stmt (end, gimple_build_label (endl));
8569 }
8570 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8571 {
8572 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8573 tree oldv = NULL_TREE;
8574 tree lab6 = NULL_TREE;
8575 if (cancellable)
8576 {
8577 /* If this reduction needs destruction and parallel
8578 has been cancelled, jump around the merge operation
8579 to the destruction. */
8580 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8581 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8582 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8583 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8584 lab6, lab5);
8585 gimple_seq_add_stmt (end, g);
8586 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8587 }
8588 if (omp_is_reference (decl)
8589 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8590 TREE_TYPE (ref)))
8591 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8592 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8593 tree refv = create_tmp_var (TREE_TYPE (ref));
8594 gimplify_assign (refv, ref, end);
8595 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8596 SET_DECL_VALUE_EXPR (placeholder, ref);
8597 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8598 tree d = maybe_lookup_decl (decl, ctx);
8599 gcc_assert (d);
8600 if (DECL_HAS_VALUE_EXPR_P (d))
8601 oldv = DECL_VALUE_EXPR (d);
8602 if (omp_is_reference (var))
8603 {
8604 tree v = fold_convert (TREE_TYPE (d),
8605 build_fold_addr_expr (new_var));
8606 SET_DECL_VALUE_EXPR (d, v);
8607 }
8608 else
8609 SET_DECL_VALUE_EXPR (d, new_var);
8610 DECL_HAS_VALUE_EXPR_P (d) = 1;
8611 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8612 if (oldv)
8613 SET_DECL_VALUE_EXPR (d, oldv);
8614 else
8615 {
8616 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8617 DECL_HAS_VALUE_EXPR_P (d) = 0;
8618 }
8619 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8620 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8622 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8623 if (cancellable)
8624 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8625 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8626 if (x)
8627 {
8628 gimple_seq tseq = NULL;
8629 gimplify_stmt (&x, &tseq);
8630 gimple_seq_add_seq (end, tseq);
8631 }
8632 }
8633 else
8634 {
8635 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8636 ref = unshare_expr (ref);
8637 gimplify_assign (ref, x, end);
8638 }
8639 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8640 ++cnt;
8641 field = DECL_CHAIN (bfield);
8642 }
8643 }
8644
8645 if (code == OMP_TASKGROUP)
8646 {
8647 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8648 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8649 gimple_seq_add_stmt (start, g);
8650 }
8651 else
8652 {
8653 tree c;
8654 if (code == OMP_FOR)
8655 c = gimple_omp_for_clauses (ctx->stmt);
8656 else if (code == OMP_SECTIONS)
8657 c = gimple_omp_sections_clauses (ctx->stmt);
8658 else
8659 c = gimple_omp_taskreg_clauses (ctx->stmt);
8660 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8661 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8662 build_fold_addr_expr (avar));
8663 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8664 }
8665
8666 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8667 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8668 size_one_node));
8669 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8670 gimple_seq_add_stmt (end, g);
8671 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8672 if (code == OMP_FOR || code == OMP_SECTIONS)
8673 {
8674 enum built_in_function bfn
8675 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8676 t = builtin_decl_explicit (bfn);
8677 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8678 tree arg;
8679 if (cancellable)
8680 {
8681 arg = create_tmp_var (c_bool_type);
8682 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8683 cancellable));
8684 }
8685 else
8686 arg = build_int_cst (c_bool_type, 0);
8687 g = gimple_build_call (t, 1, arg);
8688 }
8689 else
8690 {
8691 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8692 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8693 }
8694 gimple_seq_add_stmt (end, g);
8695 t = build_constructor (atype, NULL);
8696 TREE_THIS_VOLATILE (t) = 1;
8697 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8698 }
8699
8700 /* Expand code for an OpenMP taskgroup directive. */
8701
8702 static void
8703 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8704 {
8705 gimple *stmt = gsi_stmt (*gsi_p);
8706 gcall *x;
8707 gbind *bind;
8708 gimple_seq dseq = NULL;
8709 tree block = make_node (BLOCK);
8710
8711 bind = gimple_build_bind (NULL, NULL, block);
8712 gsi_replace (gsi_p, bind, true);
8713 gimple_bind_add_stmt (bind, stmt);
8714
8715 push_gimplify_context ();
8716
8717 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8718 0);
8719 gimple_bind_add_stmt (bind, x);
8720
8721 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8722 gimple_omp_taskgroup_clauses (stmt),
8723 gimple_bind_body_ptr (bind), &dseq);
8724
8725 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8726 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8727 gimple_omp_set_body (stmt, NULL);
8728
8729 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8730 gimple_bind_add_seq (bind, dseq);
8731
8732 pop_gimplify_context (bind);
8733
8734 gimple_bind_append_vars (bind, ctx->block_vars);
8735 BLOCK_VARS (block) = ctx->block_vars;
8736 }
8737
8738
8739 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8740
8741 static void
8742 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8743 omp_context *ctx)
8744 {
8745 struct omp_for_data fd;
8746 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8747 return;
8748
8749 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8750 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8751 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8752 if (!fd.ordered)
8753 return;
8754
8755 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8756 tree c = gimple_omp_ordered_clauses (ord_stmt);
8757 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8758 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8759 {
8760 /* Merge depend clauses from multiple adjacent
8761 #pragma omp ordered depend(sink:...) constructs
8762 into one #pragma omp ordered depend(sink:...), so that
8763 we can optimize them together. */
8764 gimple_stmt_iterator gsi = *gsi_p;
8765 gsi_next (&gsi);
8766 while (!gsi_end_p (gsi))
8767 {
8768 gimple *stmt = gsi_stmt (gsi);
8769 if (is_gimple_debug (stmt)
8770 || gimple_code (stmt) == GIMPLE_NOP)
8771 {
8772 gsi_next (&gsi);
8773 continue;
8774 }
8775 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8776 break;
8777 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8778 c = gimple_omp_ordered_clauses (ord_stmt2);
8779 if (c == NULL_TREE
8780 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8781 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8782 break;
8783 while (*list_p)
8784 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8785 *list_p = c;
8786 gsi_remove (&gsi, true);
8787 }
8788 }
8789
8790 /* Canonicalize sink dependence clauses into one folded clause if
8791 possible.
8792
8793 The basic algorithm is to create a sink vector whose first
8794 element is the GCD of all the first elements, and whose remaining
8795 elements are the minimum of the subsequent columns.
8796
8797 We ignore dependence vectors whose first element is zero because
8798 such dependencies are known to be executed by the same thread.
8799
8800 We take into account the direction of the loop, so a minimum
8801 becomes a maximum if the loop is iterating forwards. We also
8802 ignore sink clauses where the loop direction is unknown, or where
8803 the offsets are clearly invalid because they are not a multiple
8804 of the loop increment.
8805
8806 For example:
8807
8808 #pragma omp for ordered(2)
8809 for (i=0; i < N; ++i)
8810 for (j=0; j < M; ++j)
8811 {
8812 #pragma omp ordered \
8813 depend(sink:i-8,j-2) \
8814 depend(sink:i,j-1) \ // Completely ignored because i+0.
8815 depend(sink:i-4,j-3) \
8816 depend(sink:i-6,j-4)
8817 #pragma omp ordered depend(source)
8818 }
8819
8820 Folded clause is:
8821
8822 depend(sink:-gcd(8,4,6),-min(2,3,4))
8823 -or-
8824 depend(sink:-2,-2)
8825 */
8826
8827 /* FIXME: Computing GCD's where the first element is zero is
8828 non-trivial in the presence of collapsed loops. Do this later. */
8829 if (fd.collapse > 1)
8830 return;
8831
8832 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8833
8834 /* wide_int is not a POD so it must be default-constructed. */
8835 for (unsigned i = 0; i != 2 * len - 1; ++i)
8836 new (static_cast<void*>(folded_deps + i)) wide_int ();
8837
8838 tree folded_dep = NULL_TREE;
8839 /* TRUE if the first dimension's offset is negative. */
8840 bool neg_offset_p = false;
8841
8842 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8843 unsigned int i;
8844 while ((c = *list_p) != NULL)
8845 {
8846 bool remove = false;
8847
8848 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8849 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8850 goto next_ordered_clause;
8851
8852 tree vec;
8853 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8854 vec && TREE_CODE (vec) == TREE_LIST;
8855 vec = TREE_CHAIN (vec), ++i)
8856 {
8857 gcc_assert (i < len);
8858
8859 /* omp_extract_for_data has canonicalized the condition. */
8860 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8861 || fd.loops[i].cond_code == GT_EXPR);
8862 bool forward = fd.loops[i].cond_code == LT_EXPR;
8863 bool maybe_lexically_later = true;
8864
8865 /* While the committee makes up its mind, bail if we have any
8866 non-constant steps. */
8867 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8868 goto lower_omp_ordered_ret;
8869
8870 tree itype = TREE_TYPE (TREE_VALUE (vec));
8871 if (POINTER_TYPE_P (itype))
8872 itype = sizetype;
8873 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8874 TYPE_PRECISION (itype),
8875 TYPE_SIGN (itype));
8876
8877 /* Ignore invalid offsets that are not multiples of the step. */
8878 if (!wi::multiple_of_p (wi::abs (offset),
8879 wi::abs (wi::to_wide (fd.loops[i].step)),
8880 UNSIGNED))
8881 {
8882 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8883 "ignoring sink clause with offset that is not "
8884 "a multiple of the loop step");
8885 remove = true;
8886 goto next_ordered_clause;
8887 }
8888
8889 /* Calculate the first dimension. The first dimension of
8890 the folded dependency vector is the GCD of the first
8891 elements, while ignoring any first elements whose offset
8892 is 0. */
8893 if (i == 0)
8894 {
8895 /* Ignore dependence vectors whose first dimension is 0. */
8896 if (offset == 0)
8897 {
8898 remove = true;
8899 goto next_ordered_clause;
8900 }
8901 else
8902 {
8903 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8904 {
8905 error_at (OMP_CLAUSE_LOCATION (c),
8906 "first offset must be in opposite direction "
8907 "of loop iterations");
8908 goto lower_omp_ordered_ret;
8909 }
8910 if (forward)
8911 offset = -offset;
8912 neg_offset_p = forward;
8913 /* Initialize the first time around. */
8914 if (folded_dep == NULL_TREE)
8915 {
8916 folded_dep = c;
8917 folded_deps[0] = offset;
8918 }
8919 else
8920 folded_deps[0] = wi::gcd (folded_deps[0],
8921 offset, UNSIGNED);
8922 }
8923 }
8924 /* Calculate minimum for the remaining dimensions. */
8925 else
8926 {
8927 folded_deps[len + i - 1] = offset;
8928 if (folded_dep == c)
8929 folded_deps[i] = offset;
8930 else if (maybe_lexically_later
8931 && !wi::eq_p (folded_deps[i], offset))
8932 {
8933 if (forward ^ wi::gts_p (folded_deps[i], offset))
8934 {
8935 unsigned int j;
8936 folded_dep = c;
8937 for (j = 1; j <= i; j++)
8938 folded_deps[j] = folded_deps[len + j - 1];
8939 }
8940 else
8941 maybe_lexically_later = false;
8942 }
8943 }
8944 }
8945 gcc_assert (i == len);
8946
8947 remove = true;
8948
8949 next_ordered_clause:
8950 if (remove)
8951 *list_p = OMP_CLAUSE_CHAIN (c);
8952 else
8953 list_p = &OMP_CLAUSE_CHAIN (c);
8954 }
8955
8956 if (folded_dep)
8957 {
8958 if (neg_offset_p)
8959 folded_deps[0] = -folded_deps[0];
8960
8961 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8962 if (POINTER_TYPE_P (itype))
8963 itype = sizetype;
8964
8965 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8966 = wide_int_to_tree (itype, folded_deps[0]);
8967 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8968 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8969 }
8970
8971 lower_omp_ordered_ret:
8972
8973 /* Ordered without clauses is #pragma omp threads, while we want
8974 a nop instead if we remove all clauses. */
8975 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8976 gsi_replace (gsi_p, gimple_build_nop (), true);
8977 }
8978
8979
8980 /* Expand code for an OpenMP ordered directive. */
8981
8982 static void
8983 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8984 {
8985 tree block;
8986 gimple *stmt = gsi_stmt (*gsi_p), *g;
8987 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8988 gcall *x;
8989 gbind *bind;
8990 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8991 OMP_CLAUSE_SIMD);
8992 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8993 loop. */
8994 bool maybe_simt
8995 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8996 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8997 OMP_CLAUSE_THREADS);
8998
8999 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9000 OMP_CLAUSE_DEPEND))
9001 {
9002 /* FIXME: This is needs to be moved to the expansion to verify various
9003 conditions only testable on cfg with dominators computed, and also
9004 all the depend clauses to be merged still might need to be available
9005 for the runtime checks. */
9006 if (0)
9007 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9008 return;
9009 }
9010
9011 push_gimplify_context ();
9012
9013 block = make_node (BLOCK);
9014 bind = gimple_build_bind (NULL, NULL, block);
9015 gsi_replace (gsi_p, bind, true);
9016 gimple_bind_add_stmt (bind, stmt);
9017
9018 if (simd)
9019 {
9020 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9021 build_int_cst (NULL_TREE, threads));
9022 cfun->has_simduid_loops = true;
9023 }
9024 else
9025 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9026 0);
9027 gimple_bind_add_stmt (bind, x);
9028
9029 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9030 if (maybe_simt)
9031 {
9032 counter = create_tmp_var (integer_type_node);
9033 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9034 gimple_call_set_lhs (g, counter);
9035 gimple_bind_add_stmt (bind, g);
9036
9037 body = create_artificial_label (UNKNOWN_LOCATION);
9038 test = create_artificial_label (UNKNOWN_LOCATION);
9039 gimple_bind_add_stmt (bind, gimple_build_label (body));
9040
9041 tree simt_pred = create_tmp_var (integer_type_node);
9042 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9043 gimple_call_set_lhs (g, simt_pred);
9044 gimple_bind_add_stmt (bind, g);
9045
9046 tree t = create_artificial_label (UNKNOWN_LOCATION);
9047 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9048 gimple_bind_add_stmt (bind, g);
9049
9050 gimple_bind_add_stmt (bind, gimple_build_label (t));
9051 }
9052 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9053 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9054 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9055 gimple_omp_set_body (stmt, NULL);
9056
9057 if (maybe_simt)
9058 {
9059 gimple_bind_add_stmt (bind, gimple_build_label (test));
9060 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9061 gimple_bind_add_stmt (bind, g);
9062
9063 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9064 tree nonneg = create_tmp_var (integer_type_node);
9065 gimple_seq tseq = NULL;
9066 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9067 gimple_bind_add_seq (bind, tseq);
9068
9069 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9070 gimple_call_set_lhs (g, nonneg);
9071 gimple_bind_add_stmt (bind, g);
9072
9073 tree end = create_artificial_label (UNKNOWN_LOCATION);
9074 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9075 gimple_bind_add_stmt (bind, g);
9076
9077 gimple_bind_add_stmt (bind, gimple_build_label (end));
9078 }
9079 if (simd)
9080 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9081 build_int_cst (NULL_TREE, threads));
9082 else
9083 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9084 0);
9085 gimple_bind_add_stmt (bind, x);
9086
9087 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9088
9089 pop_gimplify_context (bind);
9090
9091 gimple_bind_append_vars (bind, ctx->block_vars);
9092 BLOCK_VARS (block) = gimple_bind_vars (bind);
9093 }
9094
9095
9096 /* Expand code for an OpenMP scan directive and the structured block
9097 before the scan directive. */
9098
9099 static void
9100 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9101 {
9102 gimple *stmt = gsi_stmt (*gsi_p);
9103 bool has_clauses
9104 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9105 tree lane = NULL_TREE;
9106 gimple_seq before = NULL;
9107 omp_context *octx = ctx->outer;
9108 gcc_assert (octx);
9109 if (octx->scan_exclusive && !has_clauses)
9110 {
9111 gimple_stmt_iterator gsi2 = *gsi_p;
9112 gsi_next (&gsi2);
9113 gimple *stmt2 = gsi_stmt (gsi2);
9114 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9115 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9116 the one with exclusive clause(s), comes first. */
9117 if (stmt2
9118 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9119 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9120 {
9121 gsi_remove (gsi_p, false);
9122 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9123 ctx = maybe_lookup_ctx (stmt2);
9124 gcc_assert (ctx);
9125 lower_omp_scan (gsi_p, ctx);
9126 return;
9127 }
9128 }
9129
9130 bool input_phase = has_clauses ^ octx->scan_inclusive;
9131 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9132 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9133 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9134 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9135 && !gimple_omp_for_combined_p (octx->stmt));
9136 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9137 if (is_for_simd && octx->for_simd_scan_phase)
9138 is_simd = false;
9139 if (is_simd)
9140 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9141 OMP_CLAUSE__SIMDUID_))
9142 {
9143 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9144 lane = create_tmp_var (unsigned_type_node);
9145 tree t = build_int_cst (integer_type_node,
9146 input_phase ? 1
9147 : octx->scan_inclusive ? 2 : 3);
9148 gimple *g
9149 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9150 gimple_call_set_lhs (g, lane);
9151 gimple_seq_add_stmt (&before, g);
9152 }
9153
9154 if (is_simd || is_for)
9155 {
9156 for (tree c = gimple_omp_for_clauses (octx->stmt);
9157 c; c = OMP_CLAUSE_CHAIN (c))
9158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9159 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9160 {
9161 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9162 tree var = OMP_CLAUSE_DECL (c);
9163 tree new_var = lookup_decl (var, octx);
9164 tree val = new_var;
9165 tree var2 = NULL_TREE;
9166 tree var3 = NULL_TREE;
9167 tree var4 = NULL_TREE;
9168 tree lane0 = NULL_TREE;
9169 tree new_vard = new_var;
9170 if (omp_is_reference (var))
9171 {
9172 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9173 val = new_var;
9174 }
9175 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9176 {
9177 val = DECL_VALUE_EXPR (new_vard);
9178 if (new_vard != new_var)
9179 {
9180 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9181 val = TREE_OPERAND (val, 0);
9182 }
9183 if (TREE_CODE (val) == ARRAY_REF
9184 && VAR_P (TREE_OPERAND (val, 0)))
9185 {
9186 tree v = TREE_OPERAND (val, 0);
9187 if (lookup_attribute ("omp simd array",
9188 DECL_ATTRIBUTES (v)))
9189 {
9190 val = unshare_expr (val);
9191 lane0 = TREE_OPERAND (val, 1);
9192 TREE_OPERAND (val, 1) = lane;
9193 var2 = lookup_decl (v, octx);
9194 if (octx->scan_exclusive)
9195 var4 = lookup_decl (var2, octx);
9196 if (input_phase
9197 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9198 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9199 if (!input_phase)
9200 {
9201 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9202 var2, lane, NULL_TREE, NULL_TREE);
9203 TREE_THIS_NOTRAP (var2) = 1;
9204 if (octx->scan_exclusive)
9205 {
9206 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9207 var4, lane, NULL_TREE,
9208 NULL_TREE);
9209 TREE_THIS_NOTRAP (var4) = 1;
9210 }
9211 }
9212 else
9213 var2 = val;
9214 }
9215 }
9216 gcc_assert (var2);
9217 }
9218 else
9219 {
9220 var2 = build_outer_var_ref (var, octx);
9221 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9222 {
9223 var3 = maybe_lookup_decl (new_vard, octx);
9224 if (var3 == new_vard || var3 == NULL_TREE)
9225 var3 = NULL_TREE;
9226 else if (is_simd && octx->scan_exclusive && !input_phase)
9227 {
9228 var4 = maybe_lookup_decl (var3, octx);
9229 if (var4 == var3 || var4 == NULL_TREE)
9230 {
9231 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9232 {
9233 var4 = var3;
9234 var3 = NULL_TREE;
9235 }
9236 else
9237 var4 = NULL_TREE;
9238 }
9239 }
9240 }
9241 if (is_simd
9242 && octx->scan_exclusive
9243 && !input_phase
9244 && var4 == NULL_TREE)
9245 var4 = create_tmp_var (TREE_TYPE (val));
9246 }
9247 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9248 {
9249 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9250 if (input_phase)
9251 {
9252 if (var3)
9253 {
9254 /* If we've added a separate identity element
9255 variable, copy it over into val. */
9256 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9257 var3);
9258 gimplify_and_add (x, &before);
9259 }
9260 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9261 {
9262 /* Otherwise, assign to it the identity element. */
9263 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9264 if (is_for)
9265 tseq = copy_gimple_seq_and_replace_locals (tseq);
9266 tree ref = build_outer_var_ref (var, octx);
9267 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9268 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9269 if (x)
9270 {
9271 if (new_vard != new_var)
9272 val = build_fold_addr_expr_loc (clause_loc, val);
9273 SET_DECL_VALUE_EXPR (new_vard, val);
9274 }
9275 SET_DECL_VALUE_EXPR (placeholder, ref);
9276 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9277 lower_omp (&tseq, octx);
9278 if (x)
9279 SET_DECL_VALUE_EXPR (new_vard, x);
9280 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9281 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9282 gimple_seq_add_seq (&before, tseq);
9283 if (is_simd)
9284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9285 }
9286 }
9287 else if (is_simd)
9288 {
9289 tree x;
9290 if (octx->scan_exclusive)
9291 {
9292 tree v4 = unshare_expr (var4);
9293 tree v2 = unshare_expr (var2);
9294 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9295 gimplify_and_add (x, &before);
9296 }
9297 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9298 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9299 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9300 tree vexpr = val;
9301 if (x && new_vard != new_var)
9302 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9303 if (x)
9304 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9305 SET_DECL_VALUE_EXPR (placeholder, var2);
9306 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9307 lower_omp (&tseq, octx);
9308 gimple_seq_add_seq (&before, tseq);
9309 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9310 if (x)
9311 SET_DECL_VALUE_EXPR (new_vard, x);
9312 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9313 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9314 if (octx->scan_inclusive)
9315 {
9316 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9317 var2);
9318 gimplify_and_add (x, &before);
9319 }
9320 else if (lane0 == NULL_TREE)
9321 {
9322 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9323 var4);
9324 gimplify_and_add (x, &before);
9325 }
9326 }
9327 }
9328 else
9329 {
9330 if (input_phase)
9331 {
9332 /* input phase. Set val to initializer before
9333 the body. */
9334 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9335 gimplify_assign (val, x, &before);
9336 }
9337 else if (is_simd)
9338 {
9339 /* scan phase. */
9340 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9341 if (code == MINUS_EXPR)
9342 code = PLUS_EXPR;
9343
9344 tree x = build2 (code, TREE_TYPE (var2),
9345 unshare_expr (var2), unshare_expr (val));
9346 if (octx->scan_inclusive)
9347 {
9348 gimplify_assign (unshare_expr (var2), x, &before);
9349 gimplify_assign (val, var2, &before);
9350 }
9351 else
9352 {
9353 gimplify_assign (unshare_expr (var4),
9354 unshare_expr (var2), &before);
9355 gimplify_assign (var2, x, &before);
9356 if (lane0 == NULL_TREE)
9357 gimplify_assign (val, var4, &before);
9358 }
9359 }
9360 }
9361 if (octx->scan_exclusive && !input_phase && lane0)
9362 {
9363 tree vexpr = unshare_expr (var4);
9364 TREE_OPERAND (vexpr, 1) = lane0;
9365 if (new_vard != new_var)
9366 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9367 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9368 }
9369 }
9370 }
9371 if (is_simd && !is_for_simd)
9372 {
9373 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9374 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9375 gsi_replace (gsi_p, gimple_build_nop (), true);
9376 return;
9377 }
9378 lower_omp (gimple_omp_body_ptr (stmt), octx);
9379 if (before)
9380 {
9381 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9382 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9383 }
9384 }
9385
9386
9387 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9388 substitution of a couple of function calls. But in the NAMED case,
9389 requires that languages coordinate a symbol name. It is therefore
9390 best put here in common code. */
9391
9392 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9393
9394 static void
9395 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9396 {
9397 tree block;
9398 tree name, lock, unlock;
9399 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9400 gbind *bind;
9401 location_t loc = gimple_location (stmt);
9402 gimple_seq tbody;
9403
9404 name = gimple_omp_critical_name (stmt);
9405 if (name)
9406 {
9407 tree decl;
9408
9409 if (!critical_name_mutexes)
9410 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9411
9412 tree *n = critical_name_mutexes->get (name);
9413 if (n == NULL)
9414 {
9415 char *new_str;
9416
9417 decl = create_tmp_var_raw (ptr_type_node);
9418
9419 new_str = ACONCAT ((".gomp_critical_user_",
9420 IDENTIFIER_POINTER (name), NULL));
9421 DECL_NAME (decl) = get_identifier (new_str);
9422 TREE_PUBLIC (decl) = 1;
9423 TREE_STATIC (decl) = 1;
9424 DECL_COMMON (decl) = 1;
9425 DECL_ARTIFICIAL (decl) = 1;
9426 DECL_IGNORED_P (decl) = 1;
9427
9428 varpool_node::finalize_decl (decl);
9429
9430 critical_name_mutexes->put (name, decl);
9431 }
9432 else
9433 decl = *n;
9434
9435 /* If '#pragma omp critical' is inside offloaded region or
9436 inside function marked as offloadable, the symbol must be
9437 marked as offloadable too. */
9438 omp_context *octx;
9439 if (cgraph_node::get (current_function_decl)->offloadable)
9440 varpool_node::get_create (decl)->offloadable = 1;
9441 else
9442 for (octx = ctx->outer; octx; octx = octx->outer)
9443 if (is_gimple_omp_offloaded (octx->stmt))
9444 {
9445 varpool_node::get_create (decl)->offloadable = 1;
9446 break;
9447 }
9448
9449 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9450 lock = build_call_expr_loc (loc, lock, 1,
9451 build_fold_addr_expr_loc (loc, decl));
9452
9453 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9454 unlock = build_call_expr_loc (loc, unlock, 1,
9455 build_fold_addr_expr_loc (loc, decl));
9456 }
9457 else
9458 {
9459 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9460 lock = build_call_expr_loc (loc, lock, 0);
9461
9462 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9463 unlock = build_call_expr_loc (loc, unlock, 0);
9464 }
9465
9466 push_gimplify_context ();
9467
9468 block = make_node (BLOCK);
9469 bind = gimple_build_bind (NULL, NULL, block);
9470 gsi_replace (gsi_p, bind, true);
9471 gimple_bind_add_stmt (bind, stmt);
9472
9473 tbody = gimple_bind_body (bind);
9474 gimplify_and_add (lock, &tbody);
9475 gimple_bind_set_body (bind, tbody);
9476
9477 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9478 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9479 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9480 gimple_omp_set_body (stmt, NULL);
9481
9482 tbody = gimple_bind_body (bind);
9483 gimplify_and_add (unlock, &tbody);
9484 gimple_bind_set_body (bind, tbody);
9485
9486 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9487
9488 pop_gimplify_context (bind);
9489 gimple_bind_append_vars (bind, ctx->block_vars);
9490 BLOCK_VARS (block) = gimple_bind_vars (bind);
9491 }
9492
9493 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9494 for a lastprivate clause. Given a loop control predicate of (V
9495 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9496 is appended to *DLIST, iterator initialization is appended to
9497 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9498 to be emitted in a critical section. */
9499
9500 static void
9501 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9502 gimple_seq *dlist, gimple_seq *clist,
9503 struct omp_context *ctx)
9504 {
9505 tree clauses, cond, vinit;
9506 enum tree_code cond_code;
9507 gimple_seq stmts;
9508
9509 cond_code = fd->loop.cond_code;
9510 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9511
9512 /* When possible, use a strict equality expression. This can let VRP
9513 type optimizations deduce the value and remove a copy. */
9514 if (tree_fits_shwi_p (fd->loop.step))
9515 {
9516 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9517 if (step == 1 || step == -1)
9518 cond_code = EQ_EXPR;
9519 }
9520
9521 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9522 || gimple_omp_for_grid_phony (fd->for_stmt))
9523 cond = omp_grid_lastprivate_predicate (fd);
9524 else
9525 {
9526 tree n2 = fd->loop.n2;
9527 if (fd->collapse > 1
9528 && TREE_CODE (n2) != INTEGER_CST
9529 && gimple_omp_for_combined_into_p (fd->for_stmt))
9530 {
9531 struct omp_context *taskreg_ctx = NULL;
9532 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9533 {
9534 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9535 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9536 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9537 {
9538 if (gimple_omp_for_combined_into_p (gfor))
9539 {
9540 gcc_assert (ctx->outer->outer
9541 && is_parallel_ctx (ctx->outer->outer));
9542 taskreg_ctx = ctx->outer->outer;
9543 }
9544 else
9545 {
9546 struct omp_for_data outer_fd;
9547 omp_extract_for_data (gfor, &outer_fd, NULL);
9548 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9549 }
9550 }
9551 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9552 taskreg_ctx = ctx->outer->outer;
9553 }
9554 else if (is_taskreg_ctx (ctx->outer))
9555 taskreg_ctx = ctx->outer;
9556 if (taskreg_ctx)
9557 {
9558 int i;
9559 tree taskreg_clauses
9560 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9561 tree innerc = omp_find_clause (taskreg_clauses,
9562 OMP_CLAUSE__LOOPTEMP_);
9563 gcc_assert (innerc);
9564 for (i = 0; i < fd->collapse; i++)
9565 {
9566 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9567 OMP_CLAUSE__LOOPTEMP_);
9568 gcc_assert (innerc);
9569 }
9570 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9571 OMP_CLAUSE__LOOPTEMP_);
9572 if (innerc)
9573 n2 = fold_convert (TREE_TYPE (n2),
9574 lookup_decl (OMP_CLAUSE_DECL (innerc),
9575 taskreg_ctx));
9576 }
9577 }
9578 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9579 }
9580
9581 clauses = gimple_omp_for_clauses (fd->for_stmt);
9582 stmts = NULL;
9583 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9584 if (!gimple_seq_empty_p (stmts))
9585 {
9586 gimple_seq_add_seq (&stmts, *dlist);
9587 *dlist = stmts;
9588
9589 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9590 vinit = fd->loop.n1;
9591 if (cond_code == EQ_EXPR
9592 && tree_fits_shwi_p (fd->loop.n2)
9593 && ! integer_zerop (fd->loop.n2))
9594 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9595 else
9596 vinit = unshare_expr (vinit);
9597
9598 /* Initialize the iterator variable, so that threads that don't execute
9599 any iterations don't execute the lastprivate clauses by accident. */
9600 gimplify_assign (fd->loop.v, vinit, body_p);
9601 }
9602 }
9603
9604 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9605
9606 static tree
9607 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9608 struct walk_stmt_info *wi)
9609 {
9610 gimple *stmt = gsi_stmt (*gsi_p);
9611
9612 *handled_ops_p = true;
9613 switch (gimple_code (stmt))
9614 {
9615 WALK_SUBSTMTS;
9616
9617 case GIMPLE_OMP_FOR:
9618 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9619 && gimple_omp_for_combined_into_p (stmt))
9620 *handled_ops_p = false;
9621 break;
9622
9623 case GIMPLE_OMP_SCAN:
9624 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9625 return integer_zero_node;
9626 default:
9627 break;
9628 }
9629 return NULL;
9630 }
9631
9632 /* Helper function for lower_omp_for, add transformations for a worksharing
9633 loop with scan directives inside of it.
9634 For worksharing loop not combined with simd, transform:
9635 #pragma omp for reduction(inscan,+:r) private(i)
9636 for (i = 0; i < n; i = i + 1)
9637 {
9638 {
9639 update (r);
9640 }
9641 #pragma omp scan inclusive(r)
9642 {
9643 use (r);
9644 }
9645 }
9646
9647 into two worksharing loops + code to merge results:
9648
9649 num_threads = omp_get_num_threads ();
9650 thread_num = omp_get_thread_num ();
9651 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9652 <D.2099>:
9653 var2 = r;
9654 goto <D.2101>;
9655 <D.2100>:
9656 // For UDRs this is UDR init, or if ctors are needed, copy from
9657 // var3 that has been constructed to contain the neutral element.
9658 var2 = 0;
9659 <D.2101>:
9660 ivar = 0;
9661 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9662 // a shared array with num_threads elements and rprivb to a local array
9663 // number of elements equal to the number of (contiguous) iterations the
9664 // current thread will perform. controlb and controlp variables are
9665 // temporaries to handle deallocation of rprivb at the end of second
9666 // GOMP_FOR.
9667 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9668 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9669 for (i = 0; i < n; i = i + 1)
9670 {
9671 {
9672 // For UDRs this is UDR init or copy from var3.
9673 r = 0;
9674 // This is the input phase from user code.
9675 update (r);
9676 }
9677 {
9678 // For UDRs this is UDR merge.
9679 var2 = var2 + r;
9680 // Rather than handing it over to the user, save to local thread's
9681 // array.
9682 rprivb[ivar] = var2;
9683 // For exclusive scan, the above two statements are swapped.
9684 ivar = ivar + 1;
9685 }
9686 }
9687 // And remember the final value from this thread's into the shared
9688 // rpriva array.
9689 rpriva[(sizetype) thread_num] = var2;
9690 // If more than one thread, compute using Work-Efficient prefix sum
9691 // the inclusive parallel scan of the rpriva array.
9692 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9693 <D.2102>:
9694 GOMP_barrier ();
9695 down = 0;
9696 k = 1;
9697 num_threadsu = (unsigned int) num_threads;
9698 thread_numup1 = (unsigned int) thread_num + 1;
9699 <D.2108>:
9700 twok = k << 1;
9701 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9702 <D.2110>:
9703 down = 4294967295;
9704 k = k >> 1;
9705 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9706 <D.2112>:
9707 k = k >> 1;
9708 <D.2111>:
9709 twok = k << 1;
9710 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9711 mul = REALPART_EXPR <cplx>;
9712 ovf = IMAGPART_EXPR <cplx>;
9713 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9714 <D.2116>:
9715 andv = k & down;
9716 andvm1 = andv + 4294967295;
9717 l = mul + andvm1;
9718 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9719 <D.2120>:
9720 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9721 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9722 rpriva[l] = rpriva[l - k] + rpriva[l];
9723 <D.2117>:
9724 if (down == 0) goto <D.2121>; else goto <D.2122>;
9725 <D.2121>:
9726 k = k << 1;
9727 goto <D.2123>;
9728 <D.2122>:
9729 k = k >> 1;
9730 <D.2123>:
9731 GOMP_barrier ();
9732 if (k != 0) goto <D.2108>; else goto <D.2103>;
9733 <D.2103>:
9734 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9735 <D.2124>:
9736 // For UDRs this is UDR init or copy from var3.
9737 var2 = 0;
9738 goto <D.2126>;
9739 <D.2125>:
9740 var2 = rpriva[thread_num - 1];
9741 <D.2126>:
9742 ivar = 0;
9743 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9744 reduction(inscan,+:r) private(i)
9745 for (i = 0; i < n; i = i + 1)
9746 {
9747 {
9748 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9749 r = var2 + rprivb[ivar];
9750 }
9751 {
9752 // This is the scan phase from user code.
9753 use (r);
9754 // Plus a bump of the iterator.
9755 ivar = ivar + 1;
9756 }
9757 } */
9758
9759 static void
9760 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9761 struct omp_for_data *fd, omp_context *ctx)
9762 {
9763 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9764 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9765
9766 gimple_seq body = gimple_omp_body (stmt);
9767 gimple_stmt_iterator input1_gsi = gsi_none ();
9768 struct walk_stmt_info wi;
9769 memset (&wi, 0, sizeof (wi));
9770 wi.val_only = true;
9771 wi.info = (void *) &input1_gsi;
9772 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9773 gcc_assert (!gsi_end_p (input1_gsi));
9774
9775 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9776 gimple_stmt_iterator gsi = input1_gsi;
9777 gsi_next (&gsi);
9778 gimple_stmt_iterator scan1_gsi = gsi;
9779 gimple *scan_stmt1 = gsi_stmt (gsi);
9780 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9781
9782 gimple_seq input_body = gimple_omp_body (input_stmt1);
9783 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9784 gimple_omp_set_body (input_stmt1, NULL);
9785 gimple_omp_set_body (scan_stmt1, NULL);
9786 gimple_omp_set_body (stmt, NULL);
9787
9788 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9789 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9790 gimple_omp_set_body (stmt, body);
9791 gimple_omp_set_body (input_stmt1, input_body);
9792
9793 gimple_stmt_iterator input2_gsi = gsi_none ();
9794 memset (&wi, 0, sizeof (wi));
9795 wi.val_only = true;
9796 wi.info = (void *) &input2_gsi;
9797 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9798 gcc_assert (!gsi_end_p (input2_gsi));
9799
9800 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9801 gsi = input2_gsi;
9802 gsi_next (&gsi);
9803 gimple_stmt_iterator scan2_gsi = gsi;
9804 gimple *scan_stmt2 = gsi_stmt (gsi);
9805 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9806 gimple_omp_set_body (scan_stmt2, scan_body);
9807
9808 gimple_stmt_iterator input3_gsi = gsi_none ();
9809 gimple_stmt_iterator scan3_gsi = gsi_none ();
9810 gimple_stmt_iterator input4_gsi = gsi_none ();
9811 gimple_stmt_iterator scan4_gsi = gsi_none ();
9812 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9813 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9814 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9815 if (is_for_simd)
9816 {
9817 memset (&wi, 0, sizeof (wi));
9818 wi.val_only = true;
9819 wi.info = (void *) &input3_gsi;
9820 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9821 gcc_assert (!gsi_end_p (input3_gsi));
9822
9823 input_stmt3 = gsi_stmt (input3_gsi);
9824 gsi = input3_gsi;
9825 gsi_next (&gsi);
9826 scan3_gsi = gsi;
9827 scan_stmt3 = gsi_stmt (gsi);
9828 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9829
9830 memset (&wi, 0, sizeof (wi));
9831 wi.val_only = true;
9832 wi.info = (void *) &input4_gsi;
9833 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9834 gcc_assert (!gsi_end_p (input4_gsi));
9835
9836 input_stmt4 = gsi_stmt (input4_gsi);
9837 gsi = input4_gsi;
9838 gsi_next (&gsi);
9839 scan4_gsi = gsi;
9840 scan_stmt4 = gsi_stmt (gsi);
9841 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9842
9843 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9844 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9845 }
9846
9847 tree num_threads = create_tmp_var (integer_type_node);
9848 tree thread_num = create_tmp_var (integer_type_node);
9849 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9850 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9851 gimple *g = gimple_build_call (nthreads_decl, 0);
9852 gimple_call_set_lhs (g, num_threads);
9853 gimple_seq_add_stmt (body_p, g);
9854 g = gimple_build_call (threadnum_decl, 0);
9855 gimple_call_set_lhs (g, thread_num);
9856 gimple_seq_add_stmt (body_p, g);
9857
9858 tree ivar = create_tmp_var (sizetype);
9859 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9860 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9861 tree k = create_tmp_var (unsigned_type_node);
9862 tree l = create_tmp_var (unsigned_type_node);
9863
9864 gimple_seq clist = NULL, mdlist = NULL;
9865 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9866 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9867 gimple_seq scan1_list = NULL, input2_list = NULL;
9868 gimple_seq last_list = NULL, reduc_list = NULL;
9869 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9871 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9872 {
9873 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9874 tree var = OMP_CLAUSE_DECL (c);
9875 tree new_var = lookup_decl (var, ctx);
9876 tree var3 = NULL_TREE;
9877 tree new_vard = new_var;
9878 if (omp_is_reference (var))
9879 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9880 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9881 {
9882 var3 = maybe_lookup_decl (new_vard, ctx);
9883 if (var3 == new_vard)
9884 var3 = NULL_TREE;
9885 }
9886
9887 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9888 tree rpriva = create_tmp_var (ptype);
9889 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9890 OMP_CLAUSE_DECL (nc) = rpriva;
9891 *cp1 = nc;
9892 cp1 = &OMP_CLAUSE_CHAIN (nc);
9893
9894 tree rprivb = create_tmp_var (ptype);
9895 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9896 OMP_CLAUSE_DECL (nc) = rprivb;
9897 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9898 *cp1 = nc;
9899 cp1 = &OMP_CLAUSE_CHAIN (nc);
9900
9901 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9902 if (new_vard != new_var)
9903 TREE_ADDRESSABLE (var2) = 1;
9904 gimple_add_tmp_var (var2);
9905
9906 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9907 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9908 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9909 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9910 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9911
9912 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9913 thread_num, integer_minus_one_node);
9914 x = fold_convert_loc (clause_loc, sizetype, x);
9915 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9916 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9917 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9918 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9919
9920 x = fold_convert_loc (clause_loc, sizetype, l);
9921 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9922 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9923 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9924 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9925
9926 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9927 x = fold_convert_loc (clause_loc, sizetype, x);
9928 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9929 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9930 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9931 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9932
9933 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9934 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9935 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9936 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9937
9938 tree var4 = is_for_simd ? new_var : var2;
9939 tree var5 = NULL_TREE, var6 = NULL_TREE;
9940 if (is_for_simd)
9941 {
9942 var5 = lookup_decl (var, input_simd_ctx);
9943 var6 = lookup_decl (var, scan_simd_ctx);
9944 if (new_vard != new_var)
9945 {
9946 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9947 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9948 }
9949 }
9950 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9951 {
9952 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9953 tree val = var2;
9954
9955 x = lang_hooks.decls.omp_clause_default_ctor
9956 (c, var2, build_outer_var_ref (var, ctx));
9957 if (x)
9958 gimplify_and_add (x, &clist);
9959
9960 x = build_outer_var_ref (var, ctx);
9961 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9962 x);
9963 gimplify_and_add (x, &thr01_list);
9964
9965 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9966 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9967 if (var3)
9968 {
9969 x = unshare_expr (var4);
9970 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9971 gimplify_and_add (x, &thrn1_list);
9972 x = unshare_expr (var4);
9973 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9974 gimplify_and_add (x, &thr02_list);
9975 }
9976 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9977 {
9978 /* Otherwise, assign to it the identity element. */
9979 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9980 tseq = copy_gimple_seq_and_replace_locals (tseq);
9981 if (!is_for_simd)
9982 {
9983 if (new_vard != new_var)
9984 val = build_fold_addr_expr_loc (clause_loc, val);
9985 SET_DECL_VALUE_EXPR (new_vard, val);
9986 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9987 }
9988 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9989 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9990 lower_omp (&tseq, ctx);
9991 gimple_seq_add_seq (&thrn1_list, tseq);
9992 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9993 lower_omp (&tseq, ctx);
9994 gimple_seq_add_seq (&thr02_list, tseq);
9995 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9996 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9997 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9998 if (y)
9999 SET_DECL_VALUE_EXPR (new_vard, y);
10000 else
10001 {
10002 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10003 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10004 }
10005 }
10006
10007 x = unshare_expr (var4);
10008 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10009 gimplify_and_add (x, &thrn2_list);
10010
10011 if (is_for_simd)
10012 {
10013 x = unshare_expr (rprivb_ref);
10014 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10015 gimplify_and_add (x, &scan1_list);
10016 }
10017 else
10018 {
10019 if (ctx->scan_exclusive)
10020 {
10021 x = unshare_expr (rprivb_ref);
10022 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10023 gimplify_and_add (x, &scan1_list);
10024 }
10025
10026 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10027 tseq = copy_gimple_seq_and_replace_locals (tseq);
10028 SET_DECL_VALUE_EXPR (placeholder, var2);
10029 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10030 lower_omp (&tseq, ctx);
10031 gimple_seq_add_seq (&scan1_list, tseq);
10032
10033 if (ctx->scan_inclusive)
10034 {
10035 x = unshare_expr (rprivb_ref);
10036 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10037 gimplify_and_add (x, &scan1_list);
10038 }
10039 }
10040
10041 x = unshare_expr (rpriva_ref);
10042 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10043 unshare_expr (var4));
10044 gimplify_and_add (x, &mdlist);
10045
10046 x = unshare_expr (is_for_simd ? var6 : new_var);
10047 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10048 gimplify_and_add (x, &input2_list);
10049
10050 val = rprivb_ref;
10051 if (new_vard != new_var)
10052 val = build_fold_addr_expr_loc (clause_loc, val);
10053
10054 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10055 tseq = copy_gimple_seq_and_replace_locals (tseq);
10056 SET_DECL_VALUE_EXPR (new_vard, val);
10057 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10058 if (is_for_simd)
10059 {
10060 SET_DECL_VALUE_EXPR (placeholder, var6);
10061 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10062 }
10063 else
10064 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10065 lower_omp (&tseq, ctx);
10066 if (y)
10067 SET_DECL_VALUE_EXPR (new_vard, y);
10068 else
10069 {
10070 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10071 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10072 }
10073 if (!is_for_simd)
10074 {
10075 SET_DECL_VALUE_EXPR (placeholder, new_var);
10076 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10077 lower_omp (&tseq, ctx);
10078 }
10079 gimple_seq_add_seq (&input2_list, tseq);
10080
10081 x = build_outer_var_ref (var, ctx);
10082 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10083 gimplify_and_add (x, &last_list);
10084
10085 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10086 gimplify_and_add (x, &reduc_list);
10087 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10088 tseq = copy_gimple_seq_and_replace_locals (tseq);
10089 val = rprival_ref;
10090 if (new_vard != new_var)
10091 val = build_fold_addr_expr_loc (clause_loc, val);
10092 SET_DECL_VALUE_EXPR (new_vard, val);
10093 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10094 SET_DECL_VALUE_EXPR (placeholder, var2);
10095 lower_omp (&tseq, ctx);
10096 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10097 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10098 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10099 if (y)
10100 SET_DECL_VALUE_EXPR (new_vard, y);
10101 else
10102 {
10103 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10104 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10105 }
10106 gimple_seq_add_seq (&reduc_list, tseq);
10107 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10108 gimplify_and_add (x, &reduc_list);
10109
10110 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10111 if (x)
10112 gimplify_and_add (x, dlist);
10113 }
10114 else
10115 {
10116 x = build_outer_var_ref (var, ctx);
10117 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10118
10119 x = omp_reduction_init (c, TREE_TYPE (new_var));
10120 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10121 &thrn1_list);
10122 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10123
10124 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10125
10126 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10127 if (code == MINUS_EXPR)
10128 code = PLUS_EXPR;
10129
10130 if (is_for_simd)
10131 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10132 else
10133 {
10134 if (ctx->scan_exclusive)
10135 gimplify_assign (unshare_expr (rprivb_ref), var2,
10136 &scan1_list);
10137 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10138 gimplify_assign (var2, x, &scan1_list);
10139 if (ctx->scan_inclusive)
10140 gimplify_assign (unshare_expr (rprivb_ref), var2,
10141 &scan1_list);
10142 }
10143
10144 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10145 &mdlist);
10146
10147 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10148 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10149
10150 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10151 &last_list);
10152
10153 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10154 unshare_expr (rprival_ref));
10155 gimplify_assign (rprival_ref, x, &reduc_list);
10156 }
10157 }
10158
10159 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10160 gimple_seq_add_stmt (&scan1_list, g);
10161 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10162 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10163 ? scan_stmt4 : scan_stmt2), g);
10164
10165 tree controlb = create_tmp_var (boolean_type_node);
10166 tree controlp = create_tmp_var (ptr_type_node);
10167 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10168 OMP_CLAUSE_DECL (nc) = controlb;
10169 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10170 *cp1 = nc;
10171 cp1 = &OMP_CLAUSE_CHAIN (nc);
10172 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10173 OMP_CLAUSE_DECL (nc) = controlp;
10174 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10175 *cp1 = nc;
10176 cp1 = &OMP_CLAUSE_CHAIN (nc);
10177 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10178 OMP_CLAUSE_DECL (nc) = controlb;
10179 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10180 *cp2 = nc;
10181 cp2 = &OMP_CLAUSE_CHAIN (nc);
10182 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10183 OMP_CLAUSE_DECL (nc) = controlp;
10184 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10185 *cp2 = nc;
10186 cp2 = &OMP_CLAUSE_CHAIN (nc);
10187
10188 *cp1 = gimple_omp_for_clauses (stmt);
10189 gimple_omp_for_set_clauses (stmt, new_clauses1);
10190 *cp2 = gimple_omp_for_clauses (new_stmt);
10191 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10192
10193 if (is_for_simd)
10194 {
10195 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10196 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10197
10198 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10199 GSI_SAME_STMT);
10200 gsi_remove (&input3_gsi, true);
10201 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10202 GSI_SAME_STMT);
10203 gsi_remove (&scan3_gsi, true);
10204 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10205 GSI_SAME_STMT);
10206 gsi_remove (&input4_gsi, true);
10207 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10208 GSI_SAME_STMT);
10209 gsi_remove (&scan4_gsi, true);
10210 }
10211 else
10212 {
10213 gimple_omp_set_body (scan_stmt1, scan1_list);
10214 gimple_omp_set_body (input_stmt2, input2_list);
10215 }
10216
10217 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10218 GSI_SAME_STMT);
10219 gsi_remove (&input1_gsi, true);
10220 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10221 GSI_SAME_STMT);
10222 gsi_remove (&scan1_gsi, true);
10223 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10224 GSI_SAME_STMT);
10225 gsi_remove (&input2_gsi, true);
10226 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10227 GSI_SAME_STMT);
10228 gsi_remove (&scan2_gsi, true);
10229
10230 gimple_seq_add_seq (body_p, clist);
10231
10232 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10233 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10234 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10235 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10236 gimple_seq_add_stmt (body_p, g);
10237 g = gimple_build_label (lab1);
10238 gimple_seq_add_stmt (body_p, g);
10239 gimple_seq_add_seq (body_p, thr01_list);
10240 g = gimple_build_goto (lab3);
10241 gimple_seq_add_stmt (body_p, g);
10242 g = gimple_build_label (lab2);
10243 gimple_seq_add_stmt (body_p, g);
10244 gimple_seq_add_seq (body_p, thrn1_list);
10245 g = gimple_build_label (lab3);
10246 gimple_seq_add_stmt (body_p, g);
10247
10248 g = gimple_build_assign (ivar, size_zero_node);
10249 gimple_seq_add_stmt (body_p, g);
10250
10251 gimple_seq_add_stmt (body_p, stmt);
10252 gimple_seq_add_seq (body_p, body);
10253 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10254 fd->loop.v));
10255
10256 g = gimple_build_omp_return (true);
10257 gimple_seq_add_stmt (body_p, g);
10258 gimple_seq_add_seq (body_p, mdlist);
10259
10260 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10261 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10262 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10263 gimple_seq_add_stmt (body_p, g);
10264 g = gimple_build_label (lab1);
10265 gimple_seq_add_stmt (body_p, g);
10266
10267 g = omp_build_barrier (NULL);
10268 gimple_seq_add_stmt (body_p, g);
10269
10270 tree down = create_tmp_var (unsigned_type_node);
10271 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10272 gimple_seq_add_stmt (body_p, g);
10273
10274 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10275 gimple_seq_add_stmt (body_p, g);
10276
10277 tree num_threadsu = create_tmp_var (unsigned_type_node);
10278 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10279 gimple_seq_add_stmt (body_p, g);
10280
10281 tree thread_numu = create_tmp_var (unsigned_type_node);
10282 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10283 gimple_seq_add_stmt (body_p, g);
10284
10285 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10286 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10287 build_int_cst (unsigned_type_node, 1));
10288 gimple_seq_add_stmt (body_p, g);
10289
10290 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10291 g = gimple_build_label (lab3);
10292 gimple_seq_add_stmt (body_p, g);
10293
10294 tree twok = create_tmp_var (unsigned_type_node);
10295 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10296 gimple_seq_add_stmt (body_p, g);
10297
10298 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10299 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10300 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10301 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10302 gimple_seq_add_stmt (body_p, g);
10303 g = gimple_build_label (lab4);
10304 gimple_seq_add_stmt (body_p, g);
10305 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10306 gimple_seq_add_stmt (body_p, g);
10307 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10308 gimple_seq_add_stmt (body_p, g);
10309
10310 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10311 gimple_seq_add_stmt (body_p, g);
10312 g = gimple_build_label (lab6);
10313 gimple_seq_add_stmt (body_p, g);
10314
10315 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10316 gimple_seq_add_stmt (body_p, g);
10317
10318 g = gimple_build_label (lab5);
10319 gimple_seq_add_stmt (body_p, g);
10320
10321 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10322 gimple_seq_add_stmt (body_p, g);
10323
10324 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10325 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10326 gimple_call_set_lhs (g, cplx);
10327 gimple_seq_add_stmt (body_p, g);
10328 tree mul = create_tmp_var (unsigned_type_node);
10329 g = gimple_build_assign (mul, REALPART_EXPR,
10330 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10331 gimple_seq_add_stmt (body_p, g);
10332 tree ovf = create_tmp_var (unsigned_type_node);
10333 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10334 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10335 gimple_seq_add_stmt (body_p, g);
10336
10337 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10338 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10339 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10340 lab7, lab8);
10341 gimple_seq_add_stmt (body_p, g);
10342 g = gimple_build_label (lab7);
10343 gimple_seq_add_stmt (body_p, g);
10344
10345 tree andv = create_tmp_var (unsigned_type_node);
10346 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10347 gimple_seq_add_stmt (body_p, g);
10348 tree andvm1 = create_tmp_var (unsigned_type_node);
10349 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10350 build_minus_one_cst (unsigned_type_node));
10351 gimple_seq_add_stmt (body_p, g);
10352
10353 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10354 gimple_seq_add_stmt (body_p, g);
10355
10356 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10357 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10358 gimple_seq_add_stmt (body_p, g);
10359 g = gimple_build_label (lab9);
10360 gimple_seq_add_stmt (body_p, g);
10361 gimple_seq_add_seq (body_p, reduc_list);
10362 g = gimple_build_label (lab8);
10363 gimple_seq_add_stmt (body_p, g);
10364
10365 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10366 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10367 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10368 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10369 lab10, lab11);
10370 gimple_seq_add_stmt (body_p, g);
10371 g = gimple_build_label (lab10);
10372 gimple_seq_add_stmt (body_p, g);
10373 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10374 gimple_seq_add_stmt (body_p, g);
10375 g = gimple_build_goto (lab12);
10376 gimple_seq_add_stmt (body_p, g);
10377 g = gimple_build_label (lab11);
10378 gimple_seq_add_stmt (body_p, g);
10379 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10380 gimple_seq_add_stmt (body_p, g);
10381 g = gimple_build_label (lab12);
10382 gimple_seq_add_stmt (body_p, g);
10383
10384 g = omp_build_barrier (NULL);
10385 gimple_seq_add_stmt (body_p, g);
10386
10387 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10388 lab3, lab2);
10389 gimple_seq_add_stmt (body_p, g);
10390
10391 g = gimple_build_label (lab2);
10392 gimple_seq_add_stmt (body_p, g);
10393
10394 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10395 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10396 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10397 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10398 gimple_seq_add_stmt (body_p, g);
10399 g = gimple_build_label (lab1);
10400 gimple_seq_add_stmt (body_p, g);
10401 gimple_seq_add_seq (body_p, thr02_list);
10402 g = gimple_build_goto (lab3);
10403 gimple_seq_add_stmt (body_p, g);
10404 g = gimple_build_label (lab2);
10405 gimple_seq_add_stmt (body_p, g);
10406 gimple_seq_add_seq (body_p, thrn2_list);
10407 g = gimple_build_label (lab3);
10408 gimple_seq_add_stmt (body_p, g);
10409
10410 g = gimple_build_assign (ivar, size_zero_node);
10411 gimple_seq_add_stmt (body_p, g);
10412 gimple_seq_add_stmt (body_p, new_stmt);
10413 gimple_seq_add_seq (body_p, new_body);
10414
10415 gimple_seq new_dlist = NULL;
10416 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10417 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10418 tree num_threadsm1 = create_tmp_var (integer_type_node);
10419 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10420 integer_minus_one_node);
10421 gimple_seq_add_stmt (&new_dlist, g);
10422 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10423 gimple_seq_add_stmt (&new_dlist, g);
10424 g = gimple_build_label (lab1);
10425 gimple_seq_add_stmt (&new_dlist, g);
10426 gimple_seq_add_seq (&new_dlist, last_list);
10427 g = gimple_build_label (lab2);
10428 gimple_seq_add_stmt (&new_dlist, g);
10429 gimple_seq_add_seq (&new_dlist, *dlist);
10430 *dlist = new_dlist;
10431 }
10432
10433 /* Lower code for an OMP loop directive. */
10434
10435 static void
10436 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10437 {
10438 tree *rhs_p, block;
10439 struct omp_for_data fd, *fdp = NULL;
10440 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10441 gbind *new_stmt;
10442 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10443 gimple_seq cnt_list = NULL, clist = NULL;
10444 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10445 size_t i;
10446
10447 push_gimplify_context ();
10448
10449 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10450
10451 block = make_node (BLOCK);
10452 new_stmt = gimple_build_bind (NULL, NULL, block);
10453 /* Replace at gsi right away, so that 'stmt' is no member
10454 of a sequence anymore as we're going to add to a different
10455 one below. */
10456 gsi_replace (gsi_p, new_stmt, true);
10457
10458 /* Move declaration of temporaries in the loop body before we make
10459 it go away. */
10460 omp_for_body = gimple_omp_body (stmt);
10461 if (!gimple_seq_empty_p (omp_for_body)
10462 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10463 {
10464 gbind *inner_bind
10465 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10466 tree vars = gimple_bind_vars (inner_bind);
10467 gimple_bind_append_vars (new_stmt, vars);
10468 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10469 keep them on the inner_bind and it's block. */
10470 gimple_bind_set_vars (inner_bind, NULL_TREE);
10471 if (gimple_bind_block (inner_bind))
10472 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10473 }
10474
10475 if (gimple_omp_for_combined_into_p (stmt))
10476 {
10477 omp_extract_for_data (stmt, &fd, NULL);
10478 fdp = &fd;
10479
10480 /* We need two temporaries with fd.loop.v type (istart/iend)
10481 and then (fd.collapse - 1) temporaries with the same
10482 type for count2 ... countN-1 vars if not constant. */
10483 size_t count = 2;
10484 tree type = fd.iter_type;
10485 if (fd.collapse > 1
10486 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10487 count += fd.collapse - 1;
10488 bool taskreg_for
10489 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10490 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10491 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10492 tree simtc = NULL;
10493 tree clauses = *pc;
10494 if (taskreg_for)
10495 outerc
10496 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10497 OMP_CLAUSE__LOOPTEMP_);
10498 if (ctx->simt_stmt)
10499 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10500 OMP_CLAUSE__LOOPTEMP_);
10501 for (i = 0; i < count; i++)
10502 {
10503 tree temp;
10504 if (taskreg_for)
10505 {
10506 gcc_assert (outerc);
10507 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10508 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10509 OMP_CLAUSE__LOOPTEMP_);
10510 }
10511 else
10512 {
10513 /* If there are 2 adjacent SIMD stmts, one with _simt_
10514 clause, another without, make sure they have the same
10515 decls in _looptemp_ clauses, because the outer stmt
10516 they are combined into will look up just one inner_stmt. */
10517 if (ctx->simt_stmt)
10518 temp = OMP_CLAUSE_DECL (simtc);
10519 else
10520 temp = create_tmp_var (type);
10521 insert_decl_map (&ctx->outer->cb, temp, temp);
10522 }
10523 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10524 OMP_CLAUSE_DECL (*pc) = temp;
10525 pc = &OMP_CLAUSE_CHAIN (*pc);
10526 if (ctx->simt_stmt)
10527 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10528 OMP_CLAUSE__LOOPTEMP_);
10529 }
10530 *pc = clauses;
10531 }
10532
10533 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10534 dlist = NULL;
10535 body = NULL;
10536 tree rclauses
10537 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10538 OMP_CLAUSE_REDUCTION);
10539 tree rtmp = NULL_TREE;
10540 if (rclauses)
10541 {
10542 tree type = build_pointer_type (pointer_sized_int_node);
10543 tree temp = create_tmp_var (type);
10544 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10545 OMP_CLAUSE_DECL (c) = temp;
10546 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10547 gimple_omp_for_set_clauses (stmt, c);
10548 lower_omp_task_reductions (ctx, OMP_FOR,
10549 gimple_omp_for_clauses (stmt),
10550 &tred_ilist, &tred_dlist);
10551 rclauses = c;
10552 rtmp = make_ssa_name (type);
10553 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10554 }
10555
10556 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10557 ctx);
10558
10559 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10560 fdp);
10561 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10562 gimple_omp_for_pre_body (stmt));
10563
10564 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10565
10566 /* Lower the header expressions. At this point, we can assume that
10567 the header is of the form:
10568
10569 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10570
10571 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10572 using the .omp_data_s mapping, if needed. */
10573 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10574 {
10575 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10576 if (!is_gimple_min_invariant (*rhs_p))
10577 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10578 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10579 recompute_tree_invariant_for_addr_expr (*rhs_p);
10580
10581 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10582 if (!is_gimple_min_invariant (*rhs_p))
10583 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10584 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10585 recompute_tree_invariant_for_addr_expr (*rhs_p);
10586
10587 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10588 if (!is_gimple_min_invariant (*rhs_p))
10589 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10590 }
10591 if (rclauses)
10592 gimple_seq_add_seq (&tred_ilist, cnt_list);
10593 else
10594 gimple_seq_add_seq (&body, cnt_list);
10595
10596 /* Once lowered, extract the bounds and clauses. */
10597 omp_extract_for_data (stmt, &fd, NULL);
10598
10599 if (is_gimple_omp_oacc (ctx->stmt)
10600 && !ctx_in_oacc_kernels_region (ctx))
10601 lower_oacc_head_tail (gimple_location (stmt),
10602 gimple_omp_for_clauses (stmt),
10603 &oacc_head, &oacc_tail, ctx);
10604
10605 /* Add OpenACC partitioning and reduction markers just before the loop. */
10606 if (oacc_head)
10607 gimple_seq_add_seq (&body, oacc_head);
10608
10609 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10610
10611 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10612 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10613 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10614 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10615 {
10616 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10617 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10618 OMP_CLAUSE_LINEAR_STEP (c)
10619 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10620 ctx);
10621 }
10622
10623 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10624 && gimple_omp_for_grid_phony (stmt));
10625 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10626 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10627 {
10628 gcc_assert (!phony_loop);
10629 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10630 }
10631 else
10632 {
10633 if (!phony_loop)
10634 gimple_seq_add_stmt (&body, stmt);
10635 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10636 }
10637
10638 if (!phony_loop)
10639 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10640 fd.loop.v));
10641
10642 /* After the loop, add exit clauses. */
10643 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10644
10645 if (clist)
10646 {
10647 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10648 gcall *g = gimple_build_call (fndecl, 0);
10649 gimple_seq_add_stmt (&body, g);
10650 gimple_seq_add_seq (&body, clist);
10651 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10652 g = gimple_build_call (fndecl, 0);
10653 gimple_seq_add_stmt (&body, g);
10654 }
10655
10656 if (ctx->cancellable)
10657 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10658
10659 gimple_seq_add_seq (&body, dlist);
10660
10661 if (rclauses)
10662 {
10663 gimple_seq_add_seq (&tred_ilist, body);
10664 body = tred_ilist;
10665 }
10666
10667 body = maybe_catch_exception (body);
10668
10669 if (!phony_loop)
10670 {
10671 /* Region exit marker goes at the end of the loop body. */
10672 gimple *g = gimple_build_omp_return (fd.have_nowait);
10673 gimple_seq_add_stmt (&body, g);
10674
10675 gimple_seq_add_seq (&body, tred_dlist);
10676
10677 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10678
10679 if (rclauses)
10680 OMP_CLAUSE_DECL (rclauses) = rtmp;
10681 }
10682
10683 /* Add OpenACC joining and reduction markers just after the loop. */
10684 if (oacc_tail)
10685 gimple_seq_add_seq (&body, oacc_tail);
10686
10687 pop_gimplify_context (new_stmt);
10688
10689 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10690 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10691 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10692 if (BLOCK_VARS (block))
10693 TREE_USED (block) = 1;
10694
10695 gimple_bind_set_body (new_stmt, body);
10696 gimple_omp_set_body (stmt, NULL);
10697 gimple_omp_for_set_pre_body (stmt, NULL);
10698 }
10699
10700 /* Callback for walk_stmts. Check if the current statement only contains
10701 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10702
10703 static tree
10704 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10705 bool *handled_ops_p,
10706 struct walk_stmt_info *wi)
10707 {
10708 int *info = (int *) wi->info;
10709 gimple *stmt = gsi_stmt (*gsi_p);
10710
10711 *handled_ops_p = true;
10712 switch (gimple_code (stmt))
10713 {
10714 WALK_SUBSTMTS;
10715
10716 case GIMPLE_DEBUG:
10717 break;
10718 case GIMPLE_OMP_FOR:
10719 case GIMPLE_OMP_SECTIONS:
10720 *info = *info == 0 ? 1 : -1;
10721 break;
10722 default:
10723 *info = -1;
10724 break;
10725 }
10726 return NULL;
10727 }
10728
10729 struct omp_taskcopy_context
10730 {
10731 /* This field must be at the beginning, as we do "inheritance": Some
10732 callback functions for tree-inline.c (e.g., omp_copy_decl)
10733 receive a copy_body_data pointer that is up-casted to an
10734 omp_context pointer. */
10735 copy_body_data cb;
10736 omp_context *ctx;
10737 };
10738
10739 static tree
10740 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10741 {
10742 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10743
10744 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10745 return create_tmp_var (TREE_TYPE (var));
10746
10747 return var;
10748 }
10749
10750 static tree
10751 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10752 {
10753 tree name, new_fields = NULL, type, f;
10754
10755 type = lang_hooks.types.make_type (RECORD_TYPE);
10756 name = DECL_NAME (TYPE_NAME (orig_type));
10757 name = build_decl (gimple_location (tcctx->ctx->stmt),
10758 TYPE_DECL, name, type);
10759 TYPE_NAME (type) = name;
10760
10761 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10762 {
10763 tree new_f = copy_node (f);
10764 DECL_CONTEXT (new_f) = type;
10765 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10766 TREE_CHAIN (new_f) = new_fields;
10767 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10768 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10769 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10770 &tcctx->cb, NULL);
10771 new_fields = new_f;
10772 tcctx->cb.decl_map->put (f, new_f);
10773 }
10774 TYPE_FIELDS (type) = nreverse (new_fields);
10775 layout_type (type);
10776 return type;
10777 }
10778
10779 /* Create task copyfn. */
10780
10781 static void
10782 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10783 {
10784 struct function *child_cfun;
10785 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10786 tree record_type, srecord_type, bind, list;
10787 bool record_needs_remap = false, srecord_needs_remap = false;
10788 splay_tree_node n;
10789 struct omp_taskcopy_context tcctx;
10790 location_t loc = gimple_location (task_stmt);
10791 size_t looptempno = 0;
10792
10793 child_fn = gimple_omp_task_copy_fn (task_stmt);
10794 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10795 gcc_assert (child_cfun->cfg == NULL);
10796 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10797
10798 /* Reset DECL_CONTEXT on function arguments. */
10799 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10800 DECL_CONTEXT (t) = child_fn;
10801
10802 /* Populate the function. */
10803 push_gimplify_context ();
10804 push_cfun (child_cfun);
10805
10806 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10807 TREE_SIDE_EFFECTS (bind) = 1;
10808 list = NULL;
10809 DECL_SAVED_TREE (child_fn) = bind;
10810 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10811
10812 /* Remap src and dst argument types if needed. */
10813 record_type = ctx->record_type;
10814 srecord_type = ctx->srecord_type;
10815 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10816 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10817 {
10818 record_needs_remap = true;
10819 break;
10820 }
10821 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10822 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10823 {
10824 srecord_needs_remap = true;
10825 break;
10826 }
10827
10828 if (record_needs_remap || srecord_needs_remap)
10829 {
10830 memset (&tcctx, '\0', sizeof (tcctx));
10831 tcctx.cb.src_fn = ctx->cb.src_fn;
10832 tcctx.cb.dst_fn = child_fn;
10833 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10834 gcc_checking_assert (tcctx.cb.src_node);
10835 tcctx.cb.dst_node = tcctx.cb.src_node;
10836 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10837 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10838 tcctx.cb.eh_lp_nr = 0;
10839 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10840 tcctx.cb.decl_map = new hash_map<tree, tree>;
10841 tcctx.ctx = ctx;
10842
10843 if (record_needs_remap)
10844 record_type = task_copyfn_remap_type (&tcctx, record_type);
10845 if (srecord_needs_remap)
10846 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10847 }
10848 else
10849 tcctx.cb.decl_map = NULL;
10850
10851 arg = DECL_ARGUMENTS (child_fn);
10852 TREE_TYPE (arg) = build_pointer_type (record_type);
10853 sarg = DECL_CHAIN (arg);
10854 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10855
10856 /* First pass: initialize temporaries used in record_type and srecord_type
10857 sizes and field offsets. */
10858 if (tcctx.cb.decl_map)
10859 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10860 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10861 {
10862 tree *p;
10863
10864 decl = OMP_CLAUSE_DECL (c);
10865 p = tcctx.cb.decl_map->get (decl);
10866 if (p == NULL)
10867 continue;
10868 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10869 sf = (tree) n->value;
10870 sf = *tcctx.cb.decl_map->get (sf);
10871 src = build_simple_mem_ref_loc (loc, sarg);
10872 src = omp_build_component_ref (src, sf);
10873 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10874 append_to_statement_list (t, &list);
10875 }
10876
10877 /* Second pass: copy shared var pointers and copy construct non-VLA
10878 firstprivate vars. */
10879 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10880 switch (OMP_CLAUSE_CODE (c))
10881 {
10882 splay_tree_key key;
10883 case OMP_CLAUSE_SHARED:
10884 decl = OMP_CLAUSE_DECL (c);
10885 key = (splay_tree_key) decl;
10886 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10887 key = (splay_tree_key) &DECL_UID (decl);
10888 n = splay_tree_lookup (ctx->field_map, key);
10889 if (n == NULL)
10890 break;
10891 f = (tree) n->value;
10892 if (tcctx.cb.decl_map)
10893 f = *tcctx.cb.decl_map->get (f);
10894 n = splay_tree_lookup (ctx->sfield_map, key);
10895 sf = (tree) n->value;
10896 if (tcctx.cb.decl_map)
10897 sf = *tcctx.cb.decl_map->get (sf);
10898 src = build_simple_mem_ref_loc (loc, sarg);
10899 src = omp_build_component_ref (src, sf);
10900 dst = build_simple_mem_ref_loc (loc, arg);
10901 dst = omp_build_component_ref (dst, f);
10902 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10903 append_to_statement_list (t, &list);
10904 break;
10905 case OMP_CLAUSE_REDUCTION:
10906 case OMP_CLAUSE_IN_REDUCTION:
10907 decl = OMP_CLAUSE_DECL (c);
10908 if (TREE_CODE (decl) == MEM_REF)
10909 {
10910 decl = TREE_OPERAND (decl, 0);
10911 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10912 decl = TREE_OPERAND (decl, 0);
10913 if (TREE_CODE (decl) == INDIRECT_REF
10914 || TREE_CODE (decl) == ADDR_EXPR)
10915 decl = TREE_OPERAND (decl, 0);
10916 }
10917 key = (splay_tree_key) decl;
10918 n = splay_tree_lookup (ctx->field_map, key);
10919 if (n == NULL)
10920 break;
10921 f = (tree) n->value;
10922 if (tcctx.cb.decl_map)
10923 f = *tcctx.cb.decl_map->get (f);
10924 n = splay_tree_lookup (ctx->sfield_map, key);
10925 sf = (tree) n->value;
10926 if (tcctx.cb.decl_map)
10927 sf = *tcctx.cb.decl_map->get (sf);
10928 src = build_simple_mem_ref_loc (loc, sarg);
10929 src = omp_build_component_ref (src, sf);
10930 if (decl != OMP_CLAUSE_DECL (c)
10931 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10932 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10933 src = build_simple_mem_ref_loc (loc, src);
10934 dst = build_simple_mem_ref_loc (loc, arg);
10935 dst = omp_build_component_ref (dst, f);
10936 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10937 append_to_statement_list (t, &list);
10938 break;
10939 case OMP_CLAUSE__LOOPTEMP_:
10940 /* Fields for first two _looptemp_ clauses are initialized by
10941 GOMP_taskloop*, the rest are handled like firstprivate. */
10942 if (looptempno < 2)
10943 {
10944 looptempno++;
10945 break;
10946 }
10947 /* FALLTHRU */
10948 case OMP_CLAUSE__REDUCTEMP_:
10949 case OMP_CLAUSE_FIRSTPRIVATE:
10950 decl = OMP_CLAUSE_DECL (c);
10951 if (is_variable_sized (decl))
10952 break;
10953 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10954 if (n == NULL)
10955 break;
10956 f = (tree) n->value;
10957 if (tcctx.cb.decl_map)
10958 f = *tcctx.cb.decl_map->get (f);
10959 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10960 if (n != NULL)
10961 {
10962 sf = (tree) n->value;
10963 if (tcctx.cb.decl_map)
10964 sf = *tcctx.cb.decl_map->get (sf);
10965 src = build_simple_mem_ref_loc (loc, sarg);
10966 src = omp_build_component_ref (src, sf);
10967 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10968 src = build_simple_mem_ref_loc (loc, src);
10969 }
10970 else
10971 src = decl;
10972 dst = build_simple_mem_ref_loc (loc, arg);
10973 dst = omp_build_component_ref (dst, f);
10974 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10975 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10976 else
10977 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10978 append_to_statement_list (t, &list);
10979 break;
10980 case OMP_CLAUSE_PRIVATE:
10981 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10982 break;
10983 decl = OMP_CLAUSE_DECL (c);
10984 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10985 f = (tree) n->value;
10986 if (tcctx.cb.decl_map)
10987 f = *tcctx.cb.decl_map->get (f);
10988 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10989 if (n != NULL)
10990 {
10991 sf = (tree) n->value;
10992 if (tcctx.cb.decl_map)
10993 sf = *tcctx.cb.decl_map->get (sf);
10994 src = build_simple_mem_ref_loc (loc, sarg);
10995 src = omp_build_component_ref (src, sf);
10996 if (use_pointer_for_field (decl, NULL))
10997 src = build_simple_mem_ref_loc (loc, src);
10998 }
10999 else
11000 src = decl;
11001 dst = build_simple_mem_ref_loc (loc, arg);
11002 dst = omp_build_component_ref (dst, f);
11003 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11004 append_to_statement_list (t, &list);
11005 break;
11006 default:
11007 break;
11008 }
11009
11010 /* Last pass: handle VLA firstprivates. */
11011 if (tcctx.cb.decl_map)
11012 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11013 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11014 {
11015 tree ind, ptr, df;
11016
11017 decl = OMP_CLAUSE_DECL (c);
11018 if (!is_variable_sized (decl))
11019 continue;
11020 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11021 if (n == NULL)
11022 continue;
11023 f = (tree) n->value;
11024 f = *tcctx.cb.decl_map->get (f);
11025 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11026 ind = DECL_VALUE_EXPR (decl);
11027 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11028 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11029 n = splay_tree_lookup (ctx->sfield_map,
11030 (splay_tree_key) TREE_OPERAND (ind, 0));
11031 sf = (tree) n->value;
11032 sf = *tcctx.cb.decl_map->get (sf);
11033 src = build_simple_mem_ref_loc (loc, sarg);
11034 src = omp_build_component_ref (src, sf);
11035 src = build_simple_mem_ref_loc (loc, src);
11036 dst = build_simple_mem_ref_loc (loc, arg);
11037 dst = omp_build_component_ref (dst, f);
11038 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11039 append_to_statement_list (t, &list);
11040 n = splay_tree_lookup (ctx->field_map,
11041 (splay_tree_key) TREE_OPERAND (ind, 0));
11042 df = (tree) n->value;
11043 df = *tcctx.cb.decl_map->get (df);
11044 ptr = build_simple_mem_ref_loc (loc, arg);
11045 ptr = omp_build_component_ref (ptr, df);
11046 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11047 build_fold_addr_expr_loc (loc, dst));
11048 append_to_statement_list (t, &list);
11049 }
11050
11051 t = build1 (RETURN_EXPR, void_type_node, NULL);
11052 append_to_statement_list (t, &list);
11053
11054 if (tcctx.cb.decl_map)
11055 delete tcctx.cb.decl_map;
11056 pop_gimplify_context (NULL);
11057 BIND_EXPR_BODY (bind) = list;
11058 pop_cfun ();
11059 }
11060
11061 static void
11062 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11063 {
11064 tree c, clauses;
11065 gimple *g;
11066 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11067
11068 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11069 gcc_assert (clauses);
11070 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11072 switch (OMP_CLAUSE_DEPEND_KIND (c))
11073 {
11074 case OMP_CLAUSE_DEPEND_LAST:
11075 /* Lowering already done at gimplification. */
11076 return;
11077 case OMP_CLAUSE_DEPEND_IN:
11078 cnt[2]++;
11079 break;
11080 case OMP_CLAUSE_DEPEND_OUT:
11081 case OMP_CLAUSE_DEPEND_INOUT:
11082 cnt[0]++;
11083 break;
11084 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11085 cnt[1]++;
11086 break;
11087 case OMP_CLAUSE_DEPEND_DEPOBJ:
11088 cnt[3]++;
11089 break;
11090 case OMP_CLAUSE_DEPEND_SOURCE:
11091 case OMP_CLAUSE_DEPEND_SINK:
11092 /* FALLTHRU */
11093 default:
11094 gcc_unreachable ();
11095 }
11096 if (cnt[1] || cnt[3])
11097 idx = 5;
11098 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11099 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11100 tree array = create_tmp_var (type);
11101 TREE_ADDRESSABLE (array) = 1;
11102 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11103 NULL_TREE);
11104 if (idx == 5)
11105 {
11106 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11107 gimple_seq_add_stmt (iseq, g);
11108 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11109 NULL_TREE);
11110 }
11111 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11112 gimple_seq_add_stmt (iseq, g);
11113 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11114 {
11115 r = build4 (ARRAY_REF, ptr_type_node, array,
11116 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11117 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11118 gimple_seq_add_stmt (iseq, g);
11119 }
11120 for (i = 0; i < 4; i++)
11121 {
11122 if (cnt[i] == 0)
11123 continue;
11124 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11125 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11126 continue;
11127 else
11128 {
11129 switch (OMP_CLAUSE_DEPEND_KIND (c))
11130 {
11131 case OMP_CLAUSE_DEPEND_IN:
11132 if (i != 2)
11133 continue;
11134 break;
11135 case OMP_CLAUSE_DEPEND_OUT:
11136 case OMP_CLAUSE_DEPEND_INOUT:
11137 if (i != 0)
11138 continue;
11139 break;
11140 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11141 if (i != 1)
11142 continue;
11143 break;
11144 case OMP_CLAUSE_DEPEND_DEPOBJ:
11145 if (i != 3)
11146 continue;
11147 break;
11148 default:
11149 gcc_unreachable ();
11150 }
11151 tree t = OMP_CLAUSE_DECL (c);
11152 t = fold_convert (ptr_type_node, t);
11153 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11154 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11155 NULL_TREE, NULL_TREE);
11156 g = gimple_build_assign (r, t);
11157 gimple_seq_add_stmt (iseq, g);
11158 }
11159 }
11160 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11161 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11162 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11163 OMP_CLAUSE_CHAIN (c) = *pclauses;
11164 *pclauses = c;
11165 tree clobber = build_clobber (type);
11166 g = gimple_build_assign (array, clobber);
11167 gimple_seq_add_stmt (oseq, g);
11168 }
11169
11170 /* Lower the OpenMP parallel or task directive in the current statement
11171 in GSI_P. CTX holds context information for the directive. */
11172
11173 static void
11174 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11175 {
11176 tree clauses;
11177 tree child_fn, t;
11178 gimple *stmt = gsi_stmt (*gsi_p);
11179 gbind *par_bind, *bind, *dep_bind = NULL;
11180 gimple_seq par_body;
11181 location_t loc = gimple_location (stmt);
11182
11183 clauses = gimple_omp_taskreg_clauses (stmt);
11184 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11185 && gimple_omp_task_taskwait_p (stmt))
11186 {
11187 par_bind = NULL;
11188 par_body = NULL;
11189 }
11190 else
11191 {
11192 par_bind
11193 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11194 par_body = gimple_bind_body (par_bind);
11195 }
11196 child_fn = ctx->cb.dst_fn;
11197 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11198 && !gimple_omp_parallel_combined_p (stmt))
11199 {
11200 struct walk_stmt_info wi;
11201 int ws_num = 0;
11202
11203 memset (&wi, 0, sizeof (wi));
11204 wi.info = &ws_num;
11205 wi.val_only = true;
11206 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11207 if (ws_num == 1)
11208 gimple_omp_parallel_set_combined_p (stmt, true);
11209 }
11210 gimple_seq dep_ilist = NULL;
11211 gimple_seq dep_olist = NULL;
11212 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11213 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11214 {
11215 push_gimplify_context ();
11216 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11217 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11218 &dep_ilist, &dep_olist);
11219 }
11220
11221 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11222 && gimple_omp_task_taskwait_p (stmt))
11223 {
11224 if (dep_bind)
11225 {
11226 gsi_replace (gsi_p, dep_bind, true);
11227 gimple_bind_add_seq (dep_bind, dep_ilist);
11228 gimple_bind_add_stmt (dep_bind, stmt);
11229 gimple_bind_add_seq (dep_bind, dep_olist);
11230 pop_gimplify_context (dep_bind);
11231 }
11232 return;
11233 }
11234
11235 if (ctx->srecord_type)
11236 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11237
11238 gimple_seq tskred_ilist = NULL;
11239 gimple_seq tskred_olist = NULL;
11240 if ((is_task_ctx (ctx)
11241 && gimple_omp_task_taskloop_p (ctx->stmt)
11242 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11243 OMP_CLAUSE_REDUCTION))
11244 || (is_parallel_ctx (ctx)
11245 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11246 OMP_CLAUSE__REDUCTEMP_)))
11247 {
11248 if (dep_bind == NULL)
11249 {
11250 push_gimplify_context ();
11251 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11252 }
11253 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11254 : OMP_PARALLEL,
11255 gimple_omp_taskreg_clauses (ctx->stmt),
11256 &tskred_ilist, &tskred_olist);
11257 }
11258
11259 push_gimplify_context ();
11260
11261 gimple_seq par_olist = NULL;
11262 gimple_seq par_ilist = NULL;
11263 gimple_seq par_rlist = NULL;
11264 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11265 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11266 if (phony_construct && ctx->record_type)
11267 {
11268 gcc_checking_assert (!ctx->receiver_decl);
11269 ctx->receiver_decl = create_tmp_var
11270 (build_reference_type (ctx->record_type), ".omp_rec");
11271 }
11272 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11273 lower_omp (&par_body, ctx);
11274 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
11275 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11276
11277 /* Declare all the variables created by mapping and the variables
11278 declared in the scope of the parallel body. */
11279 record_vars_into (ctx->block_vars, child_fn);
11280 maybe_remove_omp_member_access_dummy_vars (par_bind);
11281 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11282
11283 if (ctx->record_type)
11284 {
11285 ctx->sender_decl
11286 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11287 : ctx->record_type, ".omp_data_o");
11288 DECL_NAMELESS (ctx->sender_decl) = 1;
11289 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11290 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11291 }
11292
11293 gimple_seq olist = NULL;
11294 gimple_seq ilist = NULL;
11295 lower_send_clauses (clauses, &ilist, &olist, ctx);
11296 lower_send_shared_vars (&ilist, &olist, ctx);
11297
11298 if (ctx->record_type)
11299 {
11300 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11301 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11302 clobber));
11303 }
11304
11305 /* Once all the expansions are done, sequence all the different
11306 fragments inside gimple_omp_body. */
11307
11308 gimple_seq new_body = NULL;
11309
11310 if (ctx->record_type)
11311 {
11312 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11313 /* fixup_child_record_type might have changed receiver_decl's type. */
11314 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11315 gimple_seq_add_stmt (&new_body,
11316 gimple_build_assign (ctx->receiver_decl, t));
11317 }
11318
11319 gimple_seq_add_seq (&new_body, par_ilist);
11320 gimple_seq_add_seq (&new_body, par_body);
11321 gimple_seq_add_seq (&new_body, par_rlist);
11322 if (ctx->cancellable)
11323 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11324 gimple_seq_add_seq (&new_body, par_olist);
11325 new_body = maybe_catch_exception (new_body);
11326 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11327 gimple_seq_add_stmt (&new_body,
11328 gimple_build_omp_continue (integer_zero_node,
11329 integer_zero_node));
11330 if (!phony_construct)
11331 {
11332 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11333 gimple_omp_set_body (stmt, new_body);
11334 }
11335
11336 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11337 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11338 else
11339 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11340 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11341 gimple_bind_add_seq (bind, ilist);
11342 if (!phony_construct)
11343 gimple_bind_add_stmt (bind, stmt);
11344 else
11345 gimple_bind_add_seq (bind, new_body);
11346 gimple_bind_add_seq (bind, olist);
11347
11348 pop_gimplify_context (NULL);
11349
11350 if (dep_bind)
11351 {
11352 gimple_bind_add_seq (dep_bind, dep_ilist);
11353 gimple_bind_add_seq (dep_bind, tskred_ilist);
11354 gimple_bind_add_stmt (dep_bind, bind);
11355 gimple_bind_add_seq (dep_bind, tskred_olist);
11356 gimple_bind_add_seq (dep_bind, dep_olist);
11357 pop_gimplify_context (dep_bind);
11358 }
11359 }
11360
11361 /* Lower the GIMPLE_OMP_TARGET in the current statement
11362 in GSI_P. CTX holds context information for the directive. */
11363
11364 static void
11365 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11366 {
11367 tree clauses;
11368 tree child_fn, t, c;
11369 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11370 gbind *tgt_bind, *bind, *dep_bind = NULL;
11371 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11372 location_t loc = gimple_location (stmt);
11373 bool offloaded, data_region;
11374 unsigned int map_cnt = 0;
11375
11376 offloaded = is_gimple_omp_offloaded (stmt);
11377 switch (gimple_omp_target_kind (stmt))
11378 {
11379 case GF_OMP_TARGET_KIND_REGION:
11380 case GF_OMP_TARGET_KIND_UPDATE:
11381 case GF_OMP_TARGET_KIND_ENTER_DATA:
11382 case GF_OMP_TARGET_KIND_EXIT_DATA:
11383 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11384 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11385 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11386 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11387 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11388 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11389 data_region = false;
11390 break;
11391 case GF_OMP_TARGET_KIND_DATA:
11392 case GF_OMP_TARGET_KIND_OACC_DATA:
11393 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11394 data_region = true;
11395 break;
11396 default:
11397 gcc_unreachable ();
11398 }
11399
11400 clauses = gimple_omp_target_clauses (stmt);
11401
11402 gimple_seq dep_ilist = NULL;
11403 gimple_seq dep_olist = NULL;
11404 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11405 {
11406 push_gimplify_context ();
11407 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11408 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11409 &dep_ilist, &dep_olist);
11410 }
11411
11412 tgt_bind = NULL;
11413 tgt_body = NULL;
11414 if (offloaded)
11415 {
11416 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11417 tgt_body = gimple_bind_body (tgt_bind);
11418 }
11419 else if (data_region)
11420 tgt_body = gimple_omp_body (stmt);
11421 child_fn = ctx->cb.dst_fn;
11422
11423 push_gimplify_context ();
11424 fplist = NULL;
11425
11426 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11427 switch (OMP_CLAUSE_CODE (c))
11428 {
11429 tree var, x;
11430
11431 default:
11432 break;
11433 case OMP_CLAUSE_MAP:
11434 #if CHECKING_P
11435 /* First check what we're prepared to handle in the following. */
11436 switch (OMP_CLAUSE_MAP_KIND (c))
11437 {
11438 case GOMP_MAP_ALLOC:
11439 case GOMP_MAP_TO:
11440 case GOMP_MAP_FROM:
11441 case GOMP_MAP_TOFROM:
11442 case GOMP_MAP_POINTER:
11443 case GOMP_MAP_TO_PSET:
11444 case GOMP_MAP_DELETE:
11445 case GOMP_MAP_RELEASE:
11446 case GOMP_MAP_ALWAYS_TO:
11447 case GOMP_MAP_ALWAYS_FROM:
11448 case GOMP_MAP_ALWAYS_TOFROM:
11449 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11450 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11451 case GOMP_MAP_STRUCT:
11452 case GOMP_MAP_ALWAYS_POINTER:
11453 break;
11454 case GOMP_MAP_IF_PRESENT:
11455 case GOMP_MAP_FORCE_ALLOC:
11456 case GOMP_MAP_FORCE_TO:
11457 case GOMP_MAP_FORCE_FROM:
11458 case GOMP_MAP_FORCE_TOFROM:
11459 case GOMP_MAP_FORCE_PRESENT:
11460 case GOMP_MAP_FORCE_DEVICEPTR:
11461 case GOMP_MAP_DEVICE_RESIDENT:
11462 case GOMP_MAP_LINK:
11463 case GOMP_MAP_ATTACH:
11464 case GOMP_MAP_DETACH:
11465 case GOMP_MAP_FORCE_DETACH:
11466 gcc_assert (is_gimple_omp_oacc (stmt));
11467 break;
11468 default:
11469 gcc_unreachable ();
11470 }
11471 #endif
11472 /* FALLTHRU */
11473 case OMP_CLAUSE_TO:
11474 case OMP_CLAUSE_FROM:
11475 oacc_firstprivate:
11476 var = OMP_CLAUSE_DECL (c);
11477 if (!DECL_P (var))
11478 {
11479 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11480 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11481 && (OMP_CLAUSE_MAP_KIND (c)
11482 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11483 map_cnt++;
11484 continue;
11485 }
11486
11487 if (DECL_SIZE (var)
11488 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11489 {
11490 tree var2 = DECL_VALUE_EXPR (var);
11491 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11492 var2 = TREE_OPERAND (var2, 0);
11493 gcc_assert (DECL_P (var2));
11494 var = var2;
11495 }
11496
11497 if (offloaded
11498 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11499 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11500 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11501 {
11502 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11503 {
11504 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11505 && varpool_node::get_create (var)->offloadable)
11506 continue;
11507
11508 tree type = build_pointer_type (TREE_TYPE (var));
11509 tree new_var = lookup_decl (var, ctx);
11510 x = create_tmp_var_raw (type, get_name (new_var));
11511 gimple_add_tmp_var (x);
11512 x = build_simple_mem_ref (x);
11513 SET_DECL_VALUE_EXPR (new_var, x);
11514 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11515 }
11516 continue;
11517 }
11518
11519 if (!maybe_lookup_field (var, ctx))
11520 continue;
11521
11522 /* Don't remap compute constructs' reduction variables, because the
11523 intermediate result must be local to each gang. */
11524 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11525 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11526 {
11527 x = build_receiver_ref (var, true, ctx);
11528 tree new_var = lookup_decl (var, ctx);
11529
11530 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11531 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11532 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11533 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11534 x = build_simple_mem_ref (x);
11535 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11536 {
11537 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11538 if (omp_is_reference (new_var)
11539 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11540 || DECL_BY_REFERENCE (var)))
11541 {
11542 /* Create a local object to hold the instance
11543 value. */
11544 tree type = TREE_TYPE (TREE_TYPE (new_var));
11545 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11546 tree inst = create_tmp_var (type, id);
11547 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11548 x = build_fold_addr_expr (inst);
11549 }
11550 gimplify_assign (new_var, x, &fplist);
11551 }
11552 else if (DECL_P (new_var))
11553 {
11554 SET_DECL_VALUE_EXPR (new_var, x);
11555 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11556 }
11557 else
11558 gcc_unreachable ();
11559 }
11560 map_cnt++;
11561 break;
11562
11563 case OMP_CLAUSE_FIRSTPRIVATE:
11564 if (is_oacc_parallel_or_serial (ctx))
11565 goto oacc_firstprivate;
11566 map_cnt++;
11567 var = OMP_CLAUSE_DECL (c);
11568 if (!omp_is_reference (var)
11569 && !is_gimple_reg_type (TREE_TYPE (var)))
11570 {
11571 tree new_var = lookup_decl (var, ctx);
11572 if (is_variable_sized (var))
11573 {
11574 tree pvar = DECL_VALUE_EXPR (var);
11575 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11576 pvar = TREE_OPERAND (pvar, 0);
11577 gcc_assert (DECL_P (pvar));
11578 tree new_pvar = lookup_decl (pvar, ctx);
11579 x = build_fold_indirect_ref (new_pvar);
11580 TREE_THIS_NOTRAP (x) = 1;
11581 }
11582 else
11583 x = build_receiver_ref (var, true, ctx);
11584 SET_DECL_VALUE_EXPR (new_var, x);
11585 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11586 }
11587 break;
11588
11589 case OMP_CLAUSE_PRIVATE:
11590 if (is_gimple_omp_oacc (ctx->stmt))
11591 break;
11592 var = OMP_CLAUSE_DECL (c);
11593 if (is_variable_sized (var))
11594 {
11595 tree new_var = lookup_decl (var, ctx);
11596 tree pvar = DECL_VALUE_EXPR (var);
11597 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11598 pvar = TREE_OPERAND (pvar, 0);
11599 gcc_assert (DECL_P (pvar));
11600 tree new_pvar = lookup_decl (pvar, ctx);
11601 x = build_fold_indirect_ref (new_pvar);
11602 TREE_THIS_NOTRAP (x) = 1;
11603 SET_DECL_VALUE_EXPR (new_var, x);
11604 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11605 }
11606 break;
11607
11608 case OMP_CLAUSE_USE_DEVICE_PTR:
11609 case OMP_CLAUSE_USE_DEVICE_ADDR:
11610 case OMP_CLAUSE_IS_DEVICE_PTR:
11611 var = OMP_CLAUSE_DECL (c);
11612 map_cnt++;
11613 if (is_variable_sized (var))
11614 {
11615 tree new_var = lookup_decl (var, ctx);
11616 tree pvar = DECL_VALUE_EXPR (var);
11617 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11618 pvar = TREE_OPERAND (pvar, 0);
11619 gcc_assert (DECL_P (pvar));
11620 tree new_pvar = lookup_decl (pvar, ctx);
11621 x = build_fold_indirect_ref (new_pvar);
11622 TREE_THIS_NOTRAP (x) = 1;
11623 SET_DECL_VALUE_EXPR (new_var, x);
11624 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11625 }
11626 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11627 && !omp_is_reference (var)
11628 && !omp_is_allocatable_or_ptr (var)
11629 && !lang_hooks.decls.omp_array_data (var, true))
11630 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11631 {
11632 tree new_var = lookup_decl (var, ctx);
11633 tree type = build_pointer_type (TREE_TYPE (var));
11634 x = create_tmp_var_raw (type, get_name (new_var));
11635 gimple_add_tmp_var (x);
11636 x = build_simple_mem_ref (x);
11637 SET_DECL_VALUE_EXPR (new_var, x);
11638 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11639 }
11640 else
11641 {
11642 tree new_var = lookup_decl (var, ctx);
11643 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11644 gimple_add_tmp_var (x);
11645 SET_DECL_VALUE_EXPR (new_var, x);
11646 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11647 }
11648 break;
11649 }
11650
11651 if (offloaded)
11652 {
11653 target_nesting_level++;
11654 lower_omp (&tgt_body, ctx);
11655 target_nesting_level--;
11656 }
11657 else if (data_region)
11658 lower_omp (&tgt_body, ctx);
11659
11660 if (offloaded)
11661 {
11662 /* Declare all the variables created by mapping and the variables
11663 declared in the scope of the target body. */
11664 record_vars_into (ctx->block_vars, child_fn);
11665 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11666 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11667 }
11668
11669 olist = NULL;
11670 ilist = NULL;
11671 if (ctx->record_type)
11672 {
11673 ctx->sender_decl
11674 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11675 DECL_NAMELESS (ctx->sender_decl) = 1;
11676 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11677 t = make_tree_vec (3);
11678 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11679 TREE_VEC_ELT (t, 1)
11680 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11681 ".omp_data_sizes");
11682 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11683 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11684 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11685 tree tkind_type = short_unsigned_type_node;
11686 int talign_shift = 8;
11687 TREE_VEC_ELT (t, 2)
11688 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11689 ".omp_data_kinds");
11690 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11691 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11692 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11693 gimple_omp_target_set_data_arg (stmt, t);
11694
11695 vec<constructor_elt, va_gc> *vsize;
11696 vec<constructor_elt, va_gc> *vkind;
11697 vec_alloc (vsize, map_cnt);
11698 vec_alloc (vkind, map_cnt);
11699 unsigned int map_idx = 0;
11700
11701 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11702 switch (OMP_CLAUSE_CODE (c))
11703 {
11704 tree ovar, nc, s, purpose, var, x, type;
11705 unsigned int talign;
11706
11707 default:
11708 break;
11709
11710 case OMP_CLAUSE_MAP:
11711 case OMP_CLAUSE_TO:
11712 case OMP_CLAUSE_FROM:
11713 oacc_firstprivate_map:
11714 nc = c;
11715 ovar = OMP_CLAUSE_DECL (c);
11716 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11717 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11718 || (OMP_CLAUSE_MAP_KIND (c)
11719 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11720 break;
11721 if (!DECL_P (ovar))
11722 {
11723 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11724 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11725 {
11726 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11727 == get_base_address (ovar));
11728 nc = OMP_CLAUSE_CHAIN (c);
11729 ovar = OMP_CLAUSE_DECL (nc);
11730 }
11731 else
11732 {
11733 tree x = build_sender_ref (ovar, ctx);
11734 tree v
11735 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11736 gimplify_assign (x, v, &ilist);
11737 nc = NULL_TREE;
11738 }
11739 }
11740 else
11741 {
11742 if (DECL_SIZE (ovar)
11743 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11744 {
11745 tree ovar2 = DECL_VALUE_EXPR (ovar);
11746 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11747 ovar2 = TREE_OPERAND (ovar2, 0);
11748 gcc_assert (DECL_P (ovar2));
11749 ovar = ovar2;
11750 }
11751 if (!maybe_lookup_field (ovar, ctx))
11752 continue;
11753 }
11754
11755 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11756 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11757 talign = DECL_ALIGN_UNIT (ovar);
11758 if (nc)
11759 {
11760 var = lookup_decl_in_outer_ctx (ovar, ctx);
11761 x = build_sender_ref (ovar, ctx);
11762
11763 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11764 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11765 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11766 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11767 {
11768 gcc_assert (offloaded);
11769 tree avar
11770 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11771 mark_addressable (avar);
11772 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11773 talign = DECL_ALIGN_UNIT (avar);
11774 avar = build_fold_addr_expr (avar);
11775 gimplify_assign (x, avar, &ilist);
11776 }
11777 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11778 {
11779 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11780 if (!omp_is_reference (var))
11781 {
11782 if (is_gimple_reg (var)
11783 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11784 TREE_NO_WARNING (var) = 1;
11785 var = build_fold_addr_expr (var);
11786 }
11787 else
11788 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11789 gimplify_assign (x, var, &ilist);
11790 }
11791 else if (is_gimple_reg (var))
11792 {
11793 gcc_assert (offloaded);
11794 tree avar = create_tmp_var (TREE_TYPE (var));
11795 mark_addressable (avar);
11796 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11797 if (GOMP_MAP_COPY_TO_P (map_kind)
11798 || map_kind == GOMP_MAP_POINTER
11799 || map_kind == GOMP_MAP_TO_PSET
11800 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11801 {
11802 /* If we need to initialize a temporary
11803 with VAR because it is not addressable, and
11804 the variable hasn't been initialized yet, then
11805 we'll get a warning for the store to avar.
11806 Don't warn in that case, the mapping might
11807 be implicit. */
11808 TREE_NO_WARNING (var) = 1;
11809 gimplify_assign (avar, var, &ilist);
11810 }
11811 avar = build_fold_addr_expr (avar);
11812 gimplify_assign (x, avar, &ilist);
11813 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11814 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11815 && !TYPE_READONLY (TREE_TYPE (var)))
11816 {
11817 x = unshare_expr (x);
11818 x = build_simple_mem_ref (x);
11819 gimplify_assign (var, x, &olist);
11820 }
11821 }
11822 else
11823 {
11824 /* While MAP is handled explicitly by the FE,
11825 for 'target update', only the identified is passed. */
11826 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11827 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11828 && (omp_is_allocatable_or_ptr (var)
11829 && omp_check_optional_argument (var, false)))
11830 var = build_fold_indirect_ref (var);
11831 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11832 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11833 || (!omp_is_allocatable_or_ptr (var)
11834 && !omp_check_optional_argument (var, false)))
11835 var = build_fold_addr_expr (var);
11836 gimplify_assign (x, var, &ilist);
11837 }
11838 }
11839 s = NULL_TREE;
11840 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11841 {
11842 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11843 s = TREE_TYPE (ovar);
11844 if (TREE_CODE (s) == REFERENCE_TYPE
11845 || omp_check_optional_argument (ovar, false))
11846 s = TREE_TYPE (s);
11847 s = TYPE_SIZE_UNIT (s);
11848 }
11849 else
11850 s = OMP_CLAUSE_SIZE (c);
11851 if (s == NULL_TREE)
11852 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11853 s = fold_convert (size_type_node, s);
11854 purpose = size_int (map_idx++);
11855 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11856 if (TREE_CODE (s) != INTEGER_CST)
11857 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11858
11859 unsigned HOST_WIDE_INT tkind, tkind_zero;
11860 switch (OMP_CLAUSE_CODE (c))
11861 {
11862 case OMP_CLAUSE_MAP:
11863 tkind = OMP_CLAUSE_MAP_KIND (c);
11864 tkind_zero = tkind;
11865 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11866 switch (tkind)
11867 {
11868 case GOMP_MAP_ALLOC:
11869 case GOMP_MAP_IF_PRESENT:
11870 case GOMP_MAP_TO:
11871 case GOMP_MAP_FROM:
11872 case GOMP_MAP_TOFROM:
11873 case GOMP_MAP_ALWAYS_TO:
11874 case GOMP_MAP_ALWAYS_FROM:
11875 case GOMP_MAP_ALWAYS_TOFROM:
11876 case GOMP_MAP_RELEASE:
11877 case GOMP_MAP_FORCE_TO:
11878 case GOMP_MAP_FORCE_FROM:
11879 case GOMP_MAP_FORCE_TOFROM:
11880 case GOMP_MAP_FORCE_PRESENT:
11881 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11882 break;
11883 case GOMP_MAP_DELETE:
11884 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11885 default:
11886 break;
11887 }
11888 if (tkind_zero != tkind)
11889 {
11890 if (integer_zerop (s))
11891 tkind = tkind_zero;
11892 else if (integer_nonzerop (s))
11893 tkind_zero = tkind;
11894 }
11895 break;
11896 case OMP_CLAUSE_FIRSTPRIVATE:
11897 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11898 tkind = GOMP_MAP_TO;
11899 tkind_zero = tkind;
11900 break;
11901 case OMP_CLAUSE_TO:
11902 tkind = GOMP_MAP_TO;
11903 tkind_zero = tkind;
11904 break;
11905 case OMP_CLAUSE_FROM:
11906 tkind = GOMP_MAP_FROM;
11907 tkind_zero = tkind;
11908 break;
11909 default:
11910 gcc_unreachable ();
11911 }
11912 gcc_checking_assert (tkind
11913 < (HOST_WIDE_INT_C (1U) << talign_shift));
11914 gcc_checking_assert (tkind_zero
11915 < (HOST_WIDE_INT_C (1U) << talign_shift));
11916 talign = ceil_log2 (talign);
11917 tkind |= talign << talign_shift;
11918 tkind_zero |= talign << talign_shift;
11919 gcc_checking_assert (tkind
11920 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11921 gcc_checking_assert (tkind_zero
11922 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11923 if (tkind == tkind_zero)
11924 x = build_int_cstu (tkind_type, tkind);
11925 else
11926 {
11927 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11928 x = build3 (COND_EXPR, tkind_type,
11929 fold_build2 (EQ_EXPR, boolean_type_node,
11930 unshare_expr (s), size_zero_node),
11931 build_int_cstu (tkind_type, tkind_zero),
11932 build_int_cstu (tkind_type, tkind));
11933 }
11934 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11935 if (nc && nc != c)
11936 c = nc;
11937 break;
11938
11939 case OMP_CLAUSE_FIRSTPRIVATE:
11940 if (is_oacc_parallel_or_serial (ctx))
11941 goto oacc_firstprivate_map;
11942 ovar = OMP_CLAUSE_DECL (c);
11943 if (omp_is_reference (ovar))
11944 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11945 else
11946 talign = DECL_ALIGN_UNIT (ovar);
11947 var = lookup_decl_in_outer_ctx (ovar, ctx);
11948 x = build_sender_ref (ovar, ctx);
11949 tkind = GOMP_MAP_FIRSTPRIVATE;
11950 type = TREE_TYPE (ovar);
11951 if (omp_is_reference (ovar))
11952 type = TREE_TYPE (type);
11953 if ((INTEGRAL_TYPE_P (type)
11954 && TYPE_PRECISION (type) <= POINTER_SIZE)
11955 || TREE_CODE (type) == POINTER_TYPE)
11956 {
11957 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11958 tree t = var;
11959 if (omp_is_reference (var))
11960 t = build_simple_mem_ref (var);
11961 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11962 TREE_NO_WARNING (var) = 1;
11963 if (TREE_CODE (type) != POINTER_TYPE)
11964 t = fold_convert (pointer_sized_int_node, t);
11965 t = fold_convert (TREE_TYPE (x), t);
11966 gimplify_assign (x, t, &ilist);
11967 }
11968 else if (omp_is_reference (var))
11969 gimplify_assign (x, var, &ilist);
11970 else if (is_gimple_reg (var))
11971 {
11972 tree avar = create_tmp_var (TREE_TYPE (var));
11973 mark_addressable (avar);
11974 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11975 TREE_NO_WARNING (var) = 1;
11976 gimplify_assign (avar, var, &ilist);
11977 avar = build_fold_addr_expr (avar);
11978 gimplify_assign (x, avar, &ilist);
11979 }
11980 else
11981 {
11982 var = build_fold_addr_expr (var);
11983 gimplify_assign (x, var, &ilist);
11984 }
11985 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11986 s = size_int (0);
11987 else if (omp_is_reference (ovar))
11988 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11989 else
11990 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11991 s = fold_convert (size_type_node, s);
11992 purpose = size_int (map_idx++);
11993 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11994 if (TREE_CODE (s) != INTEGER_CST)
11995 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11996
11997 gcc_checking_assert (tkind
11998 < (HOST_WIDE_INT_C (1U) << talign_shift));
11999 talign = ceil_log2 (talign);
12000 tkind |= talign << talign_shift;
12001 gcc_checking_assert (tkind
12002 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12003 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12004 build_int_cstu (tkind_type, tkind));
12005 break;
12006
12007 case OMP_CLAUSE_USE_DEVICE_PTR:
12008 case OMP_CLAUSE_USE_DEVICE_ADDR:
12009 case OMP_CLAUSE_IS_DEVICE_PTR:
12010 ovar = OMP_CLAUSE_DECL (c);
12011 var = lookup_decl_in_outer_ctx (ovar, ctx);
12012
12013 if (lang_hooks.decls.omp_array_data (ovar, true))
12014 {
12015 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12016 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12017 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12018 }
12019 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12020 {
12021 tkind = GOMP_MAP_USE_DEVICE_PTR;
12022 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12023 }
12024 else
12025 {
12026 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12027 x = build_sender_ref (ovar, ctx);
12028 }
12029
12030 if (is_gimple_omp_oacc (ctx->stmt))
12031 {
12032 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12033
12034 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12035 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12036 }
12037
12038 type = TREE_TYPE (ovar);
12039 if (lang_hooks.decls.omp_array_data (ovar, true))
12040 var = lang_hooks.decls.omp_array_data (ovar, false);
12041 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12042 && !omp_is_reference (ovar)
12043 && !omp_is_allocatable_or_ptr (ovar))
12044 || TREE_CODE (type) == ARRAY_TYPE)
12045 var = build_fold_addr_expr (var);
12046 else
12047 {
12048 if (omp_is_reference (ovar)
12049 || omp_check_optional_argument (ovar, false)
12050 || omp_is_allocatable_or_ptr (ovar))
12051 {
12052 type = TREE_TYPE (type);
12053 if (TREE_CODE (type) != ARRAY_TYPE
12054 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12055 && !omp_is_allocatable_or_ptr (ovar))
12056 || (omp_is_reference (ovar)
12057 && omp_is_allocatable_or_ptr (ovar))))
12058 var = build_simple_mem_ref (var);
12059 var = fold_convert (TREE_TYPE (x), var);
12060 }
12061 }
12062 tree present;
12063 present = omp_check_optional_argument (ovar, true);
12064 if (present)
12065 {
12066 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12067 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12068 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12069 tree new_x = unshare_expr (x);
12070 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12071 fb_rvalue);
12072 gcond *cond = gimple_build_cond_from_tree (present,
12073 notnull_label,
12074 null_label);
12075 gimple_seq_add_stmt (&ilist, cond);
12076 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12077 gimplify_assign (new_x, null_pointer_node, &ilist);
12078 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12079 gimple_seq_add_stmt (&ilist,
12080 gimple_build_label (notnull_label));
12081 gimplify_assign (x, var, &ilist);
12082 gimple_seq_add_stmt (&ilist,
12083 gimple_build_label (opt_arg_label));
12084 }
12085 else
12086 gimplify_assign (x, var, &ilist);
12087 s = size_int (0);
12088 purpose = size_int (map_idx++);
12089 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12090 gcc_checking_assert (tkind
12091 < (HOST_WIDE_INT_C (1U) << talign_shift));
12092 gcc_checking_assert (tkind
12093 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12094 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12095 build_int_cstu (tkind_type, tkind));
12096 break;
12097 }
12098
12099 gcc_assert (map_idx == map_cnt);
12100
12101 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12102 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12103 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12104 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12105 for (int i = 1; i <= 2; i++)
12106 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12107 {
12108 gimple_seq initlist = NULL;
12109 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12110 TREE_VEC_ELT (t, i)),
12111 &initlist, true, NULL_TREE);
12112 gimple_seq_add_seq (&ilist, initlist);
12113
12114 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12115 gimple_seq_add_stmt (&olist,
12116 gimple_build_assign (TREE_VEC_ELT (t, i),
12117 clobber));
12118 }
12119
12120 tree clobber = build_clobber (ctx->record_type);
12121 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12122 clobber));
12123 }
12124
12125 /* Once all the expansions are done, sequence all the different
12126 fragments inside gimple_omp_body. */
12127
12128 new_body = NULL;
12129
12130 if (offloaded
12131 && ctx->record_type)
12132 {
12133 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12134 /* fixup_child_record_type might have changed receiver_decl's type. */
12135 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12136 gimple_seq_add_stmt (&new_body,
12137 gimple_build_assign (ctx->receiver_decl, t));
12138 }
12139 gimple_seq_add_seq (&new_body, fplist);
12140
12141 if (offloaded || data_region)
12142 {
12143 tree prev = NULL_TREE;
12144 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12145 switch (OMP_CLAUSE_CODE (c))
12146 {
12147 tree var, x;
12148 default:
12149 break;
12150 case OMP_CLAUSE_FIRSTPRIVATE:
12151 if (is_gimple_omp_oacc (ctx->stmt))
12152 break;
12153 var = OMP_CLAUSE_DECL (c);
12154 if (omp_is_reference (var)
12155 || is_gimple_reg_type (TREE_TYPE (var)))
12156 {
12157 tree new_var = lookup_decl (var, ctx);
12158 tree type;
12159 type = TREE_TYPE (var);
12160 if (omp_is_reference (var))
12161 type = TREE_TYPE (type);
12162 if ((INTEGRAL_TYPE_P (type)
12163 && TYPE_PRECISION (type) <= POINTER_SIZE)
12164 || TREE_CODE (type) == POINTER_TYPE)
12165 {
12166 x = build_receiver_ref (var, false, ctx);
12167 if (TREE_CODE (type) != POINTER_TYPE)
12168 x = fold_convert (pointer_sized_int_node, x);
12169 x = fold_convert (type, x);
12170 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12171 fb_rvalue);
12172 if (omp_is_reference (var))
12173 {
12174 tree v = create_tmp_var_raw (type, get_name (var));
12175 gimple_add_tmp_var (v);
12176 TREE_ADDRESSABLE (v) = 1;
12177 gimple_seq_add_stmt (&new_body,
12178 gimple_build_assign (v, x));
12179 x = build_fold_addr_expr (v);
12180 }
12181 gimple_seq_add_stmt (&new_body,
12182 gimple_build_assign (new_var, x));
12183 }
12184 else
12185 {
12186 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12187 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12188 fb_rvalue);
12189 gimple_seq_add_stmt (&new_body,
12190 gimple_build_assign (new_var, x));
12191 }
12192 }
12193 else if (is_variable_sized (var))
12194 {
12195 tree pvar = DECL_VALUE_EXPR (var);
12196 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12197 pvar = TREE_OPERAND (pvar, 0);
12198 gcc_assert (DECL_P (pvar));
12199 tree new_var = lookup_decl (pvar, ctx);
12200 x = build_receiver_ref (var, false, ctx);
12201 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12202 gimple_seq_add_stmt (&new_body,
12203 gimple_build_assign (new_var, x));
12204 }
12205 break;
12206 case OMP_CLAUSE_PRIVATE:
12207 if (is_gimple_omp_oacc (ctx->stmt))
12208 break;
12209 var = OMP_CLAUSE_DECL (c);
12210 if (omp_is_reference (var))
12211 {
12212 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12213 tree new_var = lookup_decl (var, ctx);
12214 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12215 if (TREE_CONSTANT (x))
12216 {
12217 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12218 get_name (var));
12219 gimple_add_tmp_var (x);
12220 TREE_ADDRESSABLE (x) = 1;
12221 x = build_fold_addr_expr_loc (clause_loc, x);
12222 }
12223 else
12224 break;
12225
12226 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12227 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12228 gimple_seq_add_stmt (&new_body,
12229 gimple_build_assign (new_var, x));
12230 }
12231 break;
12232 case OMP_CLAUSE_USE_DEVICE_PTR:
12233 case OMP_CLAUSE_USE_DEVICE_ADDR:
12234 case OMP_CLAUSE_IS_DEVICE_PTR:
12235 tree new_var;
12236 gimple_seq assign_body;
12237 bool is_array_data;
12238 bool do_optional_check;
12239 assign_body = NULL;
12240 do_optional_check = false;
12241 var = OMP_CLAUSE_DECL (c);
12242 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12243
12244 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12245 x = build_sender_ref (is_array_data
12246 ? (splay_tree_key) &DECL_NAME (var)
12247 : (splay_tree_key) &DECL_UID (var), ctx);
12248 else
12249 x = build_receiver_ref (var, false, ctx);
12250
12251 if (is_array_data)
12252 {
12253 bool is_ref = omp_is_reference (var);
12254 do_optional_check = true;
12255 /* First, we copy the descriptor data from the host; then
12256 we update its data to point to the target address. */
12257 new_var = lookup_decl (var, ctx);
12258 new_var = DECL_VALUE_EXPR (new_var);
12259 tree v = new_var;
12260
12261 if (is_ref)
12262 {
12263 var = build_fold_indirect_ref (var);
12264 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12265 fb_rvalue);
12266 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12267 gimple_add_tmp_var (v);
12268 TREE_ADDRESSABLE (v) = 1;
12269 gimple_seq_add_stmt (&assign_body,
12270 gimple_build_assign (v, var));
12271 tree rhs = build_fold_addr_expr (v);
12272 gimple_seq_add_stmt (&assign_body,
12273 gimple_build_assign (new_var, rhs));
12274 }
12275 else
12276 gimple_seq_add_stmt (&assign_body,
12277 gimple_build_assign (new_var, var));
12278
12279 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12280 gcc_assert (v2);
12281 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12282 gimple_seq_add_stmt (&assign_body,
12283 gimple_build_assign (v2, x));
12284 }
12285 else if (is_variable_sized (var))
12286 {
12287 tree pvar = DECL_VALUE_EXPR (var);
12288 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12289 pvar = TREE_OPERAND (pvar, 0);
12290 gcc_assert (DECL_P (pvar));
12291 new_var = lookup_decl (pvar, ctx);
12292 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12293 gimple_seq_add_stmt (&assign_body,
12294 gimple_build_assign (new_var, x));
12295 }
12296 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12297 && !omp_is_reference (var)
12298 && !omp_is_allocatable_or_ptr (var))
12299 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12300 {
12301 new_var = lookup_decl (var, ctx);
12302 new_var = DECL_VALUE_EXPR (new_var);
12303 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12304 new_var = TREE_OPERAND (new_var, 0);
12305 gcc_assert (DECL_P (new_var));
12306 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12307 gimple_seq_add_stmt (&assign_body,
12308 gimple_build_assign (new_var, x));
12309 }
12310 else
12311 {
12312 tree type = TREE_TYPE (var);
12313 new_var = lookup_decl (var, ctx);
12314 if (omp_is_reference (var))
12315 {
12316 type = TREE_TYPE (type);
12317 if (TREE_CODE (type) != ARRAY_TYPE
12318 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12319 || (omp_is_reference (var)
12320 && omp_is_allocatable_or_ptr (var))))
12321 {
12322 tree v = create_tmp_var_raw (type, get_name (var));
12323 gimple_add_tmp_var (v);
12324 TREE_ADDRESSABLE (v) = 1;
12325 x = fold_convert (type, x);
12326 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12327 fb_rvalue);
12328 gimple_seq_add_stmt (&assign_body,
12329 gimple_build_assign (v, x));
12330 x = build_fold_addr_expr (v);
12331 do_optional_check = true;
12332 }
12333 }
12334 new_var = DECL_VALUE_EXPR (new_var);
12335 x = fold_convert (TREE_TYPE (new_var), x);
12336 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12337 gimple_seq_add_stmt (&assign_body,
12338 gimple_build_assign (new_var, x));
12339 }
12340 tree present;
12341 present = (do_optional_check
12342 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12343 : NULL_TREE);
12344 if (present)
12345 {
12346 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12347 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12348 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12349 glabel *null_glabel = gimple_build_label (null_label);
12350 glabel *notnull_glabel = gimple_build_label (notnull_label);
12351 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12352 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12353 fb_rvalue);
12354 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12355 fb_rvalue);
12356 gcond *cond = gimple_build_cond_from_tree (present,
12357 notnull_label,
12358 null_label);
12359 gimple_seq_add_stmt (&new_body, cond);
12360 gimple_seq_add_stmt (&new_body, null_glabel);
12361 gimplify_assign (new_var, null_pointer_node, &new_body);
12362 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12363 gimple_seq_add_stmt (&new_body, notnull_glabel);
12364 gimple_seq_add_seq (&new_body, assign_body);
12365 gimple_seq_add_stmt (&new_body,
12366 gimple_build_label (opt_arg_label));
12367 }
12368 else
12369 gimple_seq_add_seq (&new_body, assign_body);
12370 break;
12371 }
12372 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12373 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12374 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12375 or references to VLAs. */
12376 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12377 switch (OMP_CLAUSE_CODE (c))
12378 {
12379 tree var;
12380 default:
12381 break;
12382 case OMP_CLAUSE_MAP:
12383 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12384 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12385 {
12386 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12387 poly_int64 offset = 0;
12388 gcc_assert (prev);
12389 var = OMP_CLAUSE_DECL (c);
12390 if (DECL_P (var)
12391 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12392 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12393 ctx))
12394 && varpool_node::get_create (var)->offloadable)
12395 break;
12396 if (TREE_CODE (var) == INDIRECT_REF
12397 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12398 var = TREE_OPERAND (var, 0);
12399 if (TREE_CODE (var) == COMPONENT_REF)
12400 {
12401 var = get_addr_base_and_unit_offset (var, &offset);
12402 gcc_assert (var != NULL_TREE && DECL_P (var));
12403 }
12404 else if (DECL_SIZE (var)
12405 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12406 {
12407 tree var2 = DECL_VALUE_EXPR (var);
12408 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12409 var2 = TREE_OPERAND (var2, 0);
12410 gcc_assert (DECL_P (var2));
12411 var = var2;
12412 }
12413 tree new_var = lookup_decl (var, ctx), x;
12414 tree type = TREE_TYPE (new_var);
12415 bool is_ref;
12416 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12417 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12418 == COMPONENT_REF))
12419 {
12420 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12421 is_ref = true;
12422 new_var = build2 (MEM_REF, type,
12423 build_fold_addr_expr (new_var),
12424 build_int_cst (build_pointer_type (type),
12425 offset));
12426 }
12427 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12428 {
12429 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12430 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12431 new_var = build2 (MEM_REF, type,
12432 build_fold_addr_expr (new_var),
12433 build_int_cst (build_pointer_type (type),
12434 offset));
12435 }
12436 else
12437 is_ref = omp_is_reference (var);
12438 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12439 is_ref = false;
12440 bool ref_to_array = false;
12441 if (is_ref)
12442 {
12443 type = TREE_TYPE (type);
12444 if (TREE_CODE (type) == ARRAY_TYPE)
12445 {
12446 type = build_pointer_type (type);
12447 ref_to_array = true;
12448 }
12449 }
12450 else if (TREE_CODE (type) == ARRAY_TYPE)
12451 {
12452 tree decl2 = DECL_VALUE_EXPR (new_var);
12453 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12454 decl2 = TREE_OPERAND (decl2, 0);
12455 gcc_assert (DECL_P (decl2));
12456 new_var = decl2;
12457 type = TREE_TYPE (new_var);
12458 }
12459 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12460 x = fold_convert_loc (clause_loc, type, x);
12461 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12462 {
12463 tree bias = OMP_CLAUSE_SIZE (c);
12464 if (DECL_P (bias))
12465 bias = lookup_decl (bias, ctx);
12466 bias = fold_convert_loc (clause_loc, sizetype, bias);
12467 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12468 bias);
12469 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12470 TREE_TYPE (x), x, bias);
12471 }
12472 if (ref_to_array)
12473 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12474 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12475 if (is_ref && !ref_to_array)
12476 {
12477 tree t = create_tmp_var_raw (type, get_name (var));
12478 gimple_add_tmp_var (t);
12479 TREE_ADDRESSABLE (t) = 1;
12480 gimple_seq_add_stmt (&new_body,
12481 gimple_build_assign (t, x));
12482 x = build_fold_addr_expr_loc (clause_loc, t);
12483 }
12484 gimple_seq_add_stmt (&new_body,
12485 gimple_build_assign (new_var, x));
12486 prev = NULL_TREE;
12487 }
12488 else if (OMP_CLAUSE_CHAIN (c)
12489 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12490 == OMP_CLAUSE_MAP
12491 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12492 == GOMP_MAP_FIRSTPRIVATE_POINTER
12493 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12494 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12495 prev = c;
12496 break;
12497 case OMP_CLAUSE_PRIVATE:
12498 var = OMP_CLAUSE_DECL (c);
12499 if (is_variable_sized (var))
12500 {
12501 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12502 tree new_var = lookup_decl (var, ctx);
12503 tree pvar = DECL_VALUE_EXPR (var);
12504 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12505 pvar = TREE_OPERAND (pvar, 0);
12506 gcc_assert (DECL_P (pvar));
12507 tree new_pvar = lookup_decl (pvar, ctx);
12508 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12509 tree al = size_int (DECL_ALIGN (var));
12510 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12511 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12512 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12513 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12514 gimple_seq_add_stmt (&new_body,
12515 gimple_build_assign (new_pvar, x));
12516 }
12517 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12518 {
12519 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12520 tree new_var = lookup_decl (var, ctx);
12521 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12522 if (TREE_CONSTANT (x))
12523 break;
12524 else
12525 {
12526 tree atmp
12527 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12528 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12529 tree al = size_int (TYPE_ALIGN (rtype));
12530 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12531 }
12532
12533 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12534 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12535 gimple_seq_add_stmt (&new_body,
12536 gimple_build_assign (new_var, x));
12537 }
12538 break;
12539 }
12540
12541 gimple_seq fork_seq = NULL;
12542 gimple_seq join_seq = NULL;
12543
12544 if (is_oacc_parallel_or_serial (ctx))
12545 {
12546 /* If there are reductions on the offloaded region itself, treat
12547 them as a dummy GANG loop. */
12548 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12549
12550 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12551 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12552 }
12553
12554 gimple_seq_add_seq (&new_body, fork_seq);
12555 gimple_seq_add_seq (&new_body, tgt_body);
12556 gimple_seq_add_seq (&new_body, join_seq);
12557
12558 if (offloaded)
12559 new_body = maybe_catch_exception (new_body);
12560
12561 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12562 gimple_omp_set_body (stmt, new_body);
12563 }
12564
12565 bind = gimple_build_bind (NULL, NULL,
12566 tgt_bind ? gimple_bind_block (tgt_bind)
12567 : NULL_TREE);
12568 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12569 gimple_bind_add_seq (bind, ilist);
12570 gimple_bind_add_stmt (bind, stmt);
12571 gimple_bind_add_seq (bind, olist);
12572
12573 pop_gimplify_context (NULL);
12574
12575 if (dep_bind)
12576 {
12577 gimple_bind_add_seq (dep_bind, dep_ilist);
12578 gimple_bind_add_stmt (dep_bind, bind);
12579 gimple_bind_add_seq (dep_bind, dep_olist);
12580 pop_gimplify_context (dep_bind);
12581 }
12582 }
12583
12584 /* Expand code for an OpenMP teams directive. */
12585
12586 static void
12587 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12588 {
12589 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12590 push_gimplify_context ();
12591
12592 tree block = make_node (BLOCK);
12593 gbind *bind = gimple_build_bind (NULL, NULL, block);
12594 gsi_replace (gsi_p, bind, true);
12595 gimple_seq bind_body = NULL;
12596 gimple_seq dlist = NULL;
12597 gimple_seq olist = NULL;
12598
12599 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12600 OMP_CLAUSE_NUM_TEAMS);
12601 if (num_teams == NULL_TREE)
12602 num_teams = build_int_cst (unsigned_type_node, 0);
12603 else
12604 {
12605 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12606 num_teams = fold_convert (unsigned_type_node, num_teams);
12607 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12608 }
12609 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12610 OMP_CLAUSE_THREAD_LIMIT);
12611 if (thread_limit == NULL_TREE)
12612 thread_limit = build_int_cst (unsigned_type_node, 0);
12613 else
12614 {
12615 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12616 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12617 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12618 fb_rvalue);
12619 }
12620
12621 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12622 &bind_body, &dlist, ctx, NULL);
12623 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12624 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12625 NULL, ctx);
12626 if (!gimple_omp_teams_grid_phony (teams_stmt))
12627 {
12628 gimple_seq_add_stmt (&bind_body, teams_stmt);
12629 location_t loc = gimple_location (teams_stmt);
12630 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12631 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12632 gimple_set_location (call, loc);
12633 gimple_seq_add_stmt (&bind_body, call);
12634 }
12635
12636 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12637 gimple_omp_set_body (teams_stmt, NULL);
12638 gimple_seq_add_seq (&bind_body, olist);
12639 gimple_seq_add_seq (&bind_body, dlist);
12640 if (!gimple_omp_teams_grid_phony (teams_stmt))
12641 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12642 gimple_bind_set_body (bind, bind_body);
12643
12644 pop_gimplify_context (bind);
12645
12646 gimple_bind_append_vars (bind, ctx->block_vars);
12647 BLOCK_VARS (block) = ctx->block_vars;
12648 if (BLOCK_VARS (block))
12649 TREE_USED (block) = 1;
12650 }
12651
12652 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12653
12654 static void
12655 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12656 {
12657 gimple *stmt = gsi_stmt (*gsi_p);
12658 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12659 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12660 gimple_build_omp_return (false));
12661 }
12662
12663
12664 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12665 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12666 of OMP context, but with task_shared_vars set. */
12667
12668 static tree
12669 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12670 void *data)
12671 {
12672 tree t = *tp;
12673
12674 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12675 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12676 return t;
12677
12678 if (task_shared_vars
12679 && DECL_P (t)
12680 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12681 return t;
12682
12683 /* If a global variable has been privatized, TREE_CONSTANT on
12684 ADDR_EXPR might be wrong. */
12685 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12686 recompute_tree_invariant_for_addr_expr (t);
12687
12688 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12689 return NULL_TREE;
12690 }
12691
12692 /* Data to be communicated between lower_omp_regimplify_operands and
12693 lower_omp_regimplify_operands_p. */
12694
12695 struct lower_omp_regimplify_operands_data
12696 {
12697 omp_context *ctx;
12698 vec<tree> *decls;
12699 };
12700
12701 /* Helper function for lower_omp_regimplify_operands. Find
12702 omp_member_access_dummy_var vars and adjust temporarily their
12703 DECL_VALUE_EXPRs if needed. */
12704
12705 static tree
12706 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12707 void *data)
12708 {
12709 tree t = omp_member_access_dummy_var (*tp);
12710 if (t)
12711 {
12712 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12713 lower_omp_regimplify_operands_data *ldata
12714 = (lower_omp_regimplify_operands_data *) wi->info;
12715 tree o = maybe_lookup_decl (t, ldata->ctx);
12716 if (o != t)
12717 {
12718 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12719 ldata->decls->safe_push (*tp);
12720 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12721 SET_DECL_VALUE_EXPR (*tp, v);
12722 }
12723 }
12724 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12725 return NULL_TREE;
12726 }
12727
12728 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12729 of omp_member_access_dummy_var vars during regimplification. */
12730
12731 static void
12732 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12733 gimple_stmt_iterator *gsi_p)
12734 {
12735 auto_vec<tree, 10> decls;
12736 if (ctx)
12737 {
12738 struct walk_stmt_info wi;
12739 memset (&wi, '\0', sizeof (wi));
12740 struct lower_omp_regimplify_operands_data data;
12741 data.ctx = ctx;
12742 data.decls = &decls;
12743 wi.info = &data;
12744 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12745 }
12746 gimple_regimplify_operands (stmt, gsi_p);
12747 while (!decls.is_empty ())
12748 {
12749 tree t = decls.pop ();
12750 tree v = decls.pop ();
12751 SET_DECL_VALUE_EXPR (t, v);
12752 }
12753 }
12754
12755 static void
12756 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12757 {
12758 gimple *stmt = gsi_stmt (*gsi_p);
12759 struct walk_stmt_info wi;
12760 gcall *call_stmt;
12761
12762 if (gimple_has_location (stmt))
12763 input_location = gimple_location (stmt);
12764
12765 if (task_shared_vars)
12766 memset (&wi, '\0', sizeof (wi));
12767
12768 /* If we have issued syntax errors, avoid doing any heavy lifting.
12769 Just replace the OMP directives with a NOP to avoid
12770 confusing RTL expansion. */
12771 if (seen_error () && is_gimple_omp (stmt))
12772 {
12773 gsi_replace (gsi_p, gimple_build_nop (), true);
12774 return;
12775 }
12776
12777 switch (gimple_code (stmt))
12778 {
12779 case GIMPLE_COND:
12780 {
12781 gcond *cond_stmt = as_a <gcond *> (stmt);
12782 if ((ctx || task_shared_vars)
12783 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12784 lower_omp_regimplify_p,
12785 ctx ? NULL : &wi, NULL)
12786 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12787 lower_omp_regimplify_p,
12788 ctx ? NULL : &wi, NULL)))
12789 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12790 }
12791 break;
12792 case GIMPLE_CATCH:
12793 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12794 break;
12795 case GIMPLE_EH_FILTER:
12796 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12797 break;
12798 case GIMPLE_TRY:
12799 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12800 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12801 break;
12802 case GIMPLE_TRANSACTION:
12803 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12804 ctx);
12805 break;
12806 case GIMPLE_BIND:
12807 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12808 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12809 break;
12810 case GIMPLE_OMP_PARALLEL:
12811 case GIMPLE_OMP_TASK:
12812 ctx = maybe_lookup_ctx (stmt);
12813 gcc_assert (ctx);
12814 if (ctx->cancellable)
12815 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12816 lower_omp_taskreg (gsi_p, ctx);
12817 break;
12818 case GIMPLE_OMP_FOR:
12819 ctx = maybe_lookup_ctx (stmt);
12820 gcc_assert (ctx);
12821 if (ctx->cancellable)
12822 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12823 lower_omp_for (gsi_p, ctx);
12824 break;
12825 case GIMPLE_OMP_SECTIONS:
12826 ctx = maybe_lookup_ctx (stmt);
12827 gcc_assert (ctx);
12828 if (ctx->cancellable)
12829 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12830 lower_omp_sections (gsi_p, ctx);
12831 break;
12832 case GIMPLE_OMP_SINGLE:
12833 ctx = maybe_lookup_ctx (stmt);
12834 gcc_assert (ctx);
12835 lower_omp_single (gsi_p, ctx);
12836 break;
12837 case GIMPLE_OMP_MASTER:
12838 ctx = maybe_lookup_ctx (stmt);
12839 gcc_assert (ctx);
12840 lower_omp_master (gsi_p, ctx);
12841 break;
12842 case GIMPLE_OMP_TASKGROUP:
12843 ctx = maybe_lookup_ctx (stmt);
12844 gcc_assert (ctx);
12845 lower_omp_taskgroup (gsi_p, ctx);
12846 break;
12847 case GIMPLE_OMP_ORDERED:
12848 ctx = maybe_lookup_ctx (stmt);
12849 gcc_assert (ctx);
12850 lower_omp_ordered (gsi_p, ctx);
12851 break;
12852 case GIMPLE_OMP_SCAN:
12853 ctx = maybe_lookup_ctx (stmt);
12854 gcc_assert (ctx);
12855 lower_omp_scan (gsi_p, ctx);
12856 break;
12857 case GIMPLE_OMP_CRITICAL:
12858 ctx = maybe_lookup_ctx (stmt);
12859 gcc_assert (ctx);
12860 lower_omp_critical (gsi_p, ctx);
12861 break;
12862 case GIMPLE_OMP_ATOMIC_LOAD:
12863 if ((ctx || task_shared_vars)
12864 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12865 as_a <gomp_atomic_load *> (stmt)),
12866 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12867 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12868 break;
12869 case GIMPLE_OMP_TARGET:
12870 ctx = maybe_lookup_ctx (stmt);
12871 gcc_assert (ctx);
12872 lower_omp_target (gsi_p, ctx);
12873 break;
12874 case GIMPLE_OMP_TEAMS:
12875 ctx = maybe_lookup_ctx (stmt);
12876 gcc_assert (ctx);
12877 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12878 lower_omp_taskreg (gsi_p, ctx);
12879 else
12880 lower_omp_teams (gsi_p, ctx);
12881 break;
12882 case GIMPLE_OMP_GRID_BODY:
12883 ctx = maybe_lookup_ctx (stmt);
12884 gcc_assert (ctx);
12885 lower_omp_grid_body (gsi_p, ctx);
12886 break;
12887 case GIMPLE_CALL:
12888 tree fndecl;
12889 call_stmt = as_a <gcall *> (stmt);
12890 fndecl = gimple_call_fndecl (call_stmt);
12891 if (fndecl
12892 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12893 switch (DECL_FUNCTION_CODE (fndecl))
12894 {
12895 case BUILT_IN_GOMP_BARRIER:
12896 if (ctx == NULL)
12897 break;
12898 /* FALLTHRU */
12899 case BUILT_IN_GOMP_CANCEL:
12900 case BUILT_IN_GOMP_CANCELLATION_POINT:
12901 omp_context *cctx;
12902 cctx = ctx;
12903 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12904 cctx = cctx->outer;
12905 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12906 if (!cctx->cancellable)
12907 {
12908 if (DECL_FUNCTION_CODE (fndecl)
12909 == BUILT_IN_GOMP_CANCELLATION_POINT)
12910 {
12911 stmt = gimple_build_nop ();
12912 gsi_replace (gsi_p, stmt, false);
12913 }
12914 break;
12915 }
12916 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12917 {
12918 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12919 gimple_call_set_fndecl (call_stmt, fndecl);
12920 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12921 }
12922 tree lhs;
12923 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12924 gimple_call_set_lhs (call_stmt, lhs);
12925 tree fallthru_label;
12926 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12927 gimple *g;
12928 g = gimple_build_label (fallthru_label);
12929 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12930 g = gimple_build_cond (NE_EXPR, lhs,
12931 fold_convert (TREE_TYPE (lhs),
12932 boolean_false_node),
12933 cctx->cancel_label, fallthru_label);
12934 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12935 break;
12936 default:
12937 break;
12938 }
12939 goto regimplify;
12940
12941 case GIMPLE_ASSIGN:
12942 for (omp_context *up = ctx; up; up = up->outer)
12943 {
12944 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12945 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12946 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12947 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12948 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12949 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12950 && (gimple_omp_target_kind (up->stmt)
12951 == GF_OMP_TARGET_KIND_DATA)))
12952 continue;
12953 else if (!up->lastprivate_conditional_map)
12954 break;
12955 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12956 if (TREE_CODE (lhs) == MEM_REF
12957 && DECL_P (TREE_OPERAND (lhs, 0))
12958 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12959 0))) == REFERENCE_TYPE)
12960 lhs = TREE_OPERAND (lhs, 0);
12961 if (DECL_P (lhs))
12962 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12963 {
12964 tree clauses;
12965 if (up->combined_into_simd_safelen1)
12966 {
12967 up = up->outer;
12968 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12969 up = up->outer;
12970 }
12971 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12972 clauses = gimple_omp_for_clauses (up->stmt);
12973 else
12974 clauses = gimple_omp_sections_clauses (up->stmt);
12975 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12976 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12977 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12978 OMP_CLAUSE__CONDTEMP_);
12979 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12980 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12981 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12982 }
12983 }
12984 /* FALLTHRU */
12985
12986 default:
12987 regimplify:
12988 if ((ctx || task_shared_vars)
12989 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12990 ctx ? NULL : &wi))
12991 {
12992 /* Just remove clobbers, this should happen only if we have
12993 "privatized" local addressable variables in SIMD regions,
12994 the clobber isn't needed in that case and gimplifying address
12995 of the ARRAY_REF into a pointer and creating MEM_REF based
12996 clobber would create worse code than we get with the clobber
12997 dropped. */
12998 if (gimple_clobber_p (stmt))
12999 {
13000 gsi_replace (gsi_p, gimple_build_nop (), true);
13001 break;
13002 }
13003 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13004 }
13005 break;
13006 }
13007 }
13008
13009 static void
13010 lower_omp (gimple_seq *body, omp_context *ctx)
13011 {
13012 location_t saved_location = input_location;
13013 gimple_stmt_iterator gsi;
13014 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13015 lower_omp_1 (&gsi, ctx);
13016 /* During gimplification, we haven't folded statments inside offloading
13017 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13018 if (target_nesting_level || taskreg_nesting_level)
13019 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13020 fold_stmt (&gsi);
13021 input_location = saved_location;
13022 }
13023
13024 /* Main entry point. */
13025
13026 static unsigned int
13027 execute_lower_omp (void)
13028 {
13029 gimple_seq body;
13030 int i;
13031 omp_context *ctx;
13032
13033 /* This pass always runs, to provide PROP_gimple_lomp.
13034 But often, there is nothing to do. */
13035 if (flag_openacc == 0 && flag_openmp == 0
13036 && flag_openmp_simd == 0)
13037 return 0;
13038
13039 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13040 delete_omp_context);
13041
13042 body = gimple_body (current_function_decl);
13043
13044 if (hsa_gen_requested_p ())
13045 omp_grid_gridify_all_targets (&body);
13046
13047 scan_omp (&body, NULL);
13048 gcc_assert (taskreg_nesting_level == 0);
13049 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13050 finish_taskreg_scan (ctx);
13051 taskreg_contexts.release ();
13052
13053 if (all_contexts->root)
13054 {
13055 if (task_shared_vars)
13056 push_gimplify_context ();
13057 lower_omp (&body, NULL);
13058 if (task_shared_vars)
13059 pop_gimplify_context (NULL);
13060 }
13061
13062 if (all_contexts)
13063 {
13064 splay_tree_delete (all_contexts);
13065 all_contexts = NULL;
13066 }
13067 BITMAP_FREE (task_shared_vars);
13068 BITMAP_FREE (global_nonaddressable_vars);
13069
13070 /* If current function is a method, remove artificial dummy VAR_DECL created
13071 for non-static data member privatization, they aren't needed for
13072 debuginfo nor anything else, have been already replaced everywhere in the
13073 IL and cause problems with LTO. */
13074 if (DECL_ARGUMENTS (current_function_decl)
13075 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13076 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13077 == POINTER_TYPE))
13078 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13079 return 0;
13080 }
13081
13082 namespace {
13083
13084 const pass_data pass_data_lower_omp =
13085 {
13086 GIMPLE_PASS, /* type */
13087 "omplower", /* name */
13088 OPTGROUP_OMP, /* optinfo_flags */
13089 TV_NONE, /* tv_id */
13090 PROP_gimple_any, /* properties_required */
13091 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13092 0, /* properties_destroyed */
13093 0, /* todo_flags_start */
13094 0, /* todo_flags_finish */
13095 };
13096
13097 class pass_lower_omp : public gimple_opt_pass
13098 {
13099 public:
13100 pass_lower_omp (gcc::context *ctxt)
13101 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13102 {}
13103
13104 /* opt_pass methods: */
13105 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13106
13107 }; // class pass_lower_omp
13108
13109 } // anon namespace
13110
13111 gimple_opt_pass *
13112 make_pass_lower_omp (gcc::context *ctxt)
13113 {
13114 return new pass_lower_omp (ctxt);
13115 }
13116 \f
13117 /* The following is a utility to diagnose structured block violations.
13118 It is not part of the "omplower" pass, as that's invoked too late. It
13119 should be invoked by the respective front ends after gimplification. */
13120
13121 static splay_tree all_labels;
13122
13123 /* Check for mismatched contexts and generate an error if needed. Return
13124 true if an error is detected. */
13125
13126 static bool
13127 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13128 gimple *branch_ctx, gimple *label_ctx)
13129 {
13130 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13131 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13132
13133 if (label_ctx == branch_ctx)
13134 return false;
13135
13136 const char* kind = NULL;
13137
13138 if (flag_openacc)
13139 {
13140 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13141 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13142 {
13143 gcc_checking_assert (kind == NULL);
13144 kind = "OpenACC";
13145 }
13146 }
13147 if (kind == NULL)
13148 {
13149 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13150 kind = "OpenMP";
13151 }
13152
13153 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13154 so we could traverse it and issue a correct "exit" or "enter" error
13155 message upon a structured block violation.
13156
13157 We built the context by building a list with tree_cons'ing, but there is
13158 no easy counterpart in gimple tuples. It seems like far too much work
13159 for issuing exit/enter error messages. If someone really misses the
13160 distinct error message... patches welcome. */
13161
13162 #if 0
13163 /* Try to avoid confusing the user by producing and error message
13164 with correct "exit" or "enter" verbiage. We prefer "exit"
13165 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13166 if (branch_ctx == NULL)
13167 exit_p = false;
13168 else
13169 {
13170 while (label_ctx)
13171 {
13172 if (TREE_VALUE (label_ctx) == branch_ctx)
13173 {
13174 exit_p = false;
13175 break;
13176 }
13177 label_ctx = TREE_CHAIN (label_ctx);
13178 }
13179 }
13180
13181 if (exit_p)
13182 error ("invalid exit from %s structured block", kind);
13183 else
13184 error ("invalid entry to %s structured block", kind);
13185 #endif
13186
13187 /* If it's obvious we have an invalid entry, be specific about the error. */
13188 if (branch_ctx == NULL)
13189 error ("invalid entry to %s structured block", kind);
13190 else
13191 {
13192 /* Otherwise, be vague and lazy, but efficient. */
13193 error ("invalid branch to/from %s structured block", kind);
13194 }
13195
13196 gsi_replace (gsi_p, gimple_build_nop (), false);
13197 return true;
13198 }
13199
13200 /* Pass 1: Create a minimal tree of structured blocks, and record
13201 where each label is found. */
13202
13203 static tree
13204 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13205 struct walk_stmt_info *wi)
13206 {
13207 gimple *context = (gimple *) wi->info;
13208 gimple *inner_context;
13209 gimple *stmt = gsi_stmt (*gsi_p);
13210
13211 *handled_ops_p = true;
13212
13213 switch (gimple_code (stmt))
13214 {
13215 WALK_SUBSTMTS;
13216
13217 case GIMPLE_OMP_PARALLEL:
13218 case GIMPLE_OMP_TASK:
13219 case GIMPLE_OMP_SECTIONS:
13220 case GIMPLE_OMP_SINGLE:
13221 case GIMPLE_OMP_SECTION:
13222 case GIMPLE_OMP_MASTER:
13223 case GIMPLE_OMP_ORDERED:
13224 case GIMPLE_OMP_SCAN:
13225 case GIMPLE_OMP_CRITICAL:
13226 case GIMPLE_OMP_TARGET:
13227 case GIMPLE_OMP_TEAMS:
13228 case GIMPLE_OMP_TASKGROUP:
13229 /* The minimal context here is just the current OMP construct. */
13230 inner_context = stmt;
13231 wi->info = inner_context;
13232 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13233 wi->info = context;
13234 break;
13235
13236 case GIMPLE_OMP_FOR:
13237 inner_context = stmt;
13238 wi->info = inner_context;
13239 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13240 walk them. */
13241 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13242 diagnose_sb_1, NULL, wi);
13243 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13244 wi->info = context;
13245 break;
13246
13247 case GIMPLE_LABEL:
13248 splay_tree_insert (all_labels,
13249 (splay_tree_key) gimple_label_label (
13250 as_a <glabel *> (stmt)),
13251 (splay_tree_value) context);
13252 break;
13253
13254 default:
13255 break;
13256 }
13257
13258 return NULL_TREE;
13259 }
13260
13261 /* Pass 2: Check each branch and see if its context differs from that of
13262 the destination label's context. */
13263
13264 static tree
13265 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13266 struct walk_stmt_info *wi)
13267 {
13268 gimple *context = (gimple *) wi->info;
13269 splay_tree_node n;
13270 gimple *stmt = gsi_stmt (*gsi_p);
13271
13272 *handled_ops_p = true;
13273
13274 switch (gimple_code (stmt))
13275 {
13276 WALK_SUBSTMTS;
13277
13278 case GIMPLE_OMP_PARALLEL:
13279 case GIMPLE_OMP_TASK:
13280 case GIMPLE_OMP_SECTIONS:
13281 case GIMPLE_OMP_SINGLE:
13282 case GIMPLE_OMP_SECTION:
13283 case GIMPLE_OMP_MASTER:
13284 case GIMPLE_OMP_ORDERED:
13285 case GIMPLE_OMP_SCAN:
13286 case GIMPLE_OMP_CRITICAL:
13287 case GIMPLE_OMP_TARGET:
13288 case GIMPLE_OMP_TEAMS:
13289 case GIMPLE_OMP_TASKGROUP:
13290 wi->info = stmt;
13291 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13292 wi->info = context;
13293 break;
13294
13295 case GIMPLE_OMP_FOR:
13296 wi->info = stmt;
13297 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13298 walk them. */
13299 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13300 diagnose_sb_2, NULL, wi);
13301 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13302 wi->info = context;
13303 break;
13304
13305 case GIMPLE_COND:
13306 {
13307 gcond *cond_stmt = as_a <gcond *> (stmt);
13308 tree lab = gimple_cond_true_label (cond_stmt);
13309 if (lab)
13310 {
13311 n = splay_tree_lookup (all_labels,
13312 (splay_tree_key) lab);
13313 diagnose_sb_0 (gsi_p, context,
13314 n ? (gimple *) n->value : NULL);
13315 }
13316 lab = gimple_cond_false_label (cond_stmt);
13317 if (lab)
13318 {
13319 n = splay_tree_lookup (all_labels,
13320 (splay_tree_key) lab);
13321 diagnose_sb_0 (gsi_p, context,
13322 n ? (gimple *) n->value : NULL);
13323 }
13324 }
13325 break;
13326
13327 case GIMPLE_GOTO:
13328 {
13329 tree lab = gimple_goto_dest (stmt);
13330 if (TREE_CODE (lab) != LABEL_DECL)
13331 break;
13332
13333 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13334 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13335 }
13336 break;
13337
13338 case GIMPLE_SWITCH:
13339 {
13340 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13341 unsigned int i;
13342 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13343 {
13344 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13345 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13346 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13347 break;
13348 }
13349 }
13350 break;
13351
13352 case GIMPLE_RETURN:
13353 diagnose_sb_0 (gsi_p, context, NULL);
13354 break;
13355
13356 default:
13357 break;
13358 }
13359
13360 return NULL_TREE;
13361 }
13362
13363 static unsigned int
13364 diagnose_omp_structured_block_errors (void)
13365 {
13366 struct walk_stmt_info wi;
13367 gimple_seq body = gimple_body (current_function_decl);
13368
13369 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13370
13371 memset (&wi, 0, sizeof (wi));
13372 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13373
13374 memset (&wi, 0, sizeof (wi));
13375 wi.want_locations = true;
13376 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13377
13378 gimple_set_body (current_function_decl, body);
13379
13380 splay_tree_delete (all_labels);
13381 all_labels = NULL;
13382
13383 return 0;
13384 }
13385
13386 namespace {
13387
13388 const pass_data pass_data_diagnose_omp_blocks =
13389 {
13390 GIMPLE_PASS, /* type */
13391 "*diagnose_omp_blocks", /* name */
13392 OPTGROUP_OMP, /* optinfo_flags */
13393 TV_NONE, /* tv_id */
13394 PROP_gimple_any, /* properties_required */
13395 0, /* properties_provided */
13396 0, /* properties_destroyed */
13397 0, /* todo_flags_start */
13398 0, /* todo_flags_finish */
13399 };
13400
13401 class pass_diagnose_omp_blocks : public gimple_opt_pass
13402 {
13403 public:
13404 pass_diagnose_omp_blocks (gcc::context *ctxt)
13405 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13406 {}
13407
13408 /* opt_pass methods: */
13409 virtual bool gate (function *)
13410 {
13411 return flag_openacc || flag_openmp || flag_openmp_simd;
13412 }
13413 virtual unsigned int execute (function *)
13414 {
13415 return diagnose_omp_structured_block_errors ();
13416 }
13417
13418 }; // class pass_diagnose_omp_blocks
13419
13420 } // anon namespace
13421
13422 gimple_opt_pass *
13423 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13424 {
13425 return new pass_diagnose_omp_blocks (ctxt);
13426 }
13427 \f
13428
13429 #include "gt-omp-low.h"