]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/omp-low.c
PR c++/61339 - add mismatch between struct and class [-Wmismatched-tags] to non-bugs
[thirdparty/gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2019 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.c (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* Nesting depth of this context. Used to beautify error messages re
131 invalid gotos. The outermost ctx is depth 1, with depth 0 being
132 reserved for the main body of the function. */
133 int depth;
134
135 /* True if this parallel directive is nested within another. */
136 bool is_nested;
137
138 /* True if this construct can be cancelled. */
139 bool cancellable;
140
141 /* True if lower_omp_1 should look up lastprivate conditional in parent
142 context. */
143 bool combined_into_simd_safelen1;
144
145 /* True if there is nested scan context with inclusive clause. */
146 bool scan_inclusive;
147
148 /* True if there is nested scan context with exclusive clause. */
149 bool scan_exclusive;
150
151 /* True in the second simd loop of for simd with inscan reductions. */
152 bool for_simd_scan_phase;
153 };
154
155 static splay_tree all_contexts;
156 static int taskreg_nesting_level;
157 static int target_nesting_level;
158 static bitmap task_shared_vars;
159 static vec<omp_context *> taskreg_contexts;
160
161 static void scan_omp (gimple_seq *, omp_context *);
162 static tree scan_omp_1_op (tree *, int *, void *);
163
164 #define WALK_SUBSTMTS \
165 case GIMPLE_BIND: \
166 case GIMPLE_TRY: \
167 case GIMPLE_CATCH: \
168 case GIMPLE_EH_FILTER: \
169 case GIMPLE_TRANSACTION: \
170 /* The sub-statements for these should be walked. */ \
171 *handled_ops_p = false; \
172 break;
173
174 /* Return true if CTX corresponds to an oacc parallel region. */
175
176 static bool
177 is_oacc_parallel (omp_context *ctx)
178 {
179 enum gimple_code outer_type = gimple_code (ctx->stmt);
180 return ((outer_type == GIMPLE_OMP_TARGET)
181 && (gimple_omp_target_kind (ctx->stmt)
182 == GF_OMP_TARGET_KIND_OACC_PARALLEL));
183 }
184
185 /* Return true if CTX corresponds to an oacc kernels region. */
186
187 static bool
188 is_oacc_kernels (omp_context *ctx)
189 {
190 enum gimple_code outer_type = gimple_code (ctx->stmt);
191 return ((outer_type == GIMPLE_OMP_TARGET)
192 && (gimple_omp_target_kind (ctx->stmt)
193 == GF_OMP_TARGET_KIND_OACC_KERNELS));
194 }
195
196 /* If DECL is the artificial dummy VAR_DECL created for non-static
197 data member privatization, return the underlying "this" parameter,
198 otherwise return NULL. */
199
200 tree
201 omp_member_access_dummy_var (tree decl)
202 {
203 if (!VAR_P (decl)
204 || !DECL_ARTIFICIAL (decl)
205 || !DECL_IGNORED_P (decl)
206 || !DECL_HAS_VALUE_EXPR_P (decl)
207 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
208 return NULL_TREE;
209
210 tree v = DECL_VALUE_EXPR (decl);
211 if (TREE_CODE (v) != COMPONENT_REF)
212 return NULL_TREE;
213
214 while (1)
215 switch (TREE_CODE (v))
216 {
217 case COMPONENT_REF:
218 case MEM_REF:
219 case INDIRECT_REF:
220 CASE_CONVERT:
221 case POINTER_PLUS_EXPR:
222 v = TREE_OPERAND (v, 0);
223 continue;
224 case PARM_DECL:
225 if (DECL_CONTEXT (v) == current_function_decl
226 && DECL_ARTIFICIAL (v)
227 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
228 return v;
229 return NULL_TREE;
230 default:
231 return NULL_TREE;
232 }
233 }
234
235 /* Helper for unshare_and_remap, called through walk_tree. */
236
237 static tree
238 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
239 {
240 tree *pair = (tree *) data;
241 if (*tp == pair[0])
242 {
243 *tp = unshare_expr (pair[1]);
244 *walk_subtrees = 0;
245 }
246 else if (IS_TYPE_OR_DECL_P (*tp))
247 *walk_subtrees = 0;
248 return NULL_TREE;
249 }
250
251 /* Return unshare_expr (X) with all occurrences of FROM
252 replaced with TO. */
253
254 static tree
255 unshare_and_remap (tree x, tree from, tree to)
256 {
257 tree pair[2] = { from, to };
258 x = unshare_expr (x);
259 walk_tree (&x, unshare_and_remap_1, pair, NULL);
260 return x;
261 }
262
263 /* Convenience function for calling scan_omp_1_op on tree operands. */
264
265 static inline tree
266 scan_omp_op (tree *tp, omp_context *ctx)
267 {
268 struct walk_stmt_info wi;
269
270 memset (&wi, 0, sizeof (wi));
271 wi.info = ctx;
272 wi.want_locations = true;
273
274 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
275 }
276
277 static void lower_omp (gimple_seq *, omp_context *);
278 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
279 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
280
281 /* Return true if CTX is for an omp parallel. */
282
283 static inline bool
284 is_parallel_ctx (omp_context *ctx)
285 {
286 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
287 }
288
289
290 /* Return true if CTX is for an omp task. */
291
292 static inline bool
293 is_task_ctx (omp_context *ctx)
294 {
295 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
296 }
297
298
299 /* Return true if CTX is for an omp taskloop. */
300
301 static inline bool
302 is_taskloop_ctx (omp_context *ctx)
303 {
304 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
305 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
306 }
307
308
309 /* Return true if CTX is for a host omp teams. */
310
311 static inline bool
312 is_host_teams_ctx (omp_context *ctx)
313 {
314 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
315 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
316 }
317
318 /* Return true if CTX is for an omp parallel or omp task or host omp teams
319 (the last one is strictly not a task region in OpenMP speak, but we
320 need to treat it similarly). */
321
322 static inline bool
323 is_taskreg_ctx (omp_context *ctx)
324 {
325 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
326 }
327
328 /* Return true if EXPR is variable sized. */
329
330 static inline bool
331 is_variable_sized (const_tree expr)
332 {
333 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
334 }
335
336 /* Lookup variables. The "maybe" form
337 allows for the variable form to not have been entered, otherwise we
338 assert that the variable must have been entered. */
339
340 static inline tree
341 lookup_decl (tree var, omp_context *ctx)
342 {
343 tree *n = ctx->cb.decl_map->get (var);
344 return *n;
345 }
346
347 static inline tree
348 maybe_lookup_decl (const_tree var, omp_context *ctx)
349 {
350 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
351 return n ? *n : NULL_TREE;
352 }
353
354 static inline tree
355 lookup_field (tree var, omp_context *ctx)
356 {
357 splay_tree_node n;
358 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
359 return (tree) n->value;
360 }
361
362 static inline tree
363 lookup_sfield (splay_tree_key key, omp_context *ctx)
364 {
365 splay_tree_node n;
366 n = splay_tree_lookup (ctx->sfield_map
367 ? ctx->sfield_map : ctx->field_map, key);
368 return (tree) n->value;
369 }
370
371 static inline tree
372 lookup_sfield (tree var, omp_context *ctx)
373 {
374 return lookup_sfield ((splay_tree_key) var, ctx);
375 }
376
377 static inline tree
378 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
379 {
380 splay_tree_node n;
381 n = splay_tree_lookup (ctx->field_map, key);
382 return n ? (tree) n->value : NULL_TREE;
383 }
384
385 static inline tree
386 maybe_lookup_field (tree var, omp_context *ctx)
387 {
388 return maybe_lookup_field ((splay_tree_key) var, ctx);
389 }
390
391 /* Return true if DECL should be copied by pointer. SHARED_CTX is
392 the parallel context if DECL is to be shared. */
393
394 static bool
395 use_pointer_for_field (tree decl, omp_context *shared_ctx)
396 {
397 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
398 || TYPE_ATOMIC (TREE_TYPE (decl)))
399 return true;
400
401 /* We can only use copy-in/copy-out semantics for shared variables
402 when we know the value is not accessible from an outer scope. */
403 if (shared_ctx)
404 {
405 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
406
407 /* ??? Trivially accessible from anywhere. But why would we even
408 be passing an address in this case? Should we simply assert
409 this to be false, or should we have a cleanup pass that removes
410 these from the list of mappings? */
411 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
412 return true;
413
414 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
415 without analyzing the expression whether or not its location
416 is accessible to anyone else. In the case of nested parallel
417 regions it certainly may be. */
418 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
419 return true;
420
421 /* Do not use copy-in/copy-out for variables that have their
422 address taken. */
423 if (TREE_ADDRESSABLE (decl))
424 return true;
425
426 /* lower_send_shared_vars only uses copy-in, but not copy-out
427 for these. */
428 if (TREE_READONLY (decl)
429 || ((TREE_CODE (decl) == RESULT_DECL
430 || TREE_CODE (decl) == PARM_DECL)
431 && DECL_BY_REFERENCE (decl)))
432 return false;
433
434 /* Disallow copy-in/out in nested parallel if
435 decl is shared in outer parallel, otherwise
436 each thread could store the shared variable
437 in its own copy-in location, making the
438 variable no longer really shared. */
439 if (shared_ctx->is_nested)
440 {
441 omp_context *up;
442
443 for (up = shared_ctx->outer; up; up = up->outer)
444 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
445 break;
446
447 if (up)
448 {
449 tree c;
450
451 for (c = gimple_omp_taskreg_clauses (up->stmt);
452 c; c = OMP_CLAUSE_CHAIN (c))
453 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
454 && OMP_CLAUSE_DECL (c) == decl)
455 break;
456
457 if (c)
458 goto maybe_mark_addressable_and_ret;
459 }
460 }
461
462 /* For tasks avoid using copy-in/out. As tasks can be
463 deferred or executed in different thread, when GOMP_task
464 returns, the task hasn't necessarily terminated. */
465 if (is_task_ctx (shared_ctx))
466 {
467 tree outer;
468 maybe_mark_addressable_and_ret:
469 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
470 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
471 {
472 /* Taking address of OUTER in lower_send_shared_vars
473 might need regimplification of everything that uses the
474 variable. */
475 if (!task_shared_vars)
476 task_shared_vars = BITMAP_ALLOC (NULL);
477 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
478 TREE_ADDRESSABLE (outer) = 1;
479 }
480 return true;
481 }
482 }
483
484 return false;
485 }
486
487 /* Construct a new automatic decl similar to VAR. */
488
489 static tree
490 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
491 {
492 tree copy = copy_var_decl (var, name, type);
493
494 DECL_CONTEXT (copy) = current_function_decl;
495 DECL_CHAIN (copy) = ctx->block_vars;
496 /* If VAR is listed in task_shared_vars, it means it wasn't
497 originally addressable and is just because task needs to take
498 it's address. But we don't need to take address of privatizations
499 from that var. */
500 if (TREE_ADDRESSABLE (var)
501 && task_shared_vars
502 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
503 TREE_ADDRESSABLE (copy) = 0;
504 ctx->block_vars = copy;
505
506 return copy;
507 }
508
509 static tree
510 omp_copy_decl_1 (tree var, omp_context *ctx)
511 {
512 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
513 }
514
515 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
516 as appropriate. */
517 static tree
518 omp_build_component_ref (tree obj, tree field)
519 {
520 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
521 if (TREE_THIS_VOLATILE (field))
522 TREE_THIS_VOLATILE (ret) |= 1;
523 if (TREE_READONLY (field))
524 TREE_READONLY (ret) |= 1;
525 return ret;
526 }
527
528 /* Build tree nodes to access the field for VAR on the receiver side. */
529
530 static tree
531 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
532 {
533 tree x, field = lookup_field (var, ctx);
534
535 /* If the receiver record type was remapped in the child function,
536 remap the field into the new record type. */
537 x = maybe_lookup_field (field, ctx);
538 if (x != NULL)
539 field = x;
540
541 x = build_simple_mem_ref (ctx->receiver_decl);
542 TREE_THIS_NOTRAP (x) = 1;
543 x = omp_build_component_ref (x, field);
544 if (by_ref)
545 {
546 x = build_simple_mem_ref (x);
547 TREE_THIS_NOTRAP (x) = 1;
548 }
549
550 return x;
551 }
552
553 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
554 of a parallel, this is a component reference; for workshare constructs
555 this is some variable. */
556
557 static tree
558 build_outer_var_ref (tree var, omp_context *ctx,
559 enum omp_clause_code code = OMP_CLAUSE_ERROR)
560 {
561 tree x;
562 omp_context *outer = ctx->outer;
563 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
564 outer = outer->outer;
565
566 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
567 x = var;
568 else if (is_variable_sized (var))
569 {
570 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
571 x = build_outer_var_ref (x, ctx, code);
572 x = build_simple_mem_ref (x);
573 }
574 else if (is_taskreg_ctx (ctx))
575 {
576 bool by_ref = use_pointer_for_field (var, NULL);
577 x = build_receiver_ref (var, by_ref, ctx);
578 }
579 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
580 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
581 || (code == OMP_CLAUSE_PRIVATE
582 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
583 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
584 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
585 {
586 /* #pragma omp simd isn't a worksharing construct, and can reference
587 even private vars in its linear etc. clauses.
588 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
589 to private vars in all worksharing constructs. */
590 x = NULL_TREE;
591 if (outer && is_taskreg_ctx (outer))
592 x = lookup_decl (var, outer);
593 else if (outer)
594 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
595 if (x == NULL_TREE)
596 x = var;
597 }
598 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
599 {
600 gcc_assert (outer);
601 splay_tree_node n
602 = splay_tree_lookup (outer->field_map,
603 (splay_tree_key) &DECL_UID (var));
604 if (n == NULL)
605 {
606 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
607 x = var;
608 else
609 x = lookup_decl (var, outer);
610 }
611 else
612 {
613 tree field = (tree) n->value;
614 /* If the receiver record type was remapped in the child function,
615 remap the field into the new record type. */
616 x = maybe_lookup_field (field, outer);
617 if (x != NULL)
618 field = x;
619
620 x = build_simple_mem_ref (outer->receiver_decl);
621 x = omp_build_component_ref (x, field);
622 if (use_pointer_for_field (var, outer))
623 x = build_simple_mem_ref (x);
624 }
625 }
626 else if (outer)
627 {
628 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
629 {
630 outer = outer->outer;
631 gcc_assert (outer
632 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
633 }
634 x = lookup_decl (var, outer);
635 }
636 else if (omp_is_reference (var))
637 /* This can happen with orphaned constructs. If var is reference, it is
638 possible it is shared and as such valid. */
639 x = var;
640 else if (omp_member_access_dummy_var (var))
641 x = var;
642 else
643 gcc_unreachable ();
644
645 if (x == var)
646 {
647 tree t = omp_member_access_dummy_var (var);
648 if (t)
649 {
650 x = DECL_VALUE_EXPR (var);
651 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
652 if (o != t)
653 x = unshare_and_remap (x, t, o);
654 else
655 x = unshare_expr (x);
656 }
657 }
658
659 if (omp_is_reference (var))
660 x = build_simple_mem_ref (x);
661
662 return x;
663 }
664
665 /* Build tree nodes to access the field for VAR on the sender side. */
666
667 static tree
668 build_sender_ref (splay_tree_key key, omp_context *ctx)
669 {
670 tree field = lookup_sfield (key, ctx);
671 return omp_build_component_ref (ctx->sender_decl, field);
672 }
673
674 static tree
675 build_sender_ref (tree var, omp_context *ctx)
676 {
677 return build_sender_ref ((splay_tree_key) var, ctx);
678 }
679
680 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
681 BASE_POINTERS_RESTRICT, declare the field with restrict. */
682
683 static void
684 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
685 {
686 tree field, type, sfield = NULL_TREE;
687 splay_tree_key key = (splay_tree_key) var;
688
689 if ((mask & 8) != 0)
690 {
691 key = (splay_tree_key) &DECL_UID (var);
692 gcc_checking_assert (key != (splay_tree_key) var);
693 }
694 gcc_assert ((mask & 1) == 0
695 || !splay_tree_lookup (ctx->field_map, key));
696 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
697 || !splay_tree_lookup (ctx->sfield_map, key));
698 gcc_assert ((mask & 3) == 3
699 || !is_gimple_omp_oacc (ctx->stmt));
700
701 type = TREE_TYPE (var);
702 /* Prevent redeclaring the var in the split-off function with a restrict
703 pointer type. Note that we only clear type itself, restrict qualifiers in
704 the pointed-to type will be ignored by points-to analysis. */
705 if (POINTER_TYPE_P (type)
706 && TYPE_RESTRICT (type))
707 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
708
709 if (mask & 4)
710 {
711 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
712 type = build_pointer_type (build_pointer_type (type));
713 }
714 else if (by_ref)
715 type = build_pointer_type (type);
716 else if ((mask & 3) == 1 && omp_is_reference (var))
717 type = TREE_TYPE (type);
718
719 field = build_decl (DECL_SOURCE_LOCATION (var),
720 FIELD_DECL, DECL_NAME (var), type);
721
722 /* Remember what variable this field was created for. This does have a
723 side effect of making dwarf2out ignore this member, so for helpful
724 debugging we clear it later in delete_omp_context. */
725 DECL_ABSTRACT_ORIGIN (field) = var;
726 if (type == TREE_TYPE (var))
727 {
728 SET_DECL_ALIGN (field, DECL_ALIGN (var));
729 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
730 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
731 }
732 else
733 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
734
735 if ((mask & 3) == 3)
736 {
737 insert_field_into_struct (ctx->record_type, field);
738 if (ctx->srecord_type)
739 {
740 sfield = build_decl (DECL_SOURCE_LOCATION (var),
741 FIELD_DECL, DECL_NAME (var), type);
742 DECL_ABSTRACT_ORIGIN (sfield) = var;
743 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
744 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
745 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
746 insert_field_into_struct (ctx->srecord_type, sfield);
747 }
748 }
749 else
750 {
751 if (ctx->srecord_type == NULL_TREE)
752 {
753 tree t;
754
755 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
756 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
757 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
758 {
759 sfield = build_decl (DECL_SOURCE_LOCATION (t),
760 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
761 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
762 insert_field_into_struct (ctx->srecord_type, sfield);
763 splay_tree_insert (ctx->sfield_map,
764 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
765 (splay_tree_value) sfield);
766 }
767 }
768 sfield = field;
769 insert_field_into_struct ((mask & 1) ? ctx->record_type
770 : ctx->srecord_type, field);
771 }
772
773 if (mask & 1)
774 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
775 if ((mask & 2) && ctx->sfield_map)
776 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
777 }
778
779 static tree
780 install_var_local (tree var, omp_context *ctx)
781 {
782 tree new_var = omp_copy_decl_1 (var, ctx);
783 insert_decl_map (&ctx->cb, var, new_var);
784 return new_var;
785 }
786
787 /* Adjust the replacement for DECL in CTX for the new context. This means
788 copying the DECL_VALUE_EXPR, and fixing up the type. */
789
790 static void
791 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
792 {
793 tree new_decl, size;
794
795 new_decl = lookup_decl (decl, ctx);
796
797 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
798
799 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
800 && DECL_HAS_VALUE_EXPR_P (decl))
801 {
802 tree ve = DECL_VALUE_EXPR (decl);
803 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
804 SET_DECL_VALUE_EXPR (new_decl, ve);
805 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
806 }
807
808 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
809 {
810 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
811 if (size == error_mark_node)
812 size = TYPE_SIZE (TREE_TYPE (new_decl));
813 DECL_SIZE (new_decl) = size;
814
815 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
816 if (size == error_mark_node)
817 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
818 DECL_SIZE_UNIT (new_decl) = size;
819 }
820 }
821
822 /* The callback for remap_decl. Search all containing contexts for a
823 mapping of the variable; this avoids having to duplicate the splay
824 tree ahead of time. We know a mapping doesn't already exist in the
825 given context. Create new mappings to implement default semantics. */
826
827 static tree
828 omp_copy_decl (tree var, copy_body_data *cb)
829 {
830 omp_context *ctx = (omp_context *) cb;
831 tree new_var;
832
833 if (TREE_CODE (var) == LABEL_DECL)
834 {
835 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
836 return var;
837 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
838 DECL_CONTEXT (new_var) = current_function_decl;
839 insert_decl_map (&ctx->cb, var, new_var);
840 return new_var;
841 }
842
843 while (!is_taskreg_ctx (ctx))
844 {
845 ctx = ctx->outer;
846 if (ctx == NULL)
847 return var;
848 new_var = maybe_lookup_decl (var, ctx);
849 if (new_var)
850 return new_var;
851 }
852
853 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
854 return var;
855
856 return error_mark_node;
857 }
858
859 /* Create a new context, with OUTER_CTX being the surrounding context. */
860
861 static omp_context *
862 new_omp_context (gimple *stmt, omp_context *outer_ctx)
863 {
864 omp_context *ctx = XCNEW (omp_context);
865
866 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
867 (splay_tree_value) ctx);
868 ctx->stmt = stmt;
869
870 if (outer_ctx)
871 {
872 ctx->outer = outer_ctx;
873 ctx->cb = outer_ctx->cb;
874 ctx->cb.block = NULL;
875 ctx->depth = outer_ctx->depth + 1;
876 }
877 else
878 {
879 ctx->cb.src_fn = current_function_decl;
880 ctx->cb.dst_fn = current_function_decl;
881 ctx->cb.src_node = cgraph_node::get (current_function_decl);
882 gcc_checking_assert (ctx->cb.src_node);
883 ctx->cb.dst_node = ctx->cb.src_node;
884 ctx->cb.src_cfun = cfun;
885 ctx->cb.copy_decl = omp_copy_decl;
886 ctx->cb.eh_lp_nr = 0;
887 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
888 ctx->cb.adjust_array_error_bounds = true;
889 ctx->cb.dont_remap_vla_if_no_change = true;
890 ctx->depth = 1;
891 }
892
893 ctx->cb.decl_map = new hash_map<tree, tree>;
894
895 return ctx;
896 }
897
898 static gimple_seq maybe_catch_exception (gimple_seq);
899
900 /* Finalize task copyfn. */
901
902 static void
903 finalize_task_copyfn (gomp_task *task_stmt)
904 {
905 struct function *child_cfun;
906 tree child_fn;
907 gimple_seq seq = NULL, new_seq;
908 gbind *bind;
909
910 child_fn = gimple_omp_task_copy_fn (task_stmt);
911 if (child_fn == NULL_TREE)
912 return;
913
914 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
915 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
916
917 push_cfun (child_cfun);
918 bind = gimplify_body (child_fn, false);
919 gimple_seq_add_stmt (&seq, bind);
920 new_seq = maybe_catch_exception (seq);
921 if (new_seq != seq)
922 {
923 bind = gimple_build_bind (NULL, new_seq, NULL);
924 seq = NULL;
925 gimple_seq_add_stmt (&seq, bind);
926 }
927 gimple_set_body (child_fn, seq);
928 pop_cfun ();
929
930 /* Inform the callgraph about the new function. */
931 cgraph_node *node = cgraph_node::get_create (child_fn);
932 node->parallelized_function = 1;
933 cgraph_node::add_new_function (child_fn, false);
934 }
935
936 /* Destroy a omp_context data structures. Called through the splay tree
937 value delete callback. */
938
939 static void
940 delete_omp_context (splay_tree_value value)
941 {
942 omp_context *ctx = (omp_context *) value;
943
944 delete ctx->cb.decl_map;
945
946 if (ctx->field_map)
947 splay_tree_delete (ctx->field_map);
948 if (ctx->sfield_map)
949 splay_tree_delete (ctx->sfield_map);
950
951 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
952 it produces corrupt debug information. */
953 if (ctx->record_type)
954 {
955 tree t;
956 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
957 DECL_ABSTRACT_ORIGIN (t) = NULL;
958 }
959 if (ctx->srecord_type)
960 {
961 tree t;
962 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
963 DECL_ABSTRACT_ORIGIN (t) = NULL;
964 }
965
966 if (is_task_ctx (ctx))
967 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
968
969 if (ctx->task_reduction_map)
970 {
971 ctx->task_reductions.release ();
972 delete ctx->task_reduction_map;
973 }
974
975 delete ctx->lastprivate_conditional_map;
976
977 XDELETE (ctx);
978 }
979
980 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
981 context. */
982
983 static void
984 fixup_child_record_type (omp_context *ctx)
985 {
986 tree f, type = ctx->record_type;
987
988 if (!ctx->receiver_decl)
989 return;
990 /* ??? It isn't sufficient to just call remap_type here, because
991 variably_modified_type_p doesn't work the way we expect for
992 record types. Testing each field for whether it needs remapping
993 and creating a new record by hand works, however. */
994 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
995 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
996 break;
997 if (f)
998 {
999 tree name, new_fields = NULL;
1000
1001 type = lang_hooks.types.make_type (RECORD_TYPE);
1002 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1003 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1004 TYPE_DECL, name, type);
1005 TYPE_NAME (type) = name;
1006
1007 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1008 {
1009 tree new_f = copy_node (f);
1010 DECL_CONTEXT (new_f) = type;
1011 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1012 DECL_CHAIN (new_f) = new_fields;
1013 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1014 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1015 &ctx->cb, NULL);
1016 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1017 &ctx->cb, NULL);
1018 new_fields = new_f;
1019
1020 /* Arrange to be able to look up the receiver field
1021 given the sender field. */
1022 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1023 (splay_tree_value) new_f);
1024 }
1025 TYPE_FIELDS (type) = nreverse (new_fields);
1026 layout_type (type);
1027 }
1028
1029 /* In a target region we never modify any of the pointers in *.omp_data_i,
1030 so attempt to help the optimizers. */
1031 if (is_gimple_omp_offloaded (ctx->stmt))
1032 type = build_qualified_type (type, TYPE_QUAL_CONST);
1033
1034 TREE_TYPE (ctx->receiver_decl)
1035 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1036 }
1037
1038 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1039 specified by CLAUSES. */
1040
1041 static void
1042 scan_sharing_clauses (tree clauses, omp_context *ctx)
1043 {
1044 tree c, decl;
1045 bool scan_array_reductions = false;
1046
1047 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1048 {
1049 bool by_ref;
1050
1051 switch (OMP_CLAUSE_CODE (c))
1052 {
1053 case OMP_CLAUSE_PRIVATE:
1054 decl = OMP_CLAUSE_DECL (c);
1055 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1056 goto do_private;
1057 else if (!is_variable_sized (decl))
1058 install_var_local (decl, ctx);
1059 break;
1060
1061 case OMP_CLAUSE_SHARED:
1062 decl = OMP_CLAUSE_DECL (c);
1063 /* Ignore shared directives in teams construct inside of
1064 target construct. */
1065 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1066 && !is_host_teams_ctx (ctx))
1067 {
1068 /* Global variables don't need to be copied,
1069 the receiver side will use them directly. */
1070 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1071 if (is_global_var (odecl))
1072 break;
1073 insert_decl_map (&ctx->cb, decl, odecl);
1074 break;
1075 }
1076 gcc_assert (is_taskreg_ctx (ctx));
1077 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1078 || !is_variable_sized (decl));
1079 /* Global variables don't need to be copied,
1080 the receiver side will use them directly. */
1081 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1082 break;
1083 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1084 {
1085 use_pointer_for_field (decl, ctx);
1086 break;
1087 }
1088 by_ref = use_pointer_for_field (decl, NULL);
1089 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1090 || TREE_ADDRESSABLE (decl)
1091 || by_ref
1092 || omp_is_reference (decl))
1093 {
1094 by_ref = use_pointer_for_field (decl, ctx);
1095 install_var_field (decl, by_ref, 3, ctx);
1096 install_var_local (decl, ctx);
1097 break;
1098 }
1099 /* We don't need to copy const scalar vars back. */
1100 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1101 goto do_private;
1102
1103 case OMP_CLAUSE_REDUCTION:
1104 case OMP_CLAUSE_IN_REDUCTION:
1105 decl = OMP_CLAUSE_DECL (c);
1106 if (TREE_CODE (decl) == MEM_REF)
1107 {
1108 tree t = TREE_OPERAND (decl, 0);
1109 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1110 t = TREE_OPERAND (t, 0);
1111 if (TREE_CODE (t) == INDIRECT_REF
1112 || TREE_CODE (t) == ADDR_EXPR)
1113 t = TREE_OPERAND (t, 0);
1114 install_var_local (t, ctx);
1115 if (is_taskreg_ctx (ctx)
1116 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1117 || (is_task_ctx (ctx)
1118 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1119 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1120 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1121 == POINTER_TYPE)))))
1122 && !is_variable_sized (t)
1123 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1124 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1125 && !is_task_ctx (ctx))))
1126 {
1127 by_ref = use_pointer_for_field (t, NULL);
1128 if (is_task_ctx (ctx)
1129 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1130 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1131 {
1132 install_var_field (t, false, 1, ctx);
1133 install_var_field (t, by_ref, 2, ctx);
1134 }
1135 else
1136 install_var_field (t, by_ref, 3, ctx);
1137 }
1138 break;
1139 }
1140 if (is_task_ctx (ctx)
1141 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1142 && OMP_CLAUSE_REDUCTION_TASK (c)
1143 && is_parallel_ctx (ctx)))
1144 {
1145 /* Global variables don't need to be copied,
1146 the receiver side will use them directly. */
1147 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1148 {
1149 by_ref = use_pointer_for_field (decl, ctx);
1150 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1151 install_var_field (decl, by_ref, 3, ctx);
1152 }
1153 install_var_local (decl, ctx);
1154 break;
1155 }
1156 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1157 && OMP_CLAUSE_REDUCTION_TASK (c))
1158 {
1159 install_var_local (decl, ctx);
1160 break;
1161 }
1162 goto do_private;
1163
1164 case OMP_CLAUSE_LASTPRIVATE:
1165 /* Let the corresponding firstprivate clause create
1166 the variable. */
1167 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1168 break;
1169 /* FALLTHRU */
1170
1171 case OMP_CLAUSE_FIRSTPRIVATE:
1172 case OMP_CLAUSE_LINEAR:
1173 decl = OMP_CLAUSE_DECL (c);
1174 do_private:
1175 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1176 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1177 && is_gimple_omp_offloaded (ctx->stmt))
1178 {
1179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1180 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1181 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1182 install_var_field (decl, true, 3, ctx);
1183 else
1184 install_var_field (decl, false, 3, ctx);
1185 }
1186 if (is_variable_sized (decl))
1187 {
1188 if (is_task_ctx (ctx))
1189 install_var_field (decl, false, 1, ctx);
1190 break;
1191 }
1192 else if (is_taskreg_ctx (ctx))
1193 {
1194 bool global
1195 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1196 by_ref = use_pointer_for_field (decl, NULL);
1197
1198 if (is_task_ctx (ctx)
1199 && (global || by_ref || omp_is_reference (decl)))
1200 {
1201 install_var_field (decl, false, 1, ctx);
1202 if (!global)
1203 install_var_field (decl, by_ref, 2, ctx);
1204 }
1205 else if (!global)
1206 install_var_field (decl, by_ref, 3, ctx);
1207 }
1208 install_var_local (decl, ctx);
1209 break;
1210
1211 case OMP_CLAUSE_USE_DEVICE_PTR:
1212 decl = OMP_CLAUSE_DECL (c);
1213 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1214 install_var_field (decl, true, 3, ctx);
1215 else
1216 install_var_field (decl, false, 3, ctx);
1217 if (DECL_SIZE (decl)
1218 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1219 {
1220 tree decl2 = DECL_VALUE_EXPR (decl);
1221 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1222 decl2 = TREE_OPERAND (decl2, 0);
1223 gcc_assert (DECL_P (decl2));
1224 install_var_local (decl2, ctx);
1225 }
1226 install_var_local (decl, ctx);
1227 break;
1228
1229 case OMP_CLAUSE_IS_DEVICE_PTR:
1230 decl = OMP_CLAUSE_DECL (c);
1231 goto do_private;
1232
1233 case OMP_CLAUSE__LOOPTEMP_:
1234 case OMP_CLAUSE__REDUCTEMP_:
1235 gcc_assert (is_taskreg_ctx (ctx));
1236 decl = OMP_CLAUSE_DECL (c);
1237 install_var_field (decl, false, 3, ctx);
1238 install_var_local (decl, ctx);
1239 break;
1240
1241 case OMP_CLAUSE_COPYPRIVATE:
1242 case OMP_CLAUSE_COPYIN:
1243 decl = OMP_CLAUSE_DECL (c);
1244 by_ref = use_pointer_for_field (decl, NULL);
1245 install_var_field (decl, by_ref, 3, ctx);
1246 break;
1247
1248 case OMP_CLAUSE_FINAL:
1249 case OMP_CLAUSE_IF:
1250 case OMP_CLAUSE_NUM_THREADS:
1251 case OMP_CLAUSE_NUM_TEAMS:
1252 case OMP_CLAUSE_THREAD_LIMIT:
1253 case OMP_CLAUSE_DEVICE:
1254 case OMP_CLAUSE_SCHEDULE:
1255 case OMP_CLAUSE_DIST_SCHEDULE:
1256 case OMP_CLAUSE_DEPEND:
1257 case OMP_CLAUSE_PRIORITY:
1258 case OMP_CLAUSE_GRAINSIZE:
1259 case OMP_CLAUSE_NUM_TASKS:
1260 case OMP_CLAUSE_NUM_GANGS:
1261 case OMP_CLAUSE_NUM_WORKERS:
1262 case OMP_CLAUSE_VECTOR_LENGTH:
1263 if (ctx->outer)
1264 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1265 break;
1266
1267 case OMP_CLAUSE_TO:
1268 case OMP_CLAUSE_FROM:
1269 case OMP_CLAUSE_MAP:
1270 if (ctx->outer)
1271 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1272 decl = OMP_CLAUSE_DECL (c);
1273 /* Global variables with "omp declare target" attribute
1274 don't need to be copied, the receiver side will use them
1275 directly. However, global variables with "omp declare target link"
1276 attribute need to be copied. Or when ALWAYS modifier is used. */
1277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1278 && DECL_P (decl)
1279 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1280 && (OMP_CLAUSE_MAP_KIND (c)
1281 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1282 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1283 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1284 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1285 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1286 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1287 && varpool_node::get_create (decl)->offloadable
1288 && !lookup_attribute ("omp declare target link",
1289 DECL_ATTRIBUTES (decl)))
1290 break;
1291 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1292 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1293 {
1294 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1295 not offloaded; there is nothing to map for those. */
1296 if (!is_gimple_omp_offloaded (ctx->stmt)
1297 && !POINTER_TYPE_P (TREE_TYPE (decl))
1298 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1299 break;
1300 }
1301 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1302 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1303 || (OMP_CLAUSE_MAP_KIND (c)
1304 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1305 {
1306 if (TREE_CODE (decl) == COMPONENT_REF
1307 || (TREE_CODE (decl) == INDIRECT_REF
1308 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1309 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1310 == REFERENCE_TYPE)))
1311 break;
1312 if (DECL_SIZE (decl)
1313 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1314 {
1315 tree decl2 = DECL_VALUE_EXPR (decl);
1316 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1317 decl2 = TREE_OPERAND (decl2, 0);
1318 gcc_assert (DECL_P (decl2));
1319 install_var_local (decl2, ctx);
1320 }
1321 install_var_local (decl, ctx);
1322 break;
1323 }
1324 if (DECL_P (decl))
1325 {
1326 if (DECL_SIZE (decl)
1327 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1328 {
1329 tree decl2 = DECL_VALUE_EXPR (decl);
1330 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1331 decl2 = TREE_OPERAND (decl2, 0);
1332 gcc_assert (DECL_P (decl2));
1333 install_var_field (decl2, true, 3, ctx);
1334 install_var_local (decl2, ctx);
1335 install_var_local (decl, ctx);
1336 }
1337 else
1338 {
1339 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1340 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1341 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1342 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1343 install_var_field (decl, true, 7, ctx);
1344 else
1345 install_var_field (decl, true, 3, ctx);
1346 if (is_gimple_omp_offloaded (ctx->stmt)
1347 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1348 install_var_local (decl, ctx);
1349 }
1350 }
1351 else
1352 {
1353 tree base = get_base_address (decl);
1354 tree nc = OMP_CLAUSE_CHAIN (c);
1355 if (DECL_P (base)
1356 && nc != NULL_TREE
1357 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1358 && OMP_CLAUSE_DECL (nc) == base
1359 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1360 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1361 {
1362 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1363 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1364 }
1365 else
1366 {
1367 if (ctx->outer)
1368 {
1369 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1370 decl = OMP_CLAUSE_DECL (c);
1371 }
1372 gcc_assert (!splay_tree_lookup (ctx->field_map,
1373 (splay_tree_key) decl));
1374 tree field
1375 = build_decl (OMP_CLAUSE_LOCATION (c),
1376 FIELD_DECL, NULL_TREE, ptr_type_node);
1377 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1378 insert_field_into_struct (ctx->record_type, field);
1379 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1380 (splay_tree_value) field);
1381 }
1382 }
1383 break;
1384
1385 case OMP_CLAUSE__GRIDDIM_:
1386 if (ctx->outer)
1387 {
1388 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1389 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1390 }
1391 break;
1392
1393 case OMP_CLAUSE_NOWAIT:
1394 case OMP_CLAUSE_ORDERED:
1395 case OMP_CLAUSE_COLLAPSE:
1396 case OMP_CLAUSE_UNTIED:
1397 case OMP_CLAUSE_MERGEABLE:
1398 case OMP_CLAUSE_PROC_BIND:
1399 case OMP_CLAUSE_SAFELEN:
1400 case OMP_CLAUSE_SIMDLEN:
1401 case OMP_CLAUSE_THREADS:
1402 case OMP_CLAUSE_SIMD:
1403 case OMP_CLAUSE_NOGROUP:
1404 case OMP_CLAUSE_DEFAULTMAP:
1405 case OMP_CLAUSE_ASYNC:
1406 case OMP_CLAUSE_WAIT:
1407 case OMP_CLAUSE_GANG:
1408 case OMP_CLAUSE_WORKER:
1409 case OMP_CLAUSE_VECTOR:
1410 case OMP_CLAUSE_INDEPENDENT:
1411 case OMP_CLAUSE_AUTO:
1412 case OMP_CLAUSE_SEQ:
1413 case OMP_CLAUSE_TILE:
1414 case OMP_CLAUSE__SIMT_:
1415 case OMP_CLAUSE_DEFAULT:
1416 case OMP_CLAUSE_NONTEMPORAL:
1417 case OMP_CLAUSE_IF_PRESENT:
1418 case OMP_CLAUSE_FINALIZE:
1419 case OMP_CLAUSE_TASK_REDUCTION:
1420 break;
1421
1422 case OMP_CLAUSE_ALIGNED:
1423 decl = OMP_CLAUSE_DECL (c);
1424 if (is_global_var (decl)
1425 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1426 install_var_local (decl, ctx);
1427 break;
1428
1429 case OMP_CLAUSE__CONDTEMP_:
1430 decl = OMP_CLAUSE_DECL (c);
1431 if (is_parallel_ctx (ctx))
1432 {
1433 install_var_field (decl, false, 3, ctx);
1434 install_var_local (decl, ctx);
1435 }
1436 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1437 && (gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
1438 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1439 install_var_local (decl, ctx);
1440 break;
1441
1442 case OMP_CLAUSE__CACHE_:
1443 default:
1444 gcc_unreachable ();
1445 }
1446 }
1447
1448 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1449 {
1450 switch (OMP_CLAUSE_CODE (c))
1451 {
1452 case OMP_CLAUSE_LASTPRIVATE:
1453 /* Let the corresponding firstprivate clause create
1454 the variable. */
1455 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1456 scan_array_reductions = true;
1457 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1458 break;
1459 /* FALLTHRU */
1460
1461 case OMP_CLAUSE_FIRSTPRIVATE:
1462 case OMP_CLAUSE_PRIVATE:
1463 case OMP_CLAUSE_LINEAR:
1464 case OMP_CLAUSE_IS_DEVICE_PTR:
1465 decl = OMP_CLAUSE_DECL (c);
1466 if (is_variable_sized (decl))
1467 {
1468 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1469 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1470 && is_gimple_omp_offloaded (ctx->stmt))
1471 {
1472 tree decl2 = DECL_VALUE_EXPR (decl);
1473 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1474 decl2 = TREE_OPERAND (decl2, 0);
1475 gcc_assert (DECL_P (decl2));
1476 install_var_local (decl2, ctx);
1477 fixup_remapped_decl (decl2, ctx, false);
1478 }
1479 install_var_local (decl, ctx);
1480 }
1481 fixup_remapped_decl (decl, ctx,
1482 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1483 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1484 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1485 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1486 scan_array_reductions = true;
1487 break;
1488
1489 case OMP_CLAUSE_REDUCTION:
1490 case OMP_CLAUSE_IN_REDUCTION:
1491 decl = OMP_CLAUSE_DECL (c);
1492 if (TREE_CODE (decl) != MEM_REF)
1493 {
1494 if (is_variable_sized (decl))
1495 install_var_local (decl, ctx);
1496 fixup_remapped_decl (decl, ctx, false);
1497 }
1498 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1499 scan_array_reductions = true;
1500 break;
1501
1502 case OMP_CLAUSE_TASK_REDUCTION:
1503 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1504 scan_array_reductions = true;
1505 break;
1506
1507 case OMP_CLAUSE_SHARED:
1508 /* Ignore shared directives in teams construct inside of
1509 target construct. */
1510 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1511 && !is_host_teams_ctx (ctx))
1512 break;
1513 decl = OMP_CLAUSE_DECL (c);
1514 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1515 break;
1516 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1517 {
1518 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1519 ctx->outer)))
1520 break;
1521 bool by_ref = use_pointer_for_field (decl, ctx);
1522 install_var_field (decl, by_ref, 11, ctx);
1523 break;
1524 }
1525 fixup_remapped_decl (decl, ctx, false);
1526 break;
1527
1528 case OMP_CLAUSE_MAP:
1529 if (!is_gimple_omp_offloaded (ctx->stmt))
1530 break;
1531 decl = OMP_CLAUSE_DECL (c);
1532 if (DECL_P (decl)
1533 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1534 && (OMP_CLAUSE_MAP_KIND (c)
1535 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1536 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1537 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1538 && varpool_node::get_create (decl)->offloadable)
1539 break;
1540 if (DECL_P (decl))
1541 {
1542 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1543 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1544 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1545 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1546 {
1547 tree new_decl = lookup_decl (decl, ctx);
1548 TREE_TYPE (new_decl)
1549 = remap_type (TREE_TYPE (decl), &ctx->cb);
1550 }
1551 else if (DECL_SIZE (decl)
1552 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1553 {
1554 tree decl2 = DECL_VALUE_EXPR (decl);
1555 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1556 decl2 = TREE_OPERAND (decl2, 0);
1557 gcc_assert (DECL_P (decl2));
1558 fixup_remapped_decl (decl2, ctx, false);
1559 fixup_remapped_decl (decl, ctx, true);
1560 }
1561 else
1562 fixup_remapped_decl (decl, ctx, false);
1563 }
1564 break;
1565
1566 case OMP_CLAUSE_COPYPRIVATE:
1567 case OMP_CLAUSE_COPYIN:
1568 case OMP_CLAUSE_DEFAULT:
1569 case OMP_CLAUSE_IF:
1570 case OMP_CLAUSE_NUM_THREADS:
1571 case OMP_CLAUSE_NUM_TEAMS:
1572 case OMP_CLAUSE_THREAD_LIMIT:
1573 case OMP_CLAUSE_DEVICE:
1574 case OMP_CLAUSE_SCHEDULE:
1575 case OMP_CLAUSE_DIST_SCHEDULE:
1576 case OMP_CLAUSE_NOWAIT:
1577 case OMP_CLAUSE_ORDERED:
1578 case OMP_CLAUSE_COLLAPSE:
1579 case OMP_CLAUSE_UNTIED:
1580 case OMP_CLAUSE_FINAL:
1581 case OMP_CLAUSE_MERGEABLE:
1582 case OMP_CLAUSE_PROC_BIND:
1583 case OMP_CLAUSE_SAFELEN:
1584 case OMP_CLAUSE_SIMDLEN:
1585 case OMP_CLAUSE_ALIGNED:
1586 case OMP_CLAUSE_DEPEND:
1587 case OMP_CLAUSE__LOOPTEMP_:
1588 case OMP_CLAUSE__REDUCTEMP_:
1589 case OMP_CLAUSE_TO:
1590 case OMP_CLAUSE_FROM:
1591 case OMP_CLAUSE_PRIORITY:
1592 case OMP_CLAUSE_GRAINSIZE:
1593 case OMP_CLAUSE_NUM_TASKS:
1594 case OMP_CLAUSE_THREADS:
1595 case OMP_CLAUSE_SIMD:
1596 case OMP_CLAUSE_NOGROUP:
1597 case OMP_CLAUSE_DEFAULTMAP:
1598 case OMP_CLAUSE_USE_DEVICE_PTR:
1599 case OMP_CLAUSE_NONTEMPORAL:
1600 case OMP_CLAUSE_ASYNC:
1601 case OMP_CLAUSE_WAIT:
1602 case OMP_CLAUSE_NUM_GANGS:
1603 case OMP_CLAUSE_NUM_WORKERS:
1604 case OMP_CLAUSE_VECTOR_LENGTH:
1605 case OMP_CLAUSE_GANG:
1606 case OMP_CLAUSE_WORKER:
1607 case OMP_CLAUSE_VECTOR:
1608 case OMP_CLAUSE_INDEPENDENT:
1609 case OMP_CLAUSE_AUTO:
1610 case OMP_CLAUSE_SEQ:
1611 case OMP_CLAUSE_TILE:
1612 case OMP_CLAUSE__GRIDDIM_:
1613 case OMP_CLAUSE__SIMT_:
1614 case OMP_CLAUSE_IF_PRESENT:
1615 case OMP_CLAUSE_FINALIZE:
1616 case OMP_CLAUSE__CONDTEMP_:
1617 break;
1618
1619 case OMP_CLAUSE__CACHE_:
1620 default:
1621 gcc_unreachable ();
1622 }
1623 }
1624
1625 gcc_checking_assert (!scan_array_reductions
1626 || !is_gimple_omp_oacc (ctx->stmt));
1627 if (scan_array_reductions)
1628 {
1629 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1630 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1631 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1632 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1633 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1634 {
1635 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1636 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1637 }
1638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1639 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1640 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1641 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1642 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1643 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1644 }
1645 }
1646
1647 /* Create a new name for omp child function. Returns an identifier. */
1648
1649 static tree
1650 create_omp_child_function_name (bool task_copy)
1651 {
1652 return clone_function_name_numbered (current_function_decl,
1653 task_copy ? "_omp_cpyfn" : "_omp_fn");
1654 }
1655
1656 /* Return true if CTX may belong to offloaded code: either if current function
1657 is offloaded, or any enclosing context corresponds to a target region. */
1658
1659 static bool
1660 omp_maybe_offloaded_ctx (omp_context *ctx)
1661 {
1662 if (cgraph_node::get (current_function_decl)->offloadable)
1663 return true;
1664 for (; ctx; ctx = ctx->outer)
1665 if (is_gimple_omp_offloaded (ctx->stmt))
1666 return true;
1667 return false;
1668 }
1669
1670 /* Build a decl for the omp child function. It'll not contain a body
1671 yet, just the bare decl. */
1672
1673 static void
1674 create_omp_child_function (omp_context *ctx, bool task_copy)
1675 {
1676 tree decl, type, name, t;
1677
1678 name = create_omp_child_function_name (task_copy);
1679 if (task_copy)
1680 type = build_function_type_list (void_type_node, ptr_type_node,
1681 ptr_type_node, NULL_TREE);
1682 else
1683 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1684
1685 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1686
1687 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1688 || !task_copy);
1689 if (!task_copy)
1690 ctx->cb.dst_fn = decl;
1691 else
1692 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1693
1694 TREE_STATIC (decl) = 1;
1695 TREE_USED (decl) = 1;
1696 DECL_ARTIFICIAL (decl) = 1;
1697 DECL_IGNORED_P (decl) = 0;
1698 TREE_PUBLIC (decl) = 0;
1699 DECL_UNINLINABLE (decl) = 1;
1700 DECL_EXTERNAL (decl) = 0;
1701 DECL_CONTEXT (decl) = NULL_TREE;
1702 DECL_INITIAL (decl) = make_node (BLOCK);
1703 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1704 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1705 /* Remove omp declare simd attribute from the new attributes. */
1706 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1707 {
1708 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1709 a = a2;
1710 a = TREE_CHAIN (a);
1711 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1712 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1713 *p = TREE_CHAIN (*p);
1714 else
1715 {
1716 tree chain = TREE_CHAIN (*p);
1717 *p = copy_node (*p);
1718 p = &TREE_CHAIN (*p);
1719 *p = chain;
1720 }
1721 }
1722 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1723 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1724 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1725 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1726 DECL_FUNCTION_VERSIONED (decl)
1727 = DECL_FUNCTION_VERSIONED (current_function_decl);
1728
1729 if (omp_maybe_offloaded_ctx (ctx))
1730 {
1731 cgraph_node::get_create (decl)->offloadable = 1;
1732 if (ENABLE_OFFLOADING)
1733 g->have_offload = true;
1734 }
1735
1736 if (cgraph_node::get_create (decl)->offloadable
1737 && !lookup_attribute ("omp declare target",
1738 DECL_ATTRIBUTES (current_function_decl)))
1739 {
1740 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1741 ? "omp target entrypoint"
1742 : "omp declare target");
1743 DECL_ATTRIBUTES (decl)
1744 = tree_cons (get_identifier (target_attr),
1745 NULL_TREE, DECL_ATTRIBUTES (decl));
1746 }
1747
1748 t = build_decl (DECL_SOURCE_LOCATION (decl),
1749 RESULT_DECL, NULL_TREE, void_type_node);
1750 DECL_ARTIFICIAL (t) = 1;
1751 DECL_IGNORED_P (t) = 1;
1752 DECL_CONTEXT (t) = decl;
1753 DECL_RESULT (decl) = t;
1754
1755 tree data_name = get_identifier (".omp_data_i");
1756 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1757 ptr_type_node);
1758 DECL_ARTIFICIAL (t) = 1;
1759 DECL_NAMELESS (t) = 1;
1760 DECL_ARG_TYPE (t) = ptr_type_node;
1761 DECL_CONTEXT (t) = current_function_decl;
1762 TREE_USED (t) = 1;
1763 TREE_READONLY (t) = 1;
1764 DECL_ARGUMENTS (decl) = t;
1765 if (!task_copy)
1766 ctx->receiver_decl = t;
1767 else
1768 {
1769 t = build_decl (DECL_SOURCE_LOCATION (decl),
1770 PARM_DECL, get_identifier (".omp_data_o"),
1771 ptr_type_node);
1772 DECL_ARTIFICIAL (t) = 1;
1773 DECL_NAMELESS (t) = 1;
1774 DECL_ARG_TYPE (t) = ptr_type_node;
1775 DECL_CONTEXT (t) = current_function_decl;
1776 TREE_USED (t) = 1;
1777 TREE_ADDRESSABLE (t) = 1;
1778 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1779 DECL_ARGUMENTS (decl) = t;
1780 }
1781
1782 /* Allocate memory for the function structure. The call to
1783 allocate_struct_function clobbers CFUN, so we need to restore
1784 it afterward. */
1785 push_struct_function (decl);
1786 cfun->function_end_locus = gimple_location (ctx->stmt);
1787 init_tree_ssa (cfun);
1788 pop_cfun ();
1789 }
1790
1791 /* Callback for walk_gimple_seq. Check if combined parallel
1792 contains gimple_omp_for_combined_into_p OMP_FOR. */
1793
1794 tree
1795 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1796 bool *handled_ops_p,
1797 struct walk_stmt_info *wi)
1798 {
1799 gimple *stmt = gsi_stmt (*gsi_p);
1800
1801 *handled_ops_p = true;
1802 switch (gimple_code (stmt))
1803 {
1804 WALK_SUBSTMTS;
1805
1806 case GIMPLE_OMP_FOR:
1807 if (gimple_omp_for_combined_into_p (stmt)
1808 && gimple_omp_for_kind (stmt)
1809 == *(const enum gf_mask *) (wi->info))
1810 {
1811 wi->info = stmt;
1812 return integer_zero_node;
1813 }
1814 break;
1815 default:
1816 break;
1817 }
1818 return NULL;
1819 }
1820
1821 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1822
1823 static void
1824 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1825 omp_context *outer_ctx)
1826 {
1827 struct walk_stmt_info wi;
1828
1829 memset (&wi, 0, sizeof (wi));
1830 wi.val_only = true;
1831 wi.info = (void *) &msk;
1832 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1833 if (wi.info != (void *) &msk)
1834 {
1835 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1836 struct omp_for_data fd;
1837 omp_extract_for_data (for_stmt, &fd, NULL);
1838 /* We need two temporaries with fd.loop.v type (istart/iend)
1839 and then (fd.collapse - 1) temporaries with the same
1840 type for count2 ... countN-1 vars if not constant. */
1841 size_t count = 2, i;
1842 tree type = fd.iter_type;
1843 if (fd.collapse > 1
1844 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1845 {
1846 count += fd.collapse - 1;
1847 /* If there are lastprivate clauses on the inner
1848 GIMPLE_OMP_FOR, add one more temporaries for the total number
1849 of iterations (product of count1 ... countN-1). */
1850 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1851 OMP_CLAUSE_LASTPRIVATE))
1852 count++;
1853 else if (msk == GF_OMP_FOR_KIND_FOR
1854 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1855 OMP_CLAUSE_LASTPRIVATE))
1856 count++;
1857 }
1858 for (i = 0; i < count; i++)
1859 {
1860 tree temp = create_tmp_var (type);
1861 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1862 insert_decl_map (&outer_ctx->cb, temp, temp);
1863 OMP_CLAUSE_DECL (c) = temp;
1864 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1865 gimple_omp_taskreg_set_clauses (stmt, c);
1866 }
1867 }
1868 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1869 && omp_find_clause (gimple_omp_task_clauses (stmt),
1870 OMP_CLAUSE_REDUCTION))
1871 {
1872 tree type = build_pointer_type (pointer_sized_int_node);
1873 tree temp = create_tmp_var (type);
1874 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1875 insert_decl_map (&outer_ctx->cb, temp, temp);
1876 OMP_CLAUSE_DECL (c) = temp;
1877 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1878 gimple_omp_task_set_clauses (stmt, c);
1879 }
1880 }
1881
1882 /* Scan an OpenMP parallel directive. */
1883
1884 static void
1885 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1886 {
1887 omp_context *ctx;
1888 tree name;
1889 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1890
1891 /* Ignore parallel directives with empty bodies, unless there
1892 are copyin clauses. */
1893 if (optimize > 0
1894 && empty_body_p (gimple_omp_body (stmt))
1895 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1896 OMP_CLAUSE_COPYIN) == NULL)
1897 {
1898 gsi_replace (gsi, gimple_build_nop (), false);
1899 return;
1900 }
1901
1902 if (gimple_omp_parallel_combined_p (stmt))
1903 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1904 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1905 OMP_CLAUSE_REDUCTION);
1906 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1907 if (OMP_CLAUSE_REDUCTION_TASK (c))
1908 {
1909 tree type = build_pointer_type (pointer_sized_int_node);
1910 tree temp = create_tmp_var (type);
1911 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1912 if (outer_ctx)
1913 insert_decl_map (&outer_ctx->cb, temp, temp);
1914 OMP_CLAUSE_DECL (c) = temp;
1915 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1916 gimple_omp_parallel_set_clauses (stmt, c);
1917 break;
1918 }
1919 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1920 break;
1921
1922 ctx = new_omp_context (stmt, outer_ctx);
1923 taskreg_contexts.safe_push (ctx);
1924 if (taskreg_nesting_level > 1)
1925 ctx->is_nested = true;
1926 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1927 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1928 name = create_tmp_var_name (".omp_data_s");
1929 name = build_decl (gimple_location (stmt),
1930 TYPE_DECL, name, ctx->record_type);
1931 DECL_ARTIFICIAL (name) = 1;
1932 DECL_NAMELESS (name) = 1;
1933 TYPE_NAME (ctx->record_type) = name;
1934 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1935 if (!gimple_omp_parallel_grid_phony (stmt))
1936 {
1937 create_omp_child_function (ctx, false);
1938 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1939 }
1940
1941 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1942 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1943
1944 if (TYPE_FIELDS (ctx->record_type) == NULL)
1945 ctx->record_type = ctx->receiver_decl = NULL;
1946 }
1947
1948 /* Scan an OpenMP task directive. */
1949
1950 static void
1951 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1952 {
1953 omp_context *ctx;
1954 tree name, t;
1955 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1956
1957 /* Ignore task directives with empty bodies, unless they have depend
1958 clause. */
1959 if (optimize > 0
1960 && gimple_omp_body (stmt)
1961 && empty_body_p (gimple_omp_body (stmt))
1962 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1963 {
1964 gsi_replace (gsi, gimple_build_nop (), false);
1965 return;
1966 }
1967
1968 if (gimple_omp_task_taskloop_p (stmt))
1969 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1970
1971 ctx = new_omp_context (stmt, outer_ctx);
1972
1973 if (gimple_omp_task_taskwait_p (stmt))
1974 {
1975 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1976 return;
1977 }
1978
1979 taskreg_contexts.safe_push (ctx);
1980 if (taskreg_nesting_level > 1)
1981 ctx->is_nested = true;
1982 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1983 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1984 name = create_tmp_var_name (".omp_data_s");
1985 name = build_decl (gimple_location (stmt),
1986 TYPE_DECL, name, ctx->record_type);
1987 DECL_ARTIFICIAL (name) = 1;
1988 DECL_NAMELESS (name) = 1;
1989 TYPE_NAME (ctx->record_type) = name;
1990 TYPE_ARTIFICIAL (ctx->record_type) = 1;
1991 create_omp_child_function (ctx, false);
1992 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1993
1994 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1995
1996 if (ctx->srecord_type)
1997 {
1998 name = create_tmp_var_name (".omp_data_a");
1999 name = build_decl (gimple_location (stmt),
2000 TYPE_DECL, name, ctx->srecord_type);
2001 DECL_ARTIFICIAL (name) = 1;
2002 DECL_NAMELESS (name) = 1;
2003 TYPE_NAME (ctx->srecord_type) = name;
2004 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2005 create_omp_child_function (ctx, true);
2006 }
2007
2008 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2009
2010 if (TYPE_FIELDS (ctx->record_type) == NULL)
2011 {
2012 ctx->record_type = ctx->receiver_decl = NULL;
2013 t = build_int_cst (long_integer_type_node, 0);
2014 gimple_omp_task_set_arg_size (stmt, t);
2015 t = build_int_cst (long_integer_type_node, 1);
2016 gimple_omp_task_set_arg_align (stmt, t);
2017 }
2018 }
2019
2020 /* Helper function for finish_taskreg_scan, called through walk_tree.
2021 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2022 tree, replace it in the expression. */
2023
2024 static tree
2025 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2026 {
2027 if (VAR_P (*tp))
2028 {
2029 omp_context *ctx = (omp_context *) data;
2030 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2031 if (t != *tp)
2032 {
2033 if (DECL_HAS_VALUE_EXPR_P (t))
2034 t = unshare_expr (DECL_VALUE_EXPR (t));
2035 *tp = t;
2036 }
2037 *walk_subtrees = 0;
2038 }
2039 else if (IS_TYPE_OR_DECL_P (*tp))
2040 *walk_subtrees = 0;
2041 return NULL_TREE;
2042 }
2043
2044 /* If any decls have been made addressable during scan_omp,
2045 adjust their fields if needed, and layout record types
2046 of parallel/task constructs. */
2047
2048 static void
2049 finish_taskreg_scan (omp_context *ctx)
2050 {
2051 if (ctx->record_type == NULL_TREE)
2052 return;
2053
2054 /* If any task_shared_vars were needed, verify all
2055 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2056 statements if use_pointer_for_field hasn't changed
2057 because of that. If it did, update field types now. */
2058 if (task_shared_vars)
2059 {
2060 tree c;
2061
2062 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2063 c; c = OMP_CLAUSE_CHAIN (c))
2064 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2065 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2066 {
2067 tree decl = OMP_CLAUSE_DECL (c);
2068
2069 /* Global variables don't need to be copied,
2070 the receiver side will use them directly. */
2071 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2072 continue;
2073 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2074 || !use_pointer_for_field (decl, ctx))
2075 continue;
2076 tree field = lookup_field (decl, ctx);
2077 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2078 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2079 continue;
2080 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2081 TREE_THIS_VOLATILE (field) = 0;
2082 DECL_USER_ALIGN (field) = 0;
2083 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2084 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2085 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2086 if (ctx->srecord_type)
2087 {
2088 tree sfield = lookup_sfield (decl, ctx);
2089 TREE_TYPE (sfield) = TREE_TYPE (field);
2090 TREE_THIS_VOLATILE (sfield) = 0;
2091 DECL_USER_ALIGN (sfield) = 0;
2092 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2093 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2094 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2095 }
2096 }
2097 }
2098
2099 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2100 {
2101 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2102 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2103 if (c)
2104 {
2105 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2106 expects to find it at the start of data. */
2107 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2108 tree *p = &TYPE_FIELDS (ctx->record_type);
2109 while (*p)
2110 if (*p == f)
2111 {
2112 *p = DECL_CHAIN (*p);
2113 break;
2114 }
2115 else
2116 p = &DECL_CHAIN (*p);
2117 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2118 TYPE_FIELDS (ctx->record_type) = f;
2119 }
2120 layout_type (ctx->record_type);
2121 fixup_child_record_type (ctx);
2122 }
2123 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2124 {
2125 layout_type (ctx->record_type);
2126 fixup_child_record_type (ctx);
2127 }
2128 else
2129 {
2130 location_t loc = gimple_location (ctx->stmt);
2131 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2132 /* Move VLA fields to the end. */
2133 p = &TYPE_FIELDS (ctx->record_type);
2134 while (*p)
2135 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2136 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2137 {
2138 *q = *p;
2139 *p = TREE_CHAIN (*p);
2140 TREE_CHAIN (*q) = NULL_TREE;
2141 q = &TREE_CHAIN (*q);
2142 }
2143 else
2144 p = &DECL_CHAIN (*p);
2145 *p = vla_fields;
2146 if (gimple_omp_task_taskloop_p (ctx->stmt))
2147 {
2148 /* Move fields corresponding to first and second _looptemp_
2149 clause first. There are filled by GOMP_taskloop
2150 and thus need to be in specific positions. */
2151 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2152 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2153 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2154 OMP_CLAUSE__LOOPTEMP_);
2155 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2156 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2157 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2158 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2159 p = &TYPE_FIELDS (ctx->record_type);
2160 while (*p)
2161 if (*p == f1 || *p == f2 || *p == f3)
2162 *p = DECL_CHAIN (*p);
2163 else
2164 p = &DECL_CHAIN (*p);
2165 DECL_CHAIN (f1) = f2;
2166 if (c3)
2167 {
2168 DECL_CHAIN (f2) = f3;
2169 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2170 }
2171 else
2172 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2173 TYPE_FIELDS (ctx->record_type) = f1;
2174 if (ctx->srecord_type)
2175 {
2176 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2177 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2178 if (c3)
2179 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2180 p = &TYPE_FIELDS (ctx->srecord_type);
2181 while (*p)
2182 if (*p == f1 || *p == f2 || *p == f3)
2183 *p = DECL_CHAIN (*p);
2184 else
2185 p = &DECL_CHAIN (*p);
2186 DECL_CHAIN (f1) = f2;
2187 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2188 if (c3)
2189 {
2190 DECL_CHAIN (f2) = f3;
2191 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2192 }
2193 else
2194 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2195 TYPE_FIELDS (ctx->srecord_type) = f1;
2196 }
2197 }
2198 layout_type (ctx->record_type);
2199 fixup_child_record_type (ctx);
2200 if (ctx->srecord_type)
2201 layout_type (ctx->srecord_type);
2202 tree t = fold_convert_loc (loc, long_integer_type_node,
2203 TYPE_SIZE_UNIT (ctx->record_type));
2204 if (TREE_CODE (t) != INTEGER_CST)
2205 {
2206 t = unshare_expr (t);
2207 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2208 }
2209 gimple_omp_task_set_arg_size (ctx->stmt, t);
2210 t = build_int_cst (long_integer_type_node,
2211 TYPE_ALIGN_UNIT (ctx->record_type));
2212 gimple_omp_task_set_arg_align (ctx->stmt, t);
2213 }
2214 }
2215
2216 /* Find the enclosing offload context. */
2217
2218 static omp_context *
2219 enclosing_target_ctx (omp_context *ctx)
2220 {
2221 for (; ctx; ctx = ctx->outer)
2222 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2223 break;
2224
2225 return ctx;
2226 }
2227
2228 /* Return true if ctx is part of an oacc kernels region. */
2229
2230 static bool
2231 ctx_in_oacc_kernels_region (omp_context *ctx)
2232 {
2233 for (;ctx != NULL; ctx = ctx->outer)
2234 {
2235 gimple *stmt = ctx->stmt;
2236 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2237 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2238 return true;
2239 }
2240
2241 return false;
2242 }
2243
2244 /* Check the parallelism clauses inside a kernels regions.
2245 Until kernels handling moves to use the same loop indirection
2246 scheme as parallel, we need to do this checking early. */
2247
2248 static unsigned
2249 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2250 {
2251 bool checking = true;
2252 unsigned outer_mask = 0;
2253 unsigned this_mask = 0;
2254 bool has_seq = false, has_auto = false;
2255
2256 if (ctx->outer)
2257 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2258 if (!stmt)
2259 {
2260 checking = false;
2261 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2262 return outer_mask;
2263 stmt = as_a <gomp_for *> (ctx->stmt);
2264 }
2265
2266 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2267 {
2268 switch (OMP_CLAUSE_CODE (c))
2269 {
2270 case OMP_CLAUSE_GANG:
2271 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2272 break;
2273 case OMP_CLAUSE_WORKER:
2274 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2275 break;
2276 case OMP_CLAUSE_VECTOR:
2277 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2278 break;
2279 case OMP_CLAUSE_SEQ:
2280 has_seq = true;
2281 break;
2282 case OMP_CLAUSE_AUTO:
2283 has_auto = true;
2284 break;
2285 default:
2286 break;
2287 }
2288 }
2289
2290 if (checking)
2291 {
2292 if (has_seq && (this_mask || has_auto))
2293 error_at (gimple_location (stmt), "%<seq%> overrides other"
2294 " OpenACC loop specifiers");
2295 else if (has_auto && this_mask)
2296 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2297 " OpenACC loop specifiers");
2298
2299 if (this_mask & outer_mask)
2300 error_at (gimple_location (stmt), "inner loop uses same"
2301 " OpenACC parallelism as containing loop");
2302 }
2303
2304 return outer_mask | this_mask;
2305 }
2306
2307 /* Scan a GIMPLE_OMP_FOR. */
2308
2309 static omp_context *
2310 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2311 {
2312 omp_context *ctx;
2313 size_t i;
2314 tree clauses = gimple_omp_for_clauses (stmt);
2315
2316 ctx = new_omp_context (stmt, outer_ctx);
2317
2318 if (is_gimple_omp_oacc (stmt))
2319 {
2320 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2321
2322 if (!tgt || is_oacc_parallel (tgt))
2323 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2324 {
2325 char const *check = NULL;
2326
2327 switch (OMP_CLAUSE_CODE (c))
2328 {
2329 case OMP_CLAUSE_GANG:
2330 check = "gang";
2331 break;
2332
2333 case OMP_CLAUSE_WORKER:
2334 check = "worker";
2335 break;
2336
2337 case OMP_CLAUSE_VECTOR:
2338 check = "vector";
2339 break;
2340
2341 default:
2342 break;
2343 }
2344
2345 if (check && OMP_CLAUSE_OPERAND (c, 0))
2346 error_at (gimple_location (stmt),
2347 "argument not permitted on %qs clause in"
2348 " OpenACC %<parallel%>", check);
2349 }
2350
2351 if (tgt && is_oacc_kernels (tgt))
2352 {
2353 /* Strip out reductions, as they are not handled yet. */
2354 tree *prev_ptr = &clauses;
2355
2356 while (tree probe = *prev_ptr)
2357 {
2358 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2359
2360 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2361 *prev_ptr = *next_ptr;
2362 else
2363 prev_ptr = next_ptr;
2364 }
2365
2366 gimple_omp_for_set_clauses (stmt, clauses);
2367 check_oacc_kernel_gwv (stmt, ctx);
2368 }
2369 }
2370
2371 scan_sharing_clauses (clauses, ctx);
2372
2373 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2374 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2375 {
2376 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2377 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2378 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2379 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2380 }
2381 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2382 return ctx;
2383 }
2384
2385 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2386
2387 static void
2388 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2389 omp_context *outer_ctx)
2390 {
2391 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2392 gsi_replace (gsi, bind, false);
2393 gimple_seq seq = NULL;
2394 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2395 tree cond = create_tmp_var_raw (integer_type_node);
2396 DECL_CONTEXT (cond) = current_function_decl;
2397 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2398 gimple_bind_set_vars (bind, cond);
2399 gimple_call_set_lhs (g, cond);
2400 gimple_seq_add_stmt (&seq, g);
2401 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2402 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2403 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2404 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2405 gimple_seq_add_stmt (&seq, g);
2406 g = gimple_build_label (lab1);
2407 gimple_seq_add_stmt (&seq, g);
2408 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2409 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2410 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2411 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2412 gimple_omp_for_set_clauses (new_stmt, clause);
2413 gimple_seq_add_stmt (&seq, new_stmt);
2414 g = gimple_build_goto (lab3);
2415 gimple_seq_add_stmt (&seq, g);
2416 g = gimple_build_label (lab2);
2417 gimple_seq_add_stmt (&seq, g);
2418 gimple_seq_add_stmt (&seq, stmt);
2419 g = gimple_build_label (lab3);
2420 gimple_seq_add_stmt (&seq, g);
2421 gimple_bind_set_body (bind, seq);
2422 update_stmt (bind);
2423 scan_omp_for (new_stmt, outer_ctx);
2424 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2425 }
2426
2427 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2428 struct walk_stmt_info *);
2429 static omp_context *maybe_lookup_ctx (gimple *);
2430
2431 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2432 for scan phase loop. */
2433
2434 static void
2435 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2436 omp_context *outer_ctx)
2437 {
2438 /* The only change between inclusive and exclusive scan will be
2439 within the first simd loop, so just use inclusive in the
2440 worksharing loop. */
2441 outer_ctx->scan_inclusive = true;
2442 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2443 OMP_CLAUSE_DECL (c) = integer_zero_node;
2444
2445 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2446 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2447 gsi_replace (gsi, input_stmt, false);
2448 gimple_seq input_body = NULL;
2449 gimple_seq_add_stmt (&input_body, stmt);
2450 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2451
2452 gimple_stmt_iterator input1_gsi = gsi_none ();
2453 struct walk_stmt_info wi;
2454 memset (&wi, 0, sizeof (wi));
2455 wi.val_only = true;
2456 wi.info = (void *) &input1_gsi;
2457 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2458 gcc_assert (!gsi_end_p (input1_gsi));
2459
2460 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2461 gsi_next (&input1_gsi);
2462 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2463 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2464 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2465 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2466 std::swap (input_stmt1, scan_stmt1);
2467
2468 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2469 gimple_omp_set_body (input_stmt1, NULL);
2470
2471 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2472 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2473
2474 gimple_omp_set_body (input_stmt1, input_body1);
2475 gimple_omp_set_body (scan_stmt1, NULL);
2476
2477 gimple_stmt_iterator input2_gsi = gsi_none ();
2478 memset (&wi, 0, sizeof (wi));
2479 wi.val_only = true;
2480 wi.info = (void *) &input2_gsi;
2481 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2482 NULL, &wi);
2483 gcc_assert (!gsi_end_p (input2_gsi));
2484
2485 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2486 gsi_next (&input2_gsi);
2487 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2488 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2489 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2490 std::swap (input_stmt2, scan_stmt2);
2491
2492 gimple_omp_set_body (input_stmt2, NULL);
2493
2494 gimple_omp_set_body (input_stmt, input_body);
2495 gimple_omp_set_body (scan_stmt, scan_body);
2496
2497 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2498 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2499
2500 ctx = new_omp_context (scan_stmt, outer_ctx);
2501 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2502
2503 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2504 }
2505
2506 /* Scan an OpenMP sections directive. */
2507
2508 static void
2509 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2510 {
2511 omp_context *ctx;
2512
2513 ctx = new_omp_context (stmt, outer_ctx);
2514 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2515 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2516 }
2517
2518 /* Scan an OpenMP single directive. */
2519
2520 static void
2521 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2522 {
2523 omp_context *ctx;
2524 tree name;
2525
2526 ctx = new_omp_context (stmt, outer_ctx);
2527 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2528 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2529 name = create_tmp_var_name (".omp_copy_s");
2530 name = build_decl (gimple_location (stmt),
2531 TYPE_DECL, name, ctx->record_type);
2532 TYPE_NAME (ctx->record_type) = name;
2533
2534 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2535 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2536
2537 if (TYPE_FIELDS (ctx->record_type) == NULL)
2538 ctx->record_type = NULL;
2539 else
2540 layout_type (ctx->record_type);
2541 }
2542
2543 /* Scan a GIMPLE_OMP_TARGET. */
2544
2545 static void
2546 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2547 {
2548 omp_context *ctx;
2549 tree name;
2550 bool offloaded = is_gimple_omp_offloaded (stmt);
2551 tree clauses = gimple_omp_target_clauses (stmt);
2552
2553 ctx = new_omp_context (stmt, outer_ctx);
2554 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2555 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2556 name = create_tmp_var_name (".omp_data_t");
2557 name = build_decl (gimple_location (stmt),
2558 TYPE_DECL, name, ctx->record_type);
2559 DECL_ARTIFICIAL (name) = 1;
2560 DECL_NAMELESS (name) = 1;
2561 TYPE_NAME (ctx->record_type) = name;
2562 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2563
2564 if (offloaded)
2565 {
2566 create_omp_child_function (ctx, false);
2567 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2568 }
2569
2570 scan_sharing_clauses (clauses, ctx);
2571 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2572
2573 if (TYPE_FIELDS (ctx->record_type) == NULL)
2574 ctx->record_type = ctx->receiver_decl = NULL;
2575 else
2576 {
2577 TYPE_FIELDS (ctx->record_type)
2578 = nreverse (TYPE_FIELDS (ctx->record_type));
2579 if (flag_checking)
2580 {
2581 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2582 for (tree field = TYPE_FIELDS (ctx->record_type);
2583 field;
2584 field = DECL_CHAIN (field))
2585 gcc_assert (DECL_ALIGN (field) == align);
2586 }
2587 layout_type (ctx->record_type);
2588 if (offloaded)
2589 fixup_child_record_type (ctx);
2590 }
2591 }
2592
2593 /* Scan an OpenMP teams directive. */
2594
2595 static void
2596 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2597 {
2598 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2599
2600 if (!gimple_omp_teams_host (stmt))
2601 {
2602 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2603 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2604 return;
2605 }
2606 taskreg_contexts.safe_push (ctx);
2607 gcc_assert (taskreg_nesting_level == 1);
2608 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2609 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2610 tree name = create_tmp_var_name (".omp_data_s");
2611 name = build_decl (gimple_location (stmt),
2612 TYPE_DECL, name, ctx->record_type);
2613 DECL_ARTIFICIAL (name) = 1;
2614 DECL_NAMELESS (name) = 1;
2615 TYPE_NAME (ctx->record_type) = name;
2616 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2617 create_omp_child_function (ctx, false);
2618 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2619
2620 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2621 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2622
2623 if (TYPE_FIELDS (ctx->record_type) == NULL)
2624 ctx->record_type = ctx->receiver_decl = NULL;
2625 }
2626
2627 /* Check nesting restrictions. */
2628 static bool
2629 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2630 {
2631 tree c;
2632
2633 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2634 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2635 the original copy of its contents. */
2636 return true;
2637
2638 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2639 inside an OpenACC CTX. */
2640 if (!(is_gimple_omp (stmt)
2641 && is_gimple_omp_oacc (stmt))
2642 /* Except for atomic codes that we share with OpenMP. */
2643 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2644 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2645 {
2646 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2647 {
2648 error_at (gimple_location (stmt),
2649 "non-OpenACC construct inside of OpenACC routine");
2650 return false;
2651 }
2652 else
2653 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2654 if (is_gimple_omp (octx->stmt)
2655 && is_gimple_omp_oacc (octx->stmt))
2656 {
2657 error_at (gimple_location (stmt),
2658 "non-OpenACC construct inside of OpenACC region");
2659 return false;
2660 }
2661 }
2662
2663 if (ctx != NULL)
2664 {
2665 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2666 && ctx->outer
2667 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2668 ctx = ctx->outer;
2669 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2670 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2671 {
2672 c = NULL_TREE;
2673 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2674 {
2675 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2676 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2677 {
2678 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2679 && (ctx->outer == NULL
2680 || !gimple_omp_for_combined_into_p (ctx->stmt)
2681 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2682 || (gimple_omp_for_kind (ctx->outer->stmt)
2683 != GF_OMP_FOR_KIND_FOR)
2684 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2685 {
2686 error_at (gimple_location (stmt),
2687 "%<ordered simd threads%> must be closely "
2688 "nested inside of %<for simd%> region");
2689 return false;
2690 }
2691 return true;
2692 }
2693 }
2694 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2695 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2696 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2697 return true;
2698 error_at (gimple_location (stmt),
2699 "OpenMP constructs other than %<#pragma omp ordered simd%>"
2700 " or %<#pragma omp atomic%> may not be nested inside"
2701 " %<simd%> region");
2702 return false;
2703 }
2704 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2705 {
2706 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2707 || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2708 && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2709 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2710 {
2711 error_at (gimple_location (stmt),
2712 "only %<distribute%> or %<parallel%> regions are "
2713 "allowed to be strictly nested inside %<teams%> "
2714 "region");
2715 return false;
2716 }
2717 }
2718 }
2719 switch (gimple_code (stmt))
2720 {
2721 case GIMPLE_OMP_FOR:
2722 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2723 return true;
2724 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2725 {
2726 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2727 {
2728 error_at (gimple_location (stmt),
2729 "%<distribute%> region must be strictly nested "
2730 "inside %<teams%> construct");
2731 return false;
2732 }
2733 return true;
2734 }
2735 /* We split taskloop into task and nested taskloop in it. */
2736 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2737 return true;
2738 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2739 {
2740 bool ok = false;
2741
2742 if (ctx)
2743 switch (gimple_code (ctx->stmt))
2744 {
2745 case GIMPLE_OMP_FOR:
2746 ok = (gimple_omp_for_kind (ctx->stmt)
2747 == GF_OMP_FOR_KIND_OACC_LOOP);
2748 break;
2749
2750 case GIMPLE_OMP_TARGET:
2751 switch (gimple_omp_target_kind (ctx->stmt))
2752 {
2753 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2754 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2755 ok = true;
2756 break;
2757
2758 default:
2759 break;
2760 }
2761
2762 default:
2763 break;
2764 }
2765 else if (oacc_get_fn_attrib (current_function_decl))
2766 ok = true;
2767 if (!ok)
2768 {
2769 error_at (gimple_location (stmt),
2770 "OpenACC loop directive must be associated with"
2771 " an OpenACC compute region");
2772 return false;
2773 }
2774 }
2775 /* FALLTHRU */
2776 case GIMPLE_CALL:
2777 if (is_gimple_call (stmt)
2778 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2779 == BUILT_IN_GOMP_CANCEL
2780 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2781 == BUILT_IN_GOMP_CANCELLATION_POINT))
2782 {
2783 const char *bad = NULL;
2784 const char *kind = NULL;
2785 const char *construct
2786 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2787 == BUILT_IN_GOMP_CANCEL)
2788 ? "#pragma omp cancel"
2789 : "#pragma omp cancellation point";
2790 if (ctx == NULL)
2791 {
2792 error_at (gimple_location (stmt), "orphaned %qs construct",
2793 construct);
2794 return false;
2795 }
2796 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2797 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2798 : 0)
2799 {
2800 case 1:
2801 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2802 bad = "#pragma omp parallel";
2803 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2804 == BUILT_IN_GOMP_CANCEL
2805 && !integer_zerop (gimple_call_arg (stmt, 1)))
2806 ctx->cancellable = true;
2807 kind = "parallel";
2808 break;
2809 case 2:
2810 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2811 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2812 bad = "#pragma omp for";
2813 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2814 == BUILT_IN_GOMP_CANCEL
2815 && !integer_zerop (gimple_call_arg (stmt, 1)))
2816 {
2817 ctx->cancellable = true;
2818 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2819 OMP_CLAUSE_NOWAIT))
2820 warning_at (gimple_location (stmt), 0,
2821 "%<#pragma omp cancel for%> inside "
2822 "%<nowait%> for construct");
2823 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2824 OMP_CLAUSE_ORDERED))
2825 warning_at (gimple_location (stmt), 0,
2826 "%<#pragma omp cancel for%> inside "
2827 "%<ordered%> for construct");
2828 }
2829 kind = "for";
2830 break;
2831 case 4:
2832 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2833 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2834 bad = "#pragma omp sections";
2835 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2836 == BUILT_IN_GOMP_CANCEL
2837 && !integer_zerop (gimple_call_arg (stmt, 1)))
2838 {
2839 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2840 {
2841 ctx->cancellable = true;
2842 if (omp_find_clause (gimple_omp_sections_clauses
2843 (ctx->stmt),
2844 OMP_CLAUSE_NOWAIT))
2845 warning_at (gimple_location (stmt), 0,
2846 "%<#pragma omp cancel sections%> inside "
2847 "%<nowait%> sections construct");
2848 }
2849 else
2850 {
2851 gcc_assert (ctx->outer
2852 && gimple_code (ctx->outer->stmt)
2853 == GIMPLE_OMP_SECTIONS);
2854 ctx->outer->cancellable = true;
2855 if (omp_find_clause (gimple_omp_sections_clauses
2856 (ctx->outer->stmt),
2857 OMP_CLAUSE_NOWAIT))
2858 warning_at (gimple_location (stmt), 0,
2859 "%<#pragma omp cancel sections%> inside "
2860 "%<nowait%> sections construct");
2861 }
2862 }
2863 kind = "sections";
2864 break;
2865 case 8:
2866 if (!is_task_ctx (ctx)
2867 && (!is_taskloop_ctx (ctx)
2868 || ctx->outer == NULL
2869 || !is_task_ctx (ctx->outer)))
2870 bad = "#pragma omp task";
2871 else
2872 {
2873 for (omp_context *octx = ctx->outer;
2874 octx; octx = octx->outer)
2875 {
2876 switch (gimple_code (octx->stmt))
2877 {
2878 case GIMPLE_OMP_TASKGROUP:
2879 break;
2880 case GIMPLE_OMP_TARGET:
2881 if (gimple_omp_target_kind (octx->stmt)
2882 != GF_OMP_TARGET_KIND_REGION)
2883 continue;
2884 /* FALLTHRU */
2885 case GIMPLE_OMP_PARALLEL:
2886 case GIMPLE_OMP_TEAMS:
2887 error_at (gimple_location (stmt),
2888 "%<%s taskgroup%> construct not closely "
2889 "nested inside of %<taskgroup%> region",
2890 construct);
2891 return false;
2892 case GIMPLE_OMP_TASK:
2893 if (gimple_omp_task_taskloop_p (octx->stmt)
2894 && octx->outer
2895 && is_taskloop_ctx (octx->outer))
2896 {
2897 tree clauses
2898 = gimple_omp_for_clauses (octx->outer->stmt);
2899 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2900 break;
2901 }
2902 continue;
2903 default:
2904 continue;
2905 }
2906 break;
2907 }
2908 ctx->cancellable = true;
2909 }
2910 kind = "taskgroup";
2911 break;
2912 default:
2913 error_at (gimple_location (stmt), "invalid arguments");
2914 return false;
2915 }
2916 if (bad)
2917 {
2918 error_at (gimple_location (stmt),
2919 "%<%s %s%> construct not closely nested inside of %qs",
2920 construct, kind, bad);
2921 return false;
2922 }
2923 }
2924 /* FALLTHRU */
2925 case GIMPLE_OMP_SECTIONS:
2926 case GIMPLE_OMP_SINGLE:
2927 for (; ctx != NULL; ctx = ctx->outer)
2928 switch (gimple_code (ctx->stmt))
2929 {
2930 case GIMPLE_OMP_FOR:
2931 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2932 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2933 break;
2934 /* FALLTHRU */
2935 case GIMPLE_OMP_SECTIONS:
2936 case GIMPLE_OMP_SINGLE:
2937 case GIMPLE_OMP_ORDERED:
2938 case GIMPLE_OMP_MASTER:
2939 case GIMPLE_OMP_TASK:
2940 case GIMPLE_OMP_CRITICAL:
2941 if (is_gimple_call (stmt))
2942 {
2943 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2944 != BUILT_IN_GOMP_BARRIER)
2945 return true;
2946 error_at (gimple_location (stmt),
2947 "barrier region may not be closely nested inside "
2948 "of work-sharing, %<critical%>, %<ordered%>, "
2949 "%<master%>, explicit %<task%> or %<taskloop%> "
2950 "region");
2951 return false;
2952 }
2953 error_at (gimple_location (stmt),
2954 "work-sharing region may not be closely nested inside "
2955 "of work-sharing, %<critical%>, %<ordered%>, "
2956 "%<master%>, explicit %<task%> or %<taskloop%> region");
2957 return false;
2958 case GIMPLE_OMP_PARALLEL:
2959 case GIMPLE_OMP_TEAMS:
2960 return true;
2961 case GIMPLE_OMP_TARGET:
2962 if (gimple_omp_target_kind (ctx->stmt)
2963 == GF_OMP_TARGET_KIND_REGION)
2964 return true;
2965 break;
2966 default:
2967 break;
2968 }
2969 break;
2970 case GIMPLE_OMP_MASTER:
2971 for (; ctx != NULL; ctx = ctx->outer)
2972 switch (gimple_code (ctx->stmt))
2973 {
2974 case GIMPLE_OMP_FOR:
2975 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2976 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2977 break;
2978 /* FALLTHRU */
2979 case GIMPLE_OMP_SECTIONS:
2980 case GIMPLE_OMP_SINGLE:
2981 case GIMPLE_OMP_TASK:
2982 error_at (gimple_location (stmt),
2983 "%<master%> region may not be closely nested inside "
2984 "of work-sharing, explicit %<task%> or %<taskloop%> "
2985 "region");
2986 return false;
2987 case GIMPLE_OMP_PARALLEL:
2988 case GIMPLE_OMP_TEAMS:
2989 return true;
2990 case GIMPLE_OMP_TARGET:
2991 if (gimple_omp_target_kind (ctx->stmt)
2992 == GF_OMP_TARGET_KIND_REGION)
2993 return true;
2994 break;
2995 default:
2996 break;
2997 }
2998 break;
2999 case GIMPLE_OMP_TASK:
3000 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3002 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3003 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3004 {
3005 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3006 error_at (OMP_CLAUSE_LOCATION (c),
3007 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3008 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3009 return false;
3010 }
3011 break;
3012 case GIMPLE_OMP_ORDERED:
3013 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3014 c; c = OMP_CLAUSE_CHAIN (c))
3015 {
3016 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3017 {
3018 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3019 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3020 continue;
3021 }
3022 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3023 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3024 || kind == OMP_CLAUSE_DEPEND_SINK)
3025 {
3026 tree oclause;
3027 /* Look for containing ordered(N) loop. */
3028 if (ctx == NULL
3029 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3030 || (oclause
3031 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3032 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3033 {
3034 error_at (OMP_CLAUSE_LOCATION (c),
3035 "%<ordered%> construct with %<depend%> clause "
3036 "must be closely nested inside an %<ordered%> "
3037 "loop");
3038 return false;
3039 }
3040 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3041 {
3042 error_at (OMP_CLAUSE_LOCATION (c),
3043 "%<ordered%> construct with %<depend%> clause "
3044 "must be closely nested inside a loop with "
3045 "%<ordered%> clause with a parameter");
3046 return false;
3047 }
3048 }
3049 else
3050 {
3051 error_at (OMP_CLAUSE_LOCATION (c),
3052 "invalid depend kind in omp %<ordered%> %<depend%>");
3053 return false;
3054 }
3055 }
3056 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3057 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3058 {
3059 /* ordered simd must be closely nested inside of simd region,
3060 and simd region must not encounter constructs other than
3061 ordered simd, therefore ordered simd may be either orphaned,
3062 or ctx->stmt must be simd. The latter case is handled already
3063 earlier. */
3064 if (ctx != NULL)
3065 {
3066 error_at (gimple_location (stmt),
3067 "%<ordered%> %<simd%> must be closely nested inside "
3068 "%<simd%> region");
3069 return false;
3070 }
3071 }
3072 for (; ctx != NULL; ctx = ctx->outer)
3073 switch (gimple_code (ctx->stmt))
3074 {
3075 case GIMPLE_OMP_CRITICAL:
3076 case GIMPLE_OMP_TASK:
3077 case GIMPLE_OMP_ORDERED:
3078 ordered_in_taskloop:
3079 error_at (gimple_location (stmt),
3080 "%<ordered%> region may not be closely nested inside "
3081 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3082 "%<taskloop%> region");
3083 return false;
3084 case GIMPLE_OMP_FOR:
3085 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3086 goto ordered_in_taskloop;
3087 tree o;
3088 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3089 OMP_CLAUSE_ORDERED);
3090 if (o == NULL)
3091 {
3092 error_at (gimple_location (stmt),
3093 "%<ordered%> region must be closely nested inside "
3094 "a loop region with an %<ordered%> clause");
3095 return false;
3096 }
3097 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3098 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3099 {
3100 error_at (gimple_location (stmt),
3101 "%<ordered%> region without %<depend%> clause may "
3102 "not be closely nested inside a loop region with "
3103 "an %<ordered%> clause with a parameter");
3104 return false;
3105 }
3106 return true;
3107 case GIMPLE_OMP_TARGET:
3108 if (gimple_omp_target_kind (ctx->stmt)
3109 != GF_OMP_TARGET_KIND_REGION)
3110 break;
3111 /* FALLTHRU */
3112 case GIMPLE_OMP_PARALLEL:
3113 case GIMPLE_OMP_TEAMS:
3114 error_at (gimple_location (stmt),
3115 "%<ordered%> region must be closely nested inside "
3116 "a loop region with an %<ordered%> clause");
3117 return false;
3118 default:
3119 break;
3120 }
3121 break;
3122 case GIMPLE_OMP_CRITICAL:
3123 {
3124 tree this_stmt_name
3125 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3126 for (; ctx != NULL; ctx = ctx->outer)
3127 if (gomp_critical *other_crit
3128 = dyn_cast <gomp_critical *> (ctx->stmt))
3129 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3130 {
3131 error_at (gimple_location (stmt),
3132 "%<critical%> region may not be nested inside "
3133 "a %<critical%> region with the same name");
3134 return false;
3135 }
3136 }
3137 break;
3138 case GIMPLE_OMP_TEAMS:
3139 if (ctx == NULL)
3140 break;
3141 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3142 || (gimple_omp_target_kind (ctx->stmt)
3143 != GF_OMP_TARGET_KIND_REGION))
3144 {
3145 /* Teams construct can appear either strictly nested inside of
3146 target construct with no intervening stmts, or can be encountered
3147 only by initial task (so must not appear inside any OpenMP
3148 construct. */
3149 error_at (gimple_location (stmt),
3150 "%<teams%> construct must be closely nested inside of "
3151 "%<target%> construct or not nested in any OpenMP "
3152 "construct");
3153 return false;
3154 }
3155 break;
3156 case GIMPLE_OMP_TARGET:
3157 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3159 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3160 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3161 {
3162 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3163 error_at (OMP_CLAUSE_LOCATION (c),
3164 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3165 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3166 return false;
3167 }
3168 if (is_gimple_omp_offloaded (stmt)
3169 && oacc_get_fn_attrib (cfun->decl) != NULL)
3170 {
3171 error_at (gimple_location (stmt),
3172 "OpenACC region inside of OpenACC routine, nested "
3173 "parallelism not supported yet");
3174 return false;
3175 }
3176 for (; ctx != NULL; ctx = ctx->outer)
3177 {
3178 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3179 {
3180 if (is_gimple_omp (stmt)
3181 && is_gimple_omp_oacc (stmt)
3182 && is_gimple_omp (ctx->stmt))
3183 {
3184 error_at (gimple_location (stmt),
3185 "OpenACC construct inside of non-OpenACC region");
3186 return false;
3187 }
3188 continue;
3189 }
3190
3191 const char *stmt_name, *ctx_stmt_name;
3192 switch (gimple_omp_target_kind (stmt))
3193 {
3194 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3195 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3196 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3197 case GF_OMP_TARGET_KIND_ENTER_DATA:
3198 stmt_name = "target enter data"; break;
3199 case GF_OMP_TARGET_KIND_EXIT_DATA:
3200 stmt_name = "target exit data"; break;
3201 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3202 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3203 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3204 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3205 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3206 stmt_name = "enter/exit data"; break;
3207 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3208 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3209 break;
3210 default: gcc_unreachable ();
3211 }
3212 switch (gimple_omp_target_kind (ctx->stmt))
3213 {
3214 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3215 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3216 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3217 ctx_stmt_name = "parallel"; break;
3218 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3219 ctx_stmt_name = "kernels"; break;
3220 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3221 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3222 ctx_stmt_name = "host_data"; break;
3223 default: gcc_unreachable ();
3224 }
3225
3226 /* OpenACC/OpenMP mismatch? */
3227 if (is_gimple_omp_oacc (stmt)
3228 != is_gimple_omp_oacc (ctx->stmt))
3229 {
3230 error_at (gimple_location (stmt),
3231 "%s %qs construct inside of %s %qs region",
3232 (is_gimple_omp_oacc (stmt)
3233 ? "OpenACC" : "OpenMP"), stmt_name,
3234 (is_gimple_omp_oacc (ctx->stmt)
3235 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3236 return false;
3237 }
3238 if (is_gimple_omp_offloaded (ctx->stmt))
3239 {
3240 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3241 if (is_gimple_omp_oacc (ctx->stmt))
3242 {
3243 error_at (gimple_location (stmt),
3244 "%qs construct inside of %qs region",
3245 stmt_name, ctx_stmt_name);
3246 return false;
3247 }
3248 else
3249 {
3250 warning_at (gimple_location (stmt), 0,
3251 "%qs construct inside of %qs region",
3252 stmt_name, ctx_stmt_name);
3253 }
3254 }
3255 }
3256 break;
3257 default:
3258 break;
3259 }
3260 return true;
3261 }
3262
3263
3264 /* Helper function scan_omp.
3265
3266 Callback for walk_tree or operators in walk_gimple_stmt used to
3267 scan for OMP directives in TP. */
3268
3269 static tree
3270 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3271 {
3272 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3273 omp_context *ctx = (omp_context *) wi->info;
3274 tree t = *tp;
3275
3276 switch (TREE_CODE (t))
3277 {
3278 case VAR_DECL:
3279 case PARM_DECL:
3280 case LABEL_DECL:
3281 case RESULT_DECL:
3282 if (ctx)
3283 {
3284 tree repl = remap_decl (t, &ctx->cb);
3285 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3286 *tp = repl;
3287 }
3288 break;
3289
3290 default:
3291 if (ctx && TYPE_P (t))
3292 *tp = remap_type (t, &ctx->cb);
3293 else if (!DECL_P (t))
3294 {
3295 *walk_subtrees = 1;
3296 if (ctx)
3297 {
3298 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3299 if (tem != TREE_TYPE (t))
3300 {
3301 if (TREE_CODE (t) == INTEGER_CST)
3302 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3303 else
3304 TREE_TYPE (t) = tem;
3305 }
3306 }
3307 }
3308 break;
3309 }
3310
3311 return NULL_TREE;
3312 }
3313
3314 /* Return true if FNDECL is a setjmp or a longjmp. */
3315
3316 static bool
3317 setjmp_or_longjmp_p (const_tree fndecl)
3318 {
3319 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3320 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3321 return true;
3322
3323 tree declname = DECL_NAME (fndecl);
3324 if (!declname)
3325 return false;
3326 const char *name = IDENTIFIER_POINTER (declname);
3327 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3328 }
3329
3330
3331 /* Helper function for scan_omp.
3332
3333 Callback for walk_gimple_stmt used to scan for OMP directives in
3334 the current statement in GSI. */
3335
3336 static tree
3337 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3338 struct walk_stmt_info *wi)
3339 {
3340 gimple *stmt = gsi_stmt (*gsi);
3341 omp_context *ctx = (omp_context *) wi->info;
3342
3343 if (gimple_has_location (stmt))
3344 input_location = gimple_location (stmt);
3345
3346 /* Check the nesting restrictions. */
3347 bool remove = false;
3348 if (is_gimple_omp (stmt))
3349 remove = !check_omp_nesting_restrictions (stmt, ctx);
3350 else if (is_gimple_call (stmt))
3351 {
3352 tree fndecl = gimple_call_fndecl (stmt);
3353 if (fndecl)
3354 {
3355 if (setjmp_or_longjmp_p (fndecl)
3356 && ctx
3357 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3358 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3359 {
3360 remove = true;
3361 error_at (gimple_location (stmt),
3362 "setjmp/longjmp inside simd construct");
3363 }
3364 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3365 switch (DECL_FUNCTION_CODE (fndecl))
3366 {
3367 case BUILT_IN_GOMP_BARRIER:
3368 case BUILT_IN_GOMP_CANCEL:
3369 case BUILT_IN_GOMP_CANCELLATION_POINT:
3370 case BUILT_IN_GOMP_TASKYIELD:
3371 case BUILT_IN_GOMP_TASKWAIT:
3372 case BUILT_IN_GOMP_TASKGROUP_START:
3373 case BUILT_IN_GOMP_TASKGROUP_END:
3374 remove = !check_omp_nesting_restrictions (stmt, ctx);
3375 break;
3376 default:
3377 break;
3378 }
3379 }
3380 }
3381 if (remove)
3382 {
3383 stmt = gimple_build_nop ();
3384 gsi_replace (gsi, stmt, false);
3385 }
3386
3387 *handled_ops_p = true;
3388
3389 switch (gimple_code (stmt))
3390 {
3391 case GIMPLE_OMP_PARALLEL:
3392 taskreg_nesting_level++;
3393 scan_omp_parallel (gsi, ctx);
3394 taskreg_nesting_level--;
3395 break;
3396
3397 case GIMPLE_OMP_TASK:
3398 taskreg_nesting_level++;
3399 scan_omp_task (gsi, ctx);
3400 taskreg_nesting_level--;
3401 break;
3402
3403 case GIMPLE_OMP_FOR:
3404 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3405 == GF_OMP_FOR_KIND_SIMD)
3406 && gimple_omp_for_combined_into_p (stmt)
3407 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3408 {
3409 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3410 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3411 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3412 {
3413 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3414 break;
3415 }
3416 }
3417 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3418 == GF_OMP_FOR_KIND_SIMD)
3419 && omp_maybe_offloaded_ctx (ctx)
3420 && omp_max_simt_vf ())
3421 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3422 else
3423 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3424 break;
3425
3426 case GIMPLE_OMP_SECTIONS:
3427 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3428 break;
3429
3430 case GIMPLE_OMP_SINGLE:
3431 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3432 break;
3433
3434 case GIMPLE_OMP_SCAN:
3435 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3436 {
3437 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3438 ctx->scan_inclusive = true;
3439 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3440 ctx->scan_exclusive = true;
3441 }
3442 /* FALLTHRU */
3443 case GIMPLE_OMP_SECTION:
3444 case GIMPLE_OMP_MASTER:
3445 case GIMPLE_OMP_ORDERED:
3446 case GIMPLE_OMP_CRITICAL:
3447 case GIMPLE_OMP_GRID_BODY:
3448 ctx = new_omp_context (stmt, ctx);
3449 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3450 break;
3451
3452 case GIMPLE_OMP_TASKGROUP:
3453 ctx = new_omp_context (stmt, ctx);
3454 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3455 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3456 break;
3457
3458 case GIMPLE_OMP_TARGET:
3459 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3460 break;
3461
3462 case GIMPLE_OMP_TEAMS:
3463 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3464 {
3465 taskreg_nesting_level++;
3466 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3467 taskreg_nesting_level--;
3468 }
3469 else
3470 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3471 break;
3472
3473 case GIMPLE_BIND:
3474 {
3475 tree var;
3476
3477 *handled_ops_p = false;
3478 if (ctx)
3479 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3480 var ;
3481 var = DECL_CHAIN (var))
3482 insert_decl_map (&ctx->cb, var, var);
3483 }
3484 break;
3485 default:
3486 *handled_ops_p = false;
3487 break;
3488 }
3489
3490 return NULL_TREE;
3491 }
3492
3493
3494 /* Scan all the statements starting at the current statement. CTX
3495 contains context information about the OMP directives and
3496 clauses found during the scan. */
3497
3498 static void
3499 scan_omp (gimple_seq *body_p, omp_context *ctx)
3500 {
3501 location_t saved_location;
3502 struct walk_stmt_info wi;
3503
3504 memset (&wi, 0, sizeof (wi));
3505 wi.info = ctx;
3506 wi.want_locations = true;
3507
3508 saved_location = input_location;
3509 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3510 input_location = saved_location;
3511 }
3512 \f
3513 /* Re-gimplification and code generation routines. */
3514
3515 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3516 of BIND if in a method. */
3517
3518 static void
3519 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3520 {
3521 if (DECL_ARGUMENTS (current_function_decl)
3522 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3523 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3524 == POINTER_TYPE))
3525 {
3526 tree vars = gimple_bind_vars (bind);
3527 for (tree *pvar = &vars; *pvar; )
3528 if (omp_member_access_dummy_var (*pvar))
3529 *pvar = DECL_CHAIN (*pvar);
3530 else
3531 pvar = &DECL_CHAIN (*pvar);
3532 gimple_bind_set_vars (bind, vars);
3533 }
3534 }
3535
3536 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3537 block and its subblocks. */
3538
3539 static void
3540 remove_member_access_dummy_vars (tree block)
3541 {
3542 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3543 if (omp_member_access_dummy_var (*pvar))
3544 *pvar = DECL_CHAIN (*pvar);
3545 else
3546 pvar = &DECL_CHAIN (*pvar);
3547
3548 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3549 remove_member_access_dummy_vars (block);
3550 }
3551
3552 /* If a context was created for STMT when it was scanned, return it. */
3553
3554 static omp_context *
3555 maybe_lookup_ctx (gimple *stmt)
3556 {
3557 splay_tree_node n;
3558 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3559 return n ? (omp_context *) n->value : NULL;
3560 }
3561
3562
3563 /* Find the mapping for DECL in CTX or the immediately enclosing
3564 context that has a mapping for DECL.
3565
3566 If CTX is a nested parallel directive, we may have to use the decl
3567 mappings created in CTX's parent context. Suppose that we have the
3568 following parallel nesting (variable UIDs showed for clarity):
3569
3570 iD.1562 = 0;
3571 #omp parallel shared(iD.1562) -> outer parallel
3572 iD.1562 = iD.1562 + 1;
3573
3574 #omp parallel shared (iD.1562) -> inner parallel
3575 iD.1562 = iD.1562 - 1;
3576
3577 Each parallel structure will create a distinct .omp_data_s structure
3578 for copying iD.1562 in/out of the directive:
3579
3580 outer parallel .omp_data_s.1.i -> iD.1562
3581 inner parallel .omp_data_s.2.i -> iD.1562
3582
3583 A shared variable mapping will produce a copy-out operation before
3584 the parallel directive and a copy-in operation after it. So, in
3585 this case we would have:
3586
3587 iD.1562 = 0;
3588 .omp_data_o.1.i = iD.1562;
3589 #omp parallel shared(iD.1562) -> outer parallel
3590 .omp_data_i.1 = &.omp_data_o.1
3591 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3592
3593 .omp_data_o.2.i = iD.1562; -> **
3594 #omp parallel shared(iD.1562) -> inner parallel
3595 .omp_data_i.2 = &.omp_data_o.2
3596 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3597
3598
3599 ** This is a problem. The symbol iD.1562 cannot be referenced
3600 inside the body of the outer parallel region. But since we are
3601 emitting this copy operation while expanding the inner parallel
3602 directive, we need to access the CTX structure of the outer
3603 parallel directive to get the correct mapping:
3604
3605 .omp_data_o.2.i = .omp_data_i.1->i
3606
3607 Since there may be other workshare or parallel directives enclosing
3608 the parallel directive, it may be necessary to walk up the context
3609 parent chain. This is not a problem in general because nested
3610 parallelism happens only rarely. */
3611
3612 static tree
3613 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3614 {
3615 tree t;
3616 omp_context *up;
3617
3618 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3619 t = maybe_lookup_decl (decl, up);
3620
3621 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3622
3623 return t ? t : decl;
3624 }
3625
3626
3627 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3628 in outer contexts. */
3629
3630 static tree
3631 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3632 {
3633 tree t = NULL;
3634 omp_context *up;
3635
3636 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3637 t = maybe_lookup_decl (decl, up);
3638
3639 return t ? t : decl;
3640 }
3641
3642
3643 /* Construct the initialization value for reduction operation OP. */
3644
3645 tree
3646 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3647 {
3648 switch (op)
3649 {
3650 case PLUS_EXPR:
3651 case MINUS_EXPR:
3652 case BIT_IOR_EXPR:
3653 case BIT_XOR_EXPR:
3654 case TRUTH_OR_EXPR:
3655 case TRUTH_ORIF_EXPR:
3656 case TRUTH_XOR_EXPR:
3657 case NE_EXPR:
3658 return build_zero_cst (type);
3659
3660 case MULT_EXPR:
3661 case TRUTH_AND_EXPR:
3662 case TRUTH_ANDIF_EXPR:
3663 case EQ_EXPR:
3664 return fold_convert_loc (loc, type, integer_one_node);
3665
3666 case BIT_AND_EXPR:
3667 return fold_convert_loc (loc, type, integer_minus_one_node);
3668
3669 case MAX_EXPR:
3670 if (SCALAR_FLOAT_TYPE_P (type))
3671 {
3672 REAL_VALUE_TYPE max, min;
3673 if (HONOR_INFINITIES (type))
3674 {
3675 real_inf (&max);
3676 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3677 }
3678 else
3679 real_maxval (&min, 1, TYPE_MODE (type));
3680 return build_real (type, min);
3681 }
3682 else if (POINTER_TYPE_P (type))
3683 {
3684 wide_int min
3685 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3686 return wide_int_to_tree (type, min);
3687 }
3688 else
3689 {
3690 gcc_assert (INTEGRAL_TYPE_P (type));
3691 return TYPE_MIN_VALUE (type);
3692 }
3693
3694 case MIN_EXPR:
3695 if (SCALAR_FLOAT_TYPE_P (type))
3696 {
3697 REAL_VALUE_TYPE max;
3698 if (HONOR_INFINITIES (type))
3699 real_inf (&max);
3700 else
3701 real_maxval (&max, 0, TYPE_MODE (type));
3702 return build_real (type, max);
3703 }
3704 else if (POINTER_TYPE_P (type))
3705 {
3706 wide_int max
3707 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3708 return wide_int_to_tree (type, max);
3709 }
3710 else
3711 {
3712 gcc_assert (INTEGRAL_TYPE_P (type));
3713 return TYPE_MAX_VALUE (type);
3714 }
3715
3716 default:
3717 gcc_unreachable ();
3718 }
3719 }
3720
3721 /* Construct the initialization value for reduction CLAUSE. */
3722
3723 tree
3724 omp_reduction_init (tree clause, tree type)
3725 {
3726 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3727 OMP_CLAUSE_REDUCTION_CODE (clause), type);
3728 }
3729
3730 /* Return alignment to be assumed for var in CLAUSE, which should be
3731 OMP_CLAUSE_ALIGNED. */
3732
3733 static tree
3734 omp_clause_aligned_alignment (tree clause)
3735 {
3736 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3737 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3738
3739 /* Otherwise return implementation defined alignment. */
3740 unsigned int al = 1;
3741 opt_scalar_mode mode_iter;
3742 auto_vector_sizes sizes;
3743 targetm.vectorize.autovectorize_vector_sizes (&sizes, true);
3744 poly_uint64 vs = 0;
3745 for (unsigned int i = 0; i < sizes.length (); ++i)
3746 vs = ordered_max (vs, sizes[i]);
3747 static enum mode_class classes[]
3748 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3749 for (int i = 0; i < 4; i += 2)
3750 /* The for loop above dictates that we only walk through scalar classes. */
3751 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3752 {
3753 scalar_mode mode = mode_iter.require ();
3754 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3755 if (GET_MODE_CLASS (vmode) != classes[i + 1])
3756 continue;
3757 while (maybe_ne (vs, 0U)
3758 && known_lt (GET_MODE_SIZE (vmode), vs)
3759 && GET_MODE_2XWIDER_MODE (vmode).exists ())
3760 vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3761
3762 tree type = lang_hooks.types.type_for_mode (mode, 1);
3763 if (type == NULL_TREE || TYPE_MODE (type) != mode)
3764 continue;
3765 poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3766 GET_MODE_SIZE (mode));
3767 type = build_vector_type (type, nelts);
3768 if (TYPE_MODE (type) != vmode)
3769 continue;
3770 if (TYPE_ALIGN_UNIT (type) > al)
3771 al = TYPE_ALIGN_UNIT (type);
3772 }
3773 return build_int_cst (integer_type_node, al);
3774 }
3775
3776
3777 /* This structure is part of the interface between lower_rec_simd_input_clauses
3778 and lower_rec_input_clauses. */
3779
3780 class omplow_simd_context {
3781 public:
3782 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3783 tree idx;
3784 tree lane;
3785 tree lastlane;
3786 vec<tree, va_heap> simt_eargs;
3787 gimple_seq simt_dlist;
3788 poly_uint64_pod max_vf;
3789 bool is_simt;
3790 };
3791
3792 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3793 privatization. */
3794
3795 static bool
3796 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3797 omplow_simd_context *sctx, tree &ivar,
3798 tree &lvar, tree *rvar = NULL,
3799 tree *rvar2 = NULL)
3800 {
3801 if (known_eq (sctx->max_vf, 0U))
3802 {
3803 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3804 if (maybe_gt (sctx->max_vf, 1U))
3805 {
3806 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3807 OMP_CLAUSE_SAFELEN);
3808 if (c)
3809 {
3810 poly_uint64 safe_len;
3811 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3812 || maybe_lt (safe_len, 1U))
3813 sctx->max_vf = 1;
3814 else
3815 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3816 }
3817 }
3818 if (maybe_gt (sctx->max_vf, 1U))
3819 {
3820 sctx->idx = create_tmp_var (unsigned_type_node);
3821 sctx->lane = create_tmp_var (unsigned_type_node);
3822 }
3823 }
3824 if (known_eq (sctx->max_vf, 1U))
3825 return false;
3826
3827 if (sctx->is_simt)
3828 {
3829 if (is_gimple_reg (new_var))
3830 {
3831 ivar = lvar = new_var;
3832 return true;
3833 }
3834 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3835 ivar = lvar = create_tmp_var (type);
3836 TREE_ADDRESSABLE (ivar) = 1;
3837 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3838 NULL, DECL_ATTRIBUTES (ivar));
3839 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3840 tree clobber = build_constructor (type, NULL);
3841 TREE_THIS_VOLATILE (clobber) = 1;
3842 gimple *g = gimple_build_assign (ivar, clobber);
3843 gimple_seq_add_stmt (&sctx->simt_dlist, g);
3844 }
3845 else
3846 {
3847 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3848 tree avar = create_tmp_var_raw (atype);
3849 if (TREE_ADDRESSABLE (new_var))
3850 TREE_ADDRESSABLE (avar) = 1;
3851 DECL_ATTRIBUTES (avar)
3852 = tree_cons (get_identifier ("omp simd array"), NULL,
3853 DECL_ATTRIBUTES (avar));
3854 gimple_add_tmp_var (avar);
3855 tree iavar = avar;
3856 if (rvar && !ctx->for_simd_scan_phase)
3857 {
3858 /* For inscan reductions, create another array temporary,
3859 which will hold the reduced value. */
3860 iavar = create_tmp_var_raw (atype);
3861 if (TREE_ADDRESSABLE (new_var))
3862 TREE_ADDRESSABLE (iavar) = 1;
3863 DECL_ATTRIBUTES (iavar)
3864 = tree_cons (get_identifier ("omp simd array"), NULL,
3865 tree_cons (get_identifier ("omp simd inscan"), NULL,
3866 DECL_ATTRIBUTES (iavar)));
3867 gimple_add_tmp_var (iavar);
3868 ctx->cb.decl_map->put (avar, iavar);
3869 if (sctx->lastlane == NULL_TREE)
3870 sctx->lastlane = create_tmp_var (unsigned_type_node);
3871 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
3872 sctx->lastlane, NULL_TREE, NULL_TREE);
3873 TREE_THIS_NOTRAP (*rvar) = 1;
3874
3875 if (ctx->scan_exclusive)
3876 {
3877 /* And for exclusive scan yet another one, which will
3878 hold the value during the scan phase. */
3879 tree savar = create_tmp_var_raw (atype);
3880 if (TREE_ADDRESSABLE (new_var))
3881 TREE_ADDRESSABLE (savar) = 1;
3882 DECL_ATTRIBUTES (savar)
3883 = tree_cons (get_identifier ("omp simd array"), NULL,
3884 tree_cons (get_identifier ("omp simd inscan "
3885 "exclusive"), NULL,
3886 DECL_ATTRIBUTES (savar)));
3887 gimple_add_tmp_var (savar);
3888 ctx->cb.decl_map->put (iavar, savar);
3889 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
3890 sctx->idx, NULL_TREE, NULL_TREE);
3891 TREE_THIS_NOTRAP (*rvar2) = 1;
3892 }
3893 }
3894 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
3895 NULL_TREE, NULL_TREE);
3896 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3897 NULL_TREE, NULL_TREE);
3898 TREE_THIS_NOTRAP (ivar) = 1;
3899 TREE_THIS_NOTRAP (lvar) = 1;
3900 }
3901 if (DECL_P (new_var))
3902 {
3903 SET_DECL_VALUE_EXPR (new_var, lvar);
3904 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3905 }
3906 return true;
3907 }
3908
3909 /* Helper function of lower_rec_input_clauses. For a reference
3910 in simd reduction, add an underlying variable it will reference. */
3911
3912 static void
3913 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3914 {
3915 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3916 if (TREE_CONSTANT (z))
3917 {
3918 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3919 get_name (new_vard));
3920 gimple_add_tmp_var (z);
3921 TREE_ADDRESSABLE (z) = 1;
3922 z = build_fold_addr_expr_loc (loc, z);
3923 gimplify_assign (new_vard, z, ilist);
3924 }
3925 }
3926
3927 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
3928 code to emit (type) (tskred_temp[idx]). */
3929
3930 static tree
3931 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3932 unsigned idx)
3933 {
3934 unsigned HOST_WIDE_INT sz
3935 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3936 tree r = build2 (MEM_REF, pointer_sized_int_node,
3937 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3938 idx * sz));
3939 tree v = create_tmp_var (pointer_sized_int_node);
3940 gimple *g = gimple_build_assign (v, r);
3941 gimple_seq_add_stmt (ilist, g);
3942 if (!useless_type_conversion_p (type, pointer_sized_int_node))
3943 {
3944 v = create_tmp_var (type);
3945 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3946 gimple_seq_add_stmt (ilist, g);
3947 }
3948 return v;
3949 }
3950
3951 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3952 from the receiver (aka child) side and initializers for REFERENCE_TYPE
3953 private variables. Initialization statements go in ILIST, while calls
3954 to destructors go in DLIST. */
3955
3956 static void
3957 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3958 omp_context *ctx, struct omp_for_data *fd)
3959 {
3960 tree c, copyin_seq, x, ptr;
3961 bool copyin_by_ref = false;
3962 bool lastprivate_firstprivate = false;
3963 bool reduction_omp_orig_ref = false;
3964 int pass;
3965 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3966 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3967 omplow_simd_context sctx = omplow_simd_context ();
3968 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3969 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3970 gimple_seq llist[4] = { };
3971 tree nonconst_simd_if = NULL_TREE;
3972
3973 copyin_seq = NULL;
3974 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3975
3976 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3977 with data sharing clauses referencing variable sized vars. That
3978 is unnecessarily hard to support and very unlikely to result in
3979 vectorized code anyway. */
3980 if (is_simd)
3981 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3982 switch (OMP_CLAUSE_CODE (c))
3983 {
3984 case OMP_CLAUSE_LINEAR:
3985 if (OMP_CLAUSE_LINEAR_ARRAY (c))
3986 sctx.max_vf = 1;
3987 /* FALLTHRU */
3988 case OMP_CLAUSE_PRIVATE:
3989 case OMP_CLAUSE_FIRSTPRIVATE:
3990 case OMP_CLAUSE_LASTPRIVATE:
3991 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3992 sctx.max_vf = 1;
3993 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
3994 {
3995 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
3996 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
3997 sctx.max_vf = 1;
3998 }
3999 break;
4000 case OMP_CLAUSE_REDUCTION:
4001 case OMP_CLAUSE_IN_REDUCTION:
4002 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4003 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4004 sctx.max_vf = 1;
4005 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4006 {
4007 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4008 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4009 sctx.max_vf = 1;
4010 }
4011 break;
4012 case OMP_CLAUSE_IF:
4013 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4014 sctx.max_vf = 1;
4015 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4016 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4017 break;
4018 case OMP_CLAUSE_SIMDLEN:
4019 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4020 sctx.max_vf = 1;
4021 break;
4022 case OMP_CLAUSE__CONDTEMP_:
4023 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4024 if (sctx.is_simt)
4025 sctx.max_vf = 1;
4026 break;
4027 default:
4028 continue;
4029 }
4030
4031 /* Add a placeholder for simduid. */
4032 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4033 sctx.simt_eargs.safe_push (NULL_TREE);
4034
4035 unsigned task_reduction_cnt = 0;
4036 unsigned task_reduction_cntorig = 0;
4037 unsigned task_reduction_cnt_full = 0;
4038 unsigned task_reduction_cntorig_full = 0;
4039 unsigned task_reduction_other_cnt = 0;
4040 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4041 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4042 /* Do all the fixed sized types in the first pass, and the variable sized
4043 types in the second pass. This makes sure that the scalar arguments to
4044 the variable sized types are processed before we use them in the
4045 variable sized operations. For task reductions we use 4 passes, in the
4046 first two we ignore them, in the third one gather arguments for
4047 GOMP_task_reduction_remap call and in the last pass actually handle
4048 the task reductions. */
4049 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4050 ? 4 : 2); ++pass)
4051 {
4052 if (pass == 2 && task_reduction_cnt)
4053 {
4054 tskred_atype
4055 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4056 + task_reduction_cntorig);
4057 tskred_avar = create_tmp_var_raw (tskred_atype);
4058 gimple_add_tmp_var (tskred_avar);
4059 TREE_ADDRESSABLE (tskred_avar) = 1;
4060 task_reduction_cnt_full = task_reduction_cnt;
4061 task_reduction_cntorig_full = task_reduction_cntorig;
4062 }
4063 else if (pass == 3 && task_reduction_cnt)
4064 {
4065 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4066 gimple *g
4067 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4068 size_int (task_reduction_cntorig),
4069 build_fold_addr_expr (tskred_avar));
4070 gimple_seq_add_stmt (ilist, g);
4071 }
4072 if (pass == 3 && task_reduction_other_cnt)
4073 {
4074 /* For reduction clauses, build
4075 tskred_base = (void *) tskred_temp[2]
4076 + omp_get_thread_num () * tskred_temp[1]
4077 or if tskred_temp[1] is known to be constant, that constant
4078 directly. This is the start of the private reduction copy block
4079 for the current thread. */
4080 tree v = create_tmp_var (integer_type_node);
4081 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4082 gimple *g = gimple_build_call (x, 0);
4083 gimple_call_set_lhs (g, v);
4084 gimple_seq_add_stmt (ilist, g);
4085 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4086 tskred_temp = OMP_CLAUSE_DECL (c);
4087 if (is_taskreg_ctx (ctx))
4088 tskred_temp = lookup_decl (tskred_temp, ctx);
4089 tree v2 = create_tmp_var (sizetype);
4090 g = gimple_build_assign (v2, NOP_EXPR, v);
4091 gimple_seq_add_stmt (ilist, g);
4092 if (ctx->task_reductions[0])
4093 v = fold_convert (sizetype, ctx->task_reductions[0]);
4094 else
4095 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4096 tree v3 = create_tmp_var (sizetype);
4097 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4098 gimple_seq_add_stmt (ilist, g);
4099 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4100 tskred_base = create_tmp_var (ptr_type_node);
4101 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4102 gimple_seq_add_stmt (ilist, g);
4103 }
4104 task_reduction_cnt = 0;
4105 task_reduction_cntorig = 0;
4106 task_reduction_other_cnt = 0;
4107 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4108 {
4109 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4110 tree var, new_var;
4111 bool by_ref;
4112 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4113 bool task_reduction_p = false;
4114 bool task_reduction_needs_orig_p = false;
4115 tree cond = NULL_TREE;
4116
4117 switch (c_kind)
4118 {
4119 case OMP_CLAUSE_PRIVATE:
4120 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4121 continue;
4122 break;
4123 case OMP_CLAUSE_SHARED:
4124 /* Ignore shared directives in teams construct inside
4125 of target construct. */
4126 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4127 && !is_host_teams_ctx (ctx))
4128 continue;
4129 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4130 {
4131 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4132 || is_global_var (OMP_CLAUSE_DECL (c)));
4133 continue;
4134 }
4135 case OMP_CLAUSE_FIRSTPRIVATE:
4136 case OMP_CLAUSE_COPYIN:
4137 break;
4138 case OMP_CLAUSE_LINEAR:
4139 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4140 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4141 lastprivate_firstprivate = true;
4142 break;
4143 case OMP_CLAUSE_REDUCTION:
4144 case OMP_CLAUSE_IN_REDUCTION:
4145 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4146 {
4147 task_reduction_p = true;
4148 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4149 {
4150 task_reduction_other_cnt++;
4151 if (pass == 2)
4152 continue;
4153 }
4154 else
4155 task_reduction_cnt++;
4156 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4157 {
4158 var = OMP_CLAUSE_DECL (c);
4159 /* If var is a global variable that isn't privatized
4160 in outer contexts, we don't need to look up the
4161 original address, it is always the address of the
4162 global variable itself. */
4163 if (!DECL_P (var)
4164 || omp_is_reference (var)
4165 || !is_global_var
4166 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4167 {
4168 task_reduction_needs_orig_p = true;
4169 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4170 task_reduction_cntorig++;
4171 }
4172 }
4173 }
4174 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4175 reduction_omp_orig_ref = true;
4176 break;
4177 case OMP_CLAUSE__REDUCTEMP_:
4178 if (!is_taskreg_ctx (ctx))
4179 continue;
4180 /* FALLTHRU */
4181 case OMP_CLAUSE__LOOPTEMP_:
4182 /* Handle _looptemp_/_reductemp_ clauses only on
4183 parallel/task. */
4184 if (fd)
4185 continue;
4186 break;
4187 case OMP_CLAUSE_LASTPRIVATE:
4188 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4189 {
4190 lastprivate_firstprivate = true;
4191 if (pass != 0 || is_taskloop_ctx (ctx))
4192 continue;
4193 }
4194 /* Even without corresponding firstprivate, if
4195 decl is Fortran allocatable, it needs outer var
4196 reference. */
4197 else if (pass == 0
4198 && lang_hooks.decls.omp_private_outer_ref
4199 (OMP_CLAUSE_DECL (c)))
4200 lastprivate_firstprivate = true;
4201 break;
4202 case OMP_CLAUSE_ALIGNED:
4203 if (pass != 1)
4204 continue;
4205 var = OMP_CLAUSE_DECL (c);
4206 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4207 && !is_global_var (var))
4208 {
4209 new_var = maybe_lookup_decl (var, ctx);
4210 if (new_var == NULL_TREE)
4211 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4212 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4213 tree alarg = omp_clause_aligned_alignment (c);
4214 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4215 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4216 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4217 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4218 gimplify_and_add (x, ilist);
4219 }
4220 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4221 && is_global_var (var))
4222 {
4223 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4224 new_var = lookup_decl (var, ctx);
4225 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4226 t = build_fold_addr_expr_loc (clause_loc, t);
4227 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4228 tree alarg = omp_clause_aligned_alignment (c);
4229 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4230 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4231 t = fold_convert_loc (clause_loc, ptype, t);
4232 x = create_tmp_var (ptype);
4233 t = build2 (MODIFY_EXPR, ptype, x, t);
4234 gimplify_and_add (t, ilist);
4235 t = build_simple_mem_ref_loc (clause_loc, x);
4236 SET_DECL_VALUE_EXPR (new_var, t);
4237 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4238 }
4239 continue;
4240 case OMP_CLAUSE__CONDTEMP_:
4241 if (is_parallel_ctx (ctx)
4242 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4243 break;
4244 continue;
4245 default:
4246 continue;
4247 }
4248
4249 if (task_reduction_p != (pass >= 2))
4250 continue;
4251
4252 new_var = var = OMP_CLAUSE_DECL (c);
4253 if ((c_kind == OMP_CLAUSE_REDUCTION
4254 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4255 && TREE_CODE (var) == MEM_REF)
4256 {
4257 var = TREE_OPERAND (var, 0);
4258 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4259 var = TREE_OPERAND (var, 0);
4260 if (TREE_CODE (var) == INDIRECT_REF
4261 || TREE_CODE (var) == ADDR_EXPR)
4262 var = TREE_OPERAND (var, 0);
4263 if (is_variable_sized (var))
4264 {
4265 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4266 var = DECL_VALUE_EXPR (var);
4267 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4268 var = TREE_OPERAND (var, 0);
4269 gcc_assert (DECL_P (var));
4270 }
4271 new_var = var;
4272 }
4273 if (c_kind != OMP_CLAUSE_COPYIN)
4274 new_var = lookup_decl (var, ctx);
4275
4276 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4277 {
4278 if (pass != 0)
4279 continue;
4280 }
4281 /* C/C++ array section reductions. */
4282 else if ((c_kind == OMP_CLAUSE_REDUCTION
4283 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4284 && var != OMP_CLAUSE_DECL (c))
4285 {
4286 if (pass == 0)
4287 continue;
4288
4289 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4290 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4291
4292 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4293 {
4294 tree b = TREE_OPERAND (orig_var, 1);
4295 b = maybe_lookup_decl (b, ctx);
4296 if (b == NULL)
4297 {
4298 b = TREE_OPERAND (orig_var, 1);
4299 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4300 }
4301 if (integer_zerop (bias))
4302 bias = b;
4303 else
4304 {
4305 bias = fold_convert_loc (clause_loc,
4306 TREE_TYPE (b), bias);
4307 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4308 TREE_TYPE (b), b, bias);
4309 }
4310 orig_var = TREE_OPERAND (orig_var, 0);
4311 }
4312 if (pass == 2)
4313 {
4314 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4315 if (is_global_var (out)
4316 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4317 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4318 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4319 != POINTER_TYPE)))
4320 x = var;
4321 else
4322 {
4323 bool by_ref = use_pointer_for_field (var, NULL);
4324 x = build_receiver_ref (var, by_ref, ctx);
4325 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4327 == POINTER_TYPE))
4328 x = build_fold_addr_expr (x);
4329 }
4330 if (TREE_CODE (orig_var) == INDIRECT_REF)
4331 x = build_simple_mem_ref (x);
4332 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4333 {
4334 if (var == TREE_OPERAND (orig_var, 0))
4335 x = build_fold_addr_expr (x);
4336 }
4337 bias = fold_convert (sizetype, bias);
4338 x = fold_convert (ptr_type_node, x);
4339 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4340 TREE_TYPE (x), x, bias);
4341 unsigned cnt = task_reduction_cnt - 1;
4342 if (!task_reduction_needs_orig_p)
4343 cnt += (task_reduction_cntorig_full
4344 - task_reduction_cntorig);
4345 else
4346 cnt = task_reduction_cntorig - 1;
4347 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4348 size_int (cnt), NULL_TREE, NULL_TREE);
4349 gimplify_assign (r, x, ilist);
4350 continue;
4351 }
4352
4353 if (TREE_CODE (orig_var) == INDIRECT_REF
4354 || TREE_CODE (orig_var) == ADDR_EXPR)
4355 orig_var = TREE_OPERAND (orig_var, 0);
4356 tree d = OMP_CLAUSE_DECL (c);
4357 tree type = TREE_TYPE (d);
4358 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4359 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4360 const char *name = get_name (orig_var);
4361 if (pass == 3)
4362 {
4363 tree xv = create_tmp_var (ptr_type_node);
4364 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4365 {
4366 unsigned cnt = task_reduction_cnt - 1;
4367 if (!task_reduction_needs_orig_p)
4368 cnt += (task_reduction_cntorig_full
4369 - task_reduction_cntorig);
4370 else
4371 cnt = task_reduction_cntorig - 1;
4372 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4373 size_int (cnt), NULL_TREE, NULL_TREE);
4374
4375 gimple *g = gimple_build_assign (xv, x);
4376 gimple_seq_add_stmt (ilist, g);
4377 }
4378 else
4379 {
4380 unsigned int idx = *ctx->task_reduction_map->get (c);
4381 tree off;
4382 if (ctx->task_reductions[1 + idx])
4383 off = fold_convert (sizetype,
4384 ctx->task_reductions[1 + idx]);
4385 else
4386 off = task_reduction_read (ilist, tskred_temp, sizetype,
4387 7 + 3 * idx + 1);
4388 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4389 tskred_base, off);
4390 gimple_seq_add_stmt (ilist, g);
4391 }
4392 x = fold_convert (build_pointer_type (boolean_type_node),
4393 xv);
4394 if (TREE_CONSTANT (v))
4395 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4396 TYPE_SIZE_UNIT (type));
4397 else
4398 {
4399 tree t = maybe_lookup_decl (v, ctx);
4400 if (t)
4401 v = t;
4402 else
4403 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4404 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4405 fb_rvalue);
4406 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4407 TREE_TYPE (v), v,
4408 build_int_cst (TREE_TYPE (v), 1));
4409 t = fold_build2_loc (clause_loc, MULT_EXPR,
4410 TREE_TYPE (v), t,
4411 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4412 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4413 }
4414 cond = create_tmp_var (TREE_TYPE (x));
4415 gimplify_assign (cond, x, ilist);
4416 x = xv;
4417 }
4418 else if (TREE_CONSTANT (v))
4419 {
4420 x = create_tmp_var_raw (type, name);
4421 gimple_add_tmp_var (x);
4422 TREE_ADDRESSABLE (x) = 1;
4423 x = build_fold_addr_expr_loc (clause_loc, x);
4424 }
4425 else
4426 {
4427 tree atmp
4428 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4429 tree t = maybe_lookup_decl (v, ctx);
4430 if (t)
4431 v = t;
4432 else
4433 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4434 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4435 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4436 TREE_TYPE (v), v,
4437 build_int_cst (TREE_TYPE (v), 1));
4438 t = fold_build2_loc (clause_loc, MULT_EXPR,
4439 TREE_TYPE (v), t,
4440 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4441 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4442 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4443 }
4444
4445 tree ptype = build_pointer_type (TREE_TYPE (type));
4446 x = fold_convert_loc (clause_loc, ptype, x);
4447 tree y = create_tmp_var (ptype, name);
4448 gimplify_assign (y, x, ilist);
4449 x = y;
4450 tree yb = y;
4451
4452 if (!integer_zerop (bias))
4453 {
4454 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4455 bias);
4456 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4457 x);
4458 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4459 pointer_sized_int_node, yb, bias);
4460 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4461 yb = create_tmp_var (ptype, name);
4462 gimplify_assign (yb, x, ilist);
4463 x = yb;
4464 }
4465
4466 d = TREE_OPERAND (d, 0);
4467 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4468 d = TREE_OPERAND (d, 0);
4469 if (TREE_CODE (d) == ADDR_EXPR)
4470 {
4471 if (orig_var != var)
4472 {
4473 gcc_assert (is_variable_sized (orig_var));
4474 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4475 x);
4476 gimplify_assign (new_var, x, ilist);
4477 tree new_orig_var = lookup_decl (orig_var, ctx);
4478 tree t = build_fold_indirect_ref (new_var);
4479 DECL_IGNORED_P (new_var) = 0;
4480 TREE_THIS_NOTRAP (t) = 1;
4481 SET_DECL_VALUE_EXPR (new_orig_var, t);
4482 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4483 }
4484 else
4485 {
4486 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4487 build_int_cst (ptype, 0));
4488 SET_DECL_VALUE_EXPR (new_var, x);
4489 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4490 }
4491 }
4492 else
4493 {
4494 gcc_assert (orig_var == var);
4495 if (TREE_CODE (d) == INDIRECT_REF)
4496 {
4497 x = create_tmp_var (ptype, name);
4498 TREE_ADDRESSABLE (x) = 1;
4499 gimplify_assign (x, yb, ilist);
4500 x = build_fold_addr_expr_loc (clause_loc, x);
4501 }
4502 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4503 gimplify_assign (new_var, x, ilist);
4504 }
4505 /* GOMP_taskgroup_reduction_register memsets the whole
4506 array to zero. If the initializer is zero, we don't
4507 need to initialize it again, just mark it as ever
4508 used unconditionally, i.e. cond = true. */
4509 if (cond
4510 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4511 && initializer_zerop (omp_reduction_init (c,
4512 TREE_TYPE (type))))
4513 {
4514 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4515 boolean_true_node);
4516 gimple_seq_add_stmt (ilist, g);
4517 continue;
4518 }
4519 tree end = create_artificial_label (UNKNOWN_LOCATION);
4520 if (cond)
4521 {
4522 gimple *g;
4523 if (!is_parallel_ctx (ctx))
4524 {
4525 tree condv = create_tmp_var (boolean_type_node);
4526 g = gimple_build_assign (condv,
4527 build_simple_mem_ref (cond));
4528 gimple_seq_add_stmt (ilist, g);
4529 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4530 g = gimple_build_cond (NE_EXPR, condv,
4531 boolean_false_node, end, lab1);
4532 gimple_seq_add_stmt (ilist, g);
4533 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4534 }
4535 g = gimple_build_assign (build_simple_mem_ref (cond),
4536 boolean_true_node);
4537 gimple_seq_add_stmt (ilist, g);
4538 }
4539
4540 tree y1 = create_tmp_var (ptype);
4541 gimplify_assign (y1, y, ilist);
4542 tree i2 = NULL_TREE, y2 = NULL_TREE;
4543 tree body2 = NULL_TREE, end2 = NULL_TREE;
4544 tree y3 = NULL_TREE, y4 = NULL_TREE;
4545 if (task_reduction_needs_orig_p)
4546 {
4547 y3 = create_tmp_var (ptype);
4548 tree ref;
4549 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4550 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4551 size_int (task_reduction_cnt_full
4552 + task_reduction_cntorig - 1),
4553 NULL_TREE, NULL_TREE);
4554 else
4555 {
4556 unsigned int idx = *ctx->task_reduction_map->get (c);
4557 ref = task_reduction_read (ilist, tskred_temp, ptype,
4558 7 + 3 * idx);
4559 }
4560 gimplify_assign (y3, ref, ilist);
4561 }
4562 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4563 {
4564 if (pass != 3)
4565 {
4566 y2 = create_tmp_var (ptype);
4567 gimplify_assign (y2, y, ilist);
4568 }
4569 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4570 {
4571 tree ref = build_outer_var_ref (var, ctx);
4572 /* For ref build_outer_var_ref already performs this. */
4573 if (TREE_CODE (d) == INDIRECT_REF)
4574 gcc_assert (omp_is_reference (var));
4575 else if (TREE_CODE (d) == ADDR_EXPR)
4576 ref = build_fold_addr_expr (ref);
4577 else if (omp_is_reference (var))
4578 ref = build_fold_addr_expr (ref);
4579 ref = fold_convert_loc (clause_loc, ptype, ref);
4580 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4581 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4582 {
4583 y3 = create_tmp_var (ptype);
4584 gimplify_assign (y3, unshare_expr (ref), ilist);
4585 }
4586 if (is_simd)
4587 {
4588 y4 = create_tmp_var (ptype);
4589 gimplify_assign (y4, ref, dlist);
4590 }
4591 }
4592 }
4593 tree i = create_tmp_var (TREE_TYPE (v));
4594 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4595 tree body = create_artificial_label (UNKNOWN_LOCATION);
4596 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4597 if (y2)
4598 {
4599 i2 = create_tmp_var (TREE_TYPE (v));
4600 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4601 body2 = create_artificial_label (UNKNOWN_LOCATION);
4602 end2 = create_artificial_label (UNKNOWN_LOCATION);
4603 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4604 }
4605 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4606 {
4607 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4608 tree decl_placeholder
4609 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4610 SET_DECL_VALUE_EXPR (decl_placeholder,
4611 build_simple_mem_ref (y1));
4612 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4613 SET_DECL_VALUE_EXPR (placeholder,
4614 y3 ? build_simple_mem_ref (y3)
4615 : error_mark_node);
4616 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4617 x = lang_hooks.decls.omp_clause_default_ctor
4618 (c, build_simple_mem_ref (y1),
4619 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4620 if (x)
4621 gimplify_and_add (x, ilist);
4622 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4623 {
4624 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4625 lower_omp (&tseq, ctx);
4626 gimple_seq_add_seq (ilist, tseq);
4627 }
4628 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4629 if (is_simd)
4630 {
4631 SET_DECL_VALUE_EXPR (decl_placeholder,
4632 build_simple_mem_ref (y2));
4633 SET_DECL_VALUE_EXPR (placeholder,
4634 build_simple_mem_ref (y4));
4635 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4636 lower_omp (&tseq, ctx);
4637 gimple_seq_add_seq (dlist, tseq);
4638 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4639 }
4640 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4641 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4642 if (y2)
4643 {
4644 x = lang_hooks.decls.omp_clause_dtor
4645 (c, build_simple_mem_ref (y2));
4646 if (x)
4647 gimplify_and_add (x, dlist);
4648 }
4649 }
4650 else
4651 {
4652 x = omp_reduction_init (c, TREE_TYPE (type));
4653 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4654
4655 /* reduction(-:var) sums up the partial results, so it
4656 acts identically to reduction(+:var). */
4657 if (code == MINUS_EXPR)
4658 code = PLUS_EXPR;
4659
4660 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4661 if (is_simd)
4662 {
4663 x = build2 (code, TREE_TYPE (type),
4664 build_simple_mem_ref (y4),
4665 build_simple_mem_ref (y2));
4666 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4667 }
4668 }
4669 gimple *g
4670 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4671 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4672 gimple_seq_add_stmt (ilist, g);
4673 if (y3)
4674 {
4675 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4676 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4677 gimple_seq_add_stmt (ilist, g);
4678 }
4679 g = gimple_build_assign (i, PLUS_EXPR, i,
4680 build_int_cst (TREE_TYPE (i), 1));
4681 gimple_seq_add_stmt (ilist, g);
4682 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4683 gimple_seq_add_stmt (ilist, g);
4684 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4685 if (y2)
4686 {
4687 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4688 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4689 gimple_seq_add_stmt (dlist, g);
4690 if (y4)
4691 {
4692 g = gimple_build_assign
4693 (y4, POINTER_PLUS_EXPR, y4,
4694 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4695 gimple_seq_add_stmt (dlist, g);
4696 }
4697 g = gimple_build_assign (i2, PLUS_EXPR, i2,
4698 build_int_cst (TREE_TYPE (i2), 1));
4699 gimple_seq_add_stmt (dlist, g);
4700 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4701 gimple_seq_add_stmt (dlist, g);
4702 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4703 }
4704 continue;
4705 }
4706 else if (pass == 2)
4707 {
4708 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4709 x = var;
4710 else
4711 {
4712 bool by_ref = use_pointer_for_field (var, ctx);
4713 x = build_receiver_ref (var, by_ref, ctx);
4714 }
4715 if (!omp_is_reference (var))
4716 x = build_fold_addr_expr (x);
4717 x = fold_convert (ptr_type_node, x);
4718 unsigned cnt = task_reduction_cnt - 1;
4719 if (!task_reduction_needs_orig_p)
4720 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4721 else
4722 cnt = task_reduction_cntorig - 1;
4723 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4724 size_int (cnt), NULL_TREE, NULL_TREE);
4725 gimplify_assign (r, x, ilist);
4726 continue;
4727 }
4728 else if (pass == 3)
4729 {
4730 tree type = TREE_TYPE (new_var);
4731 if (!omp_is_reference (var))
4732 type = build_pointer_type (type);
4733 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4734 {
4735 unsigned cnt = task_reduction_cnt - 1;
4736 if (!task_reduction_needs_orig_p)
4737 cnt += (task_reduction_cntorig_full
4738 - task_reduction_cntorig);
4739 else
4740 cnt = task_reduction_cntorig - 1;
4741 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4742 size_int (cnt), NULL_TREE, NULL_TREE);
4743 }
4744 else
4745 {
4746 unsigned int idx = *ctx->task_reduction_map->get (c);
4747 tree off;
4748 if (ctx->task_reductions[1 + idx])
4749 off = fold_convert (sizetype,
4750 ctx->task_reductions[1 + idx]);
4751 else
4752 off = task_reduction_read (ilist, tskred_temp, sizetype,
4753 7 + 3 * idx + 1);
4754 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4755 tskred_base, off);
4756 }
4757 x = fold_convert (type, x);
4758 tree t;
4759 if (omp_is_reference (var))
4760 {
4761 gimplify_assign (new_var, x, ilist);
4762 t = new_var;
4763 new_var = build_simple_mem_ref (new_var);
4764 }
4765 else
4766 {
4767 t = create_tmp_var (type);
4768 gimplify_assign (t, x, ilist);
4769 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4770 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4771 }
4772 t = fold_convert (build_pointer_type (boolean_type_node), t);
4773 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4774 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4775 cond = create_tmp_var (TREE_TYPE (t));
4776 gimplify_assign (cond, t, ilist);
4777 }
4778 else if (is_variable_sized (var))
4779 {
4780 /* For variable sized types, we need to allocate the
4781 actual storage here. Call alloca and store the
4782 result in the pointer decl that we created elsewhere. */
4783 if (pass == 0)
4784 continue;
4785
4786 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4787 {
4788 gcall *stmt;
4789 tree tmp, atmp;
4790
4791 ptr = DECL_VALUE_EXPR (new_var);
4792 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4793 ptr = TREE_OPERAND (ptr, 0);
4794 gcc_assert (DECL_P (ptr));
4795 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4796
4797 /* void *tmp = __builtin_alloca */
4798 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4799 stmt = gimple_build_call (atmp, 2, x,
4800 size_int (DECL_ALIGN (var)));
4801 tmp = create_tmp_var_raw (ptr_type_node);
4802 gimple_add_tmp_var (tmp);
4803 gimple_call_set_lhs (stmt, tmp);
4804
4805 gimple_seq_add_stmt (ilist, stmt);
4806
4807 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4808 gimplify_assign (ptr, x, ilist);
4809 }
4810 }
4811 else if (omp_is_reference (var)
4812 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4813 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4814 {
4815 /* For references that are being privatized for Fortran,
4816 allocate new backing storage for the new pointer
4817 variable. This allows us to avoid changing all the
4818 code that expects a pointer to something that expects
4819 a direct variable. */
4820 if (pass == 0)
4821 continue;
4822
4823 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4824 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4825 {
4826 x = build_receiver_ref (var, false, ctx);
4827 x = build_fold_addr_expr_loc (clause_loc, x);
4828 }
4829 else if (TREE_CONSTANT (x))
4830 {
4831 /* For reduction in SIMD loop, defer adding the
4832 initialization of the reference, because if we decide
4833 to use SIMD array for it, the initilization could cause
4834 expansion ICE. Ditto for other privatization clauses. */
4835 if (is_simd)
4836 x = NULL_TREE;
4837 else
4838 {
4839 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4840 get_name (var));
4841 gimple_add_tmp_var (x);
4842 TREE_ADDRESSABLE (x) = 1;
4843 x = build_fold_addr_expr_loc (clause_loc, x);
4844 }
4845 }
4846 else
4847 {
4848 tree atmp
4849 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4850 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4851 tree al = size_int (TYPE_ALIGN (rtype));
4852 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4853 }
4854
4855 if (x)
4856 {
4857 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4858 gimplify_assign (new_var, x, ilist);
4859 }
4860
4861 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4862 }
4863 else if ((c_kind == OMP_CLAUSE_REDUCTION
4864 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4865 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4866 {
4867 if (pass == 0)
4868 continue;
4869 }
4870 else if (pass != 0)
4871 continue;
4872
4873 switch (OMP_CLAUSE_CODE (c))
4874 {
4875 case OMP_CLAUSE_SHARED:
4876 /* Ignore shared directives in teams construct inside
4877 target construct. */
4878 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4879 && !is_host_teams_ctx (ctx))
4880 continue;
4881 /* Shared global vars are just accessed directly. */
4882 if (is_global_var (new_var))
4883 break;
4884 /* For taskloop firstprivate/lastprivate, represented
4885 as firstprivate and shared clause on the task, new_var
4886 is the firstprivate var. */
4887 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4888 break;
4889 /* Set up the DECL_VALUE_EXPR for shared variables now. This
4890 needs to be delayed until after fixup_child_record_type so
4891 that we get the correct type during the dereference. */
4892 by_ref = use_pointer_for_field (var, ctx);
4893 x = build_receiver_ref (var, by_ref, ctx);
4894 SET_DECL_VALUE_EXPR (new_var, x);
4895 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4896
4897 /* ??? If VAR is not passed by reference, and the variable
4898 hasn't been initialized yet, then we'll get a warning for
4899 the store into the omp_data_s structure. Ideally, we'd be
4900 able to notice this and not store anything at all, but
4901 we're generating code too early. Suppress the warning. */
4902 if (!by_ref)
4903 TREE_NO_WARNING (var) = 1;
4904 break;
4905
4906 case OMP_CLAUSE__CONDTEMP_:
4907 if (is_parallel_ctx (ctx))
4908 {
4909 x = build_receiver_ref (var, false, ctx);
4910 SET_DECL_VALUE_EXPR (new_var, x);
4911 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4912 }
4913 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
4914 {
4915 x = build_zero_cst (TREE_TYPE (var));
4916 goto do_private;
4917 }
4918 break;
4919
4920 case OMP_CLAUSE_LASTPRIVATE:
4921 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4922 break;
4923 /* FALLTHRU */
4924
4925 case OMP_CLAUSE_PRIVATE:
4926 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4927 x = build_outer_var_ref (var, ctx);
4928 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4929 {
4930 if (is_task_ctx (ctx))
4931 x = build_receiver_ref (var, false, ctx);
4932 else
4933 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4934 }
4935 else
4936 x = NULL;
4937 do_private:
4938 tree nx;
4939 nx = lang_hooks.decls.omp_clause_default_ctor
4940 (c, unshare_expr (new_var), x);
4941 if (is_simd)
4942 {
4943 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4944 if ((TREE_ADDRESSABLE (new_var) || nx || y
4945 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4946 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
4947 || omp_is_reference (var))
4948 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4949 ivar, lvar))
4950 {
4951 if (omp_is_reference (var))
4952 {
4953 gcc_assert (TREE_CODE (new_var) == MEM_REF);
4954 tree new_vard = TREE_OPERAND (new_var, 0);
4955 gcc_assert (DECL_P (new_vard));
4956 SET_DECL_VALUE_EXPR (new_vard,
4957 build_fold_addr_expr (lvar));
4958 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4959 }
4960
4961 if (nx)
4962 x = lang_hooks.decls.omp_clause_default_ctor
4963 (c, unshare_expr (ivar), x);
4964 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
4965 {
4966 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
4967 unshare_expr (ivar), x);
4968 nx = x;
4969 }
4970 if (nx && x)
4971 gimplify_and_add (x, &llist[0]);
4972 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4973 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
4974 {
4975 tree v = new_var;
4976 if (!DECL_P (v))
4977 {
4978 gcc_assert (TREE_CODE (v) == MEM_REF);
4979 v = TREE_OPERAND (v, 0);
4980 gcc_assert (DECL_P (v));
4981 }
4982 v = *ctx->lastprivate_conditional_map->get (v);
4983 tree t = create_tmp_var (TREE_TYPE (v));
4984 tree z = build_zero_cst (TREE_TYPE (v));
4985 tree orig_v
4986 = build_outer_var_ref (var, ctx,
4987 OMP_CLAUSE_LASTPRIVATE);
4988 gimple_seq_add_stmt (dlist,
4989 gimple_build_assign (t, z));
4990 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
4991 tree civar = DECL_VALUE_EXPR (v);
4992 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
4993 civar = unshare_expr (civar);
4994 TREE_OPERAND (civar, 1) = sctx.idx;
4995 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
4996 unshare_expr (civar));
4997 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
4998 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
4999 orig_v, unshare_expr (ivar)));
5000 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5001 civar);
5002 x = build3 (COND_EXPR, void_type_node, cond, x,
5003 void_node);
5004 gimple_seq tseq = NULL;
5005 gimplify_and_add (x, &tseq);
5006 if (ctx->outer)
5007 lower_omp (&tseq, ctx->outer);
5008 gimple_seq_add_seq (&llist[1], tseq);
5009 }
5010 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5011 && ctx->for_simd_scan_phase)
5012 {
5013 x = unshare_expr (ivar);
5014 tree orig_v
5015 = build_outer_var_ref (var, ctx,
5016 OMP_CLAUSE_LASTPRIVATE);
5017 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5018 orig_v);
5019 gimplify_and_add (x, &llist[0]);
5020 }
5021 if (y)
5022 {
5023 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5024 if (y)
5025 gimplify_and_add (y, &llist[1]);
5026 }
5027 break;
5028 }
5029 if (omp_is_reference (var))
5030 {
5031 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5032 tree new_vard = TREE_OPERAND (new_var, 0);
5033 gcc_assert (DECL_P (new_vard));
5034 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5035 x = TYPE_SIZE_UNIT (type);
5036 if (TREE_CONSTANT (x))
5037 {
5038 x = create_tmp_var_raw (type, get_name (var));
5039 gimple_add_tmp_var (x);
5040 TREE_ADDRESSABLE (x) = 1;
5041 x = build_fold_addr_expr_loc (clause_loc, x);
5042 x = fold_convert_loc (clause_loc,
5043 TREE_TYPE (new_vard), x);
5044 gimplify_assign (new_vard, x, ilist);
5045 }
5046 }
5047 }
5048 if (nx)
5049 gimplify_and_add (nx, ilist);
5050 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5051 && is_simd
5052 && ctx->for_simd_scan_phase)
5053 {
5054 tree orig_v = build_outer_var_ref (var, ctx,
5055 OMP_CLAUSE_LASTPRIVATE);
5056 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5057 orig_v);
5058 gimplify_and_add (x, ilist);
5059 }
5060 /* FALLTHRU */
5061
5062 do_dtor:
5063 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5064 if (x)
5065 gimplify_and_add (x, dlist);
5066 break;
5067
5068 case OMP_CLAUSE_LINEAR:
5069 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5070 goto do_firstprivate;
5071 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5072 x = NULL;
5073 else
5074 x = build_outer_var_ref (var, ctx);
5075 goto do_private;
5076
5077 case OMP_CLAUSE_FIRSTPRIVATE:
5078 if (is_task_ctx (ctx))
5079 {
5080 if ((omp_is_reference (var)
5081 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5082 || is_variable_sized (var))
5083 goto do_dtor;
5084 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5085 ctx))
5086 || use_pointer_for_field (var, NULL))
5087 {
5088 x = build_receiver_ref (var, false, ctx);
5089 SET_DECL_VALUE_EXPR (new_var, x);
5090 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5091 goto do_dtor;
5092 }
5093 }
5094 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5095 && omp_is_reference (var))
5096 {
5097 x = build_outer_var_ref (var, ctx);
5098 gcc_assert (TREE_CODE (x) == MEM_REF
5099 && integer_zerop (TREE_OPERAND (x, 1)));
5100 x = TREE_OPERAND (x, 0);
5101 x = lang_hooks.decls.omp_clause_copy_ctor
5102 (c, unshare_expr (new_var), x);
5103 gimplify_and_add (x, ilist);
5104 goto do_dtor;
5105 }
5106 do_firstprivate:
5107 x = build_outer_var_ref (var, ctx);
5108 if (is_simd)
5109 {
5110 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5111 && gimple_omp_for_combined_into_p (ctx->stmt))
5112 {
5113 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5114 tree stept = TREE_TYPE (t);
5115 tree ct = omp_find_clause (clauses,
5116 OMP_CLAUSE__LOOPTEMP_);
5117 gcc_assert (ct);
5118 tree l = OMP_CLAUSE_DECL (ct);
5119 tree n1 = fd->loop.n1;
5120 tree step = fd->loop.step;
5121 tree itype = TREE_TYPE (l);
5122 if (POINTER_TYPE_P (itype))
5123 itype = signed_type_for (itype);
5124 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5125 if (TYPE_UNSIGNED (itype)
5126 && fd->loop.cond_code == GT_EXPR)
5127 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5128 fold_build1 (NEGATE_EXPR, itype, l),
5129 fold_build1 (NEGATE_EXPR,
5130 itype, step));
5131 else
5132 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5133 t = fold_build2 (MULT_EXPR, stept,
5134 fold_convert (stept, l), t);
5135
5136 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5137 {
5138 if (omp_is_reference (var))
5139 {
5140 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5141 tree new_vard = TREE_OPERAND (new_var, 0);
5142 gcc_assert (DECL_P (new_vard));
5143 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5144 nx = TYPE_SIZE_UNIT (type);
5145 if (TREE_CONSTANT (nx))
5146 {
5147 nx = create_tmp_var_raw (type,
5148 get_name (var));
5149 gimple_add_tmp_var (nx);
5150 TREE_ADDRESSABLE (nx) = 1;
5151 nx = build_fold_addr_expr_loc (clause_loc,
5152 nx);
5153 nx = fold_convert_loc (clause_loc,
5154 TREE_TYPE (new_vard),
5155 nx);
5156 gimplify_assign (new_vard, nx, ilist);
5157 }
5158 }
5159
5160 x = lang_hooks.decls.omp_clause_linear_ctor
5161 (c, new_var, x, t);
5162 gimplify_and_add (x, ilist);
5163 goto do_dtor;
5164 }
5165
5166 if (POINTER_TYPE_P (TREE_TYPE (x)))
5167 x = fold_build2 (POINTER_PLUS_EXPR,
5168 TREE_TYPE (x), x, t);
5169 else
5170 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5171 }
5172
5173 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5174 || TREE_ADDRESSABLE (new_var)
5175 || omp_is_reference (var))
5176 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5177 ivar, lvar))
5178 {
5179 if (omp_is_reference (var))
5180 {
5181 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5182 tree new_vard = TREE_OPERAND (new_var, 0);
5183 gcc_assert (DECL_P (new_vard));
5184 SET_DECL_VALUE_EXPR (new_vard,
5185 build_fold_addr_expr (lvar));
5186 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5187 }
5188 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5189 {
5190 tree iv = create_tmp_var (TREE_TYPE (new_var));
5191 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5192 gimplify_and_add (x, ilist);
5193 gimple_stmt_iterator gsi
5194 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5195 gassign *g
5196 = gimple_build_assign (unshare_expr (lvar), iv);
5197 gsi_insert_before_without_update (&gsi, g,
5198 GSI_SAME_STMT);
5199 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5200 enum tree_code code = PLUS_EXPR;
5201 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5202 code = POINTER_PLUS_EXPR;
5203 g = gimple_build_assign (iv, code, iv, t);
5204 gsi_insert_before_without_update (&gsi, g,
5205 GSI_SAME_STMT);
5206 break;
5207 }
5208 x = lang_hooks.decls.omp_clause_copy_ctor
5209 (c, unshare_expr (ivar), x);
5210 gimplify_and_add (x, &llist[0]);
5211 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5212 if (x)
5213 gimplify_and_add (x, &llist[1]);
5214 break;
5215 }
5216 if (omp_is_reference (var))
5217 {
5218 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5219 tree new_vard = TREE_OPERAND (new_var, 0);
5220 gcc_assert (DECL_P (new_vard));
5221 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5222 nx = TYPE_SIZE_UNIT (type);
5223 if (TREE_CONSTANT (nx))
5224 {
5225 nx = create_tmp_var_raw (type, get_name (var));
5226 gimple_add_tmp_var (nx);
5227 TREE_ADDRESSABLE (nx) = 1;
5228 nx = build_fold_addr_expr_loc (clause_loc, nx);
5229 nx = fold_convert_loc (clause_loc,
5230 TREE_TYPE (new_vard), nx);
5231 gimplify_assign (new_vard, nx, ilist);
5232 }
5233 }
5234 }
5235 x = lang_hooks.decls.omp_clause_copy_ctor
5236 (c, unshare_expr (new_var), x);
5237 gimplify_and_add (x, ilist);
5238 goto do_dtor;
5239
5240 case OMP_CLAUSE__LOOPTEMP_:
5241 case OMP_CLAUSE__REDUCTEMP_:
5242 gcc_assert (is_taskreg_ctx (ctx));
5243 x = build_outer_var_ref (var, ctx);
5244 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5245 gimplify_and_add (x, ilist);
5246 break;
5247
5248 case OMP_CLAUSE_COPYIN:
5249 by_ref = use_pointer_for_field (var, NULL);
5250 x = build_receiver_ref (var, by_ref, ctx);
5251 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5252 append_to_statement_list (x, &copyin_seq);
5253 copyin_by_ref |= by_ref;
5254 break;
5255
5256 case OMP_CLAUSE_REDUCTION:
5257 case OMP_CLAUSE_IN_REDUCTION:
5258 /* OpenACC reductions are initialized using the
5259 GOACC_REDUCTION internal function. */
5260 if (is_gimple_omp_oacc (ctx->stmt))
5261 break;
5262 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5263 {
5264 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5265 gimple *tseq;
5266 tree ptype = TREE_TYPE (placeholder);
5267 if (cond)
5268 {
5269 x = error_mark_node;
5270 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5271 && !task_reduction_needs_orig_p)
5272 x = var;
5273 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5274 {
5275 tree pptype = build_pointer_type (ptype);
5276 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5277 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5278 size_int (task_reduction_cnt_full
5279 + task_reduction_cntorig - 1),
5280 NULL_TREE, NULL_TREE);
5281 else
5282 {
5283 unsigned int idx
5284 = *ctx->task_reduction_map->get (c);
5285 x = task_reduction_read (ilist, tskred_temp,
5286 pptype, 7 + 3 * idx);
5287 }
5288 x = fold_convert (pptype, x);
5289 x = build_simple_mem_ref (x);
5290 }
5291 }
5292 else
5293 {
5294 x = build_outer_var_ref (var, ctx);
5295
5296 if (omp_is_reference (var)
5297 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5298 x = build_fold_addr_expr_loc (clause_loc, x);
5299 }
5300 SET_DECL_VALUE_EXPR (placeholder, x);
5301 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5302 tree new_vard = new_var;
5303 if (omp_is_reference (var))
5304 {
5305 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5306 new_vard = TREE_OPERAND (new_var, 0);
5307 gcc_assert (DECL_P (new_vard));
5308 }
5309 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5310 if (is_simd
5311 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5312 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5313 rvarp = &rvar;
5314 if (is_simd
5315 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5316 ivar, lvar, rvarp,
5317 &rvar2))
5318 {
5319 if (new_vard == new_var)
5320 {
5321 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5322 SET_DECL_VALUE_EXPR (new_var, ivar);
5323 }
5324 else
5325 {
5326 SET_DECL_VALUE_EXPR (new_vard,
5327 build_fold_addr_expr (ivar));
5328 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5329 }
5330 x = lang_hooks.decls.omp_clause_default_ctor
5331 (c, unshare_expr (ivar),
5332 build_outer_var_ref (var, ctx));
5333 if (rvarp && ctx->for_simd_scan_phase)
5334 {
5335 if (x)
5336 gimplify_and_add (x, &llist[0]);
5337 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5338 if (x)
5339 gimplify_and_add (x, &llist[1]);
5340 break;
5341 }
5342 else if (rvarp)
5343 {
5344 if (x)
5345 {
5346 gimplify_and_add (x, &llist[0]);
5347
5348 tree ivar2 = unshare_expr (lvar);
5349 TREE_OPERAND (ivar2, 1) = sctx.idx;
5350 x = lang_hooks.decls.omp_clause_default_ctor
5351 (c, ivar2, build_outer_var_ref (var, ctx));
5352 gimplify_and_add (x, &llist[0]);
5353
5354 if (rvar2)
5355 {
5356 x = lang_hooks.decls.omp_clause_default_ctor
5357 (c, unshare_expr (rvar2),
5358 build_outer_var_ref (var, ctx));
5359 gimplify_and_add (x, &llist[0]);
5360 }
5361
5362 /* For types that need construction, add another
5363 private var which will be default constructed
5364 and optionally initialized with
5365 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5366 loop we want to assign this value instead of
5367 constructing and destructing it in each
5368 iteration. */
5369 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5370 gimple_add_tmp_var (nv);
5371 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5372 ? rvar2
5373 : ivar, 0),
5374 nv);
5375 x = lang_hooks.decls.omp_clause_default_ctor
5376 (c, nv, build_outer_var_ref (var, ctx));
5377 gimplify_and_add (x, ilist);
5378
5379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5380 {
5381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5382 x = DECL_VALUE_EXPR (new_vard);
5383 tree vexpr = nv;
5384 if (new_vard != new_var)
5385 vexpr = build_fold_addr_expr (nv);
5386 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5387 lower_omp (&tseq, ctx);
5388 SET_DECL_VALUE_EXPR (new_vard, x);
5389 gimple_seq_add_seq (ilist, tseq);
5390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5391 }
5392
5393 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5394 if (x)
5395 gimplify_and_add (x, dlist);
5396 }
5397
5398 tree ref = build_outer_var_ref (var, ctx);
5399 x = unshare_expr (ivar);
5400 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5401 ref);
5402 gimplify_and_add (x, &llist[0]);
5403
5404 ref = build_outer_var_ref (var, ctx);
5405 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5406 rvar);
5407 gimplify_and_add (x, &llist[3]);
5408
5409 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5410 if (new_vard == new_var)
5411 SET_DECL_VALUE_EXPR (new_var, lvar);
5412 else
5413 SET_DECL_VALUE_EXPR (new_vard,
5414 build_fold_addr_expr (lvar));
5415
5416 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5417 if (x)
5418 gimplify_and_add (x, &llist[1]);
5419
5420 tree ivar2 = unshare_expr (lvar);
5421 TREE_OPERAND (ivar2, 1) = sctx.idx;
5422 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5423 if (x)
5424 gimplify_and_add (x, &llist[1]);
5425
5426 if (rvar2)
5427 {
5428 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5429 if (x)
5430 gimplify_and_add (x, &llist[1]);
5431 }
5432 break;
5433 }
5434 if (x)
5435 gimplify_and_add (x, &llist[0]);
5436 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5437 {
5438 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5439 lower_omp (&tseq, ctx);
5440 gimple_seq_add_seq (&llist[0], tseq);
5441 }
5442 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5443 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5444 lower_omp (&tseq, ctx);
5445 gimple_seq_add_seq (&llist[1], tseq);
5446 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5447 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5448 if (new_vard == new_var)
5449 SET_DECL_VALUE_EXPR (new_var, lvar);
5450 else
5451 SET_DECL_VALUE_EXPR (new_vard,
5452 build_fold_addr_expr (lvar));
5453 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5454 if (x)
5455 gimplify_and_add (x, &llist[1]);
5456 break;
5457 }
5458 /* If this is a reference to constant size reduction var
5459 with placeholder, we haven't emitted the initializer
5460 for it because it is undesirable if SIMD arrays are used.
5461 But if they aren't used, we need to emit the deferred
5462 initialization now. */
5463 else if (omp_is_reference (var) && is_simd)
5464 handle_simd_reference (clause_loc, new_vard, ilist);
5465
5466 tree lab2 = NULL_TREE;
5467 if (cond)
5468 {
5469 gimple *g;
5470 if (!is_parallel_ctx (ctx))
5471 {
5472 tree condv = create_tmp_var (boolean_type_node);
5473 tree m = build_simple_mem_ref (cond);
5474 g = gimple_build_assign (condv, m);
5475 gimple_seq_add_stmt (ilist, g);
5476 tree lab1
5477 = create_artificial_label (UNKNOWN_LOCATION);
5478 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5479 g = gimple_build_cond (NE_EXPR, condv,
5480 boolean_false_node,
5481 lab2, lab1);
5482 gimple_seq_add_stmt (ilist, g);
5483 gimple_seq_add_stmt (ilist,
5484 gimple_build_label (lab1));
5485 }
5486 g = gimple_build_assign (build_simple_mem_ref (cond),
5487 boolean_true_node);
5488 gimple_seq_add_stmt (ilist, g);
5489 }
5490 x = lang_hooks.decls.omp_clause_default_ctor
5491 (c, unshare_expr (new_var),
5492 cond ? NULL_TREE
5493 : build_outer_var_ref (var, ctx));
5494 if (x)
5495 gimplify_and_add (x, ilist);
5496
5497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5498 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5499 {
5500 if (ctx->for_simd_scan_phase)
5501 goto do_dtor;
5502 if (x || (!is_simd
5503 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5504 {
5505 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5506 gimple_add_tmp_var (nv);
5507 ctx->cb.decl_map->put (new_vard, nv);
5508 x = lang_hooks.decls.omp_clause_default_ctor
5509 (c, nv, build_outer_var_ref (var, ctx));
5510 if (x)
5511 gimplify_and_add (x, ilist);
5512 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5513 {
5514 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5515 tree vexpr = nv;
5516 if (new_vard != new_var)
5517 vexpr = build_fold_addr_expr (nv);
5518 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5519 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5520 lower_omp (&tseq, ctx);
5521 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5522 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5523 gimple_seq_add_seq (ilist, tseq);
5524 }
5525 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5526 if (is_simd && ctx->scan_exclusive)
5527 {
5528 tree nv2
5529 = create_tmp_var_raw (TREE_TYPE (new_var));
5530 gimple_add_tmp_var (nv2);
5531 ctx->cb.decl_map->put (nv, nv2);
5532 x = lang_hooks.decls.omp_clause_default_ctor
5533 (c, nv2, build_outer_var_ref (var, ctx));
5534 gimplify_and_add (x, ilist);
5535 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5536 if (x)
5537 gimplify_and_add (x, dlist);
5538 }
5539 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5540 if (x)
5541 gimplify_and_add (x, dlist);
5542 }
5543 else if (is_simd
5544 && ctx->scan_exclusive
5545 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5546 {
5547 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5548 gimple_add_tmp_var (nv2);
5549 ctx->cb.decl_map->put (new_vard, nv2);
5550 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5551 if (x)
5552 gimplify_and_add (x, dlist);
5553 }
5554 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5555 goto do_dtor;
5556 }
5557
5558 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5559 {
5560 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5561 lower_omp (&tseq, ctx);
5562 gimple_seq_add_seq (ilist, tseq);
5563 }
5564 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5565 if (is_simd)
5566 {
5567 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5568 lower_omp (&tseq, ctx);
5569 gimple_seq_add_seq (dlist, tseq);
5570 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5571 }
5572 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5573 if (cond)
5574 {
5575 if (lab2)
5576 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5577 break;
5578 }
5579 goto do_dtor;
5580 }
5581 else
5582 {
5583 x = omp_reduction_init (c, TREE_TYPE (new_var));
5584 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5585 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5586
5587 if (cond)
5588 {
5589 gimple *g;
5590 tree lab2 = NULL_TREE;
5591 /* GOMP_taskgroup_reduction_register memsets the whole
5592 array to zero. If the initializer is zero, we don't
5593 need to initialize it again, just mark it as ever
5594 used unconditionally, i.e. cond = true. */
5595 if (initializer_zerop (x))
5596 {
5597 g = gimple_build_assign (build_simple_mem_ref (cond),
5598 boolean_true_node);
5599 gimple_seq_add_stmt (ilist, g);
5600 break;
5601 }
5602
5603 /* Otherwise, emit
5604 if (!cond) { cond = true; new_var = x; } */
5605 if (!is_parallel_ctx (ctx))
5606 {
5607 tree condv = create_tmp_var (boolean_type_node);
5608 tree m = build_simple_mem_ref (cond);
5609 g = gimple_build_assign (condv, m);
5610 gimple_seq_add_stmt (ilist, g);
5611 tree lab1
5612 = create_artificial_label (UNKNOWN_LOCATION);
5613 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5614 g = gimple_build_cond (NE_EXPR, condv,
5615 boolean_false_node,
5616 lab2, lab1);
5617 gimple_seq_add_stmt (ilist, g);
5618 gimple_seq_add_stmt (ilist,
5619 gimple_build_label (lab1));
5620 }
5621 g = gimple_build_assign (build_simple_mem_ref (cond),
5622 boolean_true_node);
5623 gimple_seq_add_stmt (ilist, g);
5624 gimplify_assign (new_var, x, ilist);
5625 if (lab2)
5626 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5627 break;
5628 }
5629
5630 /* reduction(-:var) sums up the partial results, so it
5631 acts identically to reduction(+:var). */
5632 if (code == MINUS_EXPR)
5633 code = PLUS_EXPR;
5634
5635 tree new_vard = new_var;
5636 if (is_simd && omp_is_reference (var))
5637 {
5638 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5639 new_vard = TREE_OPERAND (new_var, 0);
5640 gcc_assert (DECL_P (new_vard));
5641 }
5642 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5643 if (is_simd
5644 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5645 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5646 rvarp = &rvar;
5647 if (is_simd
5648 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5649 ivar, lvar, rvarp,
5650 &rvar2))
5651 {
5652 if (new_vard != new_var)
5653 {
5654 SET_DECL_VALUE_EXPR (new_vard,
5655 build_fold_addr_expr (lvar));
5656 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5657 }
5658
5659 tree ref = build_outer_var_ref (var, ctx);
5660
5661 if (rvarp)
5662 {
5663 if (ctx->for_simd_scan_phase)
5664 break;
5665 gimplify_assign (ivar, ref, &llist[0]);
5666 ref = build_outer_var_ref (var, ctx);
5667 gimplify_assign (ref, rvar, &llist[3]);
5668 break;
5669 }
5670
5671 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5672
5673 if (sctx.is_simt)
5674 {
5675 if (!simt_lane)
5676 simt_lane = create_tmp_var (unsigned_type_node);
5677 x = build_call_expr_internal_loc
5678 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5679 TREE_TYPE (ivar), 2, ivar, simt_lane);
5680 x = build2 (code, TREE_TYPE (ivar), ivar, x);
5681 gimplify_assign (ivar, x, &llist[2]);
5682 }
5683 x = build2 (code, TREE_TYPE (ref), ref, ivar);
5684 ref = build_outer_var_ref (var, ctx);
5685 gimplify_assign (ref, x, &llist[1]);
5686
5687 }
5688 else
5689 {
5690 if (omp_is_reference (var) && is_simd)
5691 handle_simd_reference (clause_loc, new_vard, ilist);
5692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5693 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5694 break;
5695 gimplify_assign (new_var, x, ilist);
5696 if (is_simd)
5697 {
5698 tree ref = build_outer_var_ref (var, ctx);
5699
5700 x = build2 (code, TREE_TYPE (ref), ref, new_var);
5701 ref = build_outer_var_ref (var, ctx);
5702 gimplify_assign (ref, x, dlist);
5703 }
5704 }
5705 }
5706 break;
5707
5708 default:
5709 gcc_unreachable ();
5710 }
5711 }
5712 }
5713 if (tskred_avar)
5714 {
5715 tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5716 TREE_THIS_VOLATILE (clobber) = 1;
5717 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5718 }
5719
5720 if (known_eq (sctx.max_vf, 1U))
5721 {
5722 sctx.is_simt = false;
5723 if (ctx->lastprivate_conditional_map)
5724 {
5725 if (gimple_omp_for_combined_into_p (ctx->stmt))
5726 {
5727 /* Signal to lower_omp_1 that it should use parent context. */
5728 ctx->combined_into_simd_safelen1 = true;
5729 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5731 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5732 {
5733 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5734 omp_context *outer = ctx->outer;
5735 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
5736 outer = outer->outer;
5737 tree *v = ctx->lastprivate_conditional_map->get (o);
5738 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
5739 tree *pv = outer->lastprivate_conditional_map->get (po);
5740 *v = *pv;
5741 }
5742 }
5743 else
5744 {
5745 /* When not vectorized, treat lastprivate(conditional:) like
5746 normal lastprivate, as there will be just one simd lane
5747 writing the privatized variable. */
5748 delete ctx->lastprivate_conditional_map;
5749 ctx->lastprivate_conditional_map = NULL;
5750 }
5751 }
5752 }
5753
5754 if (nonconst_simd_if)
5755 {
5756 if (sctx.lane == NULL_TREE)
5757 {
5758 sctx.idx = create_tmp_var (unsigned_type_node);
5759 sctx.lane = create_tmp_var (unsigned_type_node);
5760 }
5761 /* FIXME: For now. */
5762 sctx.is_simt = false;
5763 }
5764
5765 if (sctx.lane || sctx.is_simt)
5766 {
5767 uid = create_tmp_var (ptr_type_node, "simduid");
5768 /* Don't want uninit warnings on simduid, it is always uninitialized,
5769 but we use it not for the value, but for the DECL_UID only. */
5770 TREE_NO_WARNING (uid) = 1;
5771 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5772 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5773 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5774 gimple_omp_for_set_clauses (ctx->stmt, c);
5775 }
5776 /* Emit calls denoting privatized variables and initializing a pointer to
5777 structure that holds private variables as fields after ompdevlow pass. */
5778 if (sctx.is_simt)
5779 {
5780 sctx.simt_eargs[0] = uid;
5781 gimple *g
5782 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5783 gimple_call_set_lhs (g, uid);
5784 gimple_seq_add_stmt (ilist, g);
5785 sctx.simt_eargs.release ();
5786
5787 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5788 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5789 gimple_call_set_lhs (g, simtrec);
5790 gimple_seq_add_stmt (ilist, g);
5791 }
5792 if (sctx.lane)
5793 {
5794 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5795 2 + (nonconst_simd_if != NULL),
5796 uid, integer_zero_node,
5797 nonconst_simd_if);
5798 gimple_call_set_lhs (g, sctx.lane);
5799 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5800 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5801 g = gimple_build_assign (sctx.lane, INTEGER_CST,
5802 build_int_cst (unsigned_type_node, 0));
5803 gimple_seq_add_stmt (ilist, g);
5804 if (sctx.lastlane)
5805 {
5806 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5807 2, uid, sctx.lane);
5808 gimple_call_set_lhs (g, sctx.lastlane);
5809 gimple_seq_add_stmt (dlist, g);
5810 gimple_seq_add_seq (dlist, llist[3]);
5811 }
5812 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
5813 if (llist[2])
5814 {
5815 tree simt_vf = create_tmp_var (unsigned_type_node);
5816 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5817 gimple_call_set_lhs (g, simt_vf);
5818 gimple_seq_add_stmt (dlist, g);
5819
5820 tree t = build_int_cst (unsigned_type_node, 1);
5821 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5822 gimple_seq_add_stmt (dlist, g);
5823
5824 t = build_int_cst (unsigned_type_node, 0);
5825 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5826 gimple_seq_add_stmt (dlist, g);
5827
5828 tree body = create_artificial_label (UNKNOWN_LOCATION);
5829 tree header = create_artificial_label (UNKNOWN_LOCATION);
5830 tree end = create_artificial_label (UNKNOWN_LOCATION);
5831 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5832 gimple_seq_add_stmt (dlist, gimple_build_label (body));
5833
5834 gimple_seq_add_seq (dlist, llist[2]);
5835
5836 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5837 gimple_seq_add_stmt (dlist, g);
5838
5839 gimple_seq_add_stmt (dlist, gimple_build_label (header));
5840 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5841 gimple_seq_add_stmt (dlist, g);
5842
5843 gimple_seq_add_stmt (dlist, gimple_build_label (end));
5844 }
5845 for (int i = 0; i < 2; i++)
5846 if (llist[i])
5847 {
5848 tree vf = create_tmp_var (unsigned_type_node);
5849 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5850 gimple_call_set_lhs (g, vf);
5851 gimple_seq *seq = i == 0 ? ilist : dlist;
5852 gimple_seq_add_stmt (seq, g);
5853 tree t = build_int_cst (unsigned_type_node, 0);
5854 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5855 gimple_seq_add_stmt (seq, g);
5856 tree body = create_artificial_label (UNKNOWN_LOCATION);
5857 tree header = create_artificial_label (UNKNOWN_LOCATION);
5858 tree end = create_artificial_label (UNKNOWN_LOCATION);
5859 gimple_seq_add_stmt (seq, gimple_build_goto (header));
5860 gimple_seq_add_stmt (seq, gimple_build_label (body));
5861 gimple_seq_add_seq (seq, llist[i]);
5862 t = build_int_cst (unsigned_type_node, 1);
5863 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5864 gimple_seq_add_stmt (seq, g);
5865 gimple_seq_add_stmt (seq, gimple_build_label (header));
5866 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5867 gimple_seq_add_stmt (seq, g);
5868 gimple_seq_add_stmt (seq, gimple_build_label (end));
5869 }
5870 }
5871 if (sctx.is_simt)
5872 {
5873 gimple_seq_add_seq (dlist, sctx.simt_dlist);
5874 gimple *g
5875 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5876 gimple_seq_add_stmt (dlist, g);
5877 }
5878
5879 /* The copyin sequence is not to be executed by the main thread, since
5880 that would result in self-copies. Perhaps not visible to scalars,
5881 but it certainly is to C++ operator=. */
5882 if (copyin_seq)
5883 {
5884 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5885 0);
5886 x = build2 (NE_EXPR, boolean_type_node, x,
5887 build_int_cst (TREE_TYPE (x), 0));
5888 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5889 gimplify_and_add (x, ilist);
5890 }
5891
5892 /* If any copyin variable is passed by reference, we must ensure the
5893 master thread doesn't modify it before it is copied over in all
5894 threads. Similarly for variables in both firstprivate and
5895 lastprivate clauses we need to ensure the lastprivate copying
5896 happens after firstprivate copying in all threads. And similarly
5897 for UDRs if initializer expression refers to omp_orig. */
5898 if (copyin_by_ref || lastprivate_firstprivate
5899 || (reduction_omp_orig_ref
5900 && !ctx->scan_inclusive
5901 && !ctx->scan_exclusive))
5902 {
5903 /* Don't add any barrier for #pragma omp simd or
5904 #pragma omp distribute. */
5905 if (!is_task_ctx (ctx)
5906 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5907 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5908 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5909 }
5910
5911 /* If max_vf is non-zero, then we can use only a vectorization factor
5912 up to the max_vf we chose. So stick it into the safelen clause. */
5913 if (maybe_ne (sctx.max_vf, 0U))
5914 {
5915 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5916 OMP_CLAUSE_SAFELEN);
5917 poly_uint64 safe_len;
5918 if (c == NULL_TREE
5919 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5920 && maybe_gt (safe_len, sctx.max_vf)))
5921 {
5922 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5923 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5924 sctx.max_vf);
5925 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5926 gimple_omp_for_set_clauses (ctx->stmt, c);
5927 }
5928 }
5929 }
5930
5931 /* Create temporary variables for lastprivate(conditional:) implementation
5932 in context CTX with CLAUSES. */
5933
5934 static void
5935 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
5936 {
5937 tree iter_type = NULL_TREE;
5938 tree cond_ptr = NULL_TREE;
5939 tree iter_var = NULL_TREE;
5940 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5941 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
5942 tree next = *clauses;
5943 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
5944 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5945 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5946 {
5947 if (is_simd)
5948 {
5949 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
5950 gcc_assert (cc);
5951 if (iter_type == NULL_TREE)
5952 {
5953 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
5954 iter_var = create_tmp_var_raw (iter_type);
5955 DECL_CONTEXT (iter_var) = current_function_decl;
5956 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
5957 DECL_CHAIN (iter_var) = ctx->block_vars;
5958 ctx->block_vars = iter_var;
5959 tree c3
5960 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
5961 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
5962 OMP_CLAUSE_DECL (c3) = iter_var;
5963 OMP_CLAUSE_CHAIN (c3) = *clauses;
5964 *clauses = c3;
5965 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
5966 }
5967 next = OMP_CLAUSE_CHAIN (cc);
5968 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
5969 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
5970 ctx->lastprivate_conditional_map->put (o, v);
5971 continue;
5972 }
5973 if (iter_type == NULL)
5974 {
5975 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
5976 {
5977 struct omp_for_data fd;
5978 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
5979 NULL);
5980 iter_type = unsigned_type_for (fd.iter_type);
5981 }
5982 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
5983 iter_type = unsigned_type_node;
5984 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
5985 if (c2)
5986 {
5987 cond_ptr
5988 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
5989 OMP_CLAUSE_DECL (c2) = cond_ptr;
5990 }
5991 else
5992 {
5993 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
5994 DECL_CONTEXT (cond_ptr) = current_function_decl;
5995 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
5996 DECL_CHAIN (cond_ptr) = ctx->block_vars;
5997 ctx->block_vars = cond_ptr;
5998 c2 = build_omp_clause (UNKNOWN_LOCATION,
5999 OMP_CLAUSE__CONDTEMP_);
6000 OMP_CLAUSE_DECL (c2) = cond_ptr;
6001 OMP_CLAUSE_CHAIN (c2) = *clauses;
6002 *clauses = c2;
6003 }
6004 iter_var = create_tmp_var_raw (iter_type);
6005 DECL_CONTEXT (iter_var) = current_function_decl;
6006 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6007 DECL_CHAIN (iter_var) = ctx->block_vars;
6008 ctx->block_vars = iter_var;
6009 tree c3
6010 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6011 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6012 OMP_CLAUSE_DECL (c3) = iter_var;
6013 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6014 OMP_CLAUSE_CHAIN (c2) = c3;
6015 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6016 }
6017 tree v = create_tmp_var_raw (iter_type);
6018 DECL_CONTEXT (v) = current_function_decl;
6019 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6020 DECL_CHAIN (v) = ctx->block_vars;
6021 ctx->block_vars = v;
6022 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6023 ctx->lastprivate_conditional_map->put (o, v);
6024 }
6025 }
6026
6027
6028 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6029 both parallel and workshare constructs. PREDICATE may be NULL if it's
6030 always true. BODY_P is the sequence to insert early initialization
6031 if needed, STMT_LIST is where the non-conditional lastprivate handling
6032 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6033 section. */
6034
6035 static void
6036 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6037 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6038 omp_context *ctx)
6039 {
6040 tree x, c, label = NULL, orig_clauses = clauses;
6041 bool par_clauses = false;
6042 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6043 unsigned HOST_WIDE_INT conditional_off = 0;
6044 gimple_seq post_stmt_list = NULL;
6045
6046 /* Early exit if there are no lastprivate or linear clauses. */
6047 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6048 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6049 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6050 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6051 break;
6052 if (clauses == NULL)
6053 {
6054 /* If this was a workshare clause, see if it had been combined
6055 with its parallel. In that case, look for the clauses on the
6056 parallel statement itself. */
6057 if (is_parallel_ctx (ctx))
6058 return;
6059
6060 ctx = ctx->outer;
6061 if (ctx == NULL || !is_parallel_ctx (ctx))
6062 return;
6063
6064 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6065 OMP_CLAUSE_LASTPRIVATE);
6066 if (clauses == NULL)
6067 return;
6068 par_clauses = true;
6069 }
6070
6071 bool maybe_simt = false;
6072 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6073 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6074 {
6075 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6076 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6077 if (simduid)
6078 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6079 }
6080
6081 if (predicate)
6082 {
6083 gcond *stmt;
6084 tree label_true, arm1, arm2;
6085 enum tree_code pred_code = TREE_CODE (predicate);
6086
6087 label = create_artificial_label (UNKNOWN_LOCATION);
6088 label_true = create_artificial_label (UNKNOWN_LOCATION);
6089 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6090 {
6091 arm1 = TREE_OPERAND (predicate, 0);
6092 arm2 = TREE_OPERAND (predicate, 1);
6093 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6094 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6095 }
6096 else
6097 {
6098 arm1 = predicate;
6099 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6100 arm2 = boolean_false_node;
6101 pred_code = NE_EXPR;
6102 }
6103 if (maybe_simt)
6104 {
6105 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6106 c = fold_convert (integer_type_node, c);
6107 simtcond = create_tmp_var (integer_type_node);
6108 gimplify_assign (simtcond, c, stmt_list);
6109 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6110 1, simtcond);
6111 c = create_tmp_var (integer_type_node);
6112 gimple_call_set_lhs (g, c);
6113 gimple_seq_add_stmt (stmt_list, g);
6114 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6115 label_true, label);
6116 }
6117 else
6118 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6119 gimple_seq_add_stmt (stmt_list, stmt);
6120 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6121 }
6122
6123 tree cond_ptr = NULL_TREE;
6124 for (c = clauses; c ;)
6125 {
6126 tree var, new_var;
6127 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6128 gimple_seq *this_stmt_list = stmt_list;
6129 tree lab2 = NULL_TREE;
6130
6131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6132 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6133 && ctx->lastprivate_conditional_map
6134 && !ctx->combined_into_simd_safelen1)
6135 {
6136 gcc_assert (body_p);
6137 if (simduid)
6138 goto next;
6139 if (cond_ptr == NULL_TREE)
6140 {
6141 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6142 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6143 }
6144 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6145 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6146 tree v = *ctx->lastprivate_conditional_map->get (o);
6147 gimplify_assign (v, build_zero_cst (type), body_p);
6148 this_stmt_list = cstmt_list;
6149 tree mem;
6150 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6151 {
6152 mem = build2 (MEM_REF, type, cond_ptr,
6153 build_int_cst (TREE_TYPE (cond_ptr),
6154 conditional_off));
6155 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6156 }
6157 else
6158 mem = build4 (ARRAY_REF, type, cond_ptr,
6159 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6160 tree mem2 = copy_node (mem);
6161 gimple_seq seq = NULL;
6162 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6163 gimple_seq_add_seq (this_stmt_list, seq);
6164 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6165 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6166 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6167 gimple_seq_add_stmt (this_stmt_list, g);
6168 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6169 gimplify_assign (mem2, v, this_stmt_list);
6170 }
6171 else if (predicate
6172 && ctx->combined_into_simd_safelen1
6173 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6174 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6175 && ctx->lastprivate_conditional_map)
6176 this_stmt_list = &post_stmt_list;
6177
6178 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6179 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6180 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6181 {
6182 var = OMP_CLAUSE_DECL (c);
6183 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6184 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6185 && is_taskloop_ctx (ctx))
6186 {
6187 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6188 new_var = lookup_decl (var, ctx->outer);
6189 }
6190 else
6191 {
6192 new_var = lookup_decl (var, ctx);
6193 /* Avoid uninitialized warnings for lastprivate and
6194 for linear iterators. */
6195 if (predicate
6196 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6197 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6198 TREE_NO_WARNING (new_var) = 1;
6199 }
6200
6201 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6202 {
6203 tree val = DECL_VALUE_EXPR (new_var);
6204 if (TREE_CODE (val) == ARRAY_REF
6205 && VAR_P (TREE_OPERAND (val, 0))
6206 && lookup_attribute ("omp simd array",
6207 DECL_ATTRIBUTES (TREE_OPERAND (val,
6208 0))))
6209 {
6210 if (lastlane == NULL)
6211 {
6212 lastlane = create_tmp_var (unsigned_type_node);
6213 gcall *g
6214 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6215 2, simduid,
6216 TREE_OPERAND (val, 1));
6217 gimple_call_set_lhs (g, lastlane);
6218 gimple_seq_add_stmt (this_stmt_list, g);
6219 }
6220 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6221 TREE_OPERAND (val, 0), lastlane,
6222 NULL_TREE, NULL_TREE);
6223 TREE_THIS_NOTRAP (new_var) = 1;
6224 }
6225 }
6226 else if (maybe_simt)
6227 {
6228 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6229 ? DECL_VALUE_EXPR (new_var)
6230 : new_var);
6231 if (simtlast == NULL)
6232 {
6233 simtlast = create_tmp_var (unsigned_type_node);
6234 gcall *g = gimple_build_call_internal
6235 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6236 gimple_call_set_lhs (g, simtlast);
6237 gimple_seq_add_stmt (this_stmt_list, g);
6238 }
6239 x = build_call_expr_internal_loc
6240 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6241 TREE_TYPE (val), 2, val, simtlast);
6242 new_var = unshare_expr (new_var);
6243 gimplify_assign (new_var, x, this_stmt_list);
6244 new_var = unshare_expr (new_var);
6245 }
6246
6247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6248 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6249 {
6250 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6251 gimple_seq_add_seq (this_stmt_list,
6252 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6253 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6254 }
6255 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6256 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6257 {
6258 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6259 gimple_seq_add_seq (this_stmt_list,
6260 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6261 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6262 }
6263
6264 x = NULL_TREE;
6265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6266 && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
6267 {
6268 gcc_checking_assert (is_taskloop_ctx (ctx));
6269 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6270 ctx->outer->outer);
6271 if (is_global_var (ovar))
6272 x = ovar;
6273 }
6274 if (!x)
6275 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6276 if (omp_is_reference (var))
6277 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6278 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6279 gimplify_and_add (x, this_stmt_list);
6280
6281 if (lab2)
6282 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6283 }
6284
6285 next:
6286 c = OMP_CLAUSE_CHAIN (c);
6287 if (c == NULL && !par_clauses)
6288 {
6289 /* If this was a workshare clause, see if it had been combined
6290 with its parallel. In that case, continue looking for the
6291 clauses also on the parallel statement itself. */
6292 if (is_parallel_ctx (ctx))
6293 break;
6294
6295 ctx = ctx->outer;
6296 if (ctx == NULL || !is_parallel_ctx (ctx))
6297 break;
6298
6299 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6300 OMP_CLAUSE_LASTPRIVATE);
6301 par_clauses = true;
6302 }
6303 }
6304
6305 if (label)
6306 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6307 gimple_seq_add_seq (stmt_list, post_stmt_list);
6308 }
6309
6310 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6311 (which might be a placeholder). INNER is true if this is an inner
6312 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6313 join markers. Generate the before-loop forking sequence in
6314 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6315 general form of these sequences is
6316
6317 GOACC_REDUCTION_SETUP
6318 GOACC_FORK
6319 GOACC_REDUCTION_INIT
6320 ...
6321 GOACC_REDUCTION_FINI
6322 GOACC_JOIN
6323 GOACC_REDUCTION_TEARDOWN. */
6324
6325 static void
6326 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6327 gcall *fork, gcall *join, gimple_seq *fork_seq,
6328 gimple_seq *join_seq, omp_context *ctx)
6329 {
6330 gimple_seq before_fork = NULL;
6331 gimple_seq after_fork = NULL;
6332 gimple_seq before_join = NULL;
6333 gimple_seq after_join = NULL;
6334 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6335 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6336 unsigned offset = 0;
6337
6338 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6339 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6340 {
6341 tree orig = OMP_CLAUSE_DECL (c);
6342 tree var = maybe_lookup_decl (orig, ctx);
6343 tree ref_to_res = NULL_TREE;
6344 tree incoming, outgoing, v1, v2, v3;
6345 bool is_private = false;
6346
6347 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6348 if (rcode == MINUS_EXPR)
6349 rcode = PLUS_EXPR;
6350 else if (rcode == TRUTH_ANDIF_EXPR)
6351 rcode = BIT_AND_EXPR;
6352 else if (rcode == TRUTH_ORIF_EXPR)
6353 rcode = BIT_IOR_EXPR;
6354 tree op = build_int_cst (unsigned_type_node, rcode);
6355
6356 if (!var)
6357 var = orig;
6358
6359 incoming = outgoing = var;
6360
6361 if (!inner)
6362 {
6363 /* See if an outer construct also reduces this variable. */
6364 omp_context *outer = ctx;
6365
6366 while (omp_context *probe = outer->outer)
6367 {
6368 enum gimple_code type = gimple_code (probe->stmt);
6369 tree cls;
6370
6371 switch (type)
6372 {
6373 case GIMPLE_OMP_FOR:
6374 cls = gimple_omp_for_clauses (probe->stmt);
6375 break;
6376
6377 case GIMPLE_OMP_TARGET:
6378 if (gimple_omp_target_kind (probe->stmt)
6379 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6380 goto do_lookup;
6381
6382 cls = gimple_omp_target_clauses (probe->stmt);
6383 break;
6384
6385 default:
6386 goto do_lookup;
6387 }
6388
6389 outer = probe;
6390 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6391 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6392 && orig == OMP_CLAUSE_DECL (cls))
6393 {
6394 incoming = outgoing = lookup_decl (orig, probe);
6395 goto has_outer_reduction;
6396 }
6397 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6398 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6399 && orig == OMP_CLAUSE_DECL (cls))
6400 {
6401 is_private = true;
6402 goto do_lookup;
6403 }
6404 }
6405
6406 do_lookup:
6407 /* This is the outermost construct with this reduction,
6408 see if there's a mapping for it. */
6409 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6410 && maybe_lookup_field (orig, outer) && !is_private)
6411 {
6412 ref_to_res = build_receiver_ref (orig, false, outer);
6413 if (omp_is_reference (orig))
6414 ref_to_res = build_simple_mem_ref (ref_to_res);
6415
6416 tree type = TREE_TYPE (var);
6417 if (POINTER_TYPE_P (type))
6418 type = TREE_TYPE (type);
6419
6420 outgoing = var;
6421 incoming = omp_reduction_init_op (loc, rcode, type);
6422 }
6423 else
6424 {
6425 /* Try to look at enclosing contexts for reduction var,
6426 use original if no mapping found. */
6427 tree t = NULL_TREE;
6428 omp_context *c = ctx->outer;
6429 while (c && !t)
6430 {
6431 t = maybe_lookup_decl (orig, c);
6432 c = c->outer;
6433 }
6434 incoming = outgoing = (t ? t : orig);
6435 }
6436
6437 has_outer_reduction:;
6438 }
6439
6440 if (!ref_to_res)
6441 ref_to_res = integer_zero_node;
6442
6443 if (omp_is_reference (orig))
6444 {
6445 tree type = TREE_TYPE (var);
6446 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6447
6448 if (!inner)
6449 {
6450 tree x = create_tmp_var (TREE_TYPE (type), id);
6451 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6452 }
6453
6454 v1 = create_tmp_var (type, id);
6455 v2 = create_tmp_var (type, id);
6456 v3 = create_tmp_var (type, id);
6457
6458 gimplify_assign (v1, var, fork_seq);
6459 gimplify_assign (v2, var, fork_seq);
6460 gimplify_assign (v3, var, fork_seq);
6461
6462 var = build_simple_mem_ref (var);
6463 v1 = build_simple_mem_ref (v1);
6464 v2 = build_simple_mem_ref (v2);
6465 v3 = build_simple_mem_ref (v3);
6466 outgoing = build_simple_mem_ref (outgoing);
6467
6468 if (!TREE_CONSTANT (incoming))
6469 incoming = build_simple_mem_ref (incoming);
6470 }
6471 else
6472 v1 = v2 = v3 = var;
6473
6474 /* Determine position in reduction buffer, which may be used
6475 by target. The parser has ensured that this is not a
6476 variable-sized type. */
6477 fixed_size_mode mode
6478 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6479 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6480 offset = (offset + align - 1) & ~(align - 1);
6481 tree off = build_int_cst (sizetype, offset);
6482 offset += GET_MODE_SIZE (mode);
6483
6484 if (!init_code)
6485 {
6486 init_code = build_int_cst (integer_type_node,
6487 IFN_GOACC_REDUCTION_INIT);
6488 fini_code = build_int_cst (integer_type_node,
6489 IFN_GOACC_REDUCTION_FINI);
6490 setup_code = build_int_cst (integer_type_node,
6491 IFN_GOACC_REDUCTION_SETUP);
6492 teardown_code = build_int_cst (integer_type_node,
6493 IFN_GOACC_REDUCTION_TEARDOWN);
6494 }
6495
6496 tree setup_call
6497 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6498 TREE_TYPE (var), 6, setup_code,
6499 unshare_expr (ref_to_res),
6500 incoming, level, op, off);
6501 tree init_call
6502 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6503 TREE_TYPE (var), 6, init_code,
6504 unshare_expr (ref_to_res),
6505 v1, level, op, off);
6506 tree fini_call
6507 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6508 TREE_TYPE (var), 6, fini_code,
6509 unshare_expr (ref_to_res),
6510 v2, level, op, off);
6511 tree teardown_call
6512 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6513 TREE_TYPE (var), 6, teardown_code,
6514 ref_to_res, v3, level, op, off);
6515
6516 gimplify_assign (v1, setup_call, &before_fork);
6517 gimplify_assign (v2, init_call, &after_fork);
6518 gimplify_assign (v3, fini_call, &before_join);
6519 gimplify_assign (outgoing, teardown_call, &after_join);
6520 }
6521
6522 /* Now stitch things together. */
6523 gimple_seq_add_seq (fork_seq, before_fork);
6524 if (fork)
6525 gimple_seq_add_stmt (fork_seq, fork);
6526 gimple_seq_add_seq (fork_seq, after_fork);
6527
6528 gimple_seq_add_seq (join_seq, before_join);
6529 if (join)
6530 gimple_seq_add_stmt (join_seq, join);
6531 gimple_seq_add_seq (join_seq, after_join);
6532 }
6533
6534 /* Generate code to implement the REDUCTION clauses, append it
6535 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6536 that should be emitted also inside of the critical section,
6537 in that case clear *CLIST afterwards, otherwise leave it as is
6538 and let the caller emit it itself. */
6539
6540 static void
6541 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6542 gimple_seq *clist, omp_context *ctx)
6543 {
6544 gimple_seq sub_seq = NULL;
6545 gimple *stmt;
6546 tree x, c;
6547 int count = 0;
6548
6549 /* OpenACC loop reductions are handled elsewhere. */
6550 if (is_gimple_omp_oacc (ctx->stmt))
6551 return;
6552
6553 /* SIMD reductions are handled in lower_rec_input_clauses. */
6554 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6555 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
6556 return;
6557
6558 /* inscan reductions are handled elsewhere. */
6559 if (ctx->scan_inclusive || ctx->scan_exclusive)
6560 return;
6561
6562 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6563 update in that case, otherwise use a lock. */
6564 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6565 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6566 && !OMP_CLAUSE_REDUCTION_TASK (c))
6567 {
6568 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6569 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6570 {
6571 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6572 count = -1;
6573 break;
6574 }
6575 count++;
6576 }
6577
6578 if (count == 0)
6579 return;
6580
6581 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6582 {
6583 tree var, ref, new_var, orig_var;
6584 enum tree_code code;
6585 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6586
6587 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6588 || OMP_CLAUSE_REDUCTION_TASK (c))
6589 continue;
6590
6591 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6592 orig_var = var = OMP_CLAUSE_DECL (c);
6593 if (TREE_CODE (var) == MEM_REF)
6594 {
6595 var = TREE_OPERAND (var, 0);
6596 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6597 var = TREE_OPERAND (var, 0);
6598 if (TREE_CODE (var) == ADDR_EXPR)
6599 var = TREE_OPERAND (var, 0);
6600 else
6601 {
6602 /* If this is a pointer or referenced based array
6603 section, the var could be private in the outer
6604 context e.g. on orphaned loop construct. Pretend this
6605 is private variable's outer reference. */
6606 ccode = OMP_CLAUSE_PRIVATE;
6607 if (TREE_CODE (var) == INDIRECT_REF)
6608 var = TREE_OPERAND (var, 0);
6609 }
6610 orig_var = var;
6611 if (is_variable_sized (var))
6612 {
6613 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6614 var = DECL_VALUE_EXPR (var);
6615 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6616 var = TREE_OPERAND (var, 0);
6617 gcc_assert (DECL_P (var));
6618 }
6619 }
6620 new_var = lookup_decl (var, ctx);
6621 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6622 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6623 ref = build_outer_var_ref (var, ctx, ccode);
6624 code = OMP_CLAUSE_REDUCTION_CODE (c);
6625
6626 /* reduction(-:var) sums up the partial results, so it acts
6627 identically to reduction(+:var). */
6628 if (code == MINUS_EXPR)
6629 code = PLUS_EXPR;
6630
6631 if (count == 1)
6632 {
6633 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6634
6635 addr = save_expr (addr);
6636 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6637 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6638 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6639 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6640 gimplify_and_add (x, stmt_seqp);
6641 return;
6642 }
6643 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6644 {
6645 tree d = OMP_CLAUSE_DECL (c);
6646 tree type = TREE_TYPE (d);
6647 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6648 tree i = create_tmp_var (TREE_TYPE (v));
6649 tree ptype = build_pointer_type (TREE_TYPE (type));
6650 tree bias = TREE_OPERAND (d, 1);
6651 d = TREE_OPERAND (d, 0);
6652 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6653 {
6654 tree b = TREE_OPERAND (d, 1);
6655 b = maybe_lookup_decl (b, ctx);
6656 if (b == NULL)
6657 {
6658 b = TREE_OPERAND (d, 1);
6659 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6660 }
6661 if (integer_zerop (bias))
6662 bias = b;
6663 else
6664 {
6665 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
6666 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
6667 TREE_TYPE (b), b, bias);
6668 }
6669 d = TREE_OPERAND (d, 0);
6670 }
6671 /* For ref build_outer_var_ref already performs this, so
6672 only new_var needs a dereference. */
6673 if (TREE_CODE (d) == INDIRECT_REF)
6674 {
6675 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6676 gcc_assert (omp_is_reference (var) && var == orig_var);
6677 }
6678 else if (TREE_CODE (d) == ADDR_EXPR)
6679 {
6680 if (orig_var == var)
6681 {
6682 new_var = build_fold_addr_expr (new_var);
6683 ref = build_fold_addr_expr (ref);
6684 }
6685 }
6686 else
6687 {
6688 gcc_assert (orig_var == var);
6689 if (omp_is_reference (var))
6690 ref = build_fold_addr_expr (ref);
6691 }
6692 if (DECL_P (v))
6693 {
6694 tree t = maybe_lookup_decl (v, ctx);
6695 if (t)
6696 v = t;
6697 else
6698 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6699 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6700 }
6701 if (!integer_zerop (bias))
6702 {
6703 bias = fold_convert_loc (clause_loc, sizetype, bias);
6704 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6705 TREE_TYPE (new_var), new_var,
6706 unshare_expr (bias));
6707 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6708 TREE_TYPE (ref), ref, bias);
6709 }
6710 new_var = fold_convert_loc (clause_loc, ptype, new_var);
6711 ref = fold_convert_loc (clause_loc, ptype, ref);
6712 tree m = create_tmp_var (ptype);
6713 gimplify_assign (m, new_var, stmt_seqp);
6714 new_var = m;
6715 m = create_tmp_var (ptype);
6716 gimplify_assign (m, ref, stmt_seqp);
6717 ref = m;
6718 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6719 tree body = create_artificial_label (UNKNOWN_LOCATION);
6720 tree end = create_artificial_label (UNKNOWN_LOCATION);
6721 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6722 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6723 tree out = build_simple_mem_ref_loc (clause_loc, ref);
6724 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6725 {
6726 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6727 tree decl_placeholder
6728 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6729 SET_DECL_VALUE_EXPR (placeholder, out);
6730 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6731 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6732 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6733 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6734 gimple_seq_add_seq (&sub_seq,
6735 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6736 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6737 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6738 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6739 }
6740 else
6741 {
6742 x = build2 (code, TREE_TYPE (out), out, priv);
6743 out = unshare_expr (out);
6744 gimplify_assign (out, x, &sub_seq);
6745 }
6746 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6747 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6748 gimple_seq_add_stmt (&sub_seq, g);
6749 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6750 TYPE_SIZE_UNIT (TREE_TYPE (type)));
6751 gimple_seq_add_stmt (&sub_seq, g);
6752 g = gimple_build_assign (i, PLUS_EXPR, i,
6753 build_int_cst (TREE_TYPE (i), 1));
6754 gimple_seq_add_stmt (&sub_seq, g);
6755 g = gimple_build_cond (LE_EXPR, i, v, body, end);
6756 gimple_seq_add_stmt (&sub_seq, g);
6757 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6758 }
6759 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6760 {
6761 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6762
6763 if (omp_is_reference (var)
6764 && !useless_type_conversion_p (TREE_TYPE (placeholder),
6765 TREE_TYPE (ref)))
6766 ref = build_fold_addr_expr_loc (clause_loc, ref);
6767 SET_DECL_VALUE_EXPR (placeholder, ref);
6768 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6769 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6770 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6771 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6772 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6773 }
6774 else
6775 {
6776 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6777 ref = build_outer_var_ref (var, ctx);
6778 gimplify_assign (ref, x, &sub_seq);
6779 }
6780 }
6781
6782 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6783 0);
6784 gimple_seq_add_stmt (stmt_seqp, stmt);
6785
6786 gimple_seq_add_seq (stmt_seqp, sub_seq);
6787
6788 if (clist)
6789 {
6790 gimple_seq_add_seq (stmt_seqp, *clist);
6791 *clist = NULL;
6792 }
6793
6794 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6795 0);
6796 gimple_seq_add_stmt (stmt_seqp, stmt);
6797 }
6798
6799
6800 /* Generate code to implement the COPYPRIVATE clauses. */
6801
6802 static void
6803 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6804 omp_context *ctx)
6805 {
6806 tree c;
6807
6808 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6809 {
6810 tree var, new_var, ref, x;
6811 bool by_ref;
6812 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6813
6814 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6815 continue;
6816
6817 var = OMP_CLAUSE_DECL (c);
6818 by_ref = use_pointer_for_field (var, NULL);
6819
6820 ref = build_sender_ref (var, ctx);
6821 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6822 if (by_ref)
6823 {
6824 x = build_fold_addr_expr_loc (clause_loc, new_var);
6825 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6826 }
6827 gimplify_assign (ref, x, slist);
6828
6829 ref = build_receiver_ref (var, false, ctx);
6830 if (by_ref)
6831 {
6832 ref = fold_convert_loc (clause_loc,
6833 build_pointer_type (TREE_TYPE (new_var)),
6834 ref);
6835 ref = build_fold_indirect_ref_loc (clause_loc, ref);
6836 }
6837 if (omp_is_reference (var))
6838 {
6839 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6840 ref = build_simple_mem_ref_loc (clause_loc, ref);
6841 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6842 }
6843 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6844 gimplify_and_add (x, rlist);
6845 }
6846 }
6847
6848
6849 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6850 and REDUCTION from the sender (aka parent) side. */
6851
6852 static void
6853 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6854 omp_context *ctx)
6855 {
6856 tree c, t;
6857 int ignored_looptemp = 0;
6858 bool is_taskloop = false;
6859
6860 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6861 by GOMP_taskloop. */
6862 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6863 {
6864 ignored_looptemp = 2;
6865 is_taskloop = true;
6866 }
6867
6868 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6869 {
6870 tree val, ref, x, var;
6871 bool by_ref, do_in = false, do_out = false;
6872 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6873
6874 switch (OMP_CLAUSE_CODE (c))
6875 {
6876 case OMP_CLAUSE_PRIVATE:
6877 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6878 break;
6879 continue;
6880 case OMP_CLAUSE_FIRSTPRIVATE:
6881 case OMP_CLAUSE_COPYIN:
6882 case OMP_CLAUSE_LASTPRIVATE:
6883 case OMP_CLAUSE_IN_REDUCTION:
6884 case OMP_CLAUSE__REDUCTEMP_:
6885 break;
6886 case OMP_CLAUSE_REDUCTION:
6887 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6888 continue;
6889 break;
6890 case OMP_CLAUSE_SHARED:
6891 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6892 break;
6893 continue;
6894 case OMP_CLAUSE__LOOPTEMP_:
6895 if (ignored_looptemp)
6896 {
6897 ignored_looptemp--;
6898 continue;
6899 }
6900 break;
6901 default:
6902 continue;
6903 }
6904
6905 val = OMP_CLAUSE_DECL (c);
6906 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6907 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6908 && TREE_CODE (val) == MEM_REF)
6909 {
6910 val = TREE_OPERAND (val, 0);
6911 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6912 val = TREE_OPERAND (val, 0);
6913 if (TREE_CODE (val) == INDIRECT_REF
6914 || TREE_CODE (val) == ADDR_EXPR)
6915 val = TREE_OPERAND (val, 0);
6916 if (is_variable_sized (val))
6917 continue;
6918 }
6919
6920 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6921 outer taskloop region. */
6922 omp_context *ctx_for_o = ctx;
6923 if (is_taskloop
6924 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6925 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6926 ctx_for_o = ctx->outer;
6927
6928 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6929
6930 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6931 && is_global_var (var)
6932 && (val == OMP_CLAUSE_DECL (c)
6933 || !is_task_ctx (ctx)
6934 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6935 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6936 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6937 != POINTER_TYPE)))))
6938 continue;
6939
6940 t = omp_member_access_dummy_var (var);
6941 if (t)
6942 {
6943 var = DECL_VALUE_EXPR (var);
6944 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6945 if (o != t)
6946 var = unshare_and_remap (var, t, o);
6947 else
6948 var = unshare_expr (var);
6949 }
6950
6951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6952 {
6953 /* Handle taskloop firstprivate/lastprivate, where the
6954 lastprivate on GIMPLE_OMP_TASK is represented as
6955 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
6956 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6957 x = omp_build_component_ref (ctx->sender_decl, f);
6958 if (use_pointer_for_field (val, ctx))
6959 var = build_fold_addr_expr (var);
6960 gimplify_assign (x, var, ilist);
6961 DECL_ABSTRACT_ORIGIN (f) = NULL;
6962 continue;
6963 }
6964
6965 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6966 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6967 || val == OMP_CLAUSE_DECL (c))
6968 && is_variable_sized (val))
6969 continue;
6970 by_ref = use_pointer_for_field (val, NULL);
6971
6972 switch (OMP_CLAUSE_CODE (c))
6973 {
6974 case OMP_CLAUSE_FIRSTPRIVATE:
6975 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6976 && !by_ref
6977 && is_task_ctx (ctx))
6978 TREE_NO_WARNING (var) = 1;
6979 do_in = true;
6980 break;
6981
6982 case OMP_CLAUSE_PRIVATE:
6983 case OMP_CLAUSE_COPYIN:
6984 case OMP_CLAUSE__LOOPTEMP_:
6985 case OMP_CLAUSE__REDUCTEMP_:
6986 do_in = true;
6987 break;
6988
6989 case OMP_CLAUSE_LASTPRIVATE:
6990 if (by_ref || omp_is_reference (val))
6991 {
6992 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6993 continue;
6994 do_in = true;
6995 }
6996 else
6997 {
6998 do_out = true;
6999 if (lang_hooks.decls.omp_private_outer_ref (val))
7000 do_in = true;
7001 }
7002 break;
7003
7004 case OMP_CLAUSE_REDUCTION:
7005 case OMP_CLAUSE_IN_REDUCTION:
7006 do_in = true;
7007 if (val == OMP_CLAUSE_DECL (c))
7008 {
7009 if (is_task_ctx (ctx))
7010 by_ref = use_pointer_for_field (val, ctx);
7011 else
7012 do_out = !(by_ref || omp_is_reference (val));
7013 }
7014 else
7015 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7016 break;
7017
7018 default:
7019 gcc_unreachable ();
7020 }
7021
7022 if (do_in)
7023 {
7024 ref = build_sender_ref (val, ctx);
7025 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7026 gimplify_assign (ref, x, ilist);
7027 if (is_task_ctx (ctx))
7028 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7029 }
7030
7031 if (do_out)
7032 {
7033 ref = build_sender_ref (val, ctx);
7034 gimplify_assign (var, ref, olist);
7035 }
7036 }
7037 }
7038
7039 /* Generate code to implement SHARED from the sender (aka parent)
7040 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7041 list things that got automatically shared. */
7042
7043 static void
7044 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7045 {
7046 tree var, ovar, nvar, t, f, x, record_type;
7047
7048 if (ctx->record_type == NULL)
7049 return;
7050
7051 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7052 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7053 {
7054 ovar = DECL_ABSTRACT_ORIGIN (f);
7055 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7056 continue;
7057
7058 nvar = maybe_lookup_decl (ovar, ctx);
7059 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7060 continue;
7061
7062 /* If CTX is a nested parallel directive. Find the immediately
7063 enclosing parallel or workshare construct that contains a
7064 mapping for OVAR. */
7065 var = lookup_decl_in_outer_ctx (ovar, ctx);
7066
7067 t = omp_member_access_dummy_var (var);
7068 if (t)
7069 {
7070 var = DECL_VALUE_EXPR (var);
7071 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7072 if (o != t)
7073 var = unshare_and_remap (var, t, o);
7074 else
7075 var = unshare_expr (var);
7076 }
7077
7078 if (use_pointer_for_field (ovar, ctx))
7079 {
7080 x = build_sender_ref (ovar, ctx);
7081 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7082 && TREE_TYPE (f) == TREE_TYPE (ovar))
7083 {
7084 gcc_assert (is_parallel_ctx (ctx)
7085 && DECL_ARTIFICIAL (ovar));
7086 /* _condtemp_ clause. */
7087 var = build_constructor (TREE_TYPE (x), NULL);
7088 }
7089 else
7090 var = build_fold_addr_expr (var);
7091 gimplify_assign (x, var, ilist);
7092 }
7093 else
7094 {
7095 x = build_sender_ref (ovar, ctx);
7096 gimplify_assign (x, var, ilist);
7097
7098 if (!TREE_READONLY (var)
7099 /* We don't need to receive a new reference to a result
7100 or parm decl. In fact we may not store to it as we will
7101 invalidate any pending RSO and generate wrong gimple
7102 during inlining. */
7103 && !((TREE_CODE (var) == RESULT_DECL
7104 || TREE_CODE (var) == PARM_DECL)
7105 && DECL_BY_REFERENCE (var)))
7106 {
7107 x = build_sender_ref (ovar, ctx);
7108 gimplify_assign (var, x, olist);
7109 }
7110 }
7111 }
7112 }
7113
7114 /* Emit an OpenACC head marker call, encapulating the partitioning and
7115 other information that must be processed by the target compiler.
7116 Return the maximum number of dimensions the associated loop might
7117 be partitioned over. */
7118
7119 static unsigned
7120 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7121 gimple_seq *seq, omp_context *ctx)
7122 {
7123 unsigned levels = 0;
7124 unsigned tag = 0;
7125 tree gang_static = NULL_TREE;
7126 auto_vec<tree, 5> args;
7127
7128 args.quick_push (build_int_cst
7129 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7130 args.quick_push (ddvar);
7131 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7132 {
7133 switch (OMP_CLAUSE_CODE (c))
7134 {
7135 case OMP_CLAUSE_GANG:
7136 tag |= OLF_DIM_GANG;
7137 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7138 /* static:* is represented by -1, and we can ignore it, as
7139 scheduling is always static. */
7140 if (gang_static && integer_minus_onep (gang_static))
7141 gang_static = NULL_TREE;
7142 levels++;
7143 break;
7144
7145 case OMP_CLAUSE_WORKER:
7146 tag |= OLF_DIM_WORKER;
7147 levels++;
7148 break;
7149
7150 case OMP_CLAUSE_VECTOR:
7151 tag |= OLF_DIM_VECTOR;
7152 levels++;
7153 break;
7154
7155 case OMP_CLAUSE_SEQ:
7156 tag |= OLF_SEQ;
7157 break;
7158
7159 case OMP_CLAUSE_AUTO:
7160 tag |= OLF_AUTO;
7161 break;
7162
7163 case OMP_CLAUSE_INDEPENDENT:
7164 tag |= OLF_INDEPENDENT;
7165 break;
7166
7167 case OMP_CLAUSE_TILE:
7168 tag |= OLF_TILE;
7169 break;
7170
7171 default:
7172 continue;
7173 }
7174 }
7175
7176 if (gang_static)
7177 {
7178 if (DECL_P (gang_static))
7179 gang_static = build_outer_var_ref (gang_static, ctx);
7180 tag |= OLF_GANG_STATIC;
7181 }
7182
7183 /* In a parallel region, loops are implicitly INDEPENDENT. */
7184 omp_context *tgt = enclosing_target_ctx (ctx);
7185 if (!tgt || is_oacc_parallel (tgt))
7186 tag |= OLF_INDEPENDENT;
7187
7188 if (tag & OLF_TILE)
7189 /* Tiling could use all 3 levels. */
7190 levels = 3;
7191 else
7192 {
7193 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7194 Ensure at least one level, or 2 for possible auto
7195 partitioning */
7196 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7197 << OLF_DIM_BASE) | OLF_SEQ));
7198
7199 if (levels < 1u + maybe_auto)
7200 levels = 1u + maybe_auto;
7201 }
7202
7203 args.quick_push (build_int_cst (integer_type_node, levels));
7204 args.quick_push (build_int_cst (integer_type_node, tag));
7205 if (gang_static)
7206 args.quick_push (gang_static);
7207
7208 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7209 gimple_set_location (call, loc);
7210 gimple_set_lhs (call, ddvar);
7211 gimple_seq_add_stmt (seq, call);
7212
7213 return levels;
7214 }
7215
7216 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7217 partitioning level of the enclosed region. */
7218
7219 static void
7220 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7221 tree tofollow, gimple_seq *seq)
7222 {
7223 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7224 : IFN_UNIQUE_OACC_TAIL_MARK);
7225 tree marker = build_int_cst (integer_type_node, marker_kind);
7226 int nargs = 2 + (tofollow != NULL_TREE);
7227 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7228 marker, ddvar, tofollow);
7229 gimple_set_location (call, loc);
7230 gimple_set_lhs (call, ddvar);
7231 gimple_seq_add_stmt (seq, call);
7232 }
7233
7234 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7235 the loop clauses, from which we extract reductions. Initialize
7236 HEAD and TAIL. */
7237
7238 static void
7239 lower_oacc_head_tail (location_t loc, tree clauses,
7240 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7241 {
7242 bool inner = false;
7243 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7244 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7245
7246 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7247 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7248 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7249
7250 gcc_assert (count);
7251 for (unsigned done = 1; count; count--, done++)
7252 {
7253 gimple_seq fork_seq = NULL;
7254 gimple_seq join_seq = NULL;
7255
7256 tree place = build_int_cst (integer_type_node, -1);
7257 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7258 fork_kind, ddvar, place);
7259 gimple_set_location (fork, loc);
7260 gimple_set_lhs (fork, ddvar);
7261
7262 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7263 join_kind, ddvar, place);
7264 gimple_set_location (join, loc);
7265 gimple_set_lhs (join, ddvar);
7266
7267 /* Mark the beginning of this level sequence. */
7268 if (inner)
7269 lower_oacc_loop_marker (loc, ddvar, true,
7270 build_int_cst (integer_type_node, count),
7271 &fork_seq);
7272 lower_oacc_loop_marker (loc, ddvar, false,
7273 build_int_cst (integer_type_node, done),
7274 &join_seq);
7275
7276 lower_oacc_reductions (loc, clauses, place, inner,
7277 fork, join, &fork_seq, &join_seq, ctx);
7278
7279 /* Append this level to head. */
7280 gimple_seq_add_seq (head, fork_seq);
7281 /* Prepend it to tail. */
7282 gimple_seq_add_seq (&join_seq, *tail);
7283 *tail = join_seq;
7284
7285 inner = true;
7286 }
7287
7288 /* Mark the end of the sequence. */
7289 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7290 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7291 }
7292
7293 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7294 catch handler and return it. This prevents programs from violating the
7295 structured block semantics with throws. */
7296
7297 static gimple_seq
7298 maybe_catch_exception (gimple_seq body)
7299 {
7300 gimple *g;
7301 tree decl;
7302
7303 if (!flag_exceptions)
7304 return body;
7305
7306 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7307 decl = lang_hooks.eh_protect_cleanup_actions ();
7308 else
7309 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7310
7311 g = gimple_build_eh_must_not_throw (decl);
7312 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7313 GIMPLE_TRY_CATCH);
7314
7315 return gimple_seq_alloc_with_stmt (g);
7316 }
7317
7318 \f
7319 /* Routines to lower OMP directives into OMP-GIMPLE. */
7320
7321 /* If ctx is a worksharing context inside of a cancellable parallel
7322 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7323 and conditional branch to parallel's cancel_label to handle
7324 cancellation in the implicit barrier. */
7325
7326 static void
7327 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7328 gimple_seq *body)
7329 {
7330 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7331 if (gimple_omp_return_nowait_p (omp_return))
7332 return;
7333 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7334 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7335 && outer->cancellable)
7336 {
7337 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7338 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7339 tree lhs = create_tmp_var (c_bool_type);
7340 gimple_omp_return_set_lhs (omp_return, lhs);
7341 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7342 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7343 fold_convert (c_bool_type,
7344 boolean_false_node),
7345 outer->cancel_label, fallthru_label);
7346 gimple_seq_add_stmt (body, g);
7347 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7348 }
7349 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7350 return;
7351 }
7352
7353 /* Find the first task_reduction or reduction clause or return NULL
7354 if there are none. */
7355
7356 static inline tree
7357 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7358 enum omp_clause_code ccode)
7359 {
7360 while (1)
7361 {
7362 clauses = omp_find_clause (clauses, ccode);
7363 if (clauses == NULL_TREE)
7364 return NULL_TREE;
7365 if (ccode != OMP_CLAUSE_REDUCTION
7366 || code == OMP_TASKLOOP
7367 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7368 return clauses;
7369 clauses = OMP_CLAUSE_CHAIN (clauses);
7370 }
7371 }
7372
7373 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7374 gimple_seq *, gimple_seq *);
7375
7376 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7377 CTX is the enclosing OMP context for the current statement. */
7378
7379 static void
7380 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7381 {
7382 tree block, control;
7383 gimple_stmt_iterator tgsi;
7384 gomp_sections *stmt;
7385 gimple *t;
7386 gbind *new_stmt, *bind;
7387 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7388
7389 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7390
7391 push_gimplify_context ();
7392
7393 dlist = NULL;
7394 ilist = NULL;
7395
7396 tree rclauses
7397 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7398 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7399 tree rtmp = NULL_TREE;
7400 if (rclauses)
7401 {
7402 tree type = build_pointer_type (pointer_sized_int_node);
7403 tree temp = create_tmp_var (type);
7404 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7405 OMP_CLAUSE_DECL (c) = temp;
7406 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7407 gimple_omp_sections_set_clauses (stmt, c);
7408 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7409 gimple_omp_sections_clauses (stmt),
7410 &ilist, &tred_dlist);
7411 rclauses = c;
7412 rtmp = make_ssa_name (type);
7413 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7414 }
7415
7416 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7417 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7418
7419 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7420 &ilist, &dlist, ctx, NULL);
7421
7422 control = create_tmp_var (unsigned_type_node, ".section");
7423 gimple_omp_sections_set_control (stmt, control);
7424
7425 new_body = gimple_omp_body (stmt);
7426 gimple_omp_set_body (stmt, NULL);
7427 tgsi = gsi_start (new_body);
7428 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7429 {
7430 omp_context *sctx;
7431 gimple *sec_start;
7432
7433 sec_start = gsi_stmt (tgsi);
7434 sctx = maybe_lookup_ctx (sec_start);
7435 gcc_assert (sctx);
7436
7437 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7438 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7439 GSI_CONTINUE_LINKING);
7440 gimple_omp_set_body (sec_start, NULL);
7441
7442 if (gsi_one_before_end_p (tgsi))
7443 {
7444 gimple_seq l = NULL;
7445 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7446 &ilist, &l, &clist, ctx);
7447 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7448 gimple_omp_section_set_last (sec_start);
7449 }
7450
7451 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7452 GSI_CONTINUE_LINKING);
7453 }
7454
7455 block = make_node (BLOCK);
7456 bind = gimple_build_bind (NULL, new_body, block);
7457
7458 olist = NULL;
7459 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7460 &clist, ctx);
7461 if (clist)
7462 {
7463 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7464 gcall *g = gimple_build_call (fndecl, 0);
7465 gimple_seq_add_stmt (&olist, g);
7466 gimple_seq_add_seq (&olist, clist);
7467 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7468 g = gimple_build_call (fndecl, 0);
7469 gimple_seq_add_stmt (&olist, g);
7470 }
7471
7472 block = make_node (BLOCK);
7473 new_stmt = gimple_build_bind (NULL, NULL, block);
7474 gsi_replace (gsi_p, new_stmt, true);
7475
7476 pop_gimplify_context (new_stmt);
7477 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7478 BLOCK_VARS (block) = gimple_bind_vars (bind);
7479 if (BLOCK_VARS (block))
7480 TREE_USED (block) = 1;
7481
7482 new_body = NULL;
7483 gimple_seq_add_seq (&new_body, ilist);
7484 gimple_seq_add_stmt (&new_body, stmt);
7485 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7486 gimple_seq_add_stmt (&new_body, bind);
7487
7488 t = gimple_build_omp_continue (control, control);
7489 gimple_seq_add_stmt (&new_body, t);
7490
7491 gimple_seq_add_seq (&new_body, olist);
7492 if (ctx->cancellable)
7493 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7494 gimple_seq_add_seq (&new_body, dlist);
7495
7496 new_body = maybe_catch_exception (new_body);
7497
7498 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7499 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7500 t = gimple_build_omp_return (nowait);
7501 gimple_seq_add_stmt (&new_body, t);
7502 gimple_seq_add_seq (&new_body, tred_dlist);
7503 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7504
7505 if (rclauses)
7506 OMP_CLAUSE_DECL (rclauses) = rtmp;
7507
7508 gimple_bind_set_body (new_stmt, new_body);
7509 }
7510
7511
7512 /* A subroutine of lower_omp_single. Expand the simple form of
7513 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7514
7515 if (GOMP_single_start ())
7516 BODY;
7517 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7518
7519 FIXME. It may be better to delay expanding the logic of this until
7520 pass_expand_omp. The expanded logic may make the job more difficult
7521 to a synchronization analysis pass. */
7522
7523 static void
7524 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7525 {
7526 location_t loc = gimple_location (single_stmt);
7527 tree tlabel = create_artificial_label (loc);
7528 tree flabel = create_artificial_label (loc);
7529 gimple *call, *cond;
7530 tree lhs, decl;
7531
7532 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7533 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7534 call = gimple_build_call (decl, 0);
7535 gimple_call_set_lhs (call, lhs);
7536 gimple_seq_add_stmt (pre_p, call);
7537
7538 cond = gimple_build_cond (EQ_EXPR, lhs,
7539 fold_convert_loc (loc, TREE_TYPE (lhs),
7540 boolean_true_node),
7541 tlabel, flabel);
7542 gimple_seq_add_stmt (pre_p, cond);
7543 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7544 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7545 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7546 }
7547
7548
7549 /* A subroutine of lower_omp_single. Expand the simple form of
7550 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7551
7552 #pragma omp single copyprivate (a, b, c)
7553
7554 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7555
7556 {
7557 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7558 {
7559 BODY;
7560 copyout.a = a;
7561 copyout.b = b;
7562 copyout.c = c;
7563 GOMP_single_copy_end (&copyout);
7564 }
7565 else
7566 {
7567 a = copyout_p->a;
7568 b = copyout_p->b;
7569 c = copyout_p->c;
7570 }
7571 GOMP_barrier ();
7572 }
7573
7574 FIXME. It may be better to delay expanding the logic of this until
7575 pass_expand_omp. The expanded logic may make the job more difficult
7576 to a synchronization analysis pass. */
7577
7578 static void
7579 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7580 omp_context *ctx)
7581 {
7582 tree ptr_type, t, l0, l1, l2, bfn_decl;
7583 gimple_seq copyin_seq;
7584 location_t loc = gimple_location (single_stmt);
7585
7586 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7587
7588 ptr_type = build_pointer_type (ctx->record_type);
7589 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7590
7591 l0 = create_artificial_label (loc);
7592 l1 = create_artificial_label (loc);
7593 l2 = create_artificial_label (loc);
7594
7595 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7596 t = build_call_expr_loc (loc, bfn_decl, 0);
7597 t = fold_convert_loc (loc, ptr_type, t);
7598 gimplify_assign (ctx->receiver_decl, t, pre_p);
7599
7600 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7601 build_int_cst (ptr_type, 0));
7602 t = build3 (COND_EXPR, void_type_node, t,
7603 build_and_jump (&l0), build_and_jump (&l1));
7604 gimplify_and_add (t, pre_p);
7605
7606 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7607
7608 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7609
7610 copyin_seq = NULL;
7611 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7612 &copyin_seq, ctx);
7613
7614 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7615 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7616 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7617 gimplify_and_add (t, pre_p);
7618
7619 t = build_and_jump (&l2);
7620 gimplify_and_add (t, pre_p);
7621
7622 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7623
7624 gimple_seq_add_seq (pre_p, copyin_seq);
7625
7626 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7627 }
7628
7629
7630 /* Expand code for an OpenMP single directive. */
7631
7632 static void
7633 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7634 {
7635 tree block;
7636 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7637 gbind *bind;
7638 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7639
7640 push_gimplify_context ();
7641
7642 block = make_node (BLOCK);
7643 bind = gimple_build_bind (NULL, NULL, block);
7644 gsi_replace (gsi_p, bind, true);
7645 bind_body = NULL;
7646 dlist = NULL;
7647 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7648 &bind_body, &dlist, ctx, NULL);
7649 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7650
7651 gimple_seq_add_stmt (&bind_body, single_stmt);
7652
7653 if (ctx->record_type)
7654 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7655 else
7656 lower_omp_single_simple (single_stmt, &bind_body);
7657
7658 gimple_omp_set_body (single_stmt, NULL);
7659
7660 gimple_seq_add_seq (&bind_body, dlist);
7661
7662 bind_body = maybe_catch_exception (bind_body);
7663
7664 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7665 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7666 gimple *g = gimple_build_omp_return (nowait);
7667 gimple_seq_add_stmt (&bind_body_tail, g);
7668 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
7669 if (ctx->record_type)
7670 {
7671 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
7672 tree clobber = build_constructor (ctx->record_type, NULL);
7673 TREE_THIS_VOLATILE (clobber) = 1;
7674 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
7675 clobber), GSI_SAME_STMT);
7676 }
7677 gimple_seq_add_seq (&bind_body, bind_body_tail);
7678 gimple_bind_set_body (bind, bind_body);
7679
7680 pop_gimplify_context (bind);
7681
7682 gimple_bind_append_vars (bind, ctx->block_vars);
7683 BLOCK_VARS (block) = ctx->block_vars;
7684 if (BLOCK_VARS (block))
7685 TREE_USED (block) = 1;
7686 }
7687
7688
7689 /* Expand code for an OpenMP master directive. */
7690
7691 static void
7692 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7693 {
7694 tree block, lab = NULL, x, bfn_decl;
7695 gimple *stmt = gsi_stmt (*gsi_p);
7696 gbind *bind;
7697 location_t loc = gimple_location (stmt);
7698 gimple_seq tseq;
7699
7700 push_gimplify_context ();
7701
7702 block = make_node (BLOCK);
7703 bind = gimple_build_bind (NULL, NULL, block);
7704 gsi_replace (gsi_p, bind, true);
7705 gimple_bind_add_stmt (bind, stmt);
7706
7707 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7708 x = build_call_expr_loc (loc, bfn_decl, 0);
7709 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
7710 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
7711 tseq = NULL;
7712 gimplify_and_add (x, &tseq);
7713 gimple_bind_add_seq (bind, tseq);
7714
7715 lower_omp (gimple_omp_body_ptr (stmt), ctx);
7716 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7717 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7718 gimple_omp_set_body (stmt, NULL);
7719
7720 gimple_bind_add_stmt (bind, gimple_build_label (lab));
7721
7722 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7723
7724 pop_gimplify_context (bind);
7725
7726 gimple_bind_append_vars (bind, ctx->block_vars);
7727 BLOCK_VARS (block) = ctx->block_vars;
7728 }
7729
7730 /* Helper function for lower_omp_task_reductions. For a specific PASS
7731 find out the current clause it should be processed, or return false
7732 if all have been processed already. */
7733
7734 static inline bool
7735 omp_task_reduction_iterate (int pass, enum tree_code code,
7736 enum omp_clause_code ccode, tree *c, tree *decl,
7737 tree *type, tree *next)
7738 {
7739 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7740 {
7741 if (ccode == OMP_CLAUSE_REDUCTION
7742 && code != OMP_TASKLOOP
7743 && !OMP_CLAUSE_REDUCTION_TASK (*c))
7744 continue;
7745 *decl = OMP_CLAUSE_DECL (*c);
7746 *type = TREE_TYPE (*decl);
7747 if (TREE_CODE (*decl) == MEM_REF)
7748 {
7749 if (pass != 1)
7750 continue;
7751 }
7752 else
7753 {
7754 if (omp_is_reference (*decl))
7755 *type = TREE_TYPE (*type);
7756 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7757 continue;
7758 }
7759 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7760 return true;
7761 }
7762 *decl = NULL_TREE;
7763 *type = NULL_TREE;
7764 *next = NULL_TREE;
7765 return false;
7766 }
7767
7768 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7769 OMP_TASKGROUP only with task modifier). Register mapping of those in
7770 START sequence and reducing them and unregister them in the END sequence. */
7771
7772 static void
7773 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7774 gimple_seq *start, gimple_seq *end)
7775 {
7776 enum omp_clause_code ccode
7777 = (code == OMP_TASKGROUP
7778 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7779 tree cancellable = NULL_TREE;
7780 clauses = omp_task_reductions_find_first (clauses, code, ccode);
7781 if (clauses == NULL_TREE)
7782 return;
7783 if (code == OMP_FOR || code == OMP_SECTIONS)
7784 {
7785 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7786 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7787 && outer->cancellable)
7788 {
7789 cancellable = error_mark_node;
7790 break;
7791 }
7792 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7793 break;
7794 }
7795 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7796 tree *last = &TYPE_FIELDS (record_type);
7797 unsigned cnt = 0;
7798 if (cancellable)
7799 {
7800 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7801 ptr_type_node);
7802 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7803 integer_type_node);
7804 *last = field;
7805 DECL_CHAIN (field) = ifield;
7806 last = &DECL_CHAIN (ifield);
7807 DECL_CONTEXT (field) = record_type;
7808 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7809 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7810 DECL_CONTEXT (ifield) = record_type;
7811 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7812 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7813 }
7814 for (int pass = 0; pass < 2; pass++)
7815 {
7816 tree decl, type, next;
7817 for (tree c = clauses;
7818 omp_task_reduction_iterate (pass, code, ccode,
7819 &c, &decl, &type, &next); c = next)
7820 {
7821 ++cnt;
7822 tree new_type = type;
7823 if (ctx->outer)
7824 new_type = remap_type (type, &ctx->outer->cb);
7825 tree field
7826 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7827 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7828 new_type);
7829 if (DECL_P (decl) && type == TREE_TYPE (decl))
7830 {
7831 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7832 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7833 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7834 }
7835 else
7836 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7837 DECL_CONTEXT (field) = record_type;
7838 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7839 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7840 *last = field;
7841 last = &DECL_CHAIN (field);
7842 tree bfield
7843 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7844 boolean_type_node);
7845 DECL_CONTEXT (bfield) = record_type;
7846 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7847 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7848 *last = bfield;
7849 last = &DECL_CHAIN (bfield);
7850 }
7851 }
7852 *last = NULL_TREE;
7853 layout_type (record_type);
7854
7855 /* Build up an array which registers with the runtime all the reductions
7856 and deregisters them at the end. Format documented in libgomp/task.c. */
7857 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7858 tree avar = create_tmp_var_raw (atype);
7859 gimple_add_tmp_var (avar);
7860 TREE_ADDRESSABLE (avar) = 1;
7861 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7862 NULL_TREE, NULL_TREE);
7863 tree t = build_int_cst (pointer_sized_int_node, cnt);
7864 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7865 gimple_seq seq = NULL;
7866 tree sz = fold_convert (pointer_sized_int_node,
7867 TYPE_SIZE_UNIT (record_type));
7868 int cachesz = 64;
7869 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7870 build_int_cst (pointer_sized_int_node, cachesz - 1));
7871 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7872 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7873 ctx->task_reductions.create (1 + cnt);
7874 ctx->task_reduction_map = new hash_map<tree, unsigned>;
7875 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7876 ? sz : NULL_TREE);
7877 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7878 gimple_seq_add_seq (start, seq);
7879 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7880 NULL_TREE, NULL_TREE);
7881 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7882 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7883 NULL_TREE, NULL_TREE);
7884 t = build_int_cst (pointer_sized_int_node,
7885 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7886 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7887 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7888 NULL_TREE, NULL_TREE);
7889 t = build_int_cst (pointer_sized_int_node, -1);
7890 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7891 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7892 NULL_TREE, NULL_TREE);
7893 t = build_int_cst (pointer_sized_int_node, 0);
7894 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7895
7896 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7897 and for each task reduction checks a bool right after the private variable
7898 within that thread's chunk; if the bool is clear, it hasn't been
7899 initialized and thus isn't going to be reduced nor destructed, otherwise
7900 reduce and destruct it. */
7901 tree idx = create_tmp_var (size_type_node);
7902 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7903 tree num_thr_sz = create_tmp_var (size_type_node);
7904 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7905 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7906 tree lab3 = NULL_TREE;
7907 gimple *g;
7908 if (code == OMP_FOR || code == OMP_SECTIONS)
7909 {
7910 /* For worksharing constructs, only perform it in the master thread,
7911 with the exception of cancelled implicit barriers - then only handle
7912 the current thread. */
7913 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7914 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7915 tree thr_num = create_tmp_var (integer_type_node);
7916 g = gimple_build_call (t, 0);
7917 gimple_call_set_lhs (g, thr_num);
7918 gimple_seq_add_stmt (end, g);
7919 if (cancellable)
7920 {
7921 tree c;
7922 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7923 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7924 lab3 = create_artificial_label (UNKNOWN_LOCATION);
7925 if (code == OMP_FOR)
7926 c = gimple_omp_for_clauses (ctx->stmt);
7927 else /* if (code == OMP_SECTIONS) */
7928 c = gimple_omp_sections_clauses (ctx->stmt);
7929 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7930 cancellable = c;
7931 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7932 lab5, lab6);
7933 gimple_seq_add_stmt (end, g);
7934 gimple_seq_add_stmt (end, gimple_build_label (lab5));
7935 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7936 gimple_seq_add_stmt (end, g);
7937 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7938 build_one_cst (TREE_TYPE (idx)));
7939 gimple_seq_add_stmt (end, g);
7940 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7941 gimple_seq_add_stmt (end, gimple_build_label (lab6));
7942 }
7943 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7944 gimple_seq_add_stmt (end, g);
7945 gimple_seq_add_stmt (end, gimple_build_label (lab4));
7946 }
7947 if (code != OMP_PARALLEL)
7948 {
7949 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7950 tree num_thr = create_tmp_var (integer_type_node);
7951 g = gimple_build_call (t, 0);
7952 gimple_call_set_lhs (g, num_thr);
7953 gimple_seq_add_stmt (end, g);
7954 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7955 gimple_seq_add_stmt (end, g);
7956 if (cancellable)
7957 gimple_seq_add_stmt (end, gimple_build_label (lab3));
7958 }
7959 else
7960 {
7961 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7962 OMP_CLAUSE__REDUCTEMP_);
7963 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7964 t = fold_convert (size_type_node, t);
7965 gimplify_assign (num_thr_sz, t, end);
7966 }
7967 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7968 NULL_TREE, NULL_TREE);
7969 tree data = create_tmp_var (pointer_sized_int_node);
7970 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7971 gimple_seq_add_stmt (end, gimple_build_label (lab1));
7972 tree ptr;
7973 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7974 ptr = create_tmp_var (build_pointer_type (record_type));
7975 else
7976 ptr = create_tmp_var (ptr_type_node);
7977 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7978
7979 tree field = TYPE_FIELDS (record_type);
7980 cnt = 0;
7981 if (cancellable)
7982 field = DECL_CHAIN (DECL_CHAIN (field));
7983 for (int pass = 0; pass < 2; pass++)
7984 {
7985 tree decl, type, next;
7986 for (tree c = clauses;
7987 omp_task_reduction_iterate (pass, code, ccode,
7988 &c, &decl, &type, &next); c = next)
7989 {
7990 tree var = decl, ref;
7991 if (TREE_CODE (decl) == MEM_REF)
7992 {
7993 var = TREE_OPERAND (var, 0);
7994 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7995 var = TREE_OPERAND (var, 0);
7996 tree v = var;
7997 if (TREE_CODE (var) == ADDR_EXPR)
7998 var = TREE_OPERAND (var, 0);
7999 else if (TREE_CODE (var) == INDIRECT_REF)
8000 var = TREE_OPERAND (var, 0);
8001 tree orig_var = var;
8002 if (is_variable_sized (var))
8003 {
8004 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8005 var = DECL_VALUE_EXPR (var);
8006 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8007 var = TREE_OPERAND (var, 0);
8008 gcc_assert (DECL_P (var));
8009 }
8010 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8011 if (orig_var != var)
8012 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8013 else if (TREE_CODE (v) == ADDR_EXPR)
8014 t = build_fold_addr_expr (t);
8015 else if (TREE_CODE (v) == INDIRECT_REF)
8016 t = build_fold_indirect_ref (t);
8017 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8018 {
8019 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8020 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8021 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8022 }
8023 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8024 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8025 fold_convert (size_type_node,
8026 TREE_OPERAND (decl, 1)));
8027 }
8028 else
8029 {
8030 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8031 if (!omp_is_reference (decl))
8032 t = build_fold_addr_expr (t);
8033 }
8034 t = fold_convert (pointer_sized_int_node, t);
8035 seq = NULL;
8036 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8037 gimple_seq_add_seq (start, seq);
8038 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8039 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8040 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8041 t = unshare_expr (byte_position (field));
8042 t = fold_convert (pointer_sized_int_node, t);
8043 ctx->task_reduction_map->put (c, cnt);
8044 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8045 ? t : NULL_TREE);
8046 seq = NULL;
8047 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8048 gimple_seq_add_seq (start, seq);
8049 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8050 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8051 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8052
8053 tree bfield = DECL_CHAIN (field);
8054 tree cond;
8055 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8056 /* In parallel or worksharing all threads unconditionally
8057 initialize all their task reduction private variables. */
8058 cond = boolean_true_node;
8059 else if (TREE_TYPE (ptr) == ptr_type_node)
8060 {
8061 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8062 unshare_expr (byte_position (bfield)));
8063 seq = NULL;
8064 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8065 gimple_seq_add_seq (end, seq);
8066 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8067 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8068 build_int_cst (pbool, 0));
8069 }
8070 else
8071 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8072 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8073 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8074 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8075 tree condv = create_tmp_var (boolean_type_node);
8076 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8077 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8078 lab3, lab4);
8079 gimple_seq_add_stmt (end, g);
8080 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8081 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8082 {
8083 /* If this reduction doesn't need destruction and parallel
8084 has been cancelled, there is nothing to do for this
8085 reduction, so jump around the merge operation. */
8086 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8087 g = gimple_build_cond (NE_EXPR, cancellable,
8088 build_zero_cst (TREE_TYPE (cancellable)),
8089 lab4, lab5);
8090 gimple_seq_add_stmt (end, g);
8091 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8092 }
8093
8094 tree new_var;
8095 if (TREE_TYPE (ptr) == ptr_type_node)
8096 {
8097 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8098 unshare_expr (byte_position (field)));
8099 seq = NULL;
8100 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8101 gimple_seq_add_seq (end, seq);
8102 tree pbool = build_pointer_type (TREE_TYPE (field));
8103 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8104 build_int_cst (pbool, 0));
8105 }
8106 else
8107 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8108 build_simple_mem_ref (ptr), field, NULL_TREE);
8109
8110 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8111 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8112 ref = build_simple_mem_ref (ref);
8113 /* reduction(-:var) sums up the partial results, so it acts
8114 identically to reduction(+:var). */
8115 if (rcode == MINUS_EXPR)
8116 rcode = PLUS_EXPR;
8117 if (TREE_CODE (decl) == MEM_REF)
8118 {
8119 tree type = TREE_TYPE (new_var);
8120 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8121 tree i = create_tmp_var (TREE_TYPE (v));
8122 tree ptype = build_pointer_type (TREE_TYPE (type));
8123 if (DECL_P (v))
8124 {
8125 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8126 tree vv = create_tmp_var (TREE_TYPE (v));
8127 gimplify_assign (vv, v, start);
8128 v = vv;
8129 }
8130 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8131 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8132 new_var = build_fold_addr_expr (new_var);
8133 new_var = fold_convert (ptype, new_var);
8134 ref = fold_convert (ptype, ref);
8135 tree m = create_tmp_var (ptype);
8136 gimplify_assign (m, new_var, end);
8137 new_var = m;
8138 m = create_tmp_var (ptype);
8139 gimplify_assign (m, ref, end);
8140 ref = m;
8141 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8142 tree body = create_artificial_label (UNKNOWN_LOCATION);
8143 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8144 gimple_seq_add_stmt (end, gimple_build_label (body));
8145 tree priv = build_simple_mem_ref (new_var);
8146 tree out = build_simple_mem_ref (ref);
8147 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8148 {
8149 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8150 tree decl_placeholder
8151 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8152 tree lab6 = NULL_TREE;
8153 if (cancellable)
8154 {
8155 /* If this reduction needs destruction and parallel
8156 has been cancelled, jump around the merge operation
8157 to the destruction. */
8158 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8159 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8160 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8161 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8162 lab6, lab5);
8163 gimple_seq_add_stmt (end, g);
8164 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8165 }
8166 SET_DECL_VALUE_EXPR (placeholder, out);
8167 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8168 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8169 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8170 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8171 gimple_seq_add_seq (end,
8172 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8173 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8175 {
8176 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8178 }
8179 if (cancellable)
8180 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8181 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8182 if (x)
8183 {
8184 gimple_seq tseq = NULL;
8185 gimplify_stmt (&x, &tseq);
8186 gimple_seq_add_seq (end, tseq);
8187 }
8188 }
8189 else
8190 {
8191 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8192 out = unshare_expr (out);
8193 gimplify_assign (out, x, end);
8194 }
8195 gimple *g
8196 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8197 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8198 gimple_seq_add_stmt (end, g);
8199 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8200 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8201 gimple_seq_add_stmt (end, g);
8202 g = gimple_build_assign (i, PLUS_EXPR, i,
8203 build_int_cst (TREE_TYPE (i), 1));
8204 gimple_seq_add_stmt (end, g);
8205 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8206 gimple_seq_add_stmt (end, g);
8207 gimple_seq_add_stmt (end, gimple_build_label (endl));
8208 }
8209 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8210 {
8211 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8212 tree oldv = NULL_TREE;
8213 tree lab6 = NULL_TREE;
8214 if (cancellable)
8215 {
8216 /* If this reduction needs destruction and parallel
8217 has been cancelled, jump around the merge operation
8218 to the destruction. */
8219 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8220 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8221 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8222 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8223 lab6, lab5);
8224 gimple_seq_add_stmt (end, g);
8225 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8226 }
8227 if (omp_is_reference (decl)
8228 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8229 TREE_TYPE (ref)))
8230 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8231 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8232 tree refv = create_tmp_var (TREE_TYPE (ref));
8233 gimplify_assign (refv, ref, end);
8234 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8235 SET_DECL_VALUE_EXPR (placeholder, ref);
8236 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8237 tree d = maybe_lookup_decl (decl, ctx);
8238 gcc_assert (d);
8239 if (DECL_HAS_VALUE_EXPR_P (d))
8240 oldv = DECL_VALUE_EXPR (d);
8241 if (omp_is_reference (var))
8242 {
8243 tree v = fold_convert (TREE_TYPE (d),
8244 build_fold_addr_expr (new_var));
8245 SET_DECL_VALUE_EXPR (d, v);
8246 }
8247 else
8248 SET_DECL_VALUE_EXPR (d, new_var);
8249 DECL_HAS_VALUE_EXPR_P (d) = 1;
8250 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8251 if (oldv)
8252 SET_DECL_VALUE_EXPR (d, oldv);
8253 else
8254 {
8255 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8256 DECL_HAS_VALUE_EXPR_P (d) = 0;
8257 }
8258 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8260 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8261 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8262 if (cancellable)
8263 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8264 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8265 if (x)
8266 {
8267 gimple_seq tseq = NULL;
8268 gimplify_stmt (&x, &tseq);
8269 gimple_seq_add_seq (end, tseq);
8270 }
8271 }
8272 else
8273 {
8274 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8275 ref = unshare_expr (ref);
8276 gimplify_assign (ref, x, end);
8277 }
8278 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8279 ++cnt;
8280 field = DECL_CHAIN (bfield);
8281 }
8282 }
8283
8284 if (code == OMP_TASKGROUP)
8285 {
8286 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8287 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8288 gimple_seq_add_stmt (start, g);
8289 }
8290 else
8291 {
8292 tree c;
8293 if (code == OMP_FOR)
8294 c = gimple_omp_for_clauses (ctx->stmt);
8295 else if (code == OMP_SECTIONS)
8296 c = gimple_omp_sections_clauses (ctx->stmt);
8297 else
8298 c = gimple_omp_taskreg_clauses (ctx->stmt);
8299 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8300 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8301 build_fold_addr_expr (avar));
8302 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8303 }
8304
8305 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8306 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8307 size_one_node));
8308 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8309 gimple_seq_add_stmt (end, g);
8310 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8311 if (code == OMP_FOR || code == OMP_SECTIONS)
8312 {
8313 enum built_in_function bfn
8314 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8315 t = builtin_decl_explicit (bfn);
8316 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8317 tree arg;
8318 if (cancellable)
8319 {
8320 arg = create_tmp_var (c_bool_type);
8321 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8322 cancellable));
8323 }
8324 else
8325 arg = build_int_cst (c_bool_type, 0);
8326 g = gimple_build_call (t, 1, arg);
8327 }
8328 else
8329 {
8330 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8331 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8332 }
8333 gimple_seq_add_stmt (end, g);
8334 t = build_constructor (atype, NULL);
8335 TREE_THIS_VOLATILE (t) = 1;
8336 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8337 }
8338
8339 /* Expand code for an OpenMP taskgroup directive. */
8340
8341 static void
8342 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8343 {
8344 gimple *stmt = gsi_stmt (*gsi_p);
8345 gcall *x;
8346 gbind *bind;
8347 gimple_seq dseq = NULL;
8348 tree block = make_node (BLOCK);
8349
8350 bind = gimple_build_bind (NULL, NULL, block);
8351 gsi_replace (gsi_p, bind, true);
8352 gimple_bind_add_stmt (bind, stmt);
8353
8354 push_gimplify_context ();
8355
8356 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8357 0);
8358 gimple_bind_add_stmt (bind, x);
8359
8360 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8361 gimple_omp_taskgroup_clauses (stmt),
8362 gimple_bind_body_ptr (bind), &dseq);
8363
8364 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8365 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8366 gimple_omp_set_body (stmt, NULL);
8367
8368 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8369 gimple_bind_add_seq (bind, dseq);
8370
8371 pop_gimplify_context (bind);
8372
8373 gimple_bind_append_vars (bind, ctx->block_vars);
8374 BLOCK_VARS (block) = ctx->block_vars;
8375 }
8376
8377
8378 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8379
8380 static void
8381 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8382 omp_context *ctx)
8383 {
8384 struct omp_for_data fd;
8385 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8386 return;
8387
8388 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8389 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8390 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8391 if (!fd.ordered)
8392 return;
8393
8394 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8395 tree c = gimple_omp_ordered_clauses (ord_stmt);
8396 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8397 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8398 {
8399 /* Merge depend clauses from multiple adjacent
8400 #pragma omp ordered depend(sink:...) constructs
8401 into one #pragma omp ordered depend(sink:...), so that
8402 we can optimize them together. */
8403 gimple_stmt_iterator gsi = *gsi_p;
8404 gsi_next (&gsi);
8405 while (!gsi_end_p (gsi))
8406 {
8407 gimple *stmt = gsi_stmt (gsi);
8408 if (is_gimple_debug (stmt)
8409 || gimple_code (stmt) == GIMPLE_NOP)
8410 {
8411 gsi_next (&gsi);
8412 continue;
8413 }
8414 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8415 break;
8416 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8417 c = gimple_omp_ordered_clauses (ord_stmt2);
8418 if (c == NULL_TREE
8419 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8420 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8421 break;
8422 while (*list_p)
8423 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8424 *list_p = c;
8425 gsi_remove (&gsi, true);
8426 }
8427 }
8428
8429 /* Canonicalize sink dependence clauses into one folded clause if
8430 possible.
8431
8432 The basic algorithm is to create a sink vector whose first
8433 element is the GCD of all the first elements, and whose remaining
8434 elements are the minimum of the subsequent columns.
8435
8436 We ignore dependence vectors whose first element is zero because
8437 such dependencies are known to be executed by the same thread.
8438
8439 We take into account the direction of the loop, so a minimum
8440 becomes a maximum if the loop is iterating forwards. We also
8441 ignore sink clauses where the loop direction is unknown, or where
8442 the offsets are clearly invalid because they are not a multiple
8443 of the loop increment.
8444
8445 For example:
8446
8447 #pragma omp for ordered(2)
8448 for (i=0; i < N; ++i)
8449 for (j=0; j < M; ++j)
8450 {
8451 #pragma omp ordered \
8452 depend(sink:i-8,j-2) \
8453 depend(sink:i,j-1) \ // Completely ignored because i+0.
8454 depend(sink:i-4,j-3) \
8455 depend(sink:i-6,j-4)
8456 #pragma omp ordered depend(source)
8457 }
8458
8459 Folded clause is:
8460
8461 depend(sink:-gcd(8,4,6),-min(2,3,4))
8462 -or-
8463 depend(sink:-2,-2)
8464 */
8465
8466 /* FIXME: Computing GCD's where the first element is zero is
8467 non-trivial in the presence of collapsed loops. Do this later. */
8468 if (fd.collapse > 1)
8469 return;
8470
8471 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8472
8473 /* wide_int is not a POD so it must be default-constructed. */
8474 for (unsigned i = 0; i != 2 * len - 1; ++i)
8475 new (static_cast<void*>(folded_deps + i)) wide_int ();
8476
8477 tree folded_dep = NULL_TREE;
8478 /* TRUE if the first dimension's offset is negative. */
8479 bool neg_offset_p = false;
8480
8481 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8482 unsigned int i;
8483 while ((c = *list_p) != NULL)
8484 {
8485 bool remove = false;
8486
8487 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8488 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8489 goto next_ordered_clause;
8490
8491 tree vec;
8492 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8493 vec && TREE_CODE (vec) == TREE_LIST;
8494 vec = TREE_CHAIN (vec), ++i)
8495 {
8496 gcc_assert (i < len);
8497
8498 /* omp_extract_for_data has canonicalized the condition. */
8499 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8500 || fd.loops[i].cond_code == GT_EXPR);
8501 bool forward = fd.loops[i].cond_code == LT_EXPR;
8502 bool maybe_lexically_later = true;
8503
8504 /* While the committee makes up its mind, bail if we have any
8505 non-constant steps. */
8506 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8507 goto lower_omp_ordered_ret;
8508
8509 tree itype = TREE_TYPE (TREE_VALUE (vec));
8510 if (POINTER_TYPE_P (itype))
8511 itype = sizetype;
8512 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8513 TYPE_PRECISION (itype),
8514 TYPE_SIGN (itype));
8515
8516 /* Ignore invalid offsets that are not multiples of the step. */
8517 if (!wi::multiple_of_p (wi::abs (offset),
8518 wi::abs (wi::to_wide (fd.loops[i].step)),
8519 UNSIGNED))
8520 {
8521 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8522 "ignoring sink clause with offset that is not "
8523 "a multiple of the loop step");
8524 remove = true;
8525 goto next_ordered_clause;
8526 }
8527
8528 /* Calculate the first dimension. The first dimension of
8529 the folded dependency vector is the GCD of the first
8530 elements, while ignoring any first elements whose offset
8531 is 0. */
8532 if (i == 0)
8533 {
8534 /* Ignore dependence vectors whose first dimension is 0. */
8535 if (offset == 0)
8536 {
8537 remove = true;
8538 goto next_ordered_clause;
8539 }
8540 else
8541 {
8542 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8543 {
8544 error_at (OMP_CLAUSE_LOCATION (c),
8545 "first offset must be in opposite direction "
8546 "of loop iterations");
8547 goto lower_omp_ordered_ret;
8548 }
8549 if (forward)
8550 offset = -offset;
8551 neg_offset_p = forward;
8552 /* Initialize the first time around. */
8553 if (folded_dep == NULL_TREE)
8554 {
8555 folded_dep = c;
8556 folded_deps[0] = offset;
8557 }
8558 else
8559 folded_deps[0] = wi::gcd (folded_deps[0],
8560 offset, UNSIGNED);
8561 }
8562 }
8563 /* Calculate minimum for the remaining dimensions. */
8564 else
8565 {
8566 folded_deps[len + i - 1] = offset;
8567 if (folded_dep == c)
8568 folded_deps[i] = offset;
8569 else if (maybe_lexically_later
8570 && !wi::eq_p (folded_deps[i], offset))
8571 {
8572 if (forward ^ wi::gts_p (folded_deps[i], offset))
8573 {
8574 unsigned int j;
8575 folded_dep = c;
8576 for (j = 1; j <= i; j++)
8577 folded_deps[j] = folded_deps[len + j - 1];
8578 }
8579 else
8580 maybe_lexically_later = false;
8581 }
8582 }
8583 }
8584 gcc_assert (i == len);
8585
8586 remove = true;
8587
8588 next_ordered_clause:
8589 if (remove)
8590 *list_p = OMP_CLAUSE_CHAIN (c);
8591 else
8592 list_p = &OMP_CLAUSE_CHAIN (c);
8593 }
8594
8595 if (folded_dep)
8596 {
8597 if (neg_offset_p)
8598 folded_deps[0] = -folded_deps[0];
8599
8600 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8601 if (POINTER_TYPE_P (itype))
8602 itype = sizetype;
8603
8604 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8605 = wide_int_to_tree (itype, folded_deps[0]);
8606 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8607 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8608 }
8609
8610 lower_omp_ordered_ret:
8611
8612 /* Ordered without clauses is #pragma omp threads, while we want
8613 a nop instead if we remove all clauses. */
8614 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8615 gsi_replace (gsi_p, gimple_build_nop (), true);
8616 }
8617
8618
8619 /* Expand code for an OpenMP ordered directive. */
8620
8621 static void
8622 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8623 {
8624 tree block;
8625 gimple *stmt = gsi_stmt (*gsi_p), *g;
8626 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8627 gcall *x;
8628 gbind *bind;
8629 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8630 OMP_CLAUSE_SIMD);
8631 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8632 loop. */
8633 bool maybe_simt
8634 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8635 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8636 OMP_CLAUSE_THREADS);
8637
8638 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8639 OMP_CLAUSE_DEPEND))
8640 {
8641 /* FIXME: This is needs to be moved to the expansion to verify various
8642 conditions only testable on cfg with dominators computed, and also
8643 all the depend clauses to be merged still might need to be available
8644 for the runtime checks. */
8645 if (0)
8646 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8647 return;
8648 }
8649
8650 push_gimplify_context ();
8651
8652 block = make_node (BLOCK);
8653 bind = gimple_build_bind (NULL, NULL, block);
8654 gsi_replace (gsi_p, bind, true);
8655 gimple_bind_add_stmt (bind, stmt);
8656
8657 if (simd)
8658 {
8659 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8660 build_int_cst (NULL_TREE, threads));
8661 cfun->has_simduid_loops = true;
8662 }
8663 else
8664 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8665 0);
8666 gimple_bind_add_stmt (bind, x);
8667
8668 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
8669 if (maybe_simt)
8670 {
8671 counter = create_tmp_var (integer_type_node);
8672 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
8673 gimple_call_set_lhs (g, counter);
8674 gimple_bind_add_stmt (bind, g);
8675
8676 body = create_artificial_label (UNKNOWN_LOCATION);
8677 test = create_artificial_label (UNKNOWN_LOCATION);
8678 gimple_bind_add_stmt (bind, gimple_build_label (body));
8679
8680 tree simt_pred = create_tmp_var (integer_type_node);
8681 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
8682 gimple_call_set_lhs (g, simt_pred);
8683 gimple_bind_add_stmt (bind, g);
8684
8685 tree t = create_artificial_label (UNKNOWN_LOCATION);
8686 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
8687 gimple_bind_add_stmt (bind, g);
8688
8689 gimple_bind_add_stmt (bind, gimple_build_label (t));
8690 }
8691 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8692 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8693 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8694 gimple_omp_set_body (stmt, NULL);
8695
8696 if (maybe_simt)
8697 {
8698 gimple_bind_add_stmt (bind, gimple_build_label (test));
8699 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
8700 gimple_bind_add_stmt (bind, g);
8701
8702 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
8703 tree nonneg = create_tmp_var (integer_type_node);
8704 gimple_seq tseq = NULL;
8705 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
8706 gimple_bind_add_seq (bind, tseq);
8707
8708 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
8709 gimple_call_set_lhs (g, nonneg);
8710 gimple_bind_add_stmt (bind, g);
8711
8712 tree end = create_artificial_label (UNKNOWN_LOCATION);
8713 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
8714 gimple_bind_add_stmt (bind, g);
8715
8716 gimple_bind_add_stmt (bind, gimple_build_label (end));
8717 }
8718 if (simd)
8719 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8720 build_int_cst (NULL_TREE, threads));
8721 else
8722 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8723 0);
8724 gimple_bind_add_stmt (bind, x);
8725
8726 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8727
8728 pop_gimplify_context (bind);
8729
8730 gimple_bind_append_vars (bind, ctx->block_vars);
8731 BLOCK_VARS (block) = gimple_bind_vars (bind);
8732 }
8733
8734
8735 /* Expand code for an OpenMP scan directive and the structured block
8736 before the scan directive. */
8737
8738 static void
8739 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8740 {
8741 gimple *stmt = gsi_stmt (*gsi_p);
8742 bool has_clauses
8743 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
8744 tree lane = NULL_TREE;
8745 gimple_seq before = NULL;
8746 omp_context *octx = ctx->outer;
8747 gcc_assert (octx);
8748 if (octx->scan_exclusive && !has_clauses)
8749 {
8750 gimple_stmt_iterator gsi2 = *gsi_p;
8751 gsi_next (&gsi2);
8752 gimple *stmt2 = gsi_stmt (gsi2);
8753 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
8754 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
8755 the one with exclusive clause(s), comes first. */
8756 if (stmt2
8757 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
8758 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
8759 {
8760 gsi_remove (gsi_p, false);
8761 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
8762 ctx = maybe_lookup_ctx (stmt2);
8763 gcc_assert (ctx);
8764 lower_omp_scan (gsi_p, ctx);
8765 return;
8766 }
8767 }
8768
8769 bool input_phase = has_clauses ^ octx->scan_inclusive;
8770 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8771 && (gimple_omp_for_kind (octx->stmt) & GF_OMP_FOR_SIMD));
8772 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
8773 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
8774 && !gimple_omp_for_combined_p (octx->stmt));
8775 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
8776 if (is_for_simd && octx->for_simd_scan_phase)
8777 is_simd = false;
8778 if (is_simd)
8779 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
8780 OMP_CLAUSE__SIMDUID_))
8781 {
8782 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
8783 lane = create_tmp_var (unsigned_type_node);
8784 tree t = build_int_cst (integer_type_node,
8785 input_phase ? 1
8786 : octx->scan_inclusive ? 2 : 3);
8787 gimple *g
8788 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
8789 gimple_call_set_lhs (g, lane);
8790 gimple_seq_add_stmt (&before, g);
8791 }
8792
8793 if (is_simd || is_for)
8794 {
8795 for (tree c = gimple_omp_for_clauses (octx->stmt);
8796 c; c = OMP_CLAUSE_CHAIN (c))
8797 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8798 && OMP_CLAUSE_REDUCTION_INSCAN (c))
8799 {
8800 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8801 tree var = OMP_CLAUSE_DECL (c);
8802 tree new_var = lookup_decl (var, octx);
8803 tree val = new_var;
8804 tree var2 = NULL_TREE;
8805 tree var3 = NULL_TREE;
8806 tree var4 = NULL_TREE;
8807 tree lane0 = NULL_TREE;
8808 tree new_vard = new_var;
8809 if (omp_is_reference (var))
8810 {
8811 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8812 val = new_var;
8813 }
8814 if (DECL_HAS_VALUE_EXPR_P (new_vard))
8815 {
8816 val = DECL_VALUE_EXPR (new_vard);
8817 if (new_vard != new_var)
8818 {
8819 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
8820 val = TREE_OPERAND (val, 0);
8821 }
8822 if (TREE_CODE (val) == ARRAY_REF
8823 && VAR_P (TREE_OPERAND (val, 0)))
8824 {
8825 tree v = TREE_OPERAND (val, 0);
8826 if (lookup_attribute ("omp simd array",
8827 DECL_ATTRIBUTES (v)))
8828 {
8829 val = unshare_expr (val);
8830 lane0 = TREE_OPERAND (val, 1);
8831 TREE_OPERAND (val, 1) = lane;
8832 var2 = lookup_decl (v, octx);
8833 if (octx->scan_exclusive)
8834 var4 = lookup_decl (var2, octx);
8835 if (input_phase
8836 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8837 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
8838 if (!input_phase)
8839 {
8840 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
8841 var2, lane, NULL_TREE, NULL_TREE);
8842 TREE_THIS_NOTRAP (var2) = 1;
8843 if (octx->scan_exclusive)
8844 {
8845 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
8846 var4, lane, NULL_TREE,
8847 NULL_TREE);
8848 TREE_THIS_NOTRAP (var4) = 1;
8849 }
8850 }
8851 else
8852 var2 = val;
8853 }
8854 }
8855 gcc_assert (var2);
8856 }
8857 else
8858 {
8859 var2 = build_outer_var_ref (var, octx);
8860 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8861 {
8862 var3 = maybe_lookup_decl (new_vard, octx);
8863 if (var3 == new_vard || var3 == NULL_TREE)
8864 var3 = NULL_TREE;
8865 else if (is_simd && octx->scan_exclusive && !input_phase)
8866 {
8867 var4 = maybe_lookup_decl (var3, octx);
8868 if (var4 == var3 || var4 == NULL_TREE)
8869 {
8870 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
8871 {
8872 var4 = var3;
8873 var3 = NULL_TREE;
8874 }
8875 else
8876 var4 = NULL_TREE;
8877 }
8878 }
8879 }
8880 if (is_simd
8881 && octx->scan_exclusive
8882 && !input_phase
8883 && var4 == NULL_TREE)
8884 var4 = create_tmp_var (TREE_TYPE (val));
8885 }
8886 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8887 {
8888 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8889 if (input_phase)
8890 {
8891 if (var3)
8892 {
8893 /* If we've added a separate identity element
8894 variable, copy it over into val. */
8895 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
8896 var3);
8897 gimplify_and_add (x, &before);
8898 }
8899 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
8900 {
8901 /* Otherwise, assign to it the identity element. */
8902 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
8903 if (is_for)
8904 tseq = copy_gimple_seq_and_replace_locals (tseq);
8905 tree ref = build_outer_var_ref (var, octx);
8906 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8907 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8908 if (x)
8909 {
8910 if (new_vard != new_var)
8911 val = build_fold_addr_expr_loc (clause_loc, val);
8912 SET_DECL_VALUE_EXPR (new_vard, val);
8913 }
8914 SET_DECL_VALUE_EXPR (placeholder, ref);
8915 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8916 lower_omp (&tseq, octx);
8917 if (x)
8918 SET_DECL_VALUE_EXPR (new_vard, x);
8919 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8920 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8921 gimple_seq_add_seq (&before, tseq);
8922 if (is_simd)
8923 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8924 }
8925 }
8926 else if (is_simd)
8927 {
8928 tree x;
8929 if (octx->scan_exclusive)
8930 {
8931 tree v4 = unshare_expr (var4);
8932 tree v2 = unshare_expr (var2);
8933 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
8934 gimplify_and_add (x, &before);
8935 }
8936 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
8937 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
8938 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
8939 tree vexpr = val;
8940 if (x && new_vard != new_var)
8941 vexpr = build_fold_addr_expr_loc (clause_loc, val);
8942 if (x)
8943 SET_DECL_VALUE_EXPR (new_vard, vexpr);
8944 SET_DECL_VALUE_EXPR (placeholder, var2);
8945 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8946 lower_omp (&tseq, octx);
8947 gimple_seq_add_seq (&before, tseq);
8948 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8949 if (x)
8950 SET_DECL_VALUE_EXPR (new_vard, x);
8951 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
8952 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
8953 if (octx->scan_inclusive)
8954 {
8955 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8956 var2);
8957 gimplify_and_add (x, &before);
8958 }
8959 else if (lane0 == NULL_TREE)
8960 {
8961 x = lang_hooks.decls.omp_clause_assign_op (c, val,
8962 var4);
8963 gimplify_and_add (x, &before);
8964 }
8965 }
8966 }
8967 else
8968 {
8969 if (input_phase)
8970 {
8971 /* input phase. Set val to initializer before
8972 the body. */
8973 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
8974 gimplify_assign (val, x, &before);
8975 }
8976 else if (is_simd)
8977 {
8978 /* scan phase. */
8979 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
8980 if (code == MINUS_EXPR)
8981 code = PLUS_EXPR;
8982
8983 tree x = build2 (code, TREE_TYPE (var2),
8984 unshare_expr (var2), unshare_expr (val));
8985 if (octx->scan_inclusive)
8986 {
8987 gimplify_assign (unshare_expr (var2), x, &before);
8988 gimplify_assign (val, var2, &before);
8989 }
8990 else
8991 {
8992 gimplify_assign (unshare_expr (var4),
8993 unshare_expr (var2), &before);
8994 gimplify_assign (var2, x, &before);
8995 if (lane0 == NULL_TREE)
8996 gimplify_assign (val, var4, &before);
8997 }
8998 }
8999 }
9000 if (octx->scan_exclusive && !input_phase && lane0)
9001 {
9002 tree vexpr = unshare_expr (var4);
9003 TREE_OPERAND (vexpr, 1) = lane0;
9004 if (new_vard != new_var)
9005 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9006 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9007 }
9008 }
9009 }
9010 if (is_simd && !is_for_simd)
9011 {
9012 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9013 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9014 gsi_replace (gsi_p, gimple_build_nop (), true);
9015 return;
9016 }
9017 lower_omp (gimple_omp_body_ptr (stmt), octx);
9018 if (before)
9019 {
9020 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9021 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9022 }
9023 }
9024
9025
9026 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9027 substitution of a couple of function calls. But in the NAMED case,
9028 requires that languages coordinate a symbol name. It is therefore
9029 best put here in common code. */
9030
9031 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9032
9033 static void
9034 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9035 {
9036 tree block;
9037 tree name, lock, unlock;
9038 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9039 gbind *bind;
9040 location_t loc = gimple_location (stmt);
9041 gimple_seq tbody;
9042
9043 name = gimple_omp_critical_name (stmt);
9044 if (name)
9045 {
9046 tree decl;
9047
9048 if (!critical_name_mutexes)
9049 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9050
9051 tree *n = critical_name_mutexes->get (name);
9052 if (n == NULL)
9053 {
9054 char *new_str;
9055
9056 decl = create_tmp_var_raw (ptr_type_node);
9057
9058 new_str = ACONCAT ((".gomp_critical_user_",
9059 IDENTIFIER_POINTER (name), NULL));
9060 DECL_NAME (decl) = get_identifier (new_str);
9061 TREE_PUBLIC (decl) = 1;
9062 TREE_STATIC (decl) = 1;
9063 DECL_COMMON (decl) = 1;
9064 DECL_ARTIFICIAL (decl) = 1;
9065 DECL_IGNORED_P (decl) = 1;
9066
9067 varpool_node::finalize_decl (decl);
9068
9069 critical_name_mutexes->put (name, decl);
9070 }
9071 else
9072 decl = *n;
9073
9074 /* If '#pragma omp critical' is inside offloaded region or
9075 inside function marked as offloadable, the symbol must be
9076 marked as offloadable too. */
9077 omp_context *octx;
9078 if (cgraph_node::get (current_function_decl)->offloadable)
9079 varpool_node::get_create (decl)->offloadable = 1;
9080 else
9081 for (octx = ctx->outer; octx; octx = octx->outer)
9082 if (is_gimple_omp_offloaded (octx->stmt))
9083 {
9084 varpool_node::get_create (decl)->offloadable = 1;
9085 break;
9086 }
9087
9088 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9089 lock = build_call_expr_loc (loc, lock, 1,
9090 build_fold_addr_expr_loc (loc, decl));
9091
9092 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9093 unlock = build_call_expr_loc (loc, unlock, 1,
9094 build_fold_addr_expr_loc (loc, decl));
9095 }
9096 else
9097 {
9098 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9099 lock = build_call_expr_loc (loc, lock, 0);
9100
9101 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9102 unlock = build_call_expr_loc (loc, unlock, 0);
9103 }
9104
9105 push_gimplify_context ();
9106
9107 block = make_node (BLOCK);
9108 bind = gimple_build_bind (NULL, NULL, block);
9109 gsi_replace (gsi_p, bind, true);
9110 gimple_bind_add_stmt (bind, stmt);
9111
9112 tbody = gimple_bind_body (bind);
9113 gimplify_and_add (lock, &tbody);
9114 gimple_bind_set_body (bind, tbody);
9115
9116 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9117 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9118 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9119 gimple_omp_set_body (stmt, NULL);
9120
9121 tbody = gimple_bind_body (bind);
9122 gimplify_and_add (unlock, &tbody);
9123 gimple_bind_set_body (bind, tbody);
9124
9125 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9126
9127 pop_gimplify_context (bind);
9128 gimple_bind_append_vars (bind, ctx->block_vars);
9129 BLOCK_VARS (block) = gimple_bind_vars (bind);
9130 }
9131
9132 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9133 for a lastprivate clause. Given a loop control predicate of (V
9134 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9135 is appended to *DLIST, iterator initialization is appended to
9136 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9137 to be emitted in a critical section. */
9138
9139 static void
9140 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9141 gimple_seq *dlist, gimple_seq *clist,
9142 struct omp_context *ctx)
9143 {
9144 tree clauses, cond, vinit;
9145 enum tree_code cond_code;
9146 gimple_seq stmts;
9147
9148 cond_code = fd->loop.cond_code;
9149 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9150
9151 /* When possible, use a strict equality expression. This can let VRP
9152 type optimizations deduce the value and remove a copy. */
9153 if (tree_fits_shwi_p (fd->loop.step))
9154 {
9155 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9156 if (step == 1 || step == -1)
9157 cond_code = EQ_EXPR;
9158 }
9159
9160 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9161 || gimple_omp_for_grid_phony (fd->for_stmt))
9162 cond = omp_grid_lastprivate_predicate (fd);
9163 else
9164 {
9165 tree n2 = fd->loop.n2;
9166 if (fd->collapse > 1
9167 && TREE_CODE (n2) != INTEGER_CST
9168 && gimple_omp_for_combined_into_p (fd->for_stmt))
9169 {
9170 struct omp_context *taskreg_ctx = NULL;
9171 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9172 {
9173 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9174 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9175 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9176 {
9177 if (gimple_omp_for_combined_into_p (gfor))
9178 {
9179 gcc_assert (ctx->outer->outer
9180 && is_parallel_ctx (ctx->outer->outer));
9181 taskreg_ctx = ctx->outer->outer;
9182 }
9183 else
9184 {
9185 struct omp_for_data outer_fd;
9186 omp_extract_for_data (gfor, &outer_fd, NULL);
9187 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9188 }
9189 }
9190 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9191 taskreg_ctx = ctx->outer->outer;
9192 }
9193 else if (is_taskreg_ctx (ctx->outer))
9194 taskreg_ctx = ctx->outer;
9195 if (taskreg_ctx)
9196 {
9197 int i;
9198 tree taskreg_clauses
9199 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9200 tree innerc = omp_find_clause (taskreg_clauses,
9201 OMP_CLAUSE__LOOPTEMP_);
9202 gcc_assert (innerc);
9203 for (i = 0; i < fd->collapse; i++)
9204 {
9205 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9206 OMP_CLAUSE__LOOPTEMP_);
9207 gcc_assert (innerc);
9208 }
9209 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9210 OMP_CLAUSE__LOOPTEMP_);
9211 if (innerc)
9212 n2 = fold_convert (TREE_TYPE (n2),
9213 lookup_decl (OMP_CLAUSE_DECL (innerc),
9214 taskreg_ctx));
9215 }
9216 }
9217 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9218 }
9219
9220 clauses = gimple_omp_for_clauses (fd->for_stmt);
9221 stmts = NULL;
9222 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9223 if (!gimple_seq_empty_p (stmts))
9224 {
9225 gimple_seq_add_seq (&stmts, *dlist);
9226 *dlist = stmts;
9227
9228 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9229 vinit = fd->loop.n1;
9230 if (cond_code == EQ_EXPR
9231 && tree_fits_shwi_p (fd->loop.n2)
9232 && ! integer_zerop (fd->loop.n2))
9233 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9234 else
9235 vinit = unshare_expr (vinit);
9236
9237 /* Initialize the iterator variable, so that threads that don't execute
9238 any iterations don't execute the lastprivate clauses by accident. */
9239 gimplify_assign (fd->loop.v, vinit, body_p);
9240 }
9241 }
9242
9243 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9244
9245 static tree
9246 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9247 struct walk_stmt_info *wi)
9248 {
9249 gimple *stmt = gsi_stmt (*gsi_p);
9250
9251 *handled_ops_p = true;
9252 switch (gimple_code (stmt))
9253 {
9254 WALK_SUBSTMTS;
9255
9256 case GIMPLE_OMP_FOR:
9257 if ((gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
9258 && gimple_omp_for_combined_into_p (stmt))
9259 *handled_ops_p = false;
9260 break;
9261
9262 case GIMPLE_OMP_SCAN:
9263 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9264 return integer_zero_node;
9265 default:
9266 break;
9267 }
9268 return NULL;
9269 }
9270
9271 /* Helper function for lower_omp_for, add transformations for a worksharing
9272 loop with scan directives inside of it.
9273 For worksharing loop not combined with simd, transform:
9274 #pragma omp for reduction(inscan,+:r) private(i)
9275 for (i = 0; i < n; i = i + 1)
9276 {
9277 {
9278 update (r);
9279 }
9280 #pragma omp scan inclusive(r)
9281 {
9282 use (r);
9283 }
9284 }
9285
9286 into two worksharing loops + code to merge results:
9287
9288 num_threads = omp_get_num_threads ();
9289 thread_num = omp_get_thread_num ();
9290 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9291 <D.2099>:
9292 var2 = r;
9293 goto <D.2101>;
9294 <D.2100>:
9295 // For UDRs this is UDR init, or if ctors are needed, copy from
9296 // var3 that has been constructed to contain the neutral element.
9297 var2 = 0;
9298 <D.2101>:
9299 ivar = 0;
9300 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9301 // a shared array with num_threads elements and rprivb to a local array
9302 // number of elements equal to the number of (contiguous) iterations the
9303 // current thread will perform. controlb and controlp variables are
9304 // temporaries to handle deallocation of rprivb at the end of second
9305 // GOMP_FOR.
9306 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9307 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9308 for (i = 0; i < n; i = i + 1)
9309 {
9310 {
9311 // For UDRs this is UDR init or copy from var3.
9312 r = 0;
9313 // This is the input phase from user code.
9314 update (r);
9315 }
9316 {
9317 // For UDRs this is UDR merge.
9318 var2 = var2 + r;
9319 // Rather than handing it over to the user, save to local thread's
9320 // array.
9321 rprivb[ivar] = var2;
9322 // For exclusive scan, the above two statements are swapped.
9323 ivar = ivar + 1;
9324 }
9325 }
9326 // And remember the final value from this thread's into the shared
9327 // rpriva array.
9328 rpriva[(sizetype) thread_num] = var2;
9329 // If more than one thread, compute using Work-Efficient prefix sum
9330 // the inclusive parallel scan of the rpriva array.
9331 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9332 <D.2102>:
9333 GOMP_barrier ();
9334 down = 0;
9335 k = 1;
9336 num_threadsu = (unsigned int) num_threads;
9337 thread_numup1 = (unsigned int) thread_num + 1;
9338 <D.2108>:
9339 twok = k << 1;
9340 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9341 <D.2110>:
9342 down = 4294967295;
9343 k = k >> 1;
9344 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9345 <D.2112>:
9346 k = k >> 1;
9347 <D.2111>:
9348 twok = k << 1;
9349 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9350 mul = REALPART_EXPR <cplx>;
9351 ovf = IMAGPART_EXPR <cplx>;
9352 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9353 <D.2116>:
9354 andv = k & down;
9355 andvm1 = andv + 4294967295;
9356 l = mul + andvm1;
9357 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9358 <D.2120>:
9359 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9360 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9361 rpriva[l] = rpriva[l - k] + rpriva[l];
9362 <D.2117>:
9363 if (down == 0) goto <D.2121>; else goto <D.2122>;
9364 <D.2121>:
9365 k = k << 1;
9366 goto <D.2123>;
9367 <D.2122>:
9368 k = k >> 1;
9369 <D.2123>:
9370 GOMP_barrier ();
9371 if (k != 0) goto <D.2108>; else goto <D.2103>;
9372 <D.2103>:
9373 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9374 <D.2124>:
9375 // For UDRs this is UDR init or copy from var3.
9376 var2 = 0;
9377 goto <D.2126>;
9378 <D.2125>:
9379 var2 = rpriva[thread_num - 1];
9380 <D.2126>:
9381 ivar = 0;
9382 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9383 reduction(inscan,+:r) private(i)
9384 for (i = 0; i < n; i = i + 1)
9385 {
9386 {
9387 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9388 r = var2 + rprivb[ivar];
9389 }
9390 {
9391 // This is the scan phase from user code.
9392 use (r);
9393 // Plus a bump of the iterator.
9394 ivar = ivar + 1;
9395 }
9396 } */
9397
9398 static void
9399 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9400 struct omp_for_data *fd, omp_context *ctx)
9401 {
9402 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9403 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9404
9405 gimple_seq body = gimple_omp_body (stmt);
9406 gimple_stmt_iterator input1_gsi = gsi_none ();
9407 struct walk_stmt_info wi;
9408 memset (&wi, 0, sizeof (wi));
9409 wi.val_only = true;
9410 wi.info = (void *) &input1_gsi;
9411 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9412 gcc_assert (!gsi_end_p (input1_gsi));
9413
9414 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9415 gimple_stmt_iterator gsi = input1_gsi;
9416 gsi_next (&gsi);
9417 gimple_stmt_iterator scan1_gsi = gsi;
9418 gimple *scan_stmt1 = gsi_stmt (gsi);
9419 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9420
9421 gimple_seq input_body = gimple_omp_body (input_stmt1);
9422 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9423 gimple_omp_set_body (input_stmt1, NULL);
9424 gimple_omp_set_body (scan_stmt1, NULL);
9425 gimple_omp_set_body (stmt, NULL);
9426
9427 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9428 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9429 gimple_omp_set_body (stmt, body);
9430 gimple_omp_set_body (input_stmt1, input_body);
9431
9432 gimple_stmt_iterator input2_gsi = gsi_none ();
9433 memset (&wi, 0, sizeof (wi));
9434 wi.val_only = true;
9435 wi.info = (void *) &input2_gsi;
9436 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9437 gcc_assert (!gsi_end_p (input2_gsi));
9438
9439 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9440 gsi = input2_gsi;
9441 gsi_next (&gsi);
9442 gimple_stmt_iterator scan2_gsi = gsi;
9443 gimple *scan_stmt2 = gsi_stmt (gsi);
9444 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9445 gimple_omp_set_body (scan_stmt2, scan_body);
9446
9447 gimple_stmt_iterator input3_gsi = gsi_none ();
9448 gimple_stmt_iterator scan3_gsi = gsi_none ();
9449 gimple_stmt_iterator input4_gsi = gsi_none ();
9450 gimple_stmt_iterator scan4_gsi = gsi_none ();
9451 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9452 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9453 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9454 if (is_for_simd)
9455 {
9456 memset (&wi, 0, sizeof (wi));
9457 wi.val_only = true;
9458 wi.info = (void *) &input3_gsi;
9459 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9460 gcc_assert (!gsi_end_p (input3_gsi));
9461
9462 input_stmt3 = gsi_stmt (input3_gsi);
9463 gsi = input3_gsi;
9464 gsi_next (&gsi);
9465 scan3_gsi = gsi;
9466 scan_stmt3 = gsi_stmt (gsi);
9467 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9468
9469 memset (&wi, 0, sizeof (wi));
9470 wi.val_only = true;
9471 wi.info = (void *) &input4_gsi;
9472 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9473 gcc_assert (!gsi_end_p (input4_gsi));
9474
9475 input_stmt4 = gsi_stmt (input4_gsi);
9476 gsi = input4_gsi;
9477 gsi_next (&gsi);
9478 scan4_gsi = gsi;
9479 scan_stmt4 = gsi_stmt (gsi);
9480 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9481
9482 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9483 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9484 }
9485
9486 tree num_threads = create_tmp_var (integer_type_node);
9487 tree thread_num = create_tmp_var (integer_type_node);
9488 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9489 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9490 gimple *g = gimple_build_call (nthreads_decl, 0);
9491 gimple_call_set_lhs (g, num_threads);
9492 gimple_seq_add_stmt (body_p, g);
9493 g = gimple_build_call (threadnum_decl, 0);
9494 gimple_call_set_lhs (g, thread_num);
9495 gimple_seq_add_stmt (body_p, g);
9496
9497 tree ivar = create_tmp_var (sizetype);
9498 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9499 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9500 tree k = create_tmp_var (unsigned_type_node);
9501 tree l = create_tmp_var (unsigned_type_node);
9502
9503 gimple_seq clist = NULL, mdlist = NULL;
9504 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9505 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9506 gimple_seq scan1_list = NULL, input2_list = NULL;
9507 gimple_seq last_list = NULL, reduc_list = NULL;
9508 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9509 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9510 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9511 {
9512 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9513 tree var = OMP_CLAUSE_DECL (c);
9514 tree new_var = lookup_decl (var, ctx);
9515 tree var3 = NULL_TREE;
9516 tree new_vard = new_var;
9517 if (omp_is_reference (var))
9518 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9519 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9520 {
9521 var3 = maybe_lookup_decl (new_vard, ctx);
9522 if (var3 == new_vard)
9523 var3 = NULL_TREE;
9524 }
9525
9526 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9527 tree rpriva = create_tmp_var (ptype);
9528 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9529 OMP_CLAUSE_DECL (nc) = rpriva;
9530 *cp1 = nc;
9531 cp1 = &OMP_CLAUSE_CHAIN (nc);
9532
9533 tree rprivb = create_tmp_var (ptype);
9534 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9535 OMP_CLAUSE_DECL (nc) = rprivb;
9536 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9537 *cp1 = nc;
9538 cp1 = &OMP_CLAUSE_CHAIN (nc);
9539
9540 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9541 if (new_vard != new_var)
9542 TREE_ADDRESSABLE (var2) = 1;
9543 gimple_add_tmp_var (var2);
9544
9545 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9546 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9547 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9548 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9549 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9550
9551 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9552 thread_num, integer_minus_one_node);
9553 x = fold_convert_loc (clause_loc, sizetype, x);
9554 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9555 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9556 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9557 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9558
9559 x = fold_convert_loc (clause_loc, sizetype, l);
9560 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9561 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9562 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9563 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9564
9565 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9566 x = fold_convert_loc (clause_loc, sizetype, x);
9567 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9568 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9569 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9570 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9571
9572 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9573 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9574 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9575 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9576
9577 tree var4 = is_for_simd ? new_var : var2;
9578 tree var5 = NULL_TREE, var6 = NULL_TREE;
9579 if (is_for_simd)
9580 {
9581 var5 = lookup_decl (var, input_simd_ctx);
9582 var6 = lookup_decl (var, scan_simd_ctx);
9583 if (new_vard != new_var)
9584 {
9585 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9586 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9587 }
9588 }
9589 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9590 {
9591 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9592 tree val = var2;
9593
9594 x = lang_hooks.decls.omp_clause_default_ctor
9595 (c, var2, build_outer_var_ref (var, ctx));
9596 if (x)
9597 gimplify_and_add (x, &clist);
9598
9599 x = build_outer_var_ref (var, ctx);
9600 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9601 x);
9602 gimplify_and_add (x, &thr01_list);
9603
9604 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9605 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9606 if (var3)
9607 {
9608 x = unshare_expr (var4);
9609 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9610 gimplify_and_add (x, &thrn1_list);
9611 x = unshare_expr (var4);
9612 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9613 gimplify_and_add (x, &thr02_list);
9614 }
9615 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9616 {
9617 /* Otherwise, assign to it the identity element. */
9618 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9619 tseq = copy_gimple_seq_and_replace_locals (tseq);
9620 if (!is_for_simd)
9621 {
9622 if (new_vard != new_var)
9623 val = build_fold_addr_expr_loc (clause_loc, val);
9624 SET_DECL_VALUE_EXPR (new_vard, val);
9625 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9626 }
9627 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9628 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9629 lower_omp (&tseq, ctx);
9630 gimple_seq_add_seq (&thrn1_list, tseq);
9631 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9632 lower_omp (&tseq, ctx);
9633 gimple_seq_add_seq (&thr02_list, tseq);
9634 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9635 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9636 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9637 if (y)
9638 SET_DECL_VALUE_EXPR (new_vard, y);
9639 else
9640 {
9641 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9642 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9643 }
9644 }
9645
9646 x = unshare_expr (var4);
9647 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9648 gimplify_and_add (x, &thrn2_list);
9649
9650 if (is_for_simd)
9651 {
9652 x = unshare_expr (rprivb_ref);
9653 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9654 gimplify_and_add (x, &scan1_list);
9655 }
9656 else
9657 {
9658 if (ctx->scan_exclusive)
9659 {
9660 x = unshare_expr (rprivb_ref);
9661 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9662 gimplify_and_add (x, &scan1_list);
9663 }
9664
9665 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9666 tseq = copy_gimple_seq_and_replace_locals (tseq);
9667 SET_DECL_VALUE_EXPR (placeholder, var2);
9668 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9669 lower_omp (&tseq, ctx);
9670 gimple_seq_add_seq (&scan1_list, tseq);
9671
9672 if (ctx->scan_inclusive)
9673 {
9674 x = unshare_expr (rprivb_ref);
9675 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9676 gimplify_and_add (x, &scan1_list);
9677 }
9678 }
9679
9680 x = unshare_expr (rpriva_ref);
9681 x = lang_hooks.decls.omp_clause_assign_op (c, x,
9682 unshare_expr (var4));
9683 gimplify_and_add (x, &mdlist);
9684
9685 x = unshare_expr (is_for_simd ? var6 : new_var);
9686 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
9687 gimplify_and_add (x, &input2_list);
9688
9689 val = rprivb_ref;
9690 if (new_vard != new_var)
9691 val = build_fold_addr_expr_loc (clause_loc, val);
9692
9693 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9694 tseq = copy_gimple_seq_and_replace_locals (tseq);
9695 SET_DECL_VALUE_EXPR (new_vard, val);
9696 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9697 if (is_for_simd)
9698 {
9699 SET_DECL_VALUE_EXPR (placeholder, var6);
9700 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9701 }
9702 else
9703 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9704 lower_omp (&tseq, ctx);
9705 if (y)
9706 SET_DECL_VALUE_EXPR (new_vard, y);
9707 else
9708 {
9709 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9710 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9711 }
9712 if (!is_for_simd)
9713 {
9714 SET_DECL_VALUE_EXPR (placeholder, new_var);
9715 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9716 lower_omp (&tseq, ctx);
9717 }
9718 gimple_seq_add_seq (&input2_list, tseq);
9719
9720 x = build_outer_var_ref (var, ctx);
9721 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
9722 gimplify_and_add (x, &last_list);
9723
9724 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
9725 gimplify_and_add (x, &reduc_list);
9726 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9727 tseq = copy_gimple_seq_and_replace_locals (tseq);
9728 val = rprival_ref;
9729 if (new_vard != new_var)
9730 val = build_fold_addr_expr_loc (clause_loc, val);
9731 SET_DECL_VALUE_EXPR (new_vard, val);
9732 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9733 SET_DECL_VALUE_EXPR (placeholder, var2);
9734 lower_omp (&tseq, ctx);
9735 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9736 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9737 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9738 if (y)
9739 SET_DECL_VALUE_EXPR (new_vard, y);
9740 else
9741 {
9742 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9743 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9744 }
9745 gimple_seq_add_seq (&reduc_list, tseq);
9746 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
9747 gimplify_and_add (x, &reduc_list);
9748
9749 x = lang_hooks.decls.omp_clause_dtor (c, var2);
9750 if (x)
9751 gimplify_and_add (x, dlist);
9752 }
9753 else
9754 {
9755 x = build_outer_var_ref (var, ctx);
9756 gimplify_assign (unshare_expr (var4), x, &thr01_list);
9757
9758 x = omp_reduction_init (c, TREE_TYPE (new_var));
9759 gimplify_assign (unshare_expr (var4), unshare_expr (x),
9760 &thrn1_list);
9761 gimplify_assign (unshare_expr (var4), x, &thr02_list);
9762
9763 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
9764
9765 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9766 if (code == MINUS_EXPR)
9767 code = PLUS_EXPR;
9768
9769 if (is_for_simd)
9770 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
9771 else
9772 {
9773 if (ctx->scan_exclusive)
9774 gimplify_assign (unshare_expr (rprivb_ref), var2,
9775 &scan1_list);
9776 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
9777 gimplify_assign (var2, x, &scan1_list);
9778 if (ctx->scan_inclusive)
9779 gimplify_assign (unshare_expr (rprivb_ref), var2,
9780 &scan1_list);
9781 }
9782
9783 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
9784 &mdlist);
9785
9786 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
9787 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
9788
9789 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
9790 &last_list);
9791
9792 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
9793 unshare_expr (rprival_ref));
9794 gimplify_assign (rprival_ref, x, &reduc_list);
9795 }
9796 }
9797
9798 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9799 gimple_seq_add_stmt (&scan1_list, g);
9800 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
9801 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
9802 ? scan_stmt4 : scan_stmt2), g);
9803
9804 tree controlb = create_tmp_var (boolean_type_node);
9805 tree controlp = create_tmp_var (ptr_type_node);
9806 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9807 OMP_CLAUSE_DECL (nc) = controlb;
9808 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9809 *cp1 = nc;
9810 cp1 = &OMP_CLAUSE_CHAIN (nc);
9811 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9812 OMP_CLAUSE_DECL (nc) = controlp;
9813 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9814 *cp1 = nc;
9815 cp1 = &OMP_CLAUSE_CHAIN (nc);
9816 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9817 OMP_CLAUSE_DECL (nc) = controlb;
9818 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9819 *cp2 = nc;
9820 cp2 = &OMP_CLAUSE_CHAIN (nc);
9821 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
9822 OMP_CLAUSE_DECL (nc) = controlp;
9823 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
9824 *cp2 = nc;
9825 cp2 = &OMP_CLAUSE_CHAIN (nc);
9826
9827 *cp1 = gimple_omp_for_clauses (stmt);
9828 gimple_omp_for_set_clauses (stmt, new_clauses1);
9829 *cp2 = gimple_omp_for_clauses (new_stmt);
9830 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
9831
9832 if (is_for_simd)
9833 {
9834 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
9835 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
9836
9837 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
9838 GSI_SAME_STMT);
9839 gsi_remove (&input3_gsi, true);
9840 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
9841 GSI_SAME_STMT);
9842 gsi_remove (&scan3_gsi, true);
9843 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
9844 GSI_SAME_STMT);
9845 gsi_remove (&input4_gsi, true);
9846 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
9847 GSI_SAME_STMT);
9848 gsi_remove (&scan4_gsi, true);
9849 }
9850 else
9851 {
9852 gimple_omp_set_body (scan_stmt1, scan1_list);
9853 gimple_omp_set_body (input_stmt2, input2_list);
9854 }
9855
9856 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
9857 GSI_SAME_STMT);
9858 gsi_remove (&input1_gsi, true);
9859 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
9860 GSI_SAME_STMT);
9861 gsi_remove (&scan1_gsi, true);
9862 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
9863 GSI_SAME_STMT);
9864 gsi_remove (&input2_gsi, true);
9865 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
9866 GSI_SAME_STMT);
9867 gsi_remove (&scan2_gsi, true);
9868
9869 gimple_seq_add_seq (body_p, clist);
9870
9871 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9872 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9873 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9874 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
9875 gimple_seq_add_stmt (body_p, g);
9876 g = gimple_build_label (lab1);
9877 gimple_seq_add_stmt (body_p, g);
9878 gimple_seq_add_seq (body_p, thr01_list);
9879 g = gimple_build_goto (lab3);
9880 gimple_seq_add_stmt (body_p, g);
9881 g = gimple_build_label (lab2);
9882 gimple_seq_add_stmt (body_p, g);
9883 gimple_seq_add_seq (body_p, thrn1_list);
9884 g = gimple_build_label (lab3);
9885 gimple_seq_add_stmt (body_p, g);
9886
9887 g = gimple_build_assign (ivar, size_zero_node);
9888 gimple_seq_add_stmt (body_p, g);
9889
9890 gimple_seq_add_stmt (body_p, stmt);
9891 gimple_seq_add_seq (body_p, body);
9892 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
9893 fd->loop.v));
9894
9895 g = gimple_build_omp_return (true);
9896 gimple_seq_add_stmt (body_p, g);
9897 gimple_seq_add_seq (body_p, mdlist);
9898
9899 lab1 = create_artificial_label (UNKNOWN_LOCATION);
9900 lab2 = create_artificial_label (UNKNOWN_LOCATION);
9901 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
9902 gimple_seq_add_stmt (body_p, g);
9903 g = gimple_build_label (lab1);
9904 gimple_seq_add_stmt (body_p, g);
9905
9906 g = omp_build_barrier (NULL);
9907 gimple_seq_add_stmt (body_p, g);
9908
9909 tree down = create_tmp_var (unsigned_type_node);
9910 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
9911 gimple_seq_add_stmt (body_p, g);
9912
9913 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
9914 gimple_seq_add_stmt (body_p, g);
9915
9916 tree num_threadsu = create_tmp_var (unsigned_type_node);
9917 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
9918 gimple_seq_add_stmt (body_p, g);
9919
9920 tree thread_numu = create_tmp_var (unsigned_type_node);
9921 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
9922 gimple_seq_add_stmt (body_p, g);
9923
9924 tree thread_nump1 = create_tmp_var (unsigned_type_node);
9925 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
9926 build_int_cst (unsigned_type_node, 1));
9927 gimple_seq_add_stmt (body_p, g);
9928
9929 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9930 g = gimple_build_label (lab3);
9931 gimple_seq_add_stmt (body_p, g);
9932
9933 tree twok = create_tmp_var (unsigned_type_node);
9934 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9935 gimple_seq_add_stmt (body_p, g);
9936
9937 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9938 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9939 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9940 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
9941 gimple_seq_add_stmt (body_p, g);
9942 g = gimple_build_label (lab4);
9943 gimple_seq_add_stmt (body_p, g);
9944 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
9945 gimple_seq_add_stmt (body_p, g);
9946 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9947 gimple_seq_add_stmt (body_p, g);
9948
9949 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
9950 gimple_seq_add_stmt (body_p, g);
9951 g = gimple_build_label (lab6);
9952 gimple_seq_add_stmt (body_p, g);
9953
9954 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
9955 gimple_seq_add_stmt (body_p, g);
9956
9957 g = gimple_build_label (lab5);
9958 gimple_seq_add_stmt (body_p, g);
9959
9960 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
9961 gimple_seq_add_stmt (body_p, g);
9962
9963 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
9964 DECL_GIMPLE_REG_P (cplx) = 1;
9965 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
9966 gimple_call_set_lhs (g, cplx);
9967 gimple_seq_add_stmt (body_p, g);
9968 tree mul = create_tmp_var (unsigned_type_node);
9969 g = gimple_build_assign (mul, REALPART_EXPR,
9970 build1 (REALPART_EXPR, unsigned_type_node, cplx));
9971 gimple_seq_add_stmt (body_p, g);
9972 tree ovf = create_tmp_var (unsigned_type_node);
9973 g = gimple_build_assign (ovf, IMAGPART_EXPR,
9974 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
9975 gimple_seq_add_stmt (body_p, g);
9976
9977 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
9978 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
9979 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
9980 lab7, lab8);
9981 gimple_seq_add_stmt (body_p, g);
9982 g = gimple_build_label (lab7);
9983 gimple_seq_add_stmt (body_p, g);
9984
9985 tree andv = create_tmp_var (unsigned_type_node);
9986 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
9987 gimple_seq_add_stmt (body_p, g);
9988 tree andvm1 = create_tmp_var (unsigned_type_node);
9989 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
9990 build_minus_one_cst (unsigned_type_node));
9991 gimple_seq_add_stmt (body_p, g);
9992
9993 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
9994 gimple_seq_add_stmt (body_p, g);
9995
9996 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
9997 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
9998 gimple_seq_add_stmt (body_p, g);
9999 g = gimple_build_label (lab9);
10000 gimple_seq_add_stmt (body_p, g);
10001 gimple_seq_add_seq (body_p, reduc_list);
10002 g = gimple_build_label (lab8);
10003 gimple_seq_add_stmt (body_p, g);
10004
10005 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10006 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10007 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10008 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10009 lab10, lab11);
10010 gimple_seq_add_stmt (body_p, g);
10011 g = gimple_build_label (lab10);
10012 gimple_seq_add_stmt (body_p, g);
10013 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10014 gimple_seq_add_stmt (body_p, g);
10015 g = gimple_build_goto (lab12);
10016 gimple_seq_add_stmt (body_p, g);
10017 g = gimple_build_label (lab11);
10018 gimple_seq_add_stmt (body_p, g);
10019 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10020 gimple_seq_add_stmt (body_p, g);
10021 g = gimple_build_label (lab12);
10022 gimple_seq_add_stmt (body_p, g);
10023
10024 g = omp_build_barrier (NULL);
10025 gimple_seq_add_stmt (body_p, g);
10026
10027 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10028 lab3, lab2);
10029 gimple_seq_add_stmt (body_p, g);
10030
10031 g = gimple_build_label (lab2);
10032 gimple_seq_add_stmt (body_p, g);
10033
10034 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10035 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10036 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10037 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10038 gimple_seq_add_stmt (body_p, g);
10039 g = gimple_build_label (lab1);
10040 gimple_seq_add_stmt (body_p, g);
10041 gimple_seq_add_seq (body_p, thr02_list);
10042 g = gimple_build_goto (lab3);
10043 gimple_seq_add_stmt (body_p, g);
10044 g = gimple_build_label (lab2);
10045 gimple_seq_add_stmt (body_p, g);
10046 gimple_seq_add_seq (body_p, thrn2_list);
10047 g = gimple_build_label (lab3);
10048 gimple_seq_add_stmt (body_p, g);
10049
10050 g = gimple_build_assign (ivar, size_zero_node);
10051 gimple_seq_add_stmt (body_p, g);
10052 gimple_seq_add_stmt (body_p, new_stmt);
10053 gimple_seq_add_seq (body_p, new_body);
10054
10055 gimple_seq new_dlist = NULL;
10056 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10057 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10058 tree num_threadsm1 = create_tmp_var (integer_type_node);
10059 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10060 integer_minus_one_node);
10061 gimple_seq_add_stmt (&new_dlist, g);
10062 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10063 gimple_seq_add_stmt (&new_dlist, g);
10064 g = gimple_build_label (lab1);
10065 gimple_seq_add_stmt (&new_dlist, g);
10066 gimple_seq_add_seq (&new_dlist, last_list);
10067 g = gimple_build_label (lab2);
10068 gimple_seq_add_stmt (&new_dlist, g);
10069 gimple_seq_add_seq (&new_dlist, *dlist);
10070 *dlist = new_dlist;
10071 }
10072
10073 /* Lower code for an OMP loop directive. */
10074
10075 static void
10076 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10077 {
10078 tree *rhs_p, block;
10079 struct omp_for_data fd, *fdp = NULL;
10080 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10081 gbind *new_stmt;
10082 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10083 gimple_seq cnt_list = NULL, clist = NULL;
10084 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10085 size_t i;
10086
10087 push_gimplify_context ();
10088
10089 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10090
10091 block = make_node (BLOCK);
10092 new_stmt = gimple_build_bind (NULL, NULL, block);
10093 /* Replace at gsi right away, so that 'stmt' is no member
10094 of a sequence anymore as we're going to add to a different
10095 one below. */
10096 gsi_replace (gsi_p, new_stmt, true);
10097
10098 /* Move declaration of temporaries in the loop body before we make
10099 it go away. */
10100 omp_for_body = gimple_omp_body (stmt);
10101 if (!gimple_seq_empty_p (omp_for_body)
10102 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10103 {
10104 gbind *inner_bind
10105 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10106 tree vars = gimple_bind_vars (inner_bind);
10107 gimple_bind_append_vars (new_stmt, vars);
10108 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10109 keep them on the inner_bind and it's block. */
10110 gimple_bind_set_vars (inner_bind, NULL_TREE);
10111 if (gimple_bind_block (inner_bind))
10112 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10113 }
10114
10115 if (gimple_omp_for_combined_into_p (stmt))
10116 {
10117 omp_extract_for_data (stmt, &fd, NULL);
10118 fdp = &fd;
10119
10120 /* We need two temporaries with fd.loop.v type (istart/iend)
10121 and then (fd.collapse - 1) temporaries with the same
10122 type for count2 ... countN-1 vars if not constant. */
10123 size_t count = 2;
10124 tree type = fd.iter_type;
10125 if (fd.collapse > 1
10126 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10127 count += fd.collapse - 1;
10128 bool taskreg_for
10129 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10130 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10131 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10132 tree simtc = NULL;
10133 tree clauses = *pc;
10134 if (taskreg_for)
10135 outerc
10136 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10137 OMP_CLAUSE__LOOPTEMP_);
10138 if (ctx->simt_stmt)
10139 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10140 OMP_CLAUSE__LOOPTEMP_);
10141 for (i = 0; i < count; i++)
10142 {
10143 tree temp;
10144 if (taskreg_for)
10145 {
10146 gcc_assert (outerc);
10147 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10148 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10149 OMP_CLAUSE__LOOPTEMP_);
10150 }
10151 else
10152 {
10153 /* If there are 2 adjacent SIMD stmts, one with _simt_
10154 clause, another without, make sure they have the same
10155 decls in _looptemp_ clauses, because the outer stmt
10156 they are combined into will look up just one inner_stmt. */
10157 if (ctx->simt_stmt)
10158 temp = OMP_CLAUSE_DECL (simtc);
10159 else
10160 temp = create_tmp_var (type);
10161 insert_decl_map (&ctx->outer->cb, temp, temp);
10162 }
10163 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10164 OMP_CLAUSE_DECL (*pc) = temp;
10165 pc = &OMP_CLAUSE_CHAIN (*pc);
10166 if (ctx->simt_stmt)
10167 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10168 OMP_CLAUSE__LOOPTEMP_);
10169 }
10170 *pc = clauses;
10171 }
10172
10173 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10174 dlist = NULL;
10175 body = NULL;
10176 tree rclauses
10177 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10178 OMP_CLAUSE_REDUCTION);
10179 tree rtmp = NULL_TREE;
10180 if (rclauses)
10181 {
10182 tree type = build_pointer_type (pointer_sized_int_node);
10183 tree temp = create_tmp_var (type);
10184 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10185 OMP_CLAUSE_DECL (c) = temp;
10186 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10187 gimple_omp_for_set_clauses (stmt, c);
10188 lower_omp_task_reductions (ctx, OMP_FOR,
10189 gimple_omp_for_clauses (stmt),
10190 &tred_ilist, &tred_dlist);
10191 rclauses = c;
10192 rtmp = make_ssa_name (type);
10193 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10194 }
10195
10196 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10197 ctx);
10198
10199 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10200 fdp);
10201 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10202 gimple_omp_for_pre_body (stmt));
10203
10204 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10205
10206 /* Lower the header expressions. At this point, we can assume that
10207 the header is of the form:
10208
10209 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10210
10211 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10212 using the .omp_data_s mapping, if needed. */
10213 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10214 {
10215 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10216 if (!is_gimple_min_invariant (*rhs_p))
10217 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10218 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10219 recompute_tree_invariant_for_addr_expr (*rhs_p);
10220
10221 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10222 if (!is_gimple_min_invariant (*rhs_p))
10223 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10224 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10225 recompute_tree_invariant_for_addr_expr (*rhs_p);
10226
10227 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10228 if (!is_gimple_min_invariant (*rhs_p))
10229 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10230 }
10231 if (rclauses)
10232 gimple_seq_add_seq (&tred_ilist, cnt_list);
10233 else
10234 gimple_seq_add_seq (&body, cnt_list);
10235
10236 /* Once lowered, extract the bounds and clauses. */
10237 omp_extract_for_data (stmt, &fd, NULL);
10238
10239 if (is_gimple_omp_oacc (ctx->stmt)
10240 && !ctx_in_oacc_kernels_region (ctx))
10241 lower_oacc_head_tail (gimple_location (stmt),
10242 gimple_omp_for_clauses (stmt),
10243 &oacc_head, &oacc_tail, ctx);
10244
10245 /* Add OpenACC partitioning and reduction markers just before the loop. */
10246 if (oacc_head)
10247 gimple_seq_add_seq (&body, oacc_head);
10248
10249 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10250
10251 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10252 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10253 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10254 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10255 {
10256 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10257 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10258 OMP_CLAUSE_LINEAR_STEP (c)
10259 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10260 ctx);
10261 }
10262
10263 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10264 && gimple_omp_for_grid_phony (stmt));
10265 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10266 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10267 {
10268 gcc_assert (!phony_loop);
10269 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10270 }
10271 else
10272 {
10273 if (!phony_loop)
10274 gimple_seq_add_stmt (&body, stmt);
10275 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10276 }
10277
10278 if (!phony_loop)
10279 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10280 fd.loop.v));
10281
10282 /* After the loop, add exit clauses. */
10283 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10284
10285 if (clist)
10286 {
10287 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10288 gcall *g = gimple_build_call (fndecl, 0);
10289 gimple_seq_add_stmt (&body, g);
10290 gimple_seq_add_seq (&body, clist);
10291 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10292 g = gimple_build_call (fndecl, 0);
10293 gimple_seq_add_stmt (&body, g);
10294 }
10295
10296 if (ctx->cancellable)
10297 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10298
10299 gimple_seq_add_seq (&body, dlist);
10300
10301 if (rclauses)
10302 {
10303 gimple_seq_add_seq (&tred_ilist, body);
10304 body = tred_ilist;
10305 }
10306
10307 body = maybe_catch_exception (body);
10308
10309 if (!phony_loop)
10310 {
10311 /* Region exit marker goes at the end of the loop body. */
10312 gimple *g = gimple_build_omp_return (fd.have_nowait);
10313 gimple_seq_add_stmt (&body, g);
10314
10315 gimple_seq_add_seq (&body, tred_dlist);
10316
10317 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10318
10319 if (rclauses)
10320 OMP_CLAUSE_DECL (rclauses) = rtmp;
10321 }
10322
10323 /* Add OpenACC joining and reduction markers just after the loop. */
10324 if (oacc_tail)
10325 gimple_seq_add_seq (&body, oacc_tail);
10326
10327 pop_gimplify_context (new_stmt);
10328
10329 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10330 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10331 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10332 if (BLOCK_VARS (block))
10333 TREE_USED (block) = 1;
10334
10335 gimple_bind_set_body (new_stmt, body);
10336 gimple_omp_set_body (stmt, NULL);
10337 gimple_omp_for_set_pre_body (stmt, NULL);
10338 }
10339
10340 /* Callback for walk_stmts. Check if the current statement only contains
10341 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10342
10343 static tree
10344 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10345 bool *handled_ops_p,
10346 struct walk_stmt_info *wi)
10347 {
10348 int *info = (int *) wi->info;
10349 gimple *stmt = gsi_stmt (*gsi_p);
10350
10351 *handled_ops_p = true;
10352 switch (gimple_code (stmt))
10353 {
10354 WALK_SUBSTMTS;
10355
10356 case GIMPLE_DEBUG:
10357 break;
10358 case GIMPLE_OMP_FOR:
10359 case GIMPLE_OMP_SECTIONS:
10360 *info = *info == 0 ? 1 : -1;
10361 break;
10362 default:
10363 *info = -1;
10364 break;
10365 }
10366 return NULL;
10367 }
10368
10369 struct omp_taskcopy_context
10370 {
10371 /* This field must be at the beginning, as we do "inheritance": Some
10372 callback functions for tree-inline.c (e.g., omp_copy_decl)
10373 receive a copy_body_data pointer that is up-casted to an
10374 omp_context pointer. */
10375 copy_body_data cb;
10376 omp_context *ctx;
10377 };
10378
10379 static tree
10380 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10381 {
10382 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10383
10384 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10385 return create_tmp_var (TREE_TYPE (var));
10386
10387 return var;
10388 }
10389
10390 static tree
10391 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10392 {
10393 tree name, new_fields = NULL, type, f;
10394
10395 type = lang_hooks.types.make_type (RECORD_TYPE);
10396 name = DECL_NAME (TYPE_NAME (orig_type));
10397 name = build_decl (gimple_location (tcctx->ctx->stmt),
10398 TYPE_DECL, name, type);
10399 TYPE_NAME (type) = name;
10400
10401 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10402 {
10403 tree new_f = copy_node (f);
10404 DECL_CONTEXT (new_f) = type;
10405 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10406 TREE_CHAIN (new_f) = new_fields;
10407 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10408 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10409 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10410 &tcctx->cb, NULL);
10411 new_fields = new_f;
10412 tcctx->cb.decl_map->put (f, new_f);
10413 }
10414 TYPE_FIELDS (type) = nreverse (new_fields);
10415 layout_type (type);
10416 return type;
10417 }
10418
10419 /* Create task copyfn. */
10420
10421 static void
10422 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10423 {
10424 struct function *child_cfun;
10425 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10426 tree record_type, srecord_type, bind, list;
10427 bool record_needs_remap = false, srecord_needs_remap = false;
10428 splay_tree_node n;
10429 struct omp_taskcopy_context tcctx;
10430 location_t loc = gimple_location (task_stmt);
10431 size_t looptempno = 0;
10432
10433 child_fn = gimple_omp_task_copy_fn (task_stmt);
10434 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10435 gcc_assert (child_cfun->cfg == NULL);
10436 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10437
10438 /* Reset DECL_CONTEXT on function arguments. */
10439 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10440 DECL_CONTEXT (t) = child_fn;
10441
10442 /* Populate the function. */
10443 push_gimplify_context ();
10444 push_cfun (child_cfun);
10445
10446 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10447 TREE_SIDE_EFFECTS (bind) = 1;
10448 list = NULL;
10449 DECL_SAVED_TREE (child_fn) = bind;
10450 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10451
10452 /* Remap src and dst argument types if needed. */
10453 record_type = ctx->record_type;
10454 srecord_type = ctx->srecord_type;
10455 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10456 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10457 {
10458 record_needs_remap = true;
10459 break;
10460 }
10461 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10462 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10463 {
10464 srecord_needs_remap = true;
10465 break;
10466 }
10467
10468 if (record_needs_remap || srecord_needs_remap)
10469 {
10470 memset (&tcctx, '\0', sizeof (tcctx));
10471 tcctx.cb.src_fn = ctx->cb.src_fn;
10472 tcctx.cb.dst_fn = child_fn;
10473 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10474 gcc_checking_assert (tcctx.cb.src_node);
10475 tcctx.cb.dst_node = tcctx.cb.src_node;
10476 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10477 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10478 tcctx.cb.eh_lp_nr = 0;
10479 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10480 tcctx.cb.decl_map = new hash_map<tree, tree>;
10481 tcctx.ctx = ctx;
10482
10483 if (record_needs_remap)
10484 record_type = task_copyfn_remap_type (&tcctx, record_type);
10485 if (srecord_needs_remap)
10486 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10487 }
10488 else
10489 tcctx.cb.decl_map = NULL;
10490
10491 arg = DECL_ARGUMENTS (child_fn);
10492 TREE_TYPE (arg) = build_pointer_type (record_type);
10493 sarg = DECL_CHAIN (arg);
10494 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10495
10496 /* First pass: initialize temporaries used in record_type and srecord_type
10497 sizes and field offsets. */
10498 if (tcctx.cb.decl_map)
10499 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10500 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10501 {
10502 tree *p;
10503
10504 decl = OMP_CLAUSE_DECL (c);
10505 p = tcctx.cb.decl_map->get (decl);
10506 if (p == NULL)
10507 continue;
10508 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10509 sf = (tree) n->value;
10510 sf = *tcctx.cb.decl_map->get (sf);
10511 src = build_simple_mem_ref_loc (loc, sarg);
10512 src = omp_build_component_ref (src, sf);
10513 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10514 append_to_statement_list (t, &list);
10515 }
10516
10517 /* Second pass: copy shared var pointers and copy construct non-VLA
10518 firstprivate vars. */
10519 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10520 switch (OMP_CLAUSE_CODE (c))
10521 {
10522 splay_tree_key key;
10523 case OMP_CLAUSE_SHARED:
10524 decl = OMP_CLAUSE_DECL (c);
10525 key = (splay_tree_key) decl;
10526 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10527 key = (splay_tree_key) &DECL_UID (decl);
10528 n = splay_tree_lookup (ctx->field_map, key);
10529 if (n == NULL)
10530 break;
10531 f = (tree) n->value;
10532 if (tcctx.cb.decl_map)
10533 f = *tcctx.cb.decl_map->get (f);
10534 n = splay_tree_lookup (ctx->sfield_map, key);
10535 sf = (tree) n->value;
10536 if (tcctx.cb.decl_map)
10537 sf = *tcctx.cb.decl_map->get (sf);
10538 src = build_simple_mem_ref_loc (loc, sarg);
10539 src = omp_build_component_ref (src, sf);
10540 dst = build_simple_mem_ref_loc (loc, arg);
10541 dst = omp_build_component_ref (dst, f);
10542 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10543 append_to_statement_list (t, &list);
10544 break;
10545 case OMP_CLAUSE_REDUCTION:
10546 case OMP_CLAUSE_IN_REDUCTION:
10547 decl = OMP_CLAUSE_DECL (c);
10548 if (TREE_CODE (decl) == MEM_REF)
10549 {
10550 decl = TREE_OPERAND (decl, 0);
10551 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10552 decl = TREE_OPERAND (decl, 0);
10553 if (TREE_CODE (decl) == INDIRECT_REF
10554 || TREE_CODE (decl) == ADDR_EXPR)
10555 decl = TREE_OPERAND (decl, 0);
10556 }
10557 key = (splay_tree_key) decl;
10558 n = splay_tree_lookup (ctx->field_map, key);
10559 if (n == NULL)
10560 break;
10561 f = (tree) n->value;
10562 if (tcctx.cb.decl_map)
10563 f = *tcctx.cb.decl_map->get (f);
10564 n = splay_tree_lookup (ctx->sfield_map, key);
10565 sf = (tree) n->value;
10566 if (tcctx.cb.decl_map)
10567 sf = *tcctx.cb.decl_map->get (sf);
10568 src = build_simple_mem_ref_loc (loc, sarg);
10569 src = omp_build_component_ref (src, sf);
10570 if (decl != OMP_CLAUSE_DECL (c)
10571 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10572 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10573 src = build_simple_mem_ref_loc (loc, src);
10574 dst = build_simple_mem_ref_loc (loc, arg);
10575 dst = omp_build_component_ref (dst, f);
10576 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10577 append_to_statement_list (t, &list);
10578 break;
10579 case OMP_CLAUSE__LOOPTEMP_:
10580 /* Fields for first two _looptemp_ clauses are initialized by
10581 GOMP_taskloop*, the rest are handled like firstprivate. */
10582 if (looptempno < 2)
10583 {
10584 looptempno++;
10585 break;
10586 }
10587 /* FALLTHRU */
10588 case OMP_CLAUSE__REDUCTEMP_:
10589 case OMP_CLAUSE_FIRSTPRIVATE:
10590 decl = OMP_CLAUSE_DECL (c);
10591 if (is_variable_sized (decl))
10592 break;
10593 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10594 if (n == NULL)
10595 break;
10596 f = (tree) n->value;
10597 if (tcctx.cb.decl_map)
10598 f = *tcctx.cb.decl_map->get (f);
10599 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10600 if (n != NULL)
10601 {
10602 sf = (tree) n->value;
10603 if (tcctx.cb.decl_map)
10604 sf = *tcctx.cb.decl_map->get (sf);
10605 src = build_simple_mem_ref_loc (loc, sarg);
10606 src = omp_build_component_ref (src, sf);
10607 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10608 src = build_simple_mem_ref_loc (loc, src);
10609 }
10610 else
10611 src = decl;
10612 dst = build_simple_mem_ref_loc (loc, arg);
10613 dst = omp_build_component_ref (dst, f);
10614 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10615 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10616 else
10617 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10618 append_to_statement_list (t, &list);
10619 break;
10620 case OMP_CLAUSE_PRIVATE:
10621 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10622 break;
10623 decl = OMP_CLAUSE_DECL (c);
10624 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10625 f = (tree) n->value;
10626 if (tcctx.cb.decl_map)
10627 f = *tcctx.cb.decl_map->get (f);
10628 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10629 if (n != NULL)
10630 {
10631 sf = (tree) n->value;
10632 if (tcctx.cb.decl_map)
10633 sf = *tcctx.cb.decl_map->get (sf);
10634 src = build_simple_mem_ref_loc (loc, sarg);
10635 src = omp_build_component_ref (src, sf);
10636 if (use_pointer_for_field (decl, NULL))
10637 src = build_simple_mem_ref_loc (loc, src);
10638 }
10639 else
10640 src = decl;
10641 dst = build_simple_mem_ref_loc (loc, arg);
10642 dst = omp_build_component_ref (dst, f);
10643 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10644 append_to_statement_list (t, &list);
10645 break;
10646 default:
10647 break;
10648 }
10649
10650 /* Last pass: handle VLA firstprivates. */
10651 if (tcctx.cb.decl_map)
10652 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10653 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10654 {
10655 tree ind, ptr, df;
10656
10657 decl = OMP_CLAUSE_DECL (c);
10658 if (!is_variable_sized (decl))
10659 continue;
10660 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10661 if (n == NULL)
10662 continue;
10663 f = (tree) n->value;
10664 f = *tcctx.cb.decl_map->get (f);
10665 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
10666 ind = DECL_VALUE_EXPR (decl);
10667 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
10668 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
10669 n = splay_tree_lookup (ctx->sfield_map,
10670 (splay_tree_key) TREE_OPERAND (ind, 0));
10671 sf = (tree) n->value;
10672 sf = *tcctx.cb.decl_map->get (sf);
10673 src = build_simple_mem_ref_loc (loc, sarg);
10674 src = omp_build_component_ref (src, sf);
10675 src = build_simple_mem_ref_loc (loc, src);
10676 dst = build_simple_mem_ref_loc (loc, arg);
10677 dst = omp_build_component_ref (dst, f);
10678 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10679 append_to_statement_list (t, &list);
10680 n = splay_tree_lookup (ctx->field_map,
10681 (splay_tree_key) TREE_OPERAND (ind, 0));
10682 df = (tree) n->value;
10683 df = *tcctx.cb.decl_map->get (df);
10684 ptr = build_simple_mem_ref_loc (loc, arg);
10685 ptr = omp_build_component_ref (ptr, df);
10686 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
10687 build_fold_addr_expr_loc (loc, dst));
10688 append_to_statement_list (t, &list);
10689 }
10690
10691 t = build1 (RETURN_EXPR, void_type_node, NULL);
10692 append_to_statement_list (t, &list);
10693
10694 if (tcctx.cb.decl_map)
10695 delete tcctx.cb.decl_map;
10696 pop_gimplify_context (NULL);
10697 BIND_EXPR_BODY (bind) = list;
10698 pop_cfun ();
10699 }
10700
10701 static void
10702 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
10703 {
10704 tree c, clauses;
10705 gimple *g;
10706 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
10707
10708 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
10709 gcc_assert (clauses);
10710 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10712 switch (OMP_CLAUSE_DEPEND_KIND (c))
10713 {
10714 case OMP_CLAUSE_DEPEND_LAST:
10715 /* Lowering already done at gimplification. */
10716 return;
10717 case OMP_CLAUSE_DEPEND_IN:
10718 cnt[2]++;
10719 break;
10720 case OMP_CLAUSE_DEPEND_OUT:
10721 case OMP_CLAUSE_DEPEND_INOUT:
10722 cnt[0]++;
10723 break;
10724 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10725 cnt[1]++;
10726 break;
10727 case OMP_CLAUSE_DEPEND_DEPOBJ:
10728 cnt[3]++;
10729 break;
10730 case OMP_CLAUSE_DEPEND_SOURCE:
10731 case OMP_CLAUSE_DEPEND_SINK:
10732 /* FALLTHRU */
10733 default:
10734 gcc_unreachable ();
10735 }
10736 if (cnt[1] || cnt[3])
10737 idx = 5;
10738 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
10739 tree type = build_array_type_nelts (ptr_type_node, total + idx);
10740 tree array = create_tmp_var (type);
10741 TREE_ADDRESSABLE (array) = 1;
10742 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10743 NULL_TREE);
10744 if (idx == 5)
10745 {
10746 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
10747 gimple_seq_add_stmt (iseq, g);
10748 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10749 NULL_TREE);
10750 }
10751 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
10752 gimple_seq_add_stmt (iseq, g);
10753 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
10754 {
10755 r = build4 (ARRAY_REF, ptr_type_node, array,
10756 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
10757 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
10758 gimple_seq_add_stmt (iseq, g);
10759 }
10760 for (i = 0; i < 4; i++)
10761 {
10762 if (cnt[i] == 0)
10763 continue;
10764 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10765 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
10766 continue;
10767 else
10768 {
10769 switch (OMP_CLAUSE_DEPEND_KIND (c))
10770 {
10771 case OMP_CLAUSE_DEPEND_IN:
10772 if (i != 2)
10773 continue;
10774 break;
10775 case OMP_CLAUSE_DEPEND_OUT:
10776 case OMP_CLAUSE_DEPEND_INOUT:
10777 if (i != 0)
10778 continue;
10779 break;
10780 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10781 if (i != 1)
10782 continue;
10783 break;
10784 case OMP_CLAUSE_DEPEND_DEPOBJ:
10785 if (i != 3)
10786 continue;
10787 break;
10788 default:
10789 gcc_unreachable ();
10790 }
10791 tree t = OMP_CLAUSE_DECL (c);
10792 t = fold_convert (ptr_type_node, t);
10793 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
10794 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
10795 NULL_TREE, NULL_TREE);
10796 g = gimple_build_assign (r, t);
10797 gimple_seq_add_stmt (iseq, g);
10798 }
10799 }
10800 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10801 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10802 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10803 OMP_CLAUSE_CHAIN (c) = *pclauses;
10804 *pclauses = c;
10805 tree clobber = build_constructor (type, NULL);
10806 TREE_THIS_VOLATILE (clobber) = 1;
10807 g = gimple_build_assign (array, clobber);
10808 gimple_seq_add_stmt (oseq, g);
10809 }
10810
10811 /* Lower the OpenMP parallel or task directive in the current statement
10812 in GSI_P. CTX holds context information for the directive. */
10813
10814 static void
10815 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10816 {
10817 tree clauses;
10818 tree child_fn, t;
10819 gimple *stmt = gsi_stmt (*gsi_p);
10820 gbind *par_bind, *bind, *dep_bind = NULL;
10821 gimple_seq par_body;
10822 location_t loc = gimple_location (stmt);
10823
10824 clauses = gimple_omp_taskreg_clauses (stmt);
10825 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10826 && gimple_omp_task_taskwait_p (stmt))
10827 {
10828 par_bind = NULL;
10829 par_body = NULL;
10830 }
10831 else
10832 {
10833 par_bind
10834 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
10835 par_body = gimple_bind_body (par_bind);
10836 }
10837 child_fn = ctx->cb.dst_fn;
10838 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10839 && !gimple_omp_parallel_combined_p (stmt))
10840 {
10841 struct walk_stmt_info wi;
10842 int ws_num = 0;
10843
10844 memset (&wi, 0, sizeof (wi));
10845 wi.info = &ws_num;
10846 wi.val_only = true;
10847 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
10848 if (ws_num == 1)
10849 gimple_omp_parallel_set_combined_p (stmt, true);
10850 }
10851 gimple_seq dep_ilist = NULL;
10852 gimple_seq dep_olist = NULL;
10853 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10854 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
10855 {
10856 push_gimplify_context ();
10857 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10858 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
10859 &dep_ilist, &dep_olist);
10860 }
10861
10862 if (gimple_code (stmt) == GIMPLE_OMP_TASK
10863 && gimple_omp_task_taskwait_p (stmt))
10864 {
10865 if (dep_bind)
10866 {
10867 gsi_replace (gsi_p, dep_bind, true);
10868 gimple_bind_add_seq (dep_bind, dep_ilist);
10869 gimple_bind_add_stmt (dep_bind, stmt);
10870 gimple_bind_add_seq (dep_bind, dep_olist);
10871 pop_gimplify_context (dep_bind);
10872 }
10873 return;
10874 }
10875
10876 if (ctx->srecord_type)
10877 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
10878
10879 gimple_seq tskred_ilist = NULL;
10880 gimple_seq tskred_olist = NULL;
10881 if ((is_task_ctx (ctx)
10882 && gimple_omp_task_taskloop_p (ctx->stmt)
10883 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
10884 OMP_CLAUSE_REDUCTION))
10885 || (is_parallel_ctx (ctx)
10886 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
10887 OMP_CLAUSE__REDUCTEMP_)))
10888 {
10889 if (dep_bind == NULL)
10890 {
10891 push_gimplify_context ();
10892 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10893 }
10894 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
10895 : OMP_PARALLEL,
10896 gimple_omp_taskreg_clauses (ctx->stmt),
10897 &tskred_ilist, &tskred_olist);
10898 }
10899
10900 push_gimplify_context ();
10901
10902 gimple_seq par_olist = NULL;
10903 gimple_seq par_ilist = NULL;
10904 gimple_seq par_rlist = NULL;
10905 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
10906 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
10907 if (phony_construct && ctx->record_type)
10908 {
10909 gcc_checking_assert (!ctx->receiver_decl);
10910 ctx->receiver_decl = create_tmp_var
10911 (build_reference_type (ctx->record_type), ".omp_rec");
10912 }
10913 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
10914 lower_omp (&par_body, ctx);
10915 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
10916 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
10917
10918 /* Declare all the variables created by mapping and the variables
10919 declared in the scope of the parallel body. */
10920 record_vars_into (ctx->block_vars, child_fn);
10921 maybe_remove_omp_member_access_dummy_vars (par_bind);
10922 record_vars_into (gimple_bind_vars (par_bind), child_fn);
10923
10924 if (ctx->record_type)
10925 {
10926 ctx->sender_decl
10927 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
10928 : ctx->record_type, ".omp_data_o");
10929 DECL_NAMELESS (ctx->sender_decl) = 1;
10930 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
10931 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
10932 }
10933
10934 gimple_seq olist = NULL;
10935 gimple_seq ilist = NULL;
10936 lower_send_clauses (clauses, &ilist, &olist, ctx);
10937 lower_send_shared_vars (&ilist, &olist, ctx);
10938
10939 if (ctx->record_type)
10940 {
10941 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
10942 TREE_THIS_VOLATILE (clobber) = 1;
10943 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
10944 clobber));
10945 }
10946
10947 /* Once all the expansions are done, sequence all the different
10948 fragments inside gimple_omp_body. */
10949
10950 gimple_seq new_body = NULL;
10951
10952 if (ctx->record_type)
10953 {
10954 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
10955 /* fixup_child_record_type might have changed receiver_decl's type. */
10956 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
10957 gimple_seq_add_stmt (&new_body,
10958 gimple_build_assign (ctx->receiver_decl, t));
10959 }
10960
10961 gimple_seq_add_seq (&new_body, par_ilist);
10962 gimple_seq_add_seq (&new_body, par_body);
10963 gimple_seq_add_seq (&new_body, par_rlist);
10964 if (ctx->cancellable)
10965 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
10966 gimple_seq_add_seq (&new_body, par_olist);
10967 new_body = maybe_catch_exception (new_body);
10968 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
10969 gimple_seq_add_stmt (&new_body,
10970 gimple_build_omp_continue (integer_zero_node,
10971 integer_zero_node));
10972 if (!phony_construct)
10973 {
10974 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10975 gimple_omp_set_body (stmt, new_body);
10976 }
10977
10978 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
10979 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
10980 else
10981 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
10982 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10983 gimple_bind_add_seq (bind, ilist);
10984 if (!phony_construct)
10985 gimple_bind_add_stmt (bind, stmt);
10986 else
10987 gimple_bind_add_seq (bind, new_body);
10988 gimple_bind_add_seq (bind, olist);
10989
10990 pop_gimplify_context (NULL);
10991
10992 if (dep_bind)
10993 {
10994 gimple_bind_add_seq (dep_bind, dep_ilist);
10995 gimple_bind_add_seq (dep_bind, tskred_ilist);
10996 gimple_bind_add_stmt (dep_bind, bind);
10997 gimple_bind_add_seq (dep_bind, tskred_olist);
10998 gimple_bind_add_seq (dep_bind, dep_olist);
10999 pop_gimplify_context (dep_bind);
11000 }
11001 }
11002
11003 /* Lower the GIMPLE_OMP_TARGET in the current statement
11004 in GSI_P. CTX holds context information for the directive. */
11005
11006 static void
11007 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11008 {
11009 tree clauses;
11010 tree child_fn, t, c;
11011 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11012 gbind *tgt_bind, *bind, *dep_bind = NULL;
11013 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11014 location_t loc = gimple_location (stmt);
11015 bool offloaded, data_region;
11016 unsigned int map_cnt = 0;
11017
11018 offloaded = is_gimple_omp_offloaded (stmt);
11019 switch (gimple_omp_target_kind (stmt))
11020 {
11021 case GF_OMP_TARGET_KIND_REGION:
11022 case GF_OMP_TARGET_KIND_UPDATE:
11023 case GF_OMP_TARGET_KIND_ENTER_DATA:
11024 case GF_OMP_TARGET_KIND_EXIT_DATA:
11025 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11026 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11027 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11028 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11029 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11030 data_region = false;
11031 break;
11032 case GF_OMP_TARGET_KIND_DATA:
11033 case GF_OMP_TARGET_KIND_OACC_DATA:
11034 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11035 data_region = true;
11036 break;
11037 default:
11038 gcc_unreachable ();
11039 }
11040
11041 clauses = gimple_omp_target_clauses (stmt);
11042
11043 gimple_seq dep_ilist = NULL;
11044 gimple_seq dep_olist = NULL;
11045 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11046 {
11047 push_gimplify_context ();
11048 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11049 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11050 &dep_ilist, &dep_olist);
11051 }
11052
11053 tgt_bind = NULL;
11054 tgt_body = NULL;
11055 if (offloaded)
11056 {
11057 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11058 tgt_body = gimple_bind_body (tgt_bind);
11059 }
11060 else if (data_region)
11061 tgt_body = gimple_omp_body (stmt);
11062 child_fn = ctx->cb.dst_fn;
11063
11064 push_gimplify_context ();
11065 fplist = NULL;
11066
11067 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11068 switch (OMP_CLAUSE_CODE (c))
11069 {
11070 tree var, x;
11071
11072 default:
11073 break;
11074 case OMP_CLAUSE_MAP:
11075 #if CHECKING_P
11076 /* First check what we're prepared to handle in the following. */
11077 switch (OMP_CLAUSE_MAP_KIND (c))
11078 {
11079 case GOMP_MAP_ALLOC:
11080 case GOMP_MAP_TO:
11081 case GOMP_MAP_FROM:
11082 case GOMP_MAP_TOFROM:
11083 case GOMP_MAP_POINTER:
11084 case GOMP_MAP_TO_PSET:
11085 case GOMP_MAP_DELETE:
11086 case GOMP_MAP_RELEASE:
11087 case GOMP_MAP_ALWAYS_TO:
11088 case GOMP_MAP_ALWAYS_FROM:
11089 case GOMP_MAP_ALWAYS_TOFROM:
11090 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11091 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11092 case GOMP_MAP_STRUCT:
11093 case GOMP_MAP_ALWAYS_POINTER:
11094 break;
11095 case GOMP_MAP_FORCE_ALLOC:
11096 case GOMP_MAP_FORCE_TO:
11097 case GOMP_MAP_FORCE_FROM:
11098 case GOMP_MAP_FORCE_TOFROM:
11099 case GOMP_MAP_FORCE_PRESENT:
11100 case GOMP_MAP_FORCE_DEVICEPTR:
11101 case GOMP_MAP_DEVICE_RESIDENT:
11102 case GOMP_MAP_LINK:
11103 gcc_assert (is_gimple_omp_oacc (stmt));
11104 break;
11105 default:
11106 gcc_unreachable ();
11107 }
11108 #endif
11109 /* FALLTHRU */
11110 case OMP_CLAUSE_TO:
11111 case OMP_CLAUSE_FROM:
11112 oacc_firstprivate:
11113 var = OMP_CLAUSE_DECL (c);
11114 if (!DECL_P (var))
11115 {
11116 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11117 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11118 && (OMP_CLAUSE_MAP_KIND (c)
11119 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11120 map_cnt++;
11121 continue;
11122 }
11123
11124 if (DECL_SIZE (var)
11125 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11126 {
11127 tree var2 = DECL_VALUE_EXPR (var);
11128 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11129 var2 = TREE_OPERAND (var2, 0);
11130 gcc_assert (DECL_P (var2));
11131 var = var2;
11132 }
11133
11134 if (offloaded
11135 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11136 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11137 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11138 {
11139 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11140 {
11141 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11142 && varpool_node::get_create (var)->offloadable)
11143 continue;
11144
11145 tree type = build_pointer_type (TREE_TYPE (var));
11146 tree new_var = lookup_decl (var, ctx);
11147 x = create_tmp_var_raw (type, get_name (new_var));
11148 gimple_add_tmp_var (x);
11149 x = build_simple_mem_ref (x);
11150 SET_DECL_VALUE_EXPR (new_var, x);
11151 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11152 }
11153 continue;
11154 }
11155
11156 if (!maybe_lookup_field (var, ctx))
11157 continue;
11158
11159 /* Don't remap oacc parallel reduction variables, because the
11160 intermediate result must be local to each gang. */
11161 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11162 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11163 {
11164 x = build_receiver_ref (var, true, ctx);
11165 tree new_var = lookup_decl (var, ctx);
11166
11167 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11168 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11169 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11170 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11171 x = build_simple_mem_ref (x);
11172 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11173 {
11174 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11175 if (omp_is_reference (new_var)
11176 && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
11177 {
11178 /* Create a local object to hold the instance
11179 value. */
11180 tree type = TREE_TYPE (TREE_TYPE (new_var));
11181 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11182 tree inst = create_tmp_var (type, id);
11183 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11184 x = build_fold_addr_expr (inst);
11185 }
11186 gimplify_assign (new_var, x, &fplist);
11187 }
11188 else if (DECL_P (new_var))
11189 {
11190 SET_DECL_VALUE_EXPR (new_var, x);
11191 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11192 }
11193 else
11194 gcc_unreachable ();
11195 }
11196 map_cnt++;
11197 break;
11198
11199 case OMP_CLAUSE_FIRSTPRIVATE:
11200 if (is_oacc_parallel (ctx))
11201 goto oacc_firstprivate;
11202 map_cnt++;
11203 var = OMP_CLAUSE_DECL (c);
11204 if (!omp_is_reference (var)
11205 && !is_gimple_reg_type (TREE_TYPE (var)))
11206 {
11207 tree new_var = lookup_decl (var, ctx);
11208 if (is_variable_sized (var))
11209 {
11210 tree pvar = DECL_VALUE_EXPR (var);
11211 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11212 pvar = TREE_OPERAND (pvar, 0);
11213 gcc_assert (DECL_P (pvar));
11214 tree new_pvar = lookup_decl (pvar, ctx);
11215 x = build_fold_indirect_ref (new_pvar);
11216 TREE_THIS_NOTRAP (x) = 1;
11217 }
11218 else
11219 x = build_receiver_ref (var, true, ctx);
11220 SET_DECL_VALUE_EXPR (new_var, x);
11221 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11222 }
11223 break;
11224
11225 case OMP_CLAUSE_PRIVATE:
11226 if (is_gimple_omp_oacc (ctx->stmt))
11227 break;
11228 var = OMP_CLAUSE_DECL (c);
11229 if (is_variable_sized (var))
11230 {
11231 tree new_var = lookup_decl (var, ctx);
11232 tree pvar = DECL_VALUE_EXPR (var);
11233 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11234 pvar = TREE_OPERAND (pvar, 0);
11235 gcc_assert (DECL_P (pvar));
11236 tree new_pvar = lookup_decl (pvar, ctx);
11237 x = build_fold_indirect_ref (new_pvar);
11238 TREE_THIS_NOTRAP (x) = 1;
11239 SET_DECL_VALUE_EXPR (new_var, x);
11240 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11241 }
11242 break;
11243
11244 case OMP_CLAUSE_USE_DEVICE_PTR:
11245 case OMP_CLAUSE_IS_DEVICE_PTR:
11246 var = OMP_CLAUSE_DECL (c);
11247 map_cnt++;
11248 if (is_variable_sized (var))
11249 {
11250 tree new_var = lookup_decl (var, ctx);
11251 tree pvar = DECL_VALUE_EXPR (var);
11252 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11253 pvar = TREE_OPERAND (pvar, 0);
11254 gcc_assert (DECL_P (pvar));
11255 tree new_pvar = lookup_decl (pvar, ctx);
11256 x = build_fold_indirect_ref (new_pvar);
11257 TREE_THIS_NOTRAP (x) = 1;
11258 SET_DECL_VALUE_EXPR (new_var, x);
11259 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11260 }
11261 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11262 {
11263 tree new_var = lookup_decl (var, ctx);
11264 tree type = build_pointer_type (TREE_TYPE (var));
11265 x = create_tmp_var_raw (type, get_name (new_var));
11266 gimple_add_tmp_var (x);
11267 x = build_simple_mem_ref (x);
11268 SET_DECL_VALUE_EXPR (new_var, x);
11269 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11270 }
11271 else
11272 {
11273 tree new_var = lookup_decl (var, ctx);
11274 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11275 gimple_add_tmp_var (x);
11276 SET_DECL_VALUE_EXPR (new_var, x);
11277 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11278 }
11279 break;
11280 }
11281
11282 if (offloaded)
11283 {
11284 target_nesting_level++;
11285 lower_omp (&tgt_body, ctx);
11286 target_nesting_level--;
11287 }
11288 else if (data_region)
11289 lower_omp (&tgt_body, ctx);
11290
11291 if (offloaded)
11292 {
11293 /* Declare all the variables created by mapping and the variables
11294 declared in the scope of the target body. */
11295 record_vars_into (ctx->block_vars, child_fn);
11296 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11297 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11298 }
11299
11300 olist = NULL;
11301 ilist = NULL;
11302 if (ctx->record_type)
11303 {
11304 ctx->sender_decl
11305 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11306 DECL_NAMELESS (ctx->sender_decl) = 1;
11307 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11308 t = make_tree_vec (3);
11309 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11310 TREE_VEC_ELT (t, 1)
11311 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11312 ".omp_data_sizes");
11313 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11314 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11315 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11316 tree tkind_type = short_unsigned_type_node;
11317 int talign_shift = 8;
11318 TREE_VEC_ELT (t, 2)
11319 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11320 ".omp_data_kinds");
11321 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11322 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11323 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11324 gimple_omp_target_set_data_arg (stmt, t);
11325
11326 vec<constructor_elt, va_gc> *vsize;
11327 vec<constructor_elt, va_gc> *vkind;
11328 vec_alloc (vsize, map_cnt);
11329 vec_alloc (vkind, map_cnt);
11330 unsigned int map_idx = 0;
11331
11332 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11333 switch (OMP_CLAUSE_CODE (c))
11334 {
11335 tree ovar, nc, s, purpose, var, x, type;
11336 unsigned int talign;
11337
11338 default:
11339 break;
11340
11341 case OMP_CLAUSE_MAP:
11342 case OMP_CLAUSE_TO:
11343 case OMP_CLAUSE_FROM:
11344 oacc_firstprivate_map:
11345 nc = c;
11346 ovar = OMP_CLAUSE_DECL (c);
11347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11348 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11349 || (OMP_CLAUSE_MAP_KIND (c)
11350 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11351 break;
11352 if (!DECL_P (ovar))
11353 {
11354 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11355 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11356 {
11357 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11358 == get_base_address (ovar));
11359 nc = OMP_CLAUSE_CHAIN (c);
11360 ovar = OMP_CLAUSE_DECL (nc);
11361 }
11362 else
11363 {
11364 tree x = build_sender_ref (ovar, ctx);
11365 tree v
11366 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11367 gimplify_assign (x, v, &ilist);
11368 nc = NULL_TREE;
11369 }
11370 }
11371 else
11372 {
11373 if (DECL_SIZE (ovar)
11374 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11375 {
11376 tree ovar2 = DECL_VALUE_EXPR (ovar);
11377 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11378 ovar2 = TREE_OPERAND (ovar2, 0);
11379 gcc_assert (DECL_P (ovar2));
11380 ovar = ovar2;
11381 }
11382 if (!maybe_lookup_field (ovar, ctx))
11383 continue;
11384 }
11385
11386 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11387 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11388 talign = DECL_ALIGN_UNIT (ovar);
11389 if (nc)
11390 {
11391 var = lookup_decl_in_outer_ctx (ovar, ctx);
11392 x = build_sender_ref (ovar, ctx);
11393
11394 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11395 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11396 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11397 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11398 {
11399 gcc_assert (offloaded);
11400 tree avar
11401 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11402 mark_addressable (avar);
11403 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11404 talign = DECL_ALIGN_UNIT (avar);
11405 avar = build_fold_addr_expr (avar);
11406 gimplify_assign (x, avar, &ilist);
11407 }
11408 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11409 {
11410 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11411 if (!omp_is_reference (var))
11412 {
11413 if (is_gimple_reg (var)
11414 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11415 TREE_NO_WARNING (var) = 1;
11416 var = build_fold_addr_expr (var);
11417 }
11418 else
11419 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11420 gimplify_assign (x, var, &ilist);
11421 }
11422 else if (is_gimple_reg (var))
11423 {
11424 gcc_assert (offloaded);
11425 tree avar = create_tmp_var (TREE_TYPE (var));
11426 mark_addressable (avar);
11427 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11428 if (GOMP_MAP_COPY_TO_P (map_kind)
11429 || map_kind == GOMP_MAP_POINTER
11430 || map_kind == GOMP_MAP_TO_PSET
11431 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11432 {
11433 /* If we need to initialize a temporary
11434 with VAR because it is not addressable, and
11435 the variable hasn't been initialized yet, then
11436 we'll get a warning for the store to avar.
11437 Don't warn in that case, the mapping might
11438 be implicit. */
11439 TREE_NO_WARNING (var) = 1;
11440 gimplify_assign (avar, var, &ilist);
11441 }
11442 avar = build_fold_addr_expr (avar);
11443 gimplify_assign (x, avar, &ilist);
11444 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11445 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11446 && !TYPE_READONLY (TREE_TYPE (var)))
11447 {
11448 x = unshare_expr (x);
11449 x = build_simple_mem_ref (x);
11450 gimplify_assign (var, x, &olist);
11451 }
11452 }
11453 else
11454 {
11455 var = build_fold_addr_expr (var);
11456 gimplify_assign (x, var, &ilist);
11457 }
11458 }
11459 s = NULL_TREE;
11460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11461 {
11462 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11463 s = TREE_TYPE (ovar);
11464 if (TREE_CODE (s) == REFERENCE_TYPE)
11465 s = TREE_TYPE (s);
11466 s = TYPE_SIZE_UNIT (s);
11467 }
11468 else
11469 s = OMP_CLAUSE_SIZE (c);
11470 if (s == NULL_TREE)
11471 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11472 s = fold_convert (size_type_node, s);
11473 purpose = size_int (map_idx++);
11474 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11475 if (TREE_CODE (s) != INTEGER_CST)
11476 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11477
11478 unsigned HOST_WIDE_INT tkind, tkind_zero;
11479 switch (OMP_CLAUSE_CODE (c))
11480 {
11481 case OMP_CLAUSE_MAP:
11482 tkind = OMP_CLAUSE_MAP_KIND (c);
11483 tkind_zero = tkind;
11484 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11485 switch (tkind)
11486 {
11487 case GOMP_MAP_ALLOC:
11488 case GOMP_MAP_TO:
11489 case GOMP_MAP_FROM:
11490 case GOMP_MAP_TOFROM:
11491 case GOMP_MAP_ALWAYS_TO:
11492 case GOMP_MAP_ALWAYS_FROM:
11493 case GOMP_MAP_ALWAYS_TOFROM:
11494 case GOMP_MAP_RELEASE:
11495 case GOMP_MAP_FORCE_TO:
11496 case GOMP_MAP_FORCE_FROM:
11497 case GOMP_MAP_FORCE_TOFROM:
11498 case GOMP_MAP_FORCE_PRESENT:
11499 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11500 break;
11501 case GOMP_MAP_DELETE:
11502 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11503 default:
11504 break;
11505 }
11506 if (tkind_zero != tkind)
11507 {
11508 if (integer_zerop (s))
11509 tkind = tkind_zero;
11510 else if (integer_nonzerop (s))
11511 tkind_zero = tkind;
11512 }
11513 break;
11514 case OMP_CLAUSE_FIRSTPRIVATE:
11515 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11516 tkind = GOMP_MAP_TO;
11517 tkind_zero = tkind;
11518 break;
11519 case OMP_CLAUSE_TO:
11520 tkind = GOMP_MAP_TO;
11521 tkind_zero = tkind;
11522 break;
11523 case OMP_CLAUSE_FROM:
11524 tkind = GOMP_MAP_FROM;
11525 tkind_zero = tkind;
11526 break;
11527 default:
11528 gcc_unreachable ();
11529 }
11530 gcc_checking_assert (tkind
11531 < (HOST_WIDE_INT_C (1U) << talign_shift));
11532 gcc_checking_assert (tkind_zero
11533 < (HOST_WIDE_INT_C (1U) << talign_shift));
11534 talign = ceil_log2 (talign);
11535 tkind |= talign << talign_shift;
11536 tkind_zero |= talign << talign_shift;
11537 gcc_checking_assert (tkind
11538 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11539 gcc_checking_assert (tkind_zero
11540 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11541 if (tkind == tkind_zero)
11542 x = build_int_cstu (tkind_type, tkind);
11543 else
11544 {
11545 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11546 x = build3 (COND_EXPR, tkind_type,
11547 fold_build2 (EQ_EXPR, boolean_type_node,
11548 unshare_expr (s), size_zero_node),
11549 build_int_cstu (tkind_type, tkind_zero),
11550 build_int_cstu (tkind_type, tkind));
11551 }
11552 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11553 if (nc && nc != c)
11554 c = nc;
11555 break;
11556
11557 case OMP_CLAUSE_FIRSTPRIVATE:
11558 if (is_oacc_parallel (ctx))
11559 goto oacc_firstprivate_map;
11560 ovar = OMP_CLAUSE_DECL (c);
11561 if (omp_is_reference (ovar))
11562 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11563 else
11564 talign = DECL_ALIGN_UNIT (ovar);
11565 var = lookup_decl_in_outer_ctx (ovar, ctx);
11566 x = build_sender_ref (ovar, ctx);
11567 tkind = GOMP_MAP_FIRSTPRIVATE;
11568 type = TREE_TYPE (ovar);
11569 if (omp_is_reference (ovar))
11570 type = TREE_TYPE (type);
11571 if ((INTEGRAL_TYPE_P (type)
11572 && TYPE_PRECISION (type) <= POINTER_SIZE)
11573 || TREE_CODE (type) == POINTER_TYPE)
11574 {
11575 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11576 tree t = var;
11577 if (omp_is_reference (var))
11578 t = build_simple_mem_ref (var);
11579 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11580 TREE_NO_WARNING (var) = 1;
11581 if (TREE_CODE (type) != POINTER_TYPE)
11582 t = fold_convert (pointer_sized_int_node, t);
11583 t = fold_convert (TREE_TYPE (x), t);
11584 gimplify_assign (x, t, &ilist);
11585 }
11586 else if (omp_is_reference (var))
11587 gimplify_assign (x, var, &ilist);
11588 else if (is_gimple_reg (var))
11589 {
11590 tree avar = create_tmp_var (TREE_TYPE (var));
11591 mark_addressable (avar);
11592 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11593 TREE_NO_WARNING (var) = 1;
11594 gimplify_assign (avar, var, &ilist);
11595 avar = build_fold_addr_expr (avar);
11596 gimplify_assign (x, avar, &ilist);
11597 }
11598 else
11599 {
11600 var = build_fold_addr_expr (var);
11601 gimplify_assign (x, var, &ilist);
11602 }
11603 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11604 s = size_int (0);
11605 else if (omp_is_reference (ovar))
11606 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11607 else
11608 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11609 s = fold_convert (size_type_node, s);
11610 purpose = size_int (map_idx++);
11611 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11612 if (TREE_CODE (s) != INTEGER_CST)
11613 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11614
11615 gcc_checking_assert (tkind
11616 < (HOST_WIDE_INT_C (1U) << talign_shift));
11617 talign = ceil_log2 (talign);
11618 tkind |= talign << talign_shift;
11619 gcc_checking_assert (tkind
11620 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11621 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11622 build_int_cstu (tkind_type, tkind));
11623 break;
11624
11625 case OMP_CLAUSE_USE_DEVICE_PTR:
11626 case OMP_CLAUSE_IS_DEVICE_PTR:
11627 ovar = OMP_CLAUSE_DECL (c);
11628 var = lookup_decl_in_outer_ctx (ovar, ctx);
11629 x = build_sender_ref (ovar, ctx);
11630 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11631 tkind = GOMP_MAP_USE_DEVICE_PTR;
11632 else
11633 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11634 type = TREE_TYPE (ovar);
11635 if (TREE_CODE (type) == ARRAY_TYPE)
11636 var = build_fold_addr_expr (var);
11637 else
11638 {
11639 if (omp_is_reference (ovar))
11640 {
11641 type = TREE_TYPE (type);
11642 if (TREE_CODE (type) != ARRAY_TYPE)
11643 var = build_simple_mem_ref (var);
11644 var = fold_convert (TREE_TYPE (x), var);
11645 }
11646 }
11647 gimplify_assign (x, var, &ilist);
11648 s = size_int (0);
11649 purpose = size_int (map_idx++);
11650 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11651 gcc_checking_assert (tkind
11652 < (HOST_WIDE_INT_C (1U) << talign_shift));
11653 gcc_checking_assert (tkind
11654 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11655 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11656 build_int_cstu (tkind_type, tkind));
11657 break;
11658 }
11659
11660 gcc_assert (map_idx == map_cnt);
11661
11662 DECL_INITIAL (TREE_VEC_ELT (t, 1))
11663 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
11664 DECL_INITIAL (TREE_VEC_ELT (t, 2))
11665 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
11666 for (int i = 1; i <= 2; i++)
11667 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
11668 {
11669 gimple_seq initlist = NULL;
11670 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
11671 TREE_VEC_ELT (t, i)),
11672 &initlist, true, NULL_TREE);
11673 gimple_seq_add_seq (&ilist, initlist);
11674
11675 tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
11676 NULL);
11677 TREE_THIS_VOLATILE (clobber) = 1;
11678 gimple_seq_add_stmt (&olist,
11679 gimple_build_assign (TREE_VEC_ELT (t, i),
11680 clobber));
11681 }
11682
11683 tree clobber = build_constructor (ctx->record_type, NULL);
11684 TREE_THIS_VOLATILE (clobber) = 1;
11685 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11686 clobber));
11687 }
11688
11689 /* Once all the expansions are done, sequence all the different
11690 fragments inside gimple_omp_body. */
11691
11692 new_body = NULL;
11693
11694 if (offloaded
11695 && ctx->record_type)
11696 {
11697 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11698 /* fixup_child_record_type might have changed receiver_decl's type. */
11699 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11700 gimple_seq_add_stmt (&new_body,
11701 gimple_build_assign (ctx->receiver_decl, t));
11702 }
11703 gimple_seq_add_seq (&new_body, fplist);
11704
11705 if (offloaded || data_region)
11706 {
11707 tree prev = NULL_TREE;
11708 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11709 switch (OMP_CLAUSE_CODE (c))
11710 {
11711 tree var, x;
11712 default:
11713 break;
11714 case OMP_CLAUSE_FIRSTPRIVATE:
11715 if (is_gimple_omp_oacc (ctx->stmt))
11716 break;
11717 var = OMP_CLAUSE_DECL (c);
11718 if (omp_is_reference (var)
11719 || is_gimple_reg_type (TREE_TYPE (var)))
11720 {
11721 tree new_var = lookup_decl (var, ctx);
11722 tree type;
11723 type = TREE_TYPE (var);
11724 if (omp_is_reference (var))
11725 type = TREE_TYPE (type);
11726 if ((INTEGRAL_TYPE_P (type)
11727 && TYPE_PRECISION (type) <= POINTER_SIZE)
11728 || TREE_CODE (type) == POINTER_TYPE)
11729 {
11730 x = build_receiver_ref (var, false, ctx);
11731 if (TREE_CODE (type) != POINTER_TYPE)
11732 x = fold_convert (pointer_sized_int_node, x);
11733 x = fold_convert (type, x);
11734 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11735 fb_rvalue);
11736 if (omp_is_reference (var))
11737 {
11738 tree v = create_tmp_var_raw (type, get_name (var));
11739 gimple_add_tmp_var (v);
11740 TREE_ADDRESSABLE (v) = 1;
11741 gimple_seq_add_stmt (&new_body,
11742 gimple_build_assign (v, x));
11743 x = build_fold_addr_expr (v);
11744 }
11745 gimple_seq_add_stmt (&new_body,
11746 gimple_build_assign (new_var, x));
11747 }
11748 else
11749 {
11750 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
11751 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11752 fb_rvalue);
11753 gimple_seq_add_stmt (&new_body,
11754 gimple_build_assign (new_var, x));
11755 }
11756 }
11757 else if (is_variable_sized (var))
11758 {
11759 tree pvar = DECL_VALUE_EXPR (var);
11760 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11761 pvar = TREE_OPERAND (pvar, 0);
11762 gcc_assert (DECL_P (pvar));
11763 tree new_var = lookup_decl (pvar, ctx);
11764 x = build_receiver_ref (var, false, ctx);
11765 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11766 gimple_seq_add_stmt (&new_body,
11767 gimple_build_assign (new_var, x));
11768 }
11769 break;
11770 case OMP_CLAUSE_PRIVATE:
11771 if (is_gimple_omp_oacc (ctx->stmt))
11772 break;
11773 var = OMP_CLAUSE_DECL (c);
11774 if (omp_is_reference (var))
11775 {
11776 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11777 tree new_var = lookup_decl (var, ctx);
11778 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
11779 if (TREE_CONSTANT (x))
11780 {
11781 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
11782 get_name (var));
11783 gimple_add_tmp_var (x);
11784 TREE_ADDRESSABLE (x) = 1;
11785 x = build_fold_addr_expr_loc (clause_loc, x);
11786 }
11787 else
11788 break;
11789
11790 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11791 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11792 gimple_seq_add_stmt (&new_body,
11793 gimple_build_assign (new_var, x));
11794 }
11795 break;
11796 case OMP_CLAUSE_USE_DEVICE_PTR:
11797 case OMP_CLAUSE_IS_DEVICE_PTR:
11798 var = OMP_CLAUSE_DECL (c);
11799 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
11800 x = build_sender_ref (var, ctx);
11801 else
11802 x = build_receiver_ref (var, false, ctx);
11803 if (is_variable_sized (var))
11804 {
11805 tree pvar = DECL_VALUE_EXPR (var);
11806 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11807 pvar = TREE_OPERAND (pvar, 0);
11808 gcc_assert (DECL_P (pvar));
11809 tree new_var = lookup_decl (pvar, ctx);
11810 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11811 gimple_seq_add_stmt (&new_body,
11812 gimple_build_assign (new_var, x));
11813 }
11814 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11815 {
11816 tree new_var = lookup_decl (var, ctx);
11817 new_var = DECL_VALUE_EXPR (new_var);
11818 gcc_assert (TREE_CODE (new_var) == MEM_REF);
11819 new_var = TREE_OPERAND (new_var, 0);
11820 gcc_assert (DECL_P (new_var));
11821 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11822 gimple_seq_add_stmt (&new_body,
11823 gimple_build_assign (new_var, x));
11824 }
11825 else
11826 {
11827 tree type = TREE_TYPE (var);
11828 tree new_var = lookup_decl (var, ctx);
11829 if (omp_is_reference (var))
11830 {
11831 type = TREE_TYPE (type);
11832 if (TREE_CODE (type) != ARRAY_TYPE)
11833 {
11834 tree v = create_tmp_var_raw (type, get_name (var));
11835 gimple_add_tmp_var (v);
11836 TREE_ADDRESSABLE (v) = 1;
11837 x = fold_convert (type, x);
11838 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
11839 fb_rvalue);
11840 gimple_seq_add_stmt (&new_body,
11841 gimple_build_assign (v, x));
11842 x = build_fold_addr_expr (v);
11843 }
11844 }
11845 new_var = DECL_VALUE_EXPR (new_var);
11846 x = fold_convert (TREE_TYPE (new_var), x);
11847 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11848 gimple_seq_add_stmt (&new_body,
11849 gimple_build_assign (new_var, x));
11850 }
11851 break;
11852 }
11853 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
11854 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
11855 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
11856 or references to VLAs. */
11857 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11858 switch (OMP_CLAUSE_CODE (c))
11859 {
11860 tree var;
11861 default:
11862 break;
11863 case OMP_CLAUSE_MAP:
11864 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11865 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11866 {
11867 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11868 poly_int64 offset = 0;
11869 gcc_assert (prev);
11870 var = OMP_CLAUSE_DECL (c);
11871 if (DECL_P (var)
11872 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
11873 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
11874 ctx))
11875 && varpool_node::get_create (var)->offloadable)
11876 break;
11877 if (TREE_CODE (var) == INDIRECT_REF
11878 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
11879 var = TREE_OPERAND (var, 0);
11880 if (TREE_CODE (var) == COMPONENT_REF)
11881 {
11882 var = get_addr_base_and_unit_offset (var, &offset);
11883 gcc_assert (var != NULL_TREE && DECL_P (var));
11884 }
11885 else if (DECL_SIZE (var)
11886 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11887 {
11888 tree var2 = DECL_VALUE_EXPR (var);
11889 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11890 var2 = TREE_OPERAND (var2, 0);
11891 gcc_assert (DECL_P (var2));
11892 var = var2;
11893 }
11894 tree new_var = lookup_decl (var, ctx), x;
11895 tree type = TREE_TYPE (new_var);
11896 bool is_ref;
11897 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
11898 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11899 == COMPONENT_REF))
11900 {
11901 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
11902 is_ref = true;
11903 new_var = build2 (MEM_REF, type,
11904 build_fold_addr_expr (new_var),
11905 build_int_cst (build_pointer_type (type),
11906 offset));
11907 }
11908 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
11909 {
11910 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
11911 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
11912 new_var = build2 (MEM_REF, type,
11913 build_fold_addr_expr (new_var),
11914 build_int_cst (build_pointer_type (type),
11915 offset));
11916 }
11917 else
11918 is_ref = omp_is_reference (var);
11919 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11920 is_ref = false;
11921 bool ref_to_array = false;
11922 if (is_ref)
11923 {
11924 type = TREE_TYPE (type);
11925 if (TREE_CODE (type) == ARRAY_TYPE)
11926 {
11927 type = build_pointer_type (type);
11928 ref_to_array = true;
11929 }
11930 }
11931 else if (TREE_CODE (type) == ARRAY_TYPE)
11932 {
11933 tree decl2 = DECL_VALUE_EXPR (new_var);
11934 gcc_assert (TREE_CODE (decl2) == MEM_REF);
11935 decl2 = TREE_OPERAND (decl2, 0);
11936 gcc_assert (DECL_P (decl2));
11937 new_var = decl2;
11938 type = TREE_TYPE (new_var);
11939 }
11940 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
11941 x = fold_convert_loc (clause_loc, type, x);
11942 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
11943 {
11944 tree bias = OMP_CLAUSE_SIZE (c);
11945 if (DECL_P (bias))
11946 bias = lookup_decl (bias, ctx);
11947 bias = fold_convert_loc (clause_loc, sizetype, bias);
11948 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
11949 bias);
11950 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
11951 TREE_TYPE (x), x, bias);
11952 }
11953 if (ref_to_array)
11954 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
11955 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11956 if (is_ref && !ref_to_array)
11957 {
11958 tree t = create_tmp_var_raw (type, get_name (var));
11959 gimple_add_tmp_var (t);
11960 TREE_ADDRESSABLE (t) = 1;
11961 gimple_seq_add_stmt (&new_body,
11962 gimple_build_assign (t, x));
11963 x = build_fold_addr_expr_loc (clause_loc, t);
11964 }
11965 gimple_seq_add_stmt (&new_body,
11966 gimple_build_assign (new_var, x));
11967 prev = NULL_TREE;
11968 }
11969 else if (OMP_CLAUSE_CHAIN (c)
11970 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
11971 == OMP_CLAUSE_MAP
11972 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11973 == GOMP_MAP_FIRSTPRIVATE_POINTER
11974 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11975 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11976 prev = c;
11977 break;
11978 case OMP_CLAUSE_PRIVATE:
11979 var = OMP_CLAUSE_DECL (c);
11980 if (is_variable_sized (var))
11981 {
11982 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11983 tree new_var = lookup_decl (var, ctx);
11984 tree pvar = DECL_VALUE_EXPR (var);
11985 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11986 pvar = TREE_OPERAND (pvar, 0);
11987 gcc_assert (DECL_P (pvar));
11988 tree new_pvar = lookup_decl (pvar, ctx);
11989 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11990 tree al = size_int (DECL_ALIGN (var));
11991 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
11992 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
11993 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
11994 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
11995 gimple_seq_add_stmt (&new_body,
11996 gimple_build_assign (new_pvar, x));
11997 }
11998 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
11999 {
12000 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12001 tree new_var = lookup_decl (var, ctx);
12002 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12003 if (TREE_CONSTANT (x))
12004 break;
12005 else
12006 {
12007 tree atmp
12008 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12009 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12010 tree al = size_int (TYPE_ALIGN (rtype));
12011 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12012 }
12013
12014 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12015 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12016 gimple_seq_add_stmt (&new_body,
12017 gimple_build_assign (new_var, x));
12018 }
12019 break;
12020 }
12021
12022 gimple_seq fork_seq = NULL;
12023 gimple_seq join_seq = NULL;
12024
12025 if (is_oacc_parallel (ctx))
12026 {
12027 /* If there are reductions on the offloaded region itself, treat
12028 them as a dummy GANG loop. */
12029 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12030
12031 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12032 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12033 }
12034
12035 gimple_seq_add_seq (&new_body, fork_seq);
12036 gimple_seq_add_seq (&new_body, tgt_body);
12037 gimple_seq_add_seq (&new_body, join_seq);
12038
12039 if (offloaded)
12040 new_body = maybe_catch_exception (new_body);
12041
12042 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12043 gimple_omp_set_body (stmt, new_body);
12044 }
12045
12046 bind = gimple_build_bind (NULL, NULL,
12047 tgt_bind ? gimple_bind_block (tgt_bind)
12048 : NULL_TREE);
12049 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12050 gimple_bind_add_seq (bind, ilist);
12051 gimple_bind_add_stmt (bind, stmt);
12052 gimple_bind_add_seq (bind, olist);
12053
12054 pop_gimplify_context (NULL);
12055
12056 if (dep_bind)
12057 {
12058 gimple_bind_add_seq (dep_bind, dep_ilist);
12059 gimple_bind_add_stmt (dep_bind, bind);
12060 gimple_bind_add_seq (dep_bind, dep_olist);
12061 pop_gimplify_context (dep_bind);
12062 }
12063 }
12064
12065 /* Expand code for an OpenMP teams directive. */
12066
12067 static void
12068 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12069 {
12070 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12071 push_gimplify_context ();
12072
12073 tree block = make_node (BLOCK);
12074 gbind *bind = gimple_build_bind (NULL, NULL, block);
12075 gsi_replace (gsi_p, bind, true);
12076 gimple_seq bind_body = NULL;
12077 gimple_seq dlist = NULL;
12078 gimple_seq olist = NULL;
12079
12080 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12081 OMP_CLAUSE_NUM_TEAMS);
12082 if (num_teams == NULL_TREE)
12083 num_teams = build_int_cst (unsigned_type_node, 0);
12084 else
12085 {
12086 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12087 num_teams = fold_convert (unsigned_type_node, num_teams);
12088 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12089 }
12090 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12091 OMP_CLAUSE_THREAD_LIMIT);
12092 if (thread_limit == NULL_TREE)
12093 thread_limit = build_int_cst (unsigned_type_node, 0);
12094 else
12095 {
12096 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12097 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12098 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12099 fb_rvalue);
12100 }
12101
12102 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12103 &bind_body, &dlist, ctx, NULL);
12104 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12105 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12106 NULL, ctx);
12107 if (!gimple_omp_teams_grid_phony (teams_stmt))
12108 {
12109 gimple_seq_add_stmt (&bind_body, teams_stmt);
12110 location_t loc = gimple_location (teams_stmt);
12111 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12112 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12113 gimple_set_location (call, loc);
12114 gimple_seq_add_stmt (&bind_body, call);
12115 }
12116
12117 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12118 gimple_omp_set_body (teams_stmt, NULL);
12119 gimple_seq_add_seq (&bind_body, olist);
12120 gimple_seq_add_seq (&bind_body, dlist);
12121 if (!gimple_omp_teams_grid_phony (teams_stmt))
12122 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12123 gimple_bind_set_body (bind, bind_body);
12124
12125 pop_gimplify_context (bind);
12126
12127 gimple_bind_append_vars (bind, ctx->block_vars);
12128 BLOCK_VARS (block) = ctx->block_vars;
12129 if (BLOCK_VARS (block))
12130 TREE_USED (block) = 1;
12131 }
12132
12133 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12134
12135 static void
12136 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12137 {
12138 gimple *stmt = gsi_stmt (*gsi_p);
12139 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12140 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12141 gimple_build_omp_return (false));
12142 }
12143
12144
12145 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12146 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12147 of OMP context, but with task_shared_vars set. */
12148
12149 static tree
12150 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12151 void *data)
12152 {
12153 tree t = *tp;
12154
12155 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12156 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12157 return t;
12158
12159 if (task_shared_vars
12160 && DECL_P (t)
12161 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12162 return t;
12163
12164 /* If a global variable has been privatized, TREE_CONSTANT on
12165 ADDR_EXPR might be wrong. */
12166 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12167 recompute_tree_invariant_for_addr_expr (t);
12168
12169 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12170 return NULL_TREE;
12171 }
12172
12173 /* Data to be communicated between lower_omp_regimplify_operands and
12174 lower_omp_regimplify_operands_p. */
12175
12176 struct lower_omp_regimplify_operands_data
12177 {
12178 omp_context *ctx;
12179 vec<tree> *decls;
12180 };
12181
12182 /* Helper function for lower_omp_regimplify_operands. Find
12183 omp_member_access_dummy_var vars and adjust temporarily their
12184 DECL_VALUE_EXPRs if needed. */
12185
12186 static tree
12187 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12188 void *data)
12189 {
12190 tree t = omp_member_access_dummy_var (*tp);
12191 if (t)
12192 {
12193 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12194 lower_omp_regimplify_operands_data *ldata
12195 = (lower_omp_regimplify_operands_data *) wi->info;
12196 tree o = maybe_lookup_decl (t, ldata->ctx);
12197 if (o != t)
12198 {
12199 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12200 ldata->decls->safe_push (*tp);
12201 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12202 SET_DECL_VALUE_EXPR (*tp, v);
12203 }
12204 }
12205 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12206 return NULL_TREE;
12207 }
12208
12209 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12210 of omp_member_access_dummy_var vars during regimplification. */
12211
12212 static void
12213 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12214 gimple_stmt_iterator *gsi_p)
12215 {
12216 auto_vec<tree, 10> decls;
12217 if (ctx)
12218 {
12219 struct walk_stmt_info wi;
12220 memset (&wi, '\0', sizeof (wi));
12221 struct lower_omp_regimplify_operands_data data;
12222 data.ctx = ctx;
12223 data.decls = &decls;
12224 wi.info = &data;
12225 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12226 }
12227 gimple_regimplify_operands (stmt, gsi_p);
12228 while (!decls.is_empty ())
12229 {
12230 tree t = decls.pop ();
12231 tree v = decls.pop ();
12232 SET_DECL_VALUE_EXPR (t, v);
12233 }
12234 }
12235
12236 static void
12237 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12238 {
12239 gimple *stmt = gsi_stmt (*gsi_p);
12240 struct walk_stmt_info wi;
12241 gcall *call_stmt;
12242
12243 if (gimple_has_location (stmt))
12244 input_location = gimple_location (stmt);
12245
12246 if (task_shared_vars)
12247 memset (&wi, '\0', sizeof (wi));
12248
12249 /* If we have issued syntax errors, avoid doing any heavy lifting.
12250 Just replace the OMP directives with a NOP to avoid
12251 confusing RTL expansion. */
12252 if (seen_error () && is_gimple_omp (stmt))
12253 {
12254 gsi_replace (gsi_p, gimple_build_nop (), true);
12255 return;
12256 }
12257
12258 switch (gimple_code (stmt))
12259 {
12260 case GIMPLE_COND:
12261 {
12262 gcond *cond_stmt = as_a <gcond *> (stmt);
12263 if ((ctx || task_shared_vars)
12264 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12265 lower_omp_regimplify_p,
12266 ctx ? NULL : &wi, NULL)
12267 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12268 lower_omp_regimplify_p,
12269 ctx ? NULL : &wi, NULL)))
12270 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12271 }
12272 break;
12273 case GIMPLE_CATCH:
12274 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12275 break;
12276 case GIMPLE_EH_FILTER:
12277 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12278 break;
12279 case GIMPLE_TRY:
12280 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12281 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12282 break;
12283 case GIMPLE_TRANSACTION:
12284 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12285 ctx);
12286 break;
12287 case GIMPLE_BIND:
12288 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12289 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12290 break;
12291 case GIMPLE_OMP_PARALLEL:
12292 case GIMPLE_OMP_TASK:
12293 ctx = maybe_lookup_ctx (stmt);
12294 gcc_assert (ctx);
12295 if (ctx->cancellable)
12296 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12297 lower_omp_taskreg (gsi_p, ctx);
12298 break;
12299 case GIMPLE_OMP_FOR:
12300 ctx = maybe_lookup_ctx (stmt);
12301 gcc_assert (ctx);
12302 if (ctx->cancellable)
12303 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12304 lower_omp_for (gsi_p, ctx);
12305 break;
12306 case GIMPLE_OMP_SECTIONS:
12307 ctx = maybe_lookup_ctx (stmt);
12308 gcc_assert (ctx);
12309 if (ctx->cancellable)
12310 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12311 lower_omp_sections (gsi_p, ctx);
12312 break;
12313 case GIMPLE_OMP_SINGLE:
12314 ctx = maybe_lookup_ctx (stmt);
12315 gcc_assert (ctx);
12316 lower_omp_single (gsi_p, ctx);
12317 break;
12318 case GIMPLE_OMP_MASTER:
12319 ctx = maybe_lookup_ctx (stmt);
12320 gcc_assert (ctx);
12321 lower_omp_master (gsi_p, ctx);
12322 break;
12323 case GIMPLE_OMP_TASKGROUP:
12324 ctx = maybe_lookup_ctx (stmt);
12325 gcc_assert (ctx);
12326 lower_omp_taskgroup (gsi_p, ctx);
12327 break;
12328 case GIMPLE_OMP_ORDERED:
12329 ctx = maybe_lookup_ctx (stmt);
12330 gcc_assert (ctx);
12331 lower_omp_ordered (gsi_p, ctx);
12332 break;
12333 case GIMPLE_OMP_SCAN:
12334 ctx = maybe_lookup_ctx (stmt);
12335 gcc_assert (ctx);
12336 lower_omp_scan (gsi_p, ctx);
12337 break;
12338 case GIMPLE_OMP_CRITICAL:
12339 ctx = maybe_lookup_ctx (stmt);
12340 gcc_assert (ctx);
12341 lower_omp_critical (gsi_p, ctx);
12342 break;
12343 case GIMPLE_OMP_ATOMIC_LOAD:
12344 if ((ctx || task_shared_vars)
12345 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12346 as_a <gomp_atomic_load *> (stmt)),
12347 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12348 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12349 break;
12350 case GIMPLE_OMP_TARGET:
12351 ctx = maybe_lookup_ctx (stmt);
12352 gcc_assert (ctx);
12353 lower_omp_target (gsi_p, ctx);
12354 break;
12355 case GIMPLE_OMP_TEAMS:
12356 ctx = maybe_lookup_ctx (stmt);
12357 gcc_assert (ctx);
12358 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12359 lower_omp_taskreg (gsi_p, ctx);
12360 else
12361 lower_omp_teams (gsi_p, ctx);
12362 break;
12363 case GIMPLE_OMP_GRID_BODY:
12364 ctx = maybe_lookup_ctx (stmt);
12365 gcc_assert (ctx);
12366 lower_omp_grid_body (gsi_p, ctx);
12367 break;
12368 case GIMPLE_CALL:
12369 tree fndecl;
12370 call_stmt = as_a <gcall *> (stmt);
12371 fndecl = gimple_call_fndecl (call_stmt);
12372 if (fndecl
12373 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12374 switch (DECL_FUNCTION_CODE (fndecl))
12375 {
12376 case BUILT_IN_GOMP_BARRIER:
12377 if (ctx == NULL)
12378 break;
12379 /* FALLTHRU */
12380 case BUILT_IN_GOMP_CANCEL:
12381 case BUILT_IN_GOMP_CANCELLATION_POINT:
12382 omp_context *cctx;
12383 cctx = ctx;
12384 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12385 cctx = cctx->outer;
12386 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12387 if (!cctx->cancellable)
12388 {
12389 if (DECL_FUNCTION_CODE (fndecl)
12390 == BUILT_IN_GOMP_CANCELLATION_POINT)
12391 {
12392 stmt = gimple_build_nop ();
12393 gsi_replace (gsi_p, stmt, false);
12394 }
12395 break;
12396 }
12397 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12398 {
12399 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12400 gimple_call_set_fndecl (call_stmt, fndecl);
12401 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12402 }
12403 tree lhs;
12404 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12405 gimple_call_set_lhs (call_stmt, lhs);
12406 tree fallthru_label;
12407 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12408 gimple *g;
12409 g = gimple_build_label (fallthru_label);
12410 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12411 g = gimple_build_cond (NE_EXPR, lhs,
12412 fold_convert (TREE_TYPE (lhs),
12413 boolean_false_node),
12414 cctx->cancel_label, fallthru_label);
12415 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12416 break;
12417 default:
12418 break;
12419 }
12420 goto regimplify;
12421
12422 case GIMPLE_ASSIGN:
12423 for (omp_context *up = ctx; up; up = up->outer)
12424 {
12425 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12426 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12427 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12428 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12429 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12430 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12431 && (gimple_omp_target_kind (up->stmt)
12432 == GF_OMP_TARGET_KIND_DATA)))
12433 continue;
12434 else if (!up->lastprivate_conditional_map)
12435 break;
12436 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12437 if (TREE_CODE (lhs) == MEM_REF
12438 && DECL_P (TREE_OPERAND (lhs, 0))
12439 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12440 0))) == REFERENCE_TYPE)
12441 lhs = TREE_OPERAND (lhs, 0);
12442 if (DECL_P (lhs))
12443 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12444 {
12445 tree clauses;
12446 if (up->combined_into_simd_safelen1)
12447 {
12448 up = up->outer;
12449 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12450 up = up->outer;
12451 }
12452 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12453 clauses = gimple_omp_for_clauses (up->stmt);
12454 else
12455 clauses = gimple_omp_sections_clauses (up->stmt);
12456 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12457 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12458 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12459 OMP_CLAUSE__CONDTEMP_);
12460 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12461 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12462 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12463 }
12464 }
12465 /* FALLTHRU */
12466
12467 default:
12468 regimplify:
12469 if ((ctx || task_shared_vars)
12470 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12471 ctx ? NULL : &wi))
12472 {
12473 /* Just remove clobbers, this should happen only if we have
12474 "privatized" local addressable variables in SIMD regions,
12475 the clobber isn't needed in that case and gimplifying address
12476 of the ARRAY_REF into a pointer and creating MEM_REF based
12477 clobber would create worse code than we get with the clobber
12478 dropped. */
12479 if (gimple_clobber_p (stmt))
12480 {
12481 gsi_replace (gsi_p, gimple_build_nop (), true);
12482 break;
12483 }
12484 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12485 }
12486 break;
12487 }
12488 }
12489
12490 static void
12491 lower_omp (gimple_seq *body, omp_context *ctx)
12492 {
12493 location_t saved_location = input_location;
12494 gimple_stmt_iterator gsi;
12495 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12496 lower_omp_1 (&gsi, ctx);
12497 /* During gimplification, we haven't folded statments inside offloading
12498 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12499 if (target_nesting_level || taskreg_nesting_level)
12500 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12501 fold_stmt (&gsi);
12502 input_location = saved_location;
12503 }
12504
12505 /* Main entry point. */
12506
12507 static unsigned int
12508 execute_lower_omp (void)
12509 {
12510 gimple_seq body;
12511 int i;
12512 omp_context *ctx;
12513
12514 /* This pass always runs, to provide PROP_gimple_lomp.
12515 But often, there is nothing to do. */
12516 if (flag_openacc == 0 && flag_openmp == 0
12517 && flag_openmp_simd == 0)
12518 return 0;
12519
12520 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12521 delete_omp_context);
12522
12523 body = gimple_body (current_function_decl);
12524
12525 if (hsa_gen_requested_p ())
12526 omp_grid_gridify_all_targets (&body);
12527
12528 scan_omp (&body, NULL);
12529 gcc_assert (taskreg_nesting_level == 0);
12530 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12531 finish_taskreg_scan (ctx);
12532 taskreg_contexts.release ();
12533
12534 if (all_contexts->root)
12535 {
12536 if (task_shared_vars)
12537 push_gimplify_context ();
12538 lower_omp (&body, NULL);
12539 if (task_shared_vars)
12540 pop_gimplify_context (NULL);
12541 }
12542
12543 if (all_contexts)
12544 {
12545 splay_tree_delete (all_contexts);
12546 all_contexts = NULL;
12547 }
12548 BITMAP_FREE (task_shared_vars);
12549
12550 /* If current function is a method, remove artificial dummy VAR_DECL created
12551 for non-static data member privatization, they aren't needed for
12552 debuginfo nor anything else, have been already replaced everywhere in the
12553 IL and cause problems with LTO. */
12554 if (DECL_ARGUMENTS (current_function_decl)
12555 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
12556 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
12557 == POINTER_TYPE))
12558 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
12559 return 0;
12560 }
12561
12562 namespace {
12563
12564 const pass_data pass_data_lower_omp =
12565 {
12566 GIMPLE_PASS, /* type */
12567 "omplower", /* name */
12568 OPTGROUP_OMP, /* optinfo_flags */
12569 TV_NONE, /* tv_id */
12570 PROP_gimple_any, /* properties_required */
12571 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
12572 0, /* properties_destroyed */
12573 0, /* todo_flags_start */
12574 0, /* todo_flags_finish */
12575 };
12576
12577 class pass_lower_omp : public gimple_opt_pass
12578 {
12579 public:
12580 pass_lower_omp (gcc::context *ctxt)
12581 : gimple_opt_pass (pass_data_lower_omp, ctxt)
12582 {}
12583
12584 /* opt_pass methods: */
12585 virtual unsigned int execute (function *) { return execute_lower_omp (); }
12586
12587 }; // class pass_lower_omp
12588
12589 } // anon namespace
12590
12591 gimple_opt_pass *
12592 make_pass_lower_omp (gcc::context *ctxt)
12593 {
12594 return new pass_lower_omp (ctxt);
12595 }
12596 \f
12597 /* The following is a utility to diagnose structured block violations.
12598 It is not part of the "omplower" pass, as that's invoked too late. It
12599 should be invoked by the respective front ends after gimplification. */
12600
12601 static splay_tree all_labels;
12602
12603 /* Check for mismatched contexts and generate an error if needed. Return
12604 true if an error is detected. */
12605
12606 static bool
12607 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
12608 gimple *branch_ctx, gimple *label_ctx)
12609 {
12610 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
12611 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
12612
12613 if (label_ctx == branch_ctx)
12614 return false;
12615
12616 const char* kind = NULL;
12617
12618 if (flag_openacc)
12619 {
12620 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
12621 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
12622 {
12623 gcc_checking_assert (kind == NULL);
12624 kind = "OpenACC";
12625 }
12626 }
12627 if (kind == NULL)
12628 {
12629 gcc_checking_assert (flag_openmp || flag_openmp_simd);
12630 kind = "OpenMP";
12631 }
12632
12633 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
12634 so we could traverse it and issue a correct "exit" or "enter" error
12635 message upon a structured block violation.
12636
12637 We built the context by building a list with tree_cons'ing, but there is
12638 no easy counterpart in gimple tuples. It seems like far too much work
12639 for issuing exit/enter error messages. If someone really misses the
12640 distinct error message... patches welcome. */
12641
12642 #if 0
12643 /* Try to avoid confusing the user by producing and error message
12644 with correct "exit" or "enter" verbiage. We prefer "exit"
12645 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
12646 if (branch_ctx == NULL)
12647 exit_p = false;
12648 else
12649 {
12650 while (label_ctx)
12651 {
12652 if (TREE_VALUE (label_ctx) == branch_ctx)
12653 {
12654 exit_p = false;
12655 break;
12656 }
12657 label_ctx = TREE_CHAIN (label_ctx);
12658 }
12659 }
12660
12661 if (exit_p)
12662 error ("invalid exit from %s structured block", kind);
12663 else
12664 error ("invalid entry to %s structured block", kind);
12665 #endif
12666
12667 /* If it's obvious we have an invalid entry, be specific about the error. */
12668 if (branch_ctx == NULL)
12669 error ("invalid entry to %s structured block", kind);
12670 else
12671 {
12672 /* Otherwise, be vague and lazy, but efficient. */
12673 error ("invalid branch to/from %s structured block", kind);
12674 }
12675
12676 gsi_replace (gsi_p, gimple_build_nop (), false);
12677 return true;
12678 }
12679
12680 /* Pass 1: Create a minimal tree of structured blocks, and record
12681 where each label is found. */
12682
12683 static tree
12684 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12685 struct walk_stmt_info *wi)
12686 {
12687 gimple *context = (gimple *) wi->info;
12688 gimple *inner_context;
12689 gimple *stmt = gsi_stmt (*gsi_p);
12690
12691 *handled_ops_p = true;
12692
12693 switch (gimple_code (stmt))
12694 {
12695 WALK_SUBSTMTS;
12696
12697 case GIMPLE_OMP_PARALLEL:
12698 case GIMPLE_OMP_TASK:
12699 case GIMPLE_OMP_SECTIONS:
12700 case GIMPLE_OMP_SINGLE:
12701 case GIMPLE_OMP_SECTION:
12702 case GIMPLE_OMP_MASTER:
12703 case GIMPLE_OMP_ORDERED:
12704 case GIMPLE_OMP_SCAN:
12705 case GIMPLE_OMP_CRITICAL:
12706 case GIMPLE_OMP_TARGET:
12707 case GIMPLE_OMP_TEAMS:
12708 case GIMPLE_OMP_TASKGROUP:
12709 /* The minimal context here is just the current OMP construct. */
12710 inner_context = stmt;
12711 wi->info = inner_context;
12712 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12713 wi->info = context;
12714 break;
12715
12716 case GIMPLE_OMP_FOR:
12717 inner_context = stmt;
12718 wi->info = inner_context;
12719 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12720 walk them. */
12721 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12722 diagnose_sb_1, NULL, wi);
12723 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
12724 wi->info = context;
12725 break;
12726
12727 case GIMPLE_LABEL:
12728 splay_tree_insert (all_labels,
12729 (splay_tree_key) gimple_label_label (
12730 as_a <glabel *> (stmt)),
12731 (splay_tree_value) context);
12732 break;
12733
12734 default:
12735 break;
12736 }
12737
12738 return NULL_TREE;
12739 }
12740
12741 /* Pass 2: Check each branch and see if its context differs from that of
12742 the destination label's context. */
12743
12744 static tree
12745 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
12746 struct walk_stmt_info *wi)
12747 {
12748 gimple *context = (gimple *) wi->info;
12749 splay_tree_node n;
12750 gimple *stmt = gsi_stmt (*gsi_p);
12751
12752 *handled_ops_p = true;
12753
12754 switch (gimple_code (stmt))
12755 {
12756 WALK_SUBSTMTS;
12757
12758 case GIMPLE_OMP_PARALLEL:
12759 case GIMPLE_OMP_TASK:
12760 case GIMPLE_OMP_SECTIONS:
12761 case GIMPLE_OMP_SINGLE:
12762 case GIMPLE_OMP_SECTION:
12763 case GIMPLE_OMP_MASTER:
12764 case GIMPLE_OMP_ORDERED:
12765 case GIMPLE_OMP_SCAN:
12766 case GIMPLE_OMP_CRITICAL:
12767 case GIMPLE_OMP_TARGET:
12768 case GIMPLE_OMP_TEAMS:
12769 case GIMPLE_OMP_TASKGROUP:
12770 wi->info = stmt;
12771 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12772 wi->info = context;
12773 break;
12774
12775 case GIMPLE_OMP_FOR:
12776 wi->info = stmt;
12777 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
12778 walk them. */
12779 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
12780 diagnose_sb_2, NULL, wi);
12781 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
12782 wi->info = context;
12783 break;
12784
12785 case GIMPLE_COND:
12786 {
12787 gcond *cond_stmt = as_a <gcond *> (stmt);
12788 tree lab = gimple_cond_true_label (cond_stmt);
12789 if (lab)
12790 {
12791 n = splay_tree_lookup (all_labels,
12792 (splay_tree_key) lab);
12793 diagnose_sb_0 (gsi_p, context,
12794 n ? (gimple *) n->value : NULL);
12795 }
12796 lab = gimple_cond_false_label (cond_stmt);
12797 if (lab)
12798 {
12799 n = splay_tree_lookup (all_labels,
12800 (splay_tree_key) lab);
12801 diagnose_sb_0 (gsi_p, context,
12802 n ? (gimple *) n->value : NULL);
12803 }
12804 }
12805 break;
12806
12807 case GIMPLE_GOTO:
12808 {
12809 tree lab = gimple_goto_dest (stmt);
12810 if (TREE_CODE (lab) != LABEL_DECL)
12811 break;
12812
12813 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12814 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
12815 }
12816 break;
12817
12818 case GIMPLE_SWITCH:
12819 {
12820 gswitch *switch_stmt = as_a <gswitch *> (stmt);
12821 unsigned int i;
12822 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
12823 {
12824 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
12825 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
12826 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
12827 break;
12828 }
12829 }
12830 break;
12831
12832 case GIMPLE_RETURN:
12833 diagnose_sb_0 (gsi_p, context, NULL);
12834 break;
12835
12836 default:
12837 break;
12838 }
12839
12840 return NULL_TREE;
12841 }
12842
12843 static unsigned int
12844 diagnose_omp_structured_block_errors (void)
12845 {
12846 struct walk_stmt_info wi;
12847 gimple_seq body = gimple_body (current_function_decl);
12848
12849 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
12850
12851 memset (&wi, 0, sizeof (wi));
12852 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
12853
12854 memset (&wi, 0, sizeof (wi));
12855 wi.want_locations = true;
12856 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
12857
12858 gimple_set_body (current_function_decl, body);
12859
12860 splay_tree_delete (all_labels);
12861 all_labels = NULL;
12862
12863 return 0;
12864 }
12865
12866 namespace {
12867
12868 const pass_data pass_data_diagnose_omp_blocks =
12869 {
12870 GIMPLE_PASS, /* type */
12871 "*diagnose_omp_blocks", /* name */
12872 OPTGROUP_OMP, /* optinfo_flags */
12873 TV_NONE, /* tv_id */
12874 PROP_gimple_any, /* properties_required */
12875 0, /* properties_provided */
12876 0, /* properties_destroyed */
12877 0, /* todo_flags_start */
12878 0, /* todo_flags_finish */
12879 };
12880
12881 class pass_diagnose_omp_blocks : public gimple_opt_pass
12882 {
12883 public:
12884 pass_diagnose_omp_blocks (gcc::context *ctxt)
12885 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
12886 {}
12887
12888 /* opt_pass methods: */
12889 virtual bool gate (function *)
12890 {
12891 return flag_openacc || flag_openmp || flag_openmp_simd;
12892 }
12893 virtual unsigned int execute (function *)
12894 {
12895 return diagnose_omp_structured_block_errors ();
12896 }
12897
12898 }; // class pass_diagnose_omp_blocks
12899
12900 } // anon namespace
12901
12902 gimple_opt_pass *
12903 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
12904 {
12905 return new pass_diagnose_omp_blocks (ctxt);
12906 }
12907 \f
12908
12909 #include "gt-omp-low.h"