]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/omp-low.c
OpenMP/Fortran: Fix (re)mapping of allocatable/pointer arrays [PR96668]
[thirdparty/gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* A tree_list of the reduction clauses in this context. This is
130 only used for checking the consistency of OpenACC reduction
131 clauses in scan_omp_for and is not guaranteed to contain a valid
132 value outside of this function. */
133 tree local_reduction_clauses;
134
135 /* A tree_list of the reduction clauses in outer contexts. This is
136 only used for checking the consistency of OpenACC reduction
137 clauses in scan_omp_for and is not guaranteed to contain a valid
138 value outside of this function. */
139 tree outer_reduction_clauses;
140
141 /* Nesting depth of this context. Used to beautify error messages re
142 invalid gotos. The outermost ctx is depth 1, with depth 0 being
143 reserved for the main body of the function. */
144 int depth;
145
146 /* True if this parallel directive is nested within another. */
147 bool is_nested;
148
149 /* True if this construct can be cancelled. */
150 bool cancellable;
151
152 /* True if lower_omp_1 should look up lastprivate conditional in parent
153 context. */
154 bool combined_into_simd_safelen1;
155
156 /* True if there is nested scan context with inclusive clause. */
157 bool scan_inclusive;
158
159 /* True if there is nested scan context with exclusive clause. */
160 bool scan_exclusive;
161
162 /* True in the second simd loop of for simd with inscan reductions. */
163 bool for_simd_scan_phase;
164
165 /* True if there is order(concurrent) clause on the construct. */
166 bool order_concurrent;
167
168 /* True if there is bind clause on the construct (i.e. a loop construct). */
169 bool loop_p;
170 };
171
172 static splay_tree all_contexts;
173 static int taskreg_nesting_level;
174 static int target_nesting_level;
175 static bitmap task_shared_vars;
176 static bitmap global_nonaddressable_vars;
177 static vec<omp_context *> taskreg_contexts;
178
179 static void scan_omp (gimple_seq *, omp_context *);
180 static tree scan_omp_1_op (tree *, int *, void *);
181
182 #define WALK_SUBSTMTS \
183 case GIMPLE_BIND: \
184 case GIMPLE_TRY: \
185 case GIMPLE_CATCH: \
186 case GIMPLE_EH_FILTER: \
187 case GIMPLE_TRANSACTION: \
188 /* The sub-statements for these should be walked. */ \
189 *handled_ops_p = false; \
190 break;
191
192 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
193 region. */
194
195 static bool
196 is_oacc_parallel_or_serial (omp_context *ctx)
197 {
198 enum gimple_code outer_type = gimple_code (ctx->stmt);
199 return ((outer_type == GIMPLE_OMP_TARGET)
200 && ((gimple_omp_target_kind (ctx->stmt)
201 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
202 || (gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
204 }
205
206 /* Return true if CTX corresponds to an oacc kernels region. */
207
208 static bool
209 is_oacc_kernels (omp_context *ctx)
210 {
211 enum gimple_code outer_type = gimple_code (ctx->stmt);
212 return ((outer_type == GIMPLE_OMP_TARGET)
213 && (gimple_omp_target_kind (ctx->stmt)
214 == GF_OMP_TARGET_KIND_OACC_KERNELS));
215 }
216
217 /* If DECL is the artificial dummy VAR_DECL created for non-static
218 data member privatization, return the underlying "this" parameter,
219 otherwise return NULL. */
220
221 tree
222 omp_member_access_dummy_var (tree decl)
223 {
224 if (!VAR_P (decl)
225 || !DECL_ARTIFICIAL (decl)
226 || !DECL_IGNORED_P (decl)
227 || !DECL_HAS_VALUE_EXPR_P (decl)
228 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
229 return NULL_TREE;
230
231 tree v = DECL_VALUE_EXPR (decl);
232 if (TREE_CODE (v) != COMPONENT_REF)
233 return NULL_TREE;
234
235 while (1)
236 switch (TREE_CODE (v))
237 {
238 case COMPONENT_REF:
239 case MEM_REF:
240 case INDIRECT_REF:
241 CASE_CONVERT:
242 case POINTER_PLUS_EXPR:
243 v = TREE_OPERAND (v, 0);
244 continue;
245 case PARM_DECL:
246 if (DECL_CONTEXT (v) == current_function_decl
247 && DECL_ARTIFICIAL (v)
248 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
249 return v;
250 return NULL_TREE;
251 default:
252 return NULL_TREE;
253 }
254 }
255
256 /* Helper for unshare_and_remap, called through walk_tree. */
257
258 static tree
259 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
260 {
261 tree *pair = (tree *) data;
262 if (*tp == pair[0])
263 {
264 *tp = unshare_expr (pair[1]);
265 *walk_subtrees = 0;
266 }
267 else if (IS_TYPE_OR_DECL_P (*tp))
268 *walk_subtrees = 0;
269 return NULL_TREE;
270 }
271
272 /* Return unshare_expr (X) with all occurrences of FROM
273 replaced with TO. */
274
275 static tree
276 unshare_and_remap (tree x, tree from, tree to)
277 {
278 tree pair[2] = { from, to };
279 x = unshare_expr (x);
280 walk_tree (&x, unshare_and_remap_1, pair, NULL);
281 return x;
282 }
283
284 /* Convenience function for calling scan_omp_1_op on tree operands. */
285
286 static inline tree
287 scan_omp_op (tree *tp, omp_context *ctx)
288 {
289 struct walk_stmt_info wi;
290
291 memset (&wi, 0, sizeof (wi));
292 wi.info = ctx;
293 wi.want_locations = true;
294
295 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
296 }
297
298 static void lower_omp (gimple_seq *, omp_context *);
299 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
300 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
301
302 /* Return true if CTX is for an omp parallel. */
303
304 static inline bool
305 is_parallel_ctx (omp_context *ctx)
306 {
307 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
308 }
309
310
311 /* Return true if CTX is for an omp task. */
312
313 static inline bool
314 is_task_ctx (omp_context *ctx)
315 {
316 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
317 }
318
319
320 /* Return true if CTX is for an omp taskloop. */
321
322 static inline bool
323 is_taskloop_ctx (omp_context *ctx)
324 {
325 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
326 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
327 }
328
329
330 /* Return true if CTX is for a host omp teams. */
331
332 static inline bool
333 is_host_teams_ctx (omp_context *ctx)
334 {
335 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
336 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
337 }
338
339 /* Return true if CTX is for an omp parallel or omp task or host omp teams
340 (the last one is strictly not a task region in OpenMP speak, but we
341 need to treat it similarly). */
342
343 static inline bool
344 is_taskreg_ctx (omp_context *ctx)
345 {
346 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
347 }
348
349 /* Return true if EXPR is variable sized. */
350
351 static inline bool
352 is_variable_sized (const_tree expr)
353 {
354 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
355 }
356
357 /* Lookup variables. The "maybe" form
358 allows for the variable form to not have been entered, otherwise we
359 assert that the variable must have been entered. */
360
361 static inline tree
362 lookup_decl (tree var, omp_context *ctx)
363 {
364 tree *n = ctx->cb.decl_map->get (var);
365 return *n;
366 }
367
368 static inline tree
369 maybe_lookup_decl (const_tree var, omp_context *ctx)
370 {
371 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
372 return n ? *n : NULL_TREE;
373 }
374
375 static inline tree
376 lookup_field (tree var, omp_context *ctx)
377 {
378 splay_tree_node n;
379 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
380 return (tree) n->value;
381 }
382
383 static inline tree
384 lookup_sfield (splay_tree_key key, omp_context *ctx)
385 {
386 splay_tree_node n;
387 n = splay_tree_lookup (ctx->sfield_map
388 ? ctx->sfield_map : ctx->field_map, key);
389 return (tree) n->value;
390 }
391
392 static inline tree
393 lookup_sfield (tree var, omp_context *ctx)
394 {
395 return lookup_sfield ((splay_tree_key) var, ctx);
396 }
397
398 static inline tree
399 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
400 {
401 splay_tree_node n;
402 n = splay_tree_lookup (ctx->field_map, key);
403 return n ? (tree) n->value : NULL_TREE;
404 }
405
406 static inline tree
407 maybe_lookup_field (tree var, omp_context *ctx)
408 {
409 return maybe_lookup_field ((splay_tree_key) var, ctx);
410 }
411
412 /* Return true if DECL should be copied by pointer. SHARED_CTX is
413 the parallel context if DECL is to be shared. */
414
415 static bool
416 use_pointer_for_field (tree decl, omp_context *shared_ctx)
417 {
418 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
419 || TYPE_ATOMIC (TREE_TYPE (decl)))
420 return true;
421
422 /* We can only use copy-in/copy-out semantics for shared variables
423 when we know the value is not accessible from an outer scope. */
424 if (shared_ctx)
425 {
426 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
427
428 /* ??? Trivially accessible from anywhere. But why would we even
429 be passing an address in this case? Should we simply assert
430 this to be false, or should we have a cleanup pass that removes
431 these from the list of mappings? */
432 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
433 return true;
434
435 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
436 without analyzing the expression whether or not its location
437 is accessible to anyone else. In the case of nested parallel
438 regions it certainly may be. */
439 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
440 return true;
441
442 /* Do not use copy-in/copy-out for variables that have their
443 address taken. */
444 if (is_global_var (decl))
445 {
446 /* For file scope vars, track whether we've seen them as
447 non-addressable initially and in that case, keep the same
448 answer for the duration of the pass, even when they are made
449 addressable later on e.g. through reduction expansion. Global
450 variables which weren't addressable before the pass will not
451 have their privatized copies address taken. See PR91216. */
452 if (!TREE_ADDRESSABLE (decl))
453 {
454 if (!global_nonaddressable_vars)
455 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
456 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
457 }
458 else if (!global_nonaddressable_vars
459 || !bitmap_bit_p (global_nonaddressable_vars,
460 DECL_UID (decl)))
461 return true;
462 }
463 else if (TREE_ADDRESSABLE (decl))
464 return true;
465
466 /* lower_send_shared_vars only uses copy-in, but not copy-out
467 for these. */
468 if (TREE_READONLY (decl)
469 || ((TREE_CODE (decl) == RESULT_DECL
470 || TREE_CODE (decl) == PARM_DECL)
471 && DECL_BY_REFERENCE (decl)))
472 return false;
473
474 /* Disallow copy-in/out in nested parallel if
475 decl is shared in outer parallel, otherwise
476 each thread could store the shared variable
477 in its own copy-in location, making the
478 variable no longer really shared. */
479 if (shared_ctx->is_nested)
480 {
481 omp_context *up;
482
483 for (up = shared_ctx->outer; up; up = up->outer)
484 if ((is_taskreg_ctx (up)
485 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
486 && is_gimple_omp_offloaded (up->stmt)))
487 && maybe_lookup_decl (decl, up))
488 break;
489
490 if (up)
491 {
492 tree c;
493
494 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
495 {
496 for (c = gimple_omp_target_clauses (up->stmt);
497 c; c = OMP_CLAUSE_CHAIN (c))
498 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
499 && OMP_CLAUSE_DECL (c) == decl)
500 break;
501 }
502 else
503 for (c = gimple_omp_taskreg_clauses (up->stmt);
504 c; c = OMP_CLAUSE_CHAIN (c))
505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
506 && OMP_CLAUSE_DECL (c) == decl)
507 break;
508
509 if (c)
510 goto maybe_mark_addressable_and_ret;
511 }
512 }
513
514 /* For tasks avoid using copy-in/out. As tasks can be
515 deferred or executed in different thread, when GOMP_task
516 returns, the task hasn't necessarily terminated. */
517 if (is_task_ctx (shared_ctx))
518 {
519 tree outer;
520 maybe_mark_addressable_and_ret:
521 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
522 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
523 {
524 /* Taking address of OUTER in lower_send_shared_vars
525 might need regimplification of everything that uses the
526 variable. */
527 if (!task_shared_vars)
528 task_shared_vars = BITMAP_ALLOC (NULL);
529 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
530 TREE_ADDRESSABLE (outer) = 1;
531 }
532 return true;
533 }
534 }
535
536 return false;
537 }
538
539 /* Construct a new automatic decl similar to VAR. */
540
541 static tree
542 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
543 {
544 tree copy = copy_var_decl (var, name, type);
545
546 DECL_CONTEXT (copy) = current_function_decl;
547 DECL_CHAIN (copy) = ctx->block_vars;
548 /* If VAR is listed in task_shared_vars, it means it wasn't
549 originally addressable and is just because task needs to take
550 it's address. But we don't need to take address of privatizations
551 from that var. */
552 if (TREE_ADDRESSABLE (var)
553 && ((task_shared_vars
554 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
555 || (global_nonaddressable_vars
556 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
557 TREE_ADDRESSABLE (copy) = 0;
558 ctx->block_vars = copy;
559
560 return copy;
561 }
562
563 static tree
564 omp_copy_decl_1 (tree var, omp_context *ctx)
565 {
566 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
567 }
568
569 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
570 as appropriate. */
571 static tree
572 omp_build_component_ref (tree obj, tree field)
573 {
574 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
575 if (TREE_THIS_VOLATILE (field))
576 TREE_THIS_VOLATILE (ret) |= 1;
577 if (TREE_READONLY (field))
578 TREE_READONLY (ret) |= 1;
579 return ret;
580 }
581
582 /* Build tree nodes to access the field for VAR on the receiver side. */
583
584 static tree
585 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
586 {
587 tree x, field = lookup_field (var, ctx);
588
589 /* If the receiver record type was remapped in the child function,
590 remap the field into the new record type. */
591 x = maybe_lookup_field (field, ctx);
592 if (x != NULL)
593 field = x;
594
595 x = build_simple_mem_ref (ctx->receiver_decl);
596 TREE_THIS_NOTRAP (x) = 1;
597 x = omp_build_component_ref (x, field);
598 if (by_ref)
599 {
600 x = build_simple_mem_ref (x);
601 TREE_THIS_NOTRAP (x) = 1;
602 }
603
604 return x;
605 }
606
607 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
608 of a parallel, this is a component reference; for workshare constructs
609 this is some variable. */
610
611 static tree
612 build_outer_var_ref (tree var, omp_context *ctx,
613 enum omp_clause_code code = OMP_CLAUSE_ERROR)
614 {
615 tree x;
616 omp_context *outer = ctx->outer;
617 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
618 outer = outer->outer;
619
620 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
621 x = var;
622 else if (is_variable_sized (var))
623 {
624 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
625 x = build_outer_var_ref (x, ctx, code);
626 x = build_simple_mem_ref (x);
627 }
628 else if (is_taskreg_ctx (ctx))
629 {
630 bool by_ref = use_pointer_for_field (var, NULL);
631 x = build_receiver_ref (var, by_ref, ctx);
632 }
633 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
634 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
635 || ctx->loop_p
636 || (code == OMP_CLAUSE_PRIVATE
637 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
638 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
639 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
640 {
641 /* #pragma omp simd isn't a worksharing construct, and can reference
642 even private vars in its linear etc. clauses.
643 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
644 to private vars in all worksharing constructs. */
645 x = NULL_TREE;
646 if (outer && is_taskreg_ctx (outer))
647 x = lookup_decl (var, outer);
648 else if (outer)
649 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
650 if (x == NULL_TREE)
651 x = var;
652 }
653 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
654 {
655 gcc_assert (outer);
656 splay_tree_node n
657 = splay_tree_lookup (outer->field_map,
658 (splay_tree_key) &DECL_UID (var));
659 if (n == NULL)
660 {
661 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
662 x = var;
663 else
664 x = lookup_decl (var, outer);
665 }
666 else
667 {
668 tree field = (tree) n->value;
669 /* If the receiver record type was remapped in the child function,
670 remap the field into the new record type. */
671 x = maybe_lookup_field (field, outer);
672 if (x != NULL)
673 field = x;
674
675 x = build_simple_mem_ref (outer->receiver_decl);
676 x = omp_build_component_ref (x, field);
677 if (use_pointer_for_field (var, outer))
678 x = build_simple_mem_ref (x);
679 }
680 }
681 else if (outer)
682 x = lookup_decl (var, outer);
683 else if (omp_is_reference (var))
684 /* This can happen with orphaned constructs. If var is reference, it is
685 possible it is shared and as such valid. */
686 x = var;
687 else if (omp_member_access_dummy_var (var))
688 x = var;
689 else
690 gcc_unreachable ();
691
692 if (x == var)
693 {
694 tree t = omp_member_access_dummy_var (var);
695 if (t)
696 {
697 x = DECL_VALUE_EXPR (var);
698 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
699 if (o != t)
700 x = unshare_and_remap (x, t, o);
701 else
702 x = unshare_expr (x);
703 }
704 }
705
706 if (omp_is_reference (var))
707 x = build_simple_mem_ref (x);
708
709 return x;
710 }
711
712 /* Build tree nodes to access the field for VAR on the sender side. */
713
714 static tree
715 build_sender_ref (splay_tree_key key, omp_context *ctx)
716 {
717 tree field = lookup_sfield (key, ctx);
718 return omp_build_component_ref (ctx->sender_decl, field);
719 }
720
721 static tree
722 build_sender_ref (tree var, omp_context *ctx)
723 {
724 return build_sender_ref ((splay_tree_key) var, ctx);
725 }
726
727 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
728 BASE_POINTERS_RESTRICT, declare the field with restrict. */
729
730 static void
731 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
732 {
733 tree field, type, sfield = NULL_TREE;
734 splay_tree_key key = (splay_tree_key) var;
735
736 if ((mask & 16) != 0)
737 {
738 key = (splay_tree_key) &DECL_NAME (var);
739 gcc_checking_assert (key != (splay_tree_key) var);
740 }
741 if ((mask & 8) != 0)
742 {
743 key = (splay_tree_key) &DECL_UID (var);
744 gcc_checking_assert (key != (splay_tree_key) var);
745 }
746 gcc_assert ((mask & 1) == 0
747 || !splay_tree_lookup (ctx->field_map, key));
748 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
749 || !splay_tree_lookup (ctx->sfield_map, key));
750 gcc_assert ((mask & 3) == 3
751 || !is_gimple_omp_oacc (ctx->stmt));
752
753 type = TREE_TYPE (var);
754 if ((mask & 16) != 0)
755 type = lang_hooks.decls.omp_array_data (var, true);
756
757 /* Prevent redeclaring the var in the split-off function with a restrict
758 pointer type. Note that we only clear type itself, restrict qualifiers in
759 the pointed-to type will be ignored by points-to analysis. */
760 if (POINTER_TYPE_P (type)
761 && TYPE_RESTRICT (type))
762 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
763
764 if (mask & 4)
765 {
766 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
767 type = build_pointer_type (build_pointer_type (type));
768 }
769 else if (by_ref)
770 type = build_pointer_type (type);
771 else if ((mask & 3) == 1 && omp_is_reference (var))
772 type = TREE_TYPE (type);
773
774 field = build_decl (DECL_SOURCE_LOCATION (var),
775 FIELD_DECL, DECL_NAME (var), type);
776
777 /* Remember what variable this field was created for. This does have a
778 side effect of making dwarf2out ignore this member, so for helpful
779 debugging we clear it later in delete_omp_context. */
780 DECL_ABSTRACT_ORIGIN (field) = var;
781 if ((mask & 16) == 0 && type == TREE_TYPE (var))
782 {
783 SET_DECL_ALIGN (field, DECL_ALIGN (var));
784 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
785 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
786 }
787 else
788 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
789
790 if ((mask & 3) == 3)
791 {
792 insert_field_into_struct (ctx->record_type, field);
793 if (ctx->srecord_type)
794 {
795 sfield = build_decl (DECL_SOURCE_LOCATION (var),
796 FIELD_DECL, DECL_NAME (var), type);
797 DECL_ABSTRACT_ORIGIN (sfield) = var;
798 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
799 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
800 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
801 insert_field_into_struct (ctx->srecord_type, sfield);
802 }
803 }
804 else
805 {
806 if (ctx->srecord_type == NULL_TREE)
807 {
808 tree t;
809
810 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
811 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
812 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
813 {
814 sfield = build_decl (DECL_SOURCE_LOCATION (t),
815 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
816 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
817 insert_field_into_struct (ctx->srecord_type, sfield);
818 splay_tree_insert (ctx->sfield_map,
819 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
820 (splay_tree_value) sfield);
821 }
822 }
823 sfield = field;
824 insert_field_into_struct ((mask & 1) ? ctx->record_type
825 : ctx->srecord_type, field);
826 }
827
828 if (mask & 1)
829 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
830 if ((mask & 2) && ctx->sfield_map)
831 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
832 }
833
834 static tree
835 install_var_local (tree var, omp_context *ctx)
836 {
837 tree new_var = omp_copy_decl_1 (var, ctx);
838 insert_decl_map (&ctx->cb, var, new_var);
839 return new_var;
840 }
841
842 /* Adjust the replacement for DECL in CTX for the new context. This means
843 copying the DECL_VALUE_EXPR, and fixing up the type. */
844
845 static void
846 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
847 {
848 tree new_decl, size;
849
850 new_decl = lookup_decl (decl, ctx);
851
852 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
853
854 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
855 && DECL_HAS_VALUE_EXPR_P (decl))
856 {
857 tree ve = DECL_VALUE_EXPR (decl);
858 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
859 SET_DECL_VALUE_EXPR (new_decl, ve);
860 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
861 }
862
863 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
864 {
865 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
866 if (size == error_mark_node)
867 size = TYPE_SIZE (TREE_TYPE (new_decl));
868 DECL_SIZE (new_decl) = size;
869
870 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
871 if (size == error_mark_node)
872 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
873 DECL_SIZE_UNIT (new_decl) = size;
874 }
875 }
876
877 /* The callback for remap_decl. Search all containing contexts for a
878 mapping of the variable; this avoids having to duplicate the splay
879 tree ahead of time. We know a mapping doesn't already exist in the
880 given context. Create new mappings to implement default semantics. */
881
882 static tree
883 omp_copy_decl (tree var, copy_body_data *cb)
884 {
885 omp_context *ctx = (omp_context *) cb;
886 tree new_var;
887
888 if (TREE_CODE (var) == LABEL_DECL)
889 {
890 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
891 return var;
892 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
893 DECL_CONTEXT (new_var) = current_function_decl;
894 insert_decl_map (&ctx->cb, var, new_var);
895 return new_var;
896 }
897
898 while (!is_taskreg_ctx (ctx))
899 {
900 ctx = ctx->outer;
901 if (ctx == NULL)
902 return var;
903 new_var = maybe_lookup_decl (var, ctx);
904 if (new_var)
905 return new_var;
906 }
907
908 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
909 return var;
910
911 return error_mark_node;
912 }
913
914 /* Create a new context, with OUTER_CTX being the surrounding context. */
915
916 static omp_context *
917 new_omp_context (gimple *stmt, omp_context *outer_ctx)
918 {
919 omp_context *ctx = XCNEW (omp_context);
920
921 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
922 (splay_tree_value) ctx);
923 ctx->stmt = stmt;
924
925 if (outer_ctx)
926 {
927 ctx->outer = outer_ctx;
928 ctx->cb = outer_ctx->cb;
929 ctx->cb.block = NULL;
930 ctx->depth = outer_ctx->depth + 1;
931 }
932 else
933 {
934 ctx->cb.src_fn = current_function_decl;
935 ctx->cb.dst_fn = current_function_decl;
936 ctx->cb.src_node = cgraph_node::get (current_function_decl);
937 gcc_checking_assert (ctx->cb.src_node);
938 ctx->cb.dst_node = ctx->cb.src_node;
939 ctx->cb.src_cfun = cfun;
940 ctx->cb.copy_decl = omp_copy_decl;
941 ctx->cb.eh_lp_nr = 0;
942 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
943 ctx->cb.adjust_array_error_bounds = true;
944 ctx->cb.dont_remap_vla_if_no_change = true;
945 ctx->depth = 1;
946 }
947
948 ctx->cb.decl_map = new hash_map<tree, tree>;
949
950 return ctx;
951 }
952
953 static gimple_seq maybe_catch_exception (gimple_seq);
954
955 /* Finalize task copyfn. */
956
957 static void
958 finalize_task_copyfn (gomp_task *task_stmt)
959 {
960 struct function *child_cfun;
961 tree child_fn;
962 gimple_seq seq = NULL, new_seq;
963 gbind *bind;
964
965 child_fn = gimple_omp_task_copy_fn (task_stmt);
966 if (child_fn == NULL_TREE)
967 return;
968
969 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
970 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
971
972 push_cfun (child_cfun);
973 bind = gimplify_body (child_fn, false);
974 gimple_seq_add_stmt (&seq, bind);
975 new_seq = maybe_catch_exception (seq);
976 if (new_seq != seq)
977 {
978 bind = gimple_build_bind (NULL, new_seq, NULL);
979 seq = NULL;
980 gimple_seq_add_stmt (&seq, bind);
981 }
982 gimple_set_body (child_fn, seq);
983 pop_cfun ();
984
985 /* Inform the callgraph about the new function. */
986 cgraph_node *node = cgraph_node::get_create (child_fn);
987 node->parallelized_function = 1;
988 cgraph_node::add_new_function (child_fn, false);
989 }
990
991 /* Destroy a omp_context data structures. Called through the splay tree
992 value delete callback. */
993
994 static void
995 delete_omp_context (splay_tree_value value)
996 {
997 omp_context *ctx = (omp_context *) value;
998
999 delete ctx->cb.decl_map;
1000
1001 if (ctx->field_map)
1002 splay_tree_delete (ctx->field_map);
1003 if (ctx->sfield_map)
1004 splay_tree_delete (ctx->sfield_map);
1005
1006 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1007 it produces corrupt debug information. */
1008 if (ctx->record_type)
1009 {
1010 tree t;
1011 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1012 DECL_ABSTRACT_ORIGIN (t) = NULL;
1013 }
1014 if (ctx->srecord_type)
1015 {
1016 tree t;
1017 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1018 DECL_ABSTRACT_ORIGIN (t) = NULL;
1019 }
1020
1021 if (is_task_ctx (ctx))
1022 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1023
1024 if (ctx->task_reduction_map)
1025 {
1026 ctx->task_reductions.release ();
1027 delete ctx->task_reduction_map;
1028 }
1029
1030 delete ctx->lastprivate_conditional_map;
1031
1032 XDELETE (ctx);
1033 }
1034
1035 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1036 context. */
1037
1038 static void
1039 fixup_child_record_type (omp_context *ctx)
1040 {
1041 tree f, type = ctx->record_type;
1042
1043 if (!ctx->receiver_decl)
1044 return;
1045 /* ??? It isn't sufficient to just call remap_type here, because
1046 variably_modified_type_p doesn't work the way we expect for
1047 record types. Testing each field for whether it needs remapping
1048 and creating a new record by hand works, however. */
1049 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1050 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1051 break;
1052 if (f)
1053 {
1054 tree name, new_fields = NULL;
1055
1056 type = lang_hooks.types.make_type (RECORD_TYPE);
1057 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1058 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1059 TYPE_DECL, name, type);
1060 TYPE_NAME (type) = name;
1061
1062 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1063 {
1064 tree new_f = copy_node (f);
1065 DECL_CONTEXT (new_f) = type;
1066 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1067 DECL_CHAIN (new_f) = new_fields;
1068 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1069 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1070 &ctx->cb, NULL);
1071 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1072 &ctx->cb, NULL);
1073 new_fields = new_f;
1074
1075 /* Arrange to be able to look up the receiver field
1076 given the sender field. */
1077 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1078 (splay_tree_value) new_f);
1079 }
1080 TYPE_FIELDS (type) = nreverse (new_fields);
1081 layout_type (type);
1082 }
1083
1084 /* In a target region we never modify any of the pointers in *.omp_data_i,
1085 so attempt to help the optimizers. */
1086 if (is_gimple_omp_offloaded (ctx->stmt))
1087 type = build_qualified_type (type, TYPE_QUAL_CONST);
1088
1089 TREE_TYPE (ctx->receiver_decl)
1090 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1091 }
1092
1093 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1094 specified by CLAUSES. */
1095
1096 static void
1097 scan_sharing_clauses (tree clauses, omp_context *ctx)
1098 {
1099 tree c, decl;
1100 bool scan_array_reductions = false;
1101
1102 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1103 {
1104 bool by_ref;
1105
1106 switch (OMP_CLAUSE_CODE (c))
1107 {
1108 case OMP_CLAUSE_PRIVATE:
1109 decl = OMP_CLAUSE_DECL (c);
1110 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1111 goto do_private;
1112 else if (!is_variable_sized (decl))
1113 install_var_local (decl, ctx);
1114 break;
1115
1116 case OMP_CLAUSE_SHARED:
1117 decl = OMP_CLAUSE_DECL (c);
1118 /* Ignore shared directives in teams construct inside of
1119 target construct. */
1120 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1121 && !is_host_teams_ctx (ctx))
1122 {
1123 /* Global variables don't need to be copied,
1124 the receiver side will use them directly. */
1125 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1126 if (is_global_var (odecl))
1127 break;
1128 insert_decl_map (&ctx->cb, decl, odecl);
1129 break;
1130 }
1131 gcc_assert (is_taskreg_ctx (ctx));
1132 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1133 || !is_variable_sized (decl));
1134 /* Global variables don't need to be copied,
1135 the receiver side will use them directly. */
1136 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1137 break;
1138 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1139 {
1140 use_pointer_for_field (decl, ctx);
1141 break;
1142 }
1143 by_ref = use_pointer_for_field (decl, NULL);
1144 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1145 || TREE_ADDRESSABLE (decl)
1146 || by_ref
1147 || omp_is_reference (decl))
1148 {
1149 by_ref = use_pointer_for_field (decl, ctx);
1150 install_var_field (decl, by_ref, 3, ctx);
1151 install_var_local (decl, ctx);
1152 break;
1153 }
1154 /* We don't need to copy const scalar vars back. */
1155 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1156 goto do_private;
1157
1158 case OMP_CLAUSE_REDUCTION:
1159 if (is_oacc_parallel_or_serial (ctx) || is_oacc_kernels (ctx))
1160 ctx->local_reduction_clauses
1161 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1162 /* FALLTHRU */
1163
1164 case OMP_CLAUSE_IN_REDUCTION:
1165 decl = OMP_CLAUSE_DECL (c);
1166 if (TREE_CODE (decl) == MEM_REF)
1167 {
1168 tree t = TREE_OPERAND (decl, 0);
1169 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1170 t = TREE_OPERAND (t, 0);
1171 if (TREE_CODE (t) == INDIRECT_REF
1172 || TREE_CODE (t) == ADDR_EXPR)
1173 t = TREE_OPERAND (t, 0);
1174 install_var_local (t, ctx);
1175 if (is_taskreg_ctx (ctx)
1176 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1177 || (is_task_ctx (ctx)
1178 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1179 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1181 == POINTER_TYPE)))))
1182 && !is_variable_sized (t)
1183 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1184 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1185 && !is_task_ctx (ctx))))
1186 {
1187 by_ref = use_pointer_for_field (t, NULL);
1188 if (is_task_ctx (ctx)
1189 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1190 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1191 {
1192 install_var_field (t, false, 1, ctx);
1193 install_var_field (t, by_ref, 2, ctx);
1194 }
1195 else
1196 install_var_field (t, by_ref, 3, ctx);
1197 }
1198 break;
1199 }
1200 if (is_task_ctx (ctx)
1201 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1202 && OMP_CLAUSE_REDUCTION_TASK (c)
1203 && is_parallel_ctx (ctx)))
1204 {
1205 /* Global variables don't need to be copied,
1206 the receiver side will use them directly. */
1207 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1208 {
1209 by_ref = use_pointer_for_field (decl, ctx);
1210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1211 install_var_field (decl, by_ref, 3, ctx);
1212 }
1213 install_var_local (decl, ctx);
1214 break;
1215 }
1216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 && OMP_CLAUSE_REDUCTION_TASK (c))
1218 {
1219 install_var_local (decl, ctx);
1220 break;
1221 }
1222 goto do_private;
1223
1224 case OMP_CLAUSE_LASTPRIVATE:
1225 /* Let the corresponding firstprivate clause create
1226 the variable. */
1227 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1228 break;
1229 /* FALLTHRU */
1230
1231 case OMP_CLAUSE_FIRSTPRIVATE:
1232 case OMP_CLAUSE_LINEAR:
1233 decl = OMP_CLAUSE_DECL (c);
1234 do_private:
1235 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1236 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1237 && is_gimple_omp_offloaded (ctx->stmt))
1238 {
1239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1240 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1241 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1242 install_var_field (decl, true, 3, ctx);
1243 else
1244 install_var_field (decl, false, 3, ctx);
1245 }
1246 if (is_variable_sized (decl))
1247 {
1248 if (is_task_ctx (ctx))
1249 install_var_field (decl, false, 1, ctx);
1250 break;
1251 }
1252 else if (is_taskreg_ctx (ctx))
1253 {
1254 bool global
1255 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1256 by_ref = use_pointer_for_field (decl, NULL);
1257
1258 if (is_task_ctx (ctx)
1259 && (global || by_ref || omp_is_reference (decl)))
1260 {
1261 install_var_field (decl, false, 1, ctx);
1262 if (!global)
1263 install_var_field (decl, by_ref, 2, ctx);
1264 }
1265 else if (!global)
1266 install_var_field (decl, by_ref, 3, ctx);
1267 }
1268 install_var_local (decl, ctx);
1269 break;
1270
1271 case OMP_CLAUSE_USE_DEVICE_PTR:
1272 case OMP_CLAUSE_USE_DEVICE_ADDR:
1273 decl = OMP_CLAUSE_DECL (c);
1274
1275 /* Fortran array descriptors. */
1276 if (lang_hooks.decls.omp_array_data (decl, true))
1277 install_var_field (decl, false, 19, ctx);
1278 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1279 && !omp_is_reference (decl)
1280 && !omp_is_allocatable_or_ptr (decl))
1281 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1282 install_var_field (decl, true, 11, ctx);
1283 else
1284 install_var_field (decl, false, 11, ctx);
1285 if (DECL_SIZE (decl)
1286 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1287 {
1288 tree decl2 = DECL_VALUE_EXPR (decl);
1289 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1290 decl2 = TREE_OPERAND (decl2, 0);
1291 gcc_assert (DECL_P (decl2));
1292 install_var_local (decl2, ctx);
1293 }
1294 install_var_local (decl, ctx);
1295 break;
1296
1297 case OMP_CLAUSE_IS_DEVICE_PTR:
1298 decl = OMP_CLAUSE_DECL (c);
1299 goto do_private;
1300
1301 case OMP_CLAUSE__LOOPTEMP_:
1302 case OMP_CLAUSE__REDUCTEMP_:
1303 gcc_assert (is_taskreg_ctx (ctx));
1304 decl = OMP_CLAUSE_DECL (c);
1305 install_var_field (decl, false, 3, ctx);
1306 install_var_local (decl, ctx);
1307 break;
1308
1309 case OMP_CLAUSE_COPYPRIVATE:
1310 case OMP_CLAUSE_COPYIN:
1311 decl = OMP_CLAUSE_DECL (c);
1312 by_ref = use_pointer_for_field (decl, NULL);
1313 install_var_field (decl, by_ref, 3, ctx);
1314 break;
1315
1316 case OMP_CLAUSE_FINAL:
1317 case OMP_CLAUSE_IF:
1318 case OMP_CLAUSE_NUM_THREADS:
1319 case OMP_CLAUSE_NUM_TEAMS:
1320 case OMP_CLAUSE_THREAD_LIMIT:
1321 case OMP_CLAUSE_DEVICE:
1322 case OMP_CLAUSE_SCHEDULE:
1323 case OMP_CLAUSE_DIST_SCHEDULE:
1324 case OMP_CLAUSE_DEPEND:
1325 case OMP_CLAUSE_PRIORITY:
1326 case OMP_CLAUSE_GRAINSIZE:
1327 case OMP_CLAUSE_NUM_TASKS:
1328 case OMP_CLAUSE_NUM_GANGS:
1329 case OMP_CLAUSE_NUM_WORKERS:
1330 case OMP_CLAUSE_VECTOR_LENGTH:
1331 if (ctx->outer)
1332 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1333 break;
1334
1335 case OMP_CLAUSE_TO:
1336 case OMP_CLAUSE_FROM:
1337 case OMP_CLAUSE_MAP:
1338 if (ctx->outer)
1339 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1340 decl = OMP_CLAUSE_DECL (c);
1341 /* Global variables with "omp declare target" attribute
1342 don't need to be copied, the receiver side will use them
1343 directly. However, global variables with "omp declare target link"
1344 attribute need to be copied. Or when ALWAYS modifier is used. */
1345 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1346 && DECL_P (decl)
1347 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1348 && (OMP_CLAUSE_MAP_KIND (c)
1349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1350 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1351 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1352 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1353 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1354 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1355 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1356 && varpool_node::get_create (decl)->offloadable
1357 && !lookup_attribute ("omp declare target link",
1358 DECL_ATTRIBUTES (decl)))
1359 break;
1360 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1361 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1362 {
1363 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1364 not offloaded; there is nothing to map for those. */
1365 if (!is_gimple_omp_offloaded (ctx->stmt)
1366 && !POINTER_TYPE_P (TREE_TYPE (decl))
1367 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1368 break;
1369 }
1370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1371 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1372 || (OMP_CLAUSE_MAP_KIND (c)
1373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1374 {
1375 if (TREE_CODE (decl) == COMPONENT_REF
1376 || (TREE_CODE (decl) == INDIRECT_REF
1377 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1379 == REFERENCE_TYPE)))
1380 break;
1381 if (DECL_SIZE (decl)
1382 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1383 {
1384 tree decl2 = DECL_VALUE_EXPR (decl);
1385 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1386 decl2 = TREE_OPERAND (decl2, 0);
1387 gcc_assert (DECL_P (decl2));
1388 install_var_local (decl2, ctx);
1389 }
1390 install_var_local (decl, ctx);
1391 break;
1392 }
1393 if (DECL_P (decl))
1394 {
1395 if (DECL_SIZE (decl)
1396 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1397 {
1398 tree decl2 = DECL_VALUE_EXPR (decl);
1399 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1400 decl2 = TREE_OPERAND (decl2, 0);
1401 gcc_assert (DECL_P (decl2));
1402 install_var_field (decl2, true, 3, ctx);
1403 install_var_local (decl2, ctx);
1404 install_var_local (decl, ctx);
1405 }
1406 else
1407 {
1408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1409 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1410 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1411 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1412 install_var_field (decl, true, 7, ctx);
1413 else
1414 install_var_field (decl, true, 3, ctx);
1415 if (is_gimple_omp_offloaded (ctx->stmt)
1416 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1417 install_var_local (decl, ctx);
1418 }
1419 }
1420 else
1421 {
1422 tree base = get_base_address (decl);
1423 tree nc = OMP_CLAUSE_CHAIN (c);
1424 if (DECL_P (base)
1425 && nc != NULL_TREE
1426 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1427 && OMP_CLAUSE_DECL (nc) == base
1428 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1429 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1430 {
1431 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1432 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1433 }
1434 else
1435 {
1436 if (ctx->outer)
1437 {
1438 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1439 decl = OMP_CLAUSE_DECL (c);
1440 }
1441 gcc_assert (!splay_tree_lookup (ctx->field_map,
1442 (splay_tree_key) decl));
1443 tree field
1444 = build_decl (OMP_CLAUSE_LOCATION (c),
1445 FIELD_DECL, NULL_TREE, ptr_type_node);
1446 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1447 insert_field_into_struct (ctx->record_type, field);
1448 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1449 (splay_tree_value) field);
1450 }
1451 }
1452 break;
1453
1454 case OMP_CLAUSE_ORDER:
1455 ctx->order_concurrent = true;
1456 break;
1457
1458 case OMP_CLAUSE_BIND:
1459 ctx->loop_p = true;
1460 break;
1461
1462 case OMP_CLAUSE_NOWAIT:
1463 case OMP_CLAUSE_ORDERED:
1464 case OMP_CLAUSE_COLLAPSE:
1465 case OMP_CLAUSE_UNTIED:
1466 case OMP_CLAUSE_MERGEABLE:
1467 case OMP_CLAUSE_PROC_BIND:
1468 case OMP_CLAUSE_SAFELEN:
1469 case OMP_CLAUSE_SIMDLEN:
1470 case OMP_CLAUSE_THREADS:
1471 case OMP_CLAUSE_SIMD:
1472 case OMP_CLAUSE_NOGROUP:
1473 case OMP_CLAUSE_DEFAULTMAP:
1474 case OMP_CLAUSE_ASYNC:
1475 case OMP_CLAUSE_WAIT:
1476 case OMP_CLAUSE_GANG:
1477 case OMP_CLAUSE_WORKER:
1478 case OMP_CLAUSE_VECTOR:
1479 case OMP_CLAUSE_INDEPENDENT:
1480 case OMP_CLAUSE_AUTO:
1481 case OMP_CLAUSE_SEQ:
1482 case OMP_CLAUSE_TILE:
1483 case OMP_CLAUSE__SIMT_:
1484 case OMP_CLAUSE_DEFAULT:
1485 case OMP_CLAUSE_NONTEMPORAL:
1486 case OMP_CLAUSE_IF_PRESENT:
1487 case OMP_CLAUSE_FINALIZE:
1488 case OMP_CLAUSE_TASK_REDUCTION:
1489 break;
1490
1491 case OMP_CLAUSE_ALIGNED:
1492 decl = OMP_CLAUSE_DECL (c);
1493 if (is_global_var (decl)
1494 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1495 install_var_local (decl, ctx);
1496 break;
1497
1498 case OMP_CLAUSE__CONDTEMP_:
1499 decl = OMP_CLAUSE_DECL (c);
1500 if (is_parallel_ctx (ctx))
1501 {
1502 install_var_field (decl, false, 3, ctx);
1503 install_var_local (decl, ctx);
1504 }
1505 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1506 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1507 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1508 install_var_local (decl, ctx);
1509 break;
1510
1511 case OMP_CLAUSE__CACHE_:
1512 default:
1513 gcc_unreachable ();
1514 }
1515 }
1516
1517 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1518 {
1519 switch (OMP_CLAUSE_CODE (c))
1520 {
1521 case OMP_CLAUSE_LASTPRIVATE:
1522 /* Let the corresponding firstprivate clause create
1523 the variable. */
1524 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1525 scan_array_reductions = true;
1526 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1527 break;
1528 /* FALLTHRU */
1529
1530 case OMP_CLAUSE_FIRSTPRIVATE:
1531 case OMP_CLAUSE_PRIVATE:
1532 case OMP_CLAUSE_LINEAR:
1533 case OMP_CLAUSE_IS_DEVICE_PTR:
1534 decl = OMP_CLAUSE_DECL (c);
1535 if (is_variable_sized (decl))
1536 {
1537 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1538 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1539 && is_gimple_omp_offloaded (ctx->stmt))
1540 {
1541 tree decl2 = DECL_VALUE_EXPR (decl);
1542 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1543 decl2 = TREE_OPERAND (decl2, 0);
1544 gcc_assert (DECL_P (decl2));
1545 install_var_local (decl2, ctx);
1546 fixup_remapped_decl (decl2, ctx, false);
1547 }
1548 install_var_local (decl, ctx);
1549 }
1550 fixup_remapped_decl (decl, ctx,
1551 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1552 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1554 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1555 scan_array_reductions = true;
1556 break;
1557
1558 case OMP_CLAUSE_REDUCTION:
1559 case OMP_CLAUSE_IN_REDUCTION:
1560 decl = OMP_CLAUSE_DECL (c);
1561 if (TREE_CODE (decl) != MEM_REF)
1562 {
1563 if (is_variable_sized (decl))
1564 install_var_local (decl, ctx);
1565 fixup_remapped_decl (decl, ctx, false);
1566 }
1567 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1568 scan_array_reductions = true;
1569 break;
1570
1571 case OMP_CLAUSE_TASK_REDUCTION:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1573 scan_array_reductions = true;
1574 break;
1575
1576 case OMP_CLAUSE_SHARED:
1577 /* Ignore shared directives in teams construct inside of
1578 target construct. */
1579 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1580 && !is_host_teams_ctx (ctx))
1581 break;
1582 decl = OMP_CLAUSE_DECL (c);
1583 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1584 break;
1585 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1586 {
1587 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1588 ctx->outer)))
1589 break;
1590 bool by_ref = use_pointer_for_field (decl, ctx);
1591 install_var_field (decl, by_ref, 11, ctx);
1592 break;
1593 }
1594 fixup_remapped_decl (decl, ctx, false);
1595 break;
1596
1597 case OMP_CLAUSE_MAP:
1598 if (!is_gimple_omp_offloaded (ctx->stmt))
1599 break;
1600 decl = OMP_CLAUSE_DECL (c);
1601 if (DECL_P (decl)
1602 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1603 && (OMP_CLAUSE_MAP_KIND (c)
1604 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1605 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && varpool_node::get_create (decl)->offloadable)
1608 break;
1609 if (DECL_P (decl))
1610 {
1611 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1612 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1613 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1614 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1615 {
1616 tree new_decl = lookup_decl (decl, ctx);
1617 TREE_TYPE (new_decl)
1618 = remap_type (TREE_TYPE (decl), &ctx->cb);
1619 }
1620 else if (DECL_SIZE (decl)
1621 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1622 {
1623 tree decl2 = DECL_VALUE_EXPR (decl);
1624 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1625 decl2 = TREE_OPERAND (decl2, 0);
1626 gcc_assert (DECL_P (decl2));
1627 fixup_remapped_decl (decl2, ctx, false);
1628 fixup_remapped_decl (decl, ctx, true);
1629 }
1630 else
1631 fixup_remapped_decl (decl, ctx, false);
1632 }
1633 break;
1634
1635 case OMP_CLAUSE_COPYPRIVATE:
1636 case OMP_CLAUSE_COPYIN:
1637 case OMP_CLAUSE_DEFAULT:
1638 case OMP_CLAUSE_IF:
1639 case OMP_CLAUSE_NUM_THREADS:
1640 case OMP_CLAUSE_NUM_TEAMS:
1641 case OMP_CLAUSE_THREAD_LIMIT:
1642 case OMP_CLAUSE_DEVICE:
1643 case OMP_CLAUSE_SCHEDULE:
1644 case OMP_CLAUSE_DIST_SCHEDULE:
1645 case OMP_CLAUSE_NOWAIT:
1646 case OMP_CLAUSE_ORDERED:
1647 case OMP_CLAUSE_COLLAPSE:
1648 case OMP_CLAUSE_UNTIED:
1649 case OMP_CLAUSE_FINAL:
1650 case OMP_CLAUSE_MERGEABLE:
1651 case OMP_CLAUSE_PROC_BIND:
1652 case OMP_CLAUSE_SAFELEN:
1653 case OMP_CLAUSE_SIMDLEN:
1654 case OMP_CLAUSE_ALIGNED:
1655 case OMP_CLAUSE_DEPEND:
1656 case OMP_CLAUSE__LOOPTEMP_:
1657 case OMP_CLAUSE__REDUCTEMP_:
1658 case OMP_CLAUSE_TO:
1659 case OMP_CLAUSE_FROM:
1660 case OMP_CLAUSE_PRIORITY:
1661 case OMP_CLAUSE_GRAINSIZE:
1662 case OMP_CLAUSE_NUM_TASKS:
1663 case OMP_CLAUSE_THREADS:
1664 case OMP_CLAUSE_SIMD:
1665 case OMP_CLAUSE_NOGROUP:
1666 case OMP_CLAUSE_DEFAULTMAP:
1667 case OMP_CLAUSE_ORDER:
1668 case OMP_CLAUSE_BIND:
1669 case OMP_CLAUSE_USE_DEVICE_PTR:
1670 case OMP_CLAUSE_USE_DEVICE_ADDR:
1671 case OMP_CLAUSE_NONTEMPORAL:
1672 case OMP_CLAUSE_ASYNC:
1673 case OMP_CLAUSE_WAIT:
1674 case OMP_CLAUSE_NUM_GANGS:
1675 case OMP_CLAUSE_NUM_WORKERS:
1676 case OMP_CLAUSE_VECTOR_LENGTH:
1677 case OMP_CLAUSE_GANG:
1678 case OMP_CLAUSE_WORKER:
1679 case OMP_CLAUSE_VECTOR:
1680 case OMP_CLAUSE_INDEPENDENT:
1681 case OMP_CLAUSE_AUTO:
1682 case OMP_CLAUSE_SEQ:
1683 case OMP_CLAUSE_TILE:
1684 case OMP_CLAUSE__SIMT_:
1685 case OMP_CLAUSE_IF_PRESENT:
1686 case OMP_CLAUSE_FINALIZE:
1687 case OMP_CLAUSE__CONDTEMP_:
1688 break;
1689
1690 case OMP_CLAUSE__CACHE_:
1691 default:
1692 gcc_unreachable ();
1693 }
1694 }
1695
1696 gcc_checking_assert (!scan_array_reductions
1697 || !is_gimple_omp_oacc (ctx->stmt));
1698 if (scan_array_reductions)
1699 {
1700 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1701 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1702 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1703 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1704 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1705 {
1706 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1707 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1708 }
1709 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1711 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1712 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1713 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1714 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1715 }
1716 }
1717
1718 /* Create a new name for omp child function. Returns an identifier. */
1719
1720 static tree
1721 create_omp_child_function_name (bool task_copy)
1722 {
1723 return clone_function_name_numbered (current_function_decl,
1724 task_copy ? "_omp_cpyfn" : "_omp_fn");
1725 }
1726
1727 /* Return true if CTX may belong to offloaded code: either if current function
1728 is offloaded, or any enclosing context corresponds to a target region. */
1729
1730 static bool
1731 omp_maybe_offloaded_ctx (omp_context *ctx)
1732 {
1733 if (cgraph_node::get (current_function_decl)->offloadable)
1734 return true;
1735 for (; ctx; ctx = ctx->outer)
1736 if (is_gimple_omp_offloaded (ctx->stmt))
1737 return true;
1738 return false;
1739 }
1740
1741 /* Build a decl for the omp child function. It'll not contain a body
1742 yet, just the bare decl. */
1743
1744 static void
1745 create_omp_child_function (omp_context *ctx, bool task_copy)
1746 {
1747 tree decl, type, name, t;
1748
1749 name = create_omp_child_function_name (task_copy);
1750 if (task_copy)
1751 type = build_function_type_list (void_type_node, ptr_type_node,
1752 ptr_type_node, NULL_TREE);
1753 else
1754 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1755
1756 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1757
1758 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1759 || !task_copy);
1760 if (!task_copy)
1761 ctx->cb.dst_fn = decl;
1762 else
1763 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1764
1765 TREE_STATIC (decl) = 1;
1766 TREE_USED (decl) = 1;
1767 DECL_ARTIFICIAL (decl) = 1;
1768 DECL_IGNORED_P (decl) = 0;
1769 TREE_PUBLIC (decl) = 0;
1770 DECL_UNINLINABLE (decl) = 1;
1771 DECL_EXTERNAL (decl) = 0;
1772 DECL_CONTEXT (decl) = NULL_TREE;
1773 DECL_INITIAL (decl) = make_node (BLOCK);
1774 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1775 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1776 /* Remove omp declare simd attribute from the new attributes. */
1777 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1778 {
1779 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1780 a = a2;
1781 a = TREE_CHAIN (a);
1782 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1783 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1784 *p = TREE_CHAIN (*p);
1785 else
1786 {
1787 tree chain = TREE_CHAIN (*p);
1788 *p = copy_node (*p);
1789 p = &TREE_CHAIN (*p);
1790 *p = chain;
1791 }
1792 }
1793 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1794 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1795 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1796 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1797 DECL_FUNCTION_VERSIONED (decl)
1798 = DECL_FUNCTION_VERSIONED (current_function_decl);
1799
1800 if (omp_maybe_offloaded_ctx (ctx))
1801 {
1802 cgraph_node::get_create (decl)->offloadable = 1;
1803 if (ENABLE_OFFLOADING)
1804 g->have_offload = true;
1805 }
1806
1807 if (cgraph_node::get_create (decl)->offloadable
1808 && !lookup_attribute ("omp declare target",
1809 DECL_ATTRIBUTES (current_function_decl)))
1810 {
1811 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1812 ? "omp target entrypoint"
1813 : "omp declare target");
1814 DECL_ATTRIBUTES (decl)
1815 = tree_cons (get_identifier (target_attr),
1816 NULL_TREE, DECL_ATTRIBUTES (decl));
1817 }
1818
1819 t = build_decl (DECL_SOURCE_LOCATION (decl),
1820 RESULT_DECL, NULL_TREE, void_type_node);
1821 DECL_ARTIFICIAL (t) = 1;
1822 DECL_IGNORED_P (t) = 1;
1823 DECL_CONTEXT (t) = decl;
1824 DECL_RESULT (decl) = t;
1825
1826 tree data_name = get_identifier (".omp_data_i");
1827 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1828 ptr_type_node);
1829 DECL_ARTIFICIAL (t) = 1;
1830 DECL_NAMELESS (t) = 1;
1831 DECL_ARG_TYPE (t) = ptr_type_node;
1832 DECL_CONTEXT (t) = current_function_decl;
1833 TREE_USED (t) = 1;
1834 TREE_READONLY (t) = 1;
1835 DECL_ARGUMENTS (decl) = t;
1836 if (!task_copy)
1837 ctx->receiver_decl = t;
1838 else
1839 {
1840 t = build_decl (DECL_SOURCE_LOCATION (decl),
1841 PARM_DECL, get_identifier (".omp_data_o"),
1842 ptr_type_node);
1843 DECL_ARTIFICIAL (t) = 1;
1844 DECL_NAMELESS (t) = 1;
1845 DECL_ARG_TYPE (t) = ptr_type_node;
1846 DECL_CONTEXT (t) = current_function_decl;
1847 TREE_USED (t) = 1;
1848 TREE_ADDRESSABLE (t) = 1;
1849 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1850 DECL_ARGUMENTS (decl) = t;
1851 }
1852
1853 /* Allocate memory for the function structure. The call to
1854 allocate_struct_function clobbers CFUN, so we need to restore
1855 it afterward. */
1856 push_struct_function (decl);
1857 cfun->function_end_locus = gimple_location (ctx->stmt);
1858 init_tree_ssa (cfun);
1859 pop_cfun ();
1860 }
1861
1862 /* Callback for walk_gimple_seq. Check if combined parallel
1863 contains gimple_omp_for_combined_into_p OMP_FOR. */
1864
1865 tree
1866 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1867 bool *handled_ops_p,
1868 struct walk_stmt_info *wi)
1869 {
1870 gimple *stmt = gsi_stmt (*gsi_p);
1871
1872 *handled_ops_p = true;
1873 switch (gimple_code (stmt))
1874 {
1875 WALK_SUBSTMTS;
1876
1877 case GIMPLE_OMP_FOR:
1878 if (gimple_omp_for_combined_into_p (stmt)
1879 && gimple_omp_for_kind (stmt)
1880 == *(const enum gf_mask *) (wi->info))
1881 {
1882 wi->info = stmt;
1883 return integer_zero_node;
1884 }
1885 break;
1886 default:
1887 break;
1888 }
1889 return NULL;
1890 }
1891
1892 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1893
1894 static void
1895 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1896 omp_context *outer_ctx)
1897 {
1898 struct walk_stmt_info wi;
1899
1900 memset (&wi, 0, sizeof (wi));
1901 wi.val_only = true;
1902 wi.info = (void *) &msk;
1903 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1904 if (wi.info != (void *) &msk)
1905 {
1906 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1907 struct omp_for_data fd;
1908 omp_extract_for_data (for_stmt, &fd, NULL);
1909 /* We need two temporaries with fd.loop.v type (istart/iend)
1910 and then (fd.collapse - 1) temporaries with the same
1911 type for count2 ... countN-1 vars if not constant. */
1912 size_t count = 2, i;
1913 tree type = fd.iter_type;
1914 if (fd.collapse > 1
1915 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1916 {
1917 count += fd.collapse - 1;
1918 /* If there are lastprivate clauses on the inner
1919 GIMPLE_OMP_FOR, add one more temporaries for the total number
1920 of iterations (product of count1 ... countN-1). */
1921 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1922 OMP_CLAUSE_LASTPRIVATE))
1923 count++;
1924 else if (msk == GF_OMP_FOR_KIND_FOR
1925 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1926 OMP_CLAUSE_LASTPRIVATE))
1927 count++;
1928 }
1929 for (i = 0; i < count; i++)
1930 {
1931 tree temp = create_tmp_var (type);
1932 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1933 insert_decl_map (&outer_ctx->cb, temp, temp);
1934 OMP_CLAUSE_DECL (c) = temp;
1935 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1936 gimple_omp_taskreg_set_clauses (stmt, c);
1937 }
1938 }
1939 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1940 && omp_find_clause (gimple_omp_task_clauses (stmt),
1941 OMP_CLAUSE_REDUCTION))
1942 {
1943 tree type = build_pointer_type (pointer_sized_int_node);
1944 tree temp = create_tmp_var (type);
1945 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1946 insert_decl_map (&outer_ctx->cb, temp, temp);
1947 OMP_CLAUSE_DECL (c) = temp;
1948 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1949 gimple_omp_task_set_clauses (stmt, c);
1950 }
1951 }
1952
1953 /* Scan an OpenMP parallel directive. */
1954
1955 static void
1956 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1957 {
1958 omp_context *ctx;
1959 tree name;
1960 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1961
1962 /* Ignore parallel directives with empty bodies, unless there
1963 are copyin clauses. */
1964 if (optimize > 0
1965 && empty_body_p (gimple_omp_body (stmt))
1966 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1967 OMP_CLAUSE_COPYIN) == NULL)
1968 {
1969 gsi_replace (gsi, gimple_build_nop (), false);
1970 return;
1971 }
1972
1973 if (gimple_omp_parallel_combined_p (stmt))
1974 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1975 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1976 OMP_CLAUSE_REDUCTION);
1977 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1978 if (OMP_CLAUSE_REDUCTION_TASK (c))
1979 {
1980 tree type = build_pointer_type (pointer_sized_int_node);
1981 tree temp = create_tmp_var (type);
1982 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1983 if (outer_ctx)
1984 insert_decl_map (&outer_ctx->cb, temp, temp);
1985 OMP_CLAUSE_DECL (c) = temp;
1986 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1987 gimple_omp_parallel_set_clauses (stmt, c);
1988 break;
1989 }
1990 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1991 break;
1992
1993 ctx = new_omp_context (stmt, outer_ctx);
1994 taskreg_contexts.safe_push (ctx);
1995 if (taskreg_nesting_level > 1)
1996 ctx->is_nested = true;
1997 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1998 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1999 name = create_tmp_var_name (".omp_data_s");
2000 name = build_decl (gimple_location (stmt),
2001 TYPE_DECL, name, ctx->record_type);
2002 DECL_ARTIFICIAL (name) = 1;
2003 DECL_NAMELESS (name) = 1;
2004 TYPE_NAME (ctx->record_type) = name;
2005 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2006 create_omp_child_function (ctx, false);
2007 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2008
2009 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2010 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2011
2012 if (TYPE_FIELDS (ctx->record_type) == NULL)
2013 ctx->record_type = ctx->receiver_decl = NULL;
2014 }
2015
2016 /* Scan an OpenMP task directive. */
2017
2018 static void
2019 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2020 {
2021 omp_context *ctx;
2022 tree name, t;
2023 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2024
2025 /* Ignore task directives with empty bodies, unless they have depend
2026 clause. */
2027 if (optimize > 0
2028 && gimple_omp_body (stmt)
2029 && empty_body_p (gimple_omp_body (stmt))
2030 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2031 {
2032 gsi_replace (gsi, gimple_build_nop (), false);
2033 return;
2034 }
2035
2036 if (gimple_omp_task_taskloop_p (stmt))
2037 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2038
2039 ctx = new_omp_context (stmt, outer_ctx);
2040
2041 if (gimple_omp_task_taskwait_p (stmt))
2042 {
2043 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2044 return;
2045 }
2046
2047 taskreg_contexts.safe_push (ctx);
2048 if (taskreg_nesting_level > 1)
2049 ctx->is_nested = true;
2050 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2051 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2052 name = create_tmp_var_name (".omp_data_s");
2053 name = build_decl (gimple_location (stmt),
2054 TYPE_DECL, name, ctx->record_type);
2055 DECL_ARTIFICIAL (name) = 1;
2056 DECL_NAMELESS (name) = 1;
2057 TYPE_NAME (ctx->record_type) = name;
2058 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2059 create_omp_child_function (ctx, false);
2060 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2061
2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2063
2064 if (ctx->srecord_type)
2065 {
2066 name = create_tmp_var_name (".omp_data_a");
2067 name = build_decl (gimple_location (stmt),
2068 TYPE_DECL, name, ctx->srecord_type);
2069 DECL_ARTIFICIAL (name) = 1;
2070 DECL_NAMELESS (name) = 1;
2071 TYPE_NAME (ctx->srecord_type) = name;
2072 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2073 create_omp_child_function (ctx, true);
2074 }
2075
2076 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2077
2078 if (TYPE_FIELDS (ctx->record_type) == NULL)
2079 {
2080 ctx->record_type = ctx->receiver_decl = NULL;
2081 t = build_int_cst (long_integer_type_node, 0);
2082 gimple_omp_task_set_arg_size (stmt, t);
2083 t = build_int_cst (long_integer_type_node, 1);
2084 gimple_omp_task_set_arg_align (stmt, t);
2085 }
2086 }
2087
2088 /* Helper function for finish_taskreg_scan, called through walk_tree.
2089 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2090 tree, replace it in the expression. */
2091
2092 static tree
2093 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2094 {
2095 if (VAR_P (*tp))
2096 {
2097 omp_context *ctx = (omp_context *) data;
2098 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2099 if (t != *tp)
2100 {
2101 if (DECL_HAS_VALUE_EXPR_P (t))
2102 t = unshare_expr (DECL_VALUE_EXPR (t));
2103 *tp = t;
2104 }
2105 *walk_subtrees = 0;
2106 }
2107 else if (IS_TYPE_OR_DECL_P (*tp))
2108 *walk_subtrees = 0;
2109 return NULL_TREE;
2110 }
2111
2112 /* If any decls have been made addressable during scan_omp,
2113 adjust their fields if needed, and layout record types
2114 of parallel/task constructs. */
2115
2116 static void
2117 finish_taskreg_scan (omp_context *ctx)
2118 {
2119 if (ctx->record_type == NULL_TREE)
2120 return;
2121
2122 /* If any task_shared_vars were needed, verify all
2123 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2124 statements if use_pointer_for_field hasn't changed
2125 because of that. If it did, update field types now. */
2126 if (task_shared_vars)
2127 {
2128 tree c;
2129
2130 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2131 c; c = OMP_CLAUSE_CHAIN (c))
2132 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2133 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2134 {
2135 tree decl = OMP_CLAUSE_DECL (c);
2136
2137 /* Global variables don't need to be copied,
2138 the receiver side will use them directly. */
2139 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2140 continue;
2141 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2142 || !use_pointer_for_field (decl, ctx))
2143 continue;
2144 tree field = lookup_field (decl, ctx);
2145 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2146 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2147 continue;
2148 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2149 TREE_THIS_VOLATILE (field) = 0;
2150 DECL_USER_ALIGN (field) = 0;
2151 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2152 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2153 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2154 if (ctx->srecord_type)
2155 {
2156 tree sfield = lookup_sfield (decl, ctx);
2157 TREE_TYPE (sfield) = TREE_TYPE (field);
2158 TREE_THIS_VOLATILE (sfield) = 0;
2159 DECL_USER_ALIGN (sfield) = 0;
2160 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2161 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2162 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2163 }
2164 }
2165 }
2166
2167 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2168 {
2169 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2170 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2171 if (c)
2172 {
2173 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2174 expects to find it at the start of data. */
2175 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2176 tree *p = &TYPE_FIELDS (ctx->record_type);
2177 while (*p)
2178 if (*p == f)
2179 {
2180 *p = DECL_CHAIN (*p);
2181 break;
2182 }
2183 else
2184 p = &DECL_CHAIN (*p);
2185 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2186 TYPE_FIELDS (ctx->record_type) = f;
2187 }
2188 layout_type (ctx->record_type);
2189 fixup_child_record_type (ctx);
2190 }
2191 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2192 {
2193 layout_type (ctx->record_type);
2194 fixup_child_record_type (ctx);
2195 }
2196 else
2197 {
2198 location_t loc = gimple_location (ctx->stmt);
2199 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2200 /* Move VLA fields to the end. */
2201 p = &TYPE_FIELDS (ctx->record_type);
2202 while (*p)
2203 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2204 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2205 {
2206 *q = *p;
2207 *p = TREE_CHAIN (*p);
2208 TREE_CHAIN (*q) = NULL_TREE;
2209 q = &TREE_CHAIN (*q);
2210 }
2211 else
2212 p = &DECL_CHAIN (*p);
2213 *p = vla_fields;
2214 if (gimple_omp_task_taskloop_p (ctx->stmt))
2215 {
2216 /* Move fields corresponding to first and second _looptemp_
2217 clause first. There are filled by GOMP_taskloop
2218 and thus need to be in specific positions. */
2219 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2220 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2221 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2222 OMP_CLAUSE__LOOPTEMP_);
2223 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2224 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2225 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2226 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2227 p = &TYPE_FIELDS (ctx->record_type);
2228 while (*p)
2229 if (*p == f1 || *p == f2 || *p == f3)
2230 *p = DECL_CHAIN (*p);
2231 else
2232 p = &DECL_CHAIN (*p);
2233 DECL_CHAIN (f1) = f2;
2234 if (c3)
2235 {
2236 DECL_CHAIN (f2) = f3;
2237 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2238 }
2239 else
2240 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2241 TYPE_FIELDS (ctx->record_type) = f1;
2242 if (ctx->srecord_type)
2243 {
2244 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2245 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2246 if (c3)
2247 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2248 p = &TYPE_FIELDS (ctx->srecord_type);
2249 while (*p)
2250 if (*p == f1 || *p == f2 || *p == f3)
2251 *p = DECL_CHAIN (*p);
2252 else
2253 p = &DECL_CHAIN (*p);
2254 DECL_CHAIN (f1) = f2;
2255 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2256 if (c3)
2257 {
2258 DECL_CHAIN (f2) = f3;
2259 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2260 }
2261 else
2262 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2263 TYPE_FIELDS (ctx->srecord_type) = f1;
2264 }
2265 }
2266 layout_type (ctx->record_type);
2267 fixup_child_record_type (ctx);
2268 if (ctx->srecord_type)
2269 layout_type (ctx->srecord_type);
2270 tree t = fold_convert_loc (loc, long_integer_type_node,
2271 TYPE_SIZE_UNIT (ctx->record_type));
2272 if (TREE_CODE (t) != INTEGER_CST)
2273 {
2274 t = unshare_expr (t);
2275 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2276 }
2277 gimple_omp_task_set_arg_size (ctx->stmt, t);
2278 t = build_int_cst (long_integer_type_node,
2279 TYPE_ALIGN_UNIT (ctx->record_type));
2280 gimple_omp_task_set_arg_align (ctx->stmt, t);
2281 }
2282 }
2283
2284 /* Find the enclosing offload context. */
2285
2286 static omp_context *
2287 enclosing_target_ctx (omp_context *ctx)
2288 {
2289 for (; ctx; ctx = ctx->outer)
2290 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2291 break;
2292
2293 return ctx;
2294 }
2295
2296 /* Return true if ctx is part of an oacc kernels region. */
2297
2298 static bool
2299 ctx_in_oacc_kernels_region (omp_context *ctx)
2300 {
2301 for (;ctx != NULL; ctx = ctx->outer)
2302 {
2303 gimple *stmt = ctx->stmt;
2304 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2305 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2306 return true;
2307 }
2308
2309 return false;
2310 }
2311
2312 /* Check the parallelism clauses inside a kernels regions.
2313 Until kernels handling moves to use the same loop indirection
2314 scheme as parallel, we need to do this checking early. */
2315
2316 static unsigned
2317 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2318 {
2319 bool checking = true;
2320 unsigned outer_mask = 0;
2321 unsigned this_mask = 0;
2322 bool has_seq = false, has_auto = false;
2323
2324 if (ctx->outer)
2325 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2326 if (!stmt)
2327 {
2328 checking = false;
2329 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2330 return outer_mask;
2331 stmt = as_a <gomp_for *> (ctx->stmt);
2332 }
2333
2334 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2335 {
2336 switch (OMP_CLAUSE_CODE (c))
2337 {
2338 case OMP_CLAUSE_GANG:
2339 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2340 break;
2341 case OMP_CLAUSE_WORKER:
2342 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2343 break;
2344 case OMP_CLAUSE_VECTOR:
2345 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2346 break;
2347 case OMP_CLAUSE_SEQ:
2348 has_seq = true;
2349 break;
2350 case OMP_CLAUSE_AUTO:
2351 has_auto = true;
2352 break;
2353 default:
2354 break;
2355 }
2356 }
2357
2358 if (checking)
2359 {
2360 if (has_seq && (this_mask || has_auto))
2361 error_at (gimple_location (stmt), "%<seq%> overrides other"
2362 " OpenACC loop specifiers");
2363 else if (has_auto && this_mask)
2364 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2365 " OpenACC loop specifiers");
2366
2367 if (this_mask & outer_mask)
2368 error_at (gimple_location (stmt), "inner loop uses same"
2369 " OpenACC parallelism as containing loop");
2370 }
2371
2372 return outer_mask | this_mask;
2373 }
2374
2375 /* Scan a GIMPLE_OMP_FOR. */
2376
2377 static omp_context *
2378 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2379 {
2380 omp_context *ctx;
2381 size_t i;
2382 tree clauses = gimple_omp_for_clauses (stmt);
2383
2384 ctx = new_omp_context (stmt, outer_ctx);
2385
2386 if (is_gimple_omp_oacc (stmt))
2387 {
2388 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2389
2390 if (!tgt || is_oacc_parallel_or_serial (tgt))
2391 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2392 {
2393 char const *check = NULL;
2394
2395 switch (OMP_CLAUSE_CODE (c))
2396 {
2397 case OMP_CLAUSE_GANG:
2398 check = "gang";
2399 break;
2400
2401 case OMP_CLAUSE_WORKER:
2402 check = "worker";
2403 break;
2404
2405 case OMP_CLAUSE_VECTOR:
2406 check = "vector";
2407 break;
2408
2409 default:
2410 break;
2411 }
2412
2413 if (check && OMP_CLAUSE_OPERAND (c, 0))
2414 error_at (gimple_location (stmt),
2415 "argument not permitted on %qs clause in"
2416 " OpenACC %<parallel%> or %<serial%>", check);
2417 }
2418
2419 if (tgt && is_oacc_kernels (tgt))
2420 {
2421 /* Strip out reductions, as they are not handled yet. */
2422 tree *prev_ptr = &clauses;
2423
2424 while (tree probe = *prev_ptr)
2425 {
2426 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2427
2428 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2429 *prev_ptr = *next_ptr;
2430 else
2431 prev_ptr = next_ptr;
2432 }
2433
2434 gimple_omp_for_set_clauses (stmt, clauses);
2435 check_oacc_kernel_gwv (stmt, ctx);
2436 }
2437
2438 /* Collect all variables named in reductions on this loop. Ensure
2439 that, if this loop has a reduction on some variable v, and there is
2440 a reduction on v somewhere in an outer context, then there is a
2441 reduction on v on all intervening loops as well. */
2442 tree local_reduction_clauses = NULL;
2443 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2444 {
2445 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2446 local_reduction_clauses
2447 = tree_cons (NULL, c, local_reduction_clauses);
2448 }
2449 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2450 ctx->outer_reduction_clauses
2451 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2452 ctx->outer->outer_reduction_clauses);
2453 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2454 tree local_iter = local_reduction_clauses;
2455 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2456 {
2457 tree local_clause = TREE_VALUE (local_iter);
2458 tree local_var = OMP_CLAUSE_DECL (local_clause);
2459 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2460 bool have_outer_reduction = false;
2461 tree ctx_iter = outer_reduction_clauses;
2462 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2463 {
2464 tree outer_clause = TREE_VALUE (ctx_iter);
2465 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2466 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2467 if (outer_var == local_var && outer_op != local_op)
2468 {
2469 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2470 "conflicting reduction operations for %qE",
2471 local_var);
2472 inform (OMP_CLAUSE_LOCATION (outer_clause),
2473 "location of the previous reduction for %qE",
2474 outer_var);
2475 }
2476 if (outer_var == local_var)
2477 {
2478 have_outer_reduction = true;
2479 break;
2480 }
2481 }
2482 if (have_outer_reduction)
2483 {
2484 /* There is a reduction on outer_var both on this loop and on
2485 some enclosing loop. Walk up the context tree until such a
2486 loop with a reduction on outer_var is found, and complain
2487 about all intervening loops that do not have such a
2488 reduction. */
2489 struct omp_context *curr_loop = ctx->outer;
2490 bool found = false;
2491 while (curr_loop != NULL)
2492 {
2493 tree curr_iter = curr_loop->local_reduction_clauses;
2494 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2495 {
2496 tree curr_clause = TREE_VALUE (curr_iter);
2497 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2498 if (curr_var == local_var)
2499 {
2500 found = true;
2501 break;
2502 }
2503 }
2504 if (!found)
2505 warning_at (gimple_location (curr_loop->stmt), 0,
2506 "nested loop in reduction needs "
2507 "reduction clause for %qE",
2508 local_var);
2509 else
2510 break;
2511 curr_loop = curr_loop->outer;
2512 }
2513 }
2514 }
2515 ctx->local_reduction_clauses = local_reduction_clauses;
2516 ctx->outer_reduction_clauses
2517 = chainon (unshare_expr (ctx->local_reduction_clauses),
2518 ctx->outer_reduction_clauses);
2519 }
2520
2521 scan_sharing_clauses (clauses, ctx);
2522
2523 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2524 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2525 {
2526 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2527 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2528 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2529 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2530 }
2531 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2532 return ctx;
2533 }
2534
2535 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2536
2537 static void
2538 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2539 omp_context *outer_ctx)
2540 {
2541 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2542 gsi_replace (gsi, bind, false);
2543 gimple_seq seq = NULL;
2544 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2545 tree cond = create_tmp_var_raw (integer_type_node);
2546 DECL_CONTEXT (cond) = current_function_decl;
2547 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2548 gimple_bind_set_vars (bind, cond);
2549 gimple_call_set_lhs (g, cond);
2550 gimple_seq_add_stmt (&seq, g);
2551 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2552 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2553 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2554 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2555 gimple_seq_add_stmt (&seq, g);
2556 g = gimple_build_label (lab1);
2557 gimple_seq_add_stmt (&seq, g);
2558 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2559 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2560 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2561 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2562 gimple_omp_for_set_clauses (new_stmt, clause);
2563 gimple_seq_add_stmt (&seq, new_stmt);
2564 g = gimple_build_goto (lab3);
2565 gimple_seq_add_stmt (&seq, g);
2566 g = gimple_build_label (lab2);
2567 gimple_seq_add_stmt (&seq, g);
2568 gimple_seq_add_stmt (&seq, stmt);
2569 g = gimple_build_label (lab3);
2570 gimple_seq_add_stmt (&seq, g);
2571 gimple_bind_set_body (bind, seq);
2572 update_stmt (bind);
2573 scan_omp_for (new_stmt, outer_ctx);
2574 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2575 }
2576
2577 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2578 struct walk_stmt_info *);
2579 static omp_context *maybe_lookup_ctx (gimple *);
2580
2581 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2582 for scan phase loop. */
2583
2584 static void
2585 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2586 omp_context *outer_ctx)
2587 {
2588 /* The only change between inclusive and exclusive scan will be
2589 within the first simd loop, so just use inclusive in the
2590 worksharing loop. */
2591 outer_ctx->scan_inclusive = true;
2592 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2593 OMP_CLAUSE_DECL (c) = integer_zero_node;
2594
2595 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2596 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2597 gsi_replace (gsi, input_stmt, false);
2598 gimple_seq input_body = NULL;
2599 gimple_seq_add_stmt (&input_body, stmt);
2600 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2601
2602 gimple_stmt_iterator input1_gsi = gsi_none ();
2603 struct walk_stmt_info wi;
2604 memset (&wi, 0, sizeof (wi));
2605 wi.val_only = true;
2606 wi.info = (void *) &input1_gsi;
2607 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2608 gcc_assert (!gsi_end_p (input1_gsi));
2609
2610 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2611 gsi_next (&input1_gsi);
2612 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2613 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2614 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2615 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2616 std::swap (input_stmt1, scan_stmt1);
2617
2618 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2619 gimple_omp_set_body (input_stmt1, NULL);
2620
2621 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2622 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2623
2624 gimple_omp_set_body (input_stmt1, input_body1);
2625 gimple_omp_set_body (scan_stmt1, NULL);
2626
2627 gimple_stmt_iterator input2_gsi = gsi_none ();
2628 memset (&wi, 0, sizeof (wi));
2629 wi.val_only = true;
2630 wi.info = (void *) &input2_gsi;
2631 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2632 NULL, &wi);
2633 gcc_assert (!gsi_end_p (input2_gsi));
2634
2635 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2636 gsi_next (&input2_gsi);
2637 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2638 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2639 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2640 std::swap (input_stmt2, scan_stmt2);
2641
2642 gimple_omp_set_body (input_stmt2, NULL);
2643
2644 gimple_omp_set_body (input_stmt, input_body);
2645 gimple_omp_set_body (scan_stmt, scan_body);
2646
2647 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2648 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2649
2650 ctx = new_omp_context (scan_stmt, outer_ctx);
2651 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2652
2653 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2654 }
2655
2656 /* Scan an OpenMP sections directive. */
2657
2658 static void
2659 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2660 {
2661 omp_context *ctx;
2662
2663 ctx = new_omp_context (stmt, outer_ctx);
2664 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2665 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2666 }
2667
2668 /* Scan an OpenMP single directive. */
2669
2670 static void
2671 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2672 {
2673 omp_context *ctx;
2674 tree name;
2675
2676 ctx = new_omp_context (stmt, outer_ctx);
2677 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2678 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2679 name = create_tmp_var_name (".omp_copy_s");
2680 name = build_decl (gimple_location (stmt),
2681 TYPE_DECL, name, ctx->record_type);
2682 TYPE_NAME (ctx->record_type) = name;
2683
2684 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2685 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2686
2687 if (TYPE_FIELDS (ctx->record_type) == NULL)
2688 ctx->record_type = NULL;
2689 else
2690 layout_type (ctx->record_type);
2691 }
2692
2693 /* Scan a GIMPLE_OMP_TARGET. */
2694
2695 static void
2696 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2697 {
2698 omp_context *ctx;
2699 tree name;
2700 bool offloaded = is_gimple_omp_offloaded (stmt);
2701 tree clauses = gimple_omp_target_clauses (stmt);
2702
2703 ctx = new_omp_context (stmt, outer_ctx);
2704 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2705 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2706 name = create_tmp_var_name (".omp_data_t");
2707 name = build_decl (gimple_location (stmt),
2708 TYPE_DECL, name, ctx->record_type);
2709 DECL_ARTIFICIAL (name) = 1;
2710 DECL_NAMELESS (name) = 1;
2711 TYPE_NAME (ctx->record_type) = name;
2712 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2713
2714 if (offloaded)
2715 {
2716 create_omp_child_function (ctx, false);
2717 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2718 }
2719
2720 scan_sharing_clauses (clauses, ctx);
2721 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2722
2723 if (TYPE_FIELDS (ctx->record_type) == NULL)
2724 ctx->record_type = ctx->receiver_decl = NULL;
2725 else
2726 {
2727 TYPE_FIELDS (ctx->record_type)
2728 = nreverse (TYPE_FIELDS (ctx->record_type));
2729 if (flag_checking)
2730 {
2731 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2732 for (tree field = TYPE_FIELDS (ctx->record_type);
2733 field;
2734 field = DECL_CHAIN (field))
2735 gcc_assert (DECL_ALIGN (field) == align);
2736 }
2737 layout_type (ctx->record_type);
2738 if (offloaded)
2739 fixup_child_record_type (ctx);
2740 }
2741 }
2742
2743 /* Scan an OpenMP teams directive. */
2744
2745 static void
2746 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2747 {
2748 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2749
2750 if (!gimple_omp_teams_host (stmt))
2751 {
2752 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2753 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2754 return;
2755 }
2756 taskreg_contexts.safe_push (ctx);
2757 gcc_assert (taskreg_nesting_level == 1);
2758 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2759 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2760 tree name = create_tmp_var_name (".omp_data_s");
2761 name = build_decl (gimple_location (stmt),
2762 TYPE_DECL, name, ctx->record_type);
2763 DECL_ARTIFICIAL (name) = 1;
2764 DECL_NAMELESS (name) = 1;
2765 TYPE_NAME (ctx->record_type) = name;
2766 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2767 create_omp_child_function (ctx, false);
2768 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2769
2770 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2771 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2772
2773 if (TYPE_FIELDS (ctx->record_type) == NULL)
2774 ctx->record_type = ctx->receiver_decl = NULL;
2775 }
2776
2777 /* Check nesting restrictions. */
2778 static bool
2779 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2780 {
2781 tree c;
2782
2783 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2784 inside an OpenACC CTX. */
2785 if (!(is_gimple_omp (stmt)
2786 && is_gimple_omp_oacc (stmt))
2787 /* Except for atomic codes that we share with OpenMP. */
2788 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2789 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2790 {
2791 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2792 {
2793 error_at (gimple_location (stmt),
2794 "non-OpenACC construct inside of OpenACC routine");
2795 return false;
2796 }
2797 else
2798 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2799 if (is_gimple_omp (octx->stmt)
2800 && is_gimple_omp_oacc (octx->stmt))
2801 {
2802 error_at (gimple_location (stmt),
2803 "non-OpenACC construct inside of OpenACC region");
2804 return false;
2805 }
2806 }
2807
2808 if (ctx != NULL)
2809 {
2810 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2811 && ctx->outer
2812 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2813 ctx = ctx->outer;
2814 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2815 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2816 && !ctx->loop_p)
2817 {
2818 c = NULL_TREE;
2819 if (ctx->order_concurrent
2820 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2821 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2822 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2823 {
2824 error_at (gimple_location (stmt),
2825 "OpenMP constructs other than %<parallel%>, %<loop%>"
2826 " or %<simd%> may not be nested inside a region with"
2827 " the %<order(concurrent)%> clause");
2828 return false;
2829 }
2830 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2831 {
2832 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2833 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2834 {
2835 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2836 && (ctx->outer == NULL
2837 || !gimple_omp_for_combined_into_p (ctx->stmt)
2838 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2839 || (gimple_omp_for_kind (ctx->outer->stmt)
2840 != GF_OMP_FOR_KIND_FOR)
2841 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2842 {
2843 error_at (gimple_location (stmt),
2844 "%<ordered simd threads%> must be closely "
2845 "nested inside of %<for simd%> region");
2846 return false;
2847 }
2848 return true;
2849 }
2850 }
2851 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2852 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2853 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2854 return true;
2855 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2856 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2857 return true;
2858 error_at (gimple_location (stmt),
2859 "OpenMP constructs other than "
2860 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2861 "not be nested inside %<simd%> region");
2862 return false;
2863 }
2864 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2865 {
2866 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2867 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2868 && omp_find_clause (gimple_omp_for_clauses (stmt),
2869 OMP_CLAUSE_BIND) == NULL_TREE))
2870 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2871 {
2872 error_at (gimple_location (stmt),
2873 "only %<distribute%>, %<parallel%> or %<loop%> "
2874 "regions are allowed to be strictly nested inside "
2875 "%<teams%> region");
2876 return false;
2877 }
2878 }
2879 else if (ctx->order_concurrent
2880 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2881 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2882 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2883 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2884 {
2885 if (ctx->loop_p)
2886 error_at (gimple_location (stmt),
2887 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2888 "%<simd%> may not be nested inside a %<loop%> region");
2889 else
2890 error_at (gimple_location (stmt),
2891 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2892 "%<simd%> may not be nested inside a region with "
2893 "the %<order(concurrent)%> clause");
2894 return false;
2895 }
2896 }
2897 switch (gimple_code (stmt))
2898 {
2899 case GIMPLE_OMP_FOR:
2900 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2901 return true;
2902 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2903 {
2904 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2905 {
2906 error_at (gimple_location (stmt),
2907 "%<distribute%> region must be strictly nested "
2908 "inside %<teams%> construct");
2909 return false;
2910 }
2911 return true;
2912 }
2913 /* We split taskloop into task and nested taskloop in it. */
2914 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2915 return true;
2916 /* For now, hope this will change and loop bind(parallel) will not
2917 be allowed in lots of contexts. */
2918 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2919 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2920 return true;
2921 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2922 {
2923 bool ok = false;
2924
2925 if (ctx)
2926 switch (gimple_code (ctx->stmt))
2927 {
2928 case GIMPLE_OMP_FOR:
2929 ok = (gimple_omp_for_kind (ctx->stmt)
2930 == GF_OMP_FOR_KIND_OACC_LOOP);
2931 break;
2932
2933 case GIMPLE_OMP_TARGET:
2934 switch (gimple_omp_target_kind (ctx->stmt))
2935 {
2936 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2937 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2938 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2939 ok = true;
2940 break;
2941
2942 default:
2943 break;
2944 }
2945
2946 default:
2947 break;
2948 }
2949 else if (oacc_get_fn_attrib (current_function_decl))
2950 ok = true;
2951 if (!ok)
2952 {
2953 error_at (gimple_location (stmt),
2954 "OpenACC loop directive must be associated with"
2955 " an OpenACC compute region");
2956 return false;
2957 }
2958 }
2959 /* FALLTHRU */
2960 case GIMPLE_CALL:
2961 if (is_gimple_call (stmt)
2962 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2963 == BUILT_IN_GOMP_CANCEL
2964 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2965 == BUILT_IN_GOMP_CANCELLATION_POINT))
2966 {
2967 const char *bad = NULL;
2968 const char *kind = NULL;
2969 const char *construct
2970 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2971 == BUILT_IN_GOMP_CANCEL)
2972 ? "cancel"
2973 : "cancellation point";
2974 if (ctx == NULL)
2975 {
2976 error_at (gimple_location (stmt), "orphaned %qs construct",
2977 construct);
2978 return false;
2979 }
2980 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2981 ? tree_to_shwi (gimple_call_arg (stmt, 0))
2982 : 0)
2983 {
2984 case 1:
2985 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2986 bad = "parallel";
2987 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2988 == BUILT_IN_GOMP_CANCEL
2989 && !integer_zerop (gimple_call_arg (stmt, 1)))
2990 ctx->cancellable = true;
2991 kind = "parallel";
2992 break;
2993 case 2:
2994 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2995 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2996 bad = "for";
2997 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2998 == BUILT_IN_GOMP_CANCEL
2999 && !integer_zerop (gimple_call_arg (stmt, 1)))
3000 {
3001 ctx->cancellable = true;
3002 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3003 OMP_CLAUSE_NOWAIT))
3004 warning_at (gimple_location (stmt), 0,
3005 "%<cancel for%> inside "
3006 "%<nowait%> for construct");
3007 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3008 OMP_CLAUSE_ORDERED))
3009 warning_at (gimple_location (stmt), 0,
3010 "%<cancel for%> inside "
3011 "%<ordered%> for construct");
3012 }
3013 kind = "for";
3014 break;
3015 case 4:
3016 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3017 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3018 bad = "sections";
3019 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3020 == BUILT_IN_GOMP_CANCEL
3021 && !integer_zerop (gimple_call_arg (stmt, 1)))
3022 {
3023 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3024 {
3025 ctx->cancellable = true;
3026 if (omp_find_clause (gimple_omp_sections_clauses
3027 (ctx->stmt),
3028 OMP_CLAUSE_NOWAIT))
3029 warning_at (gimple_location (stmt), 0,
3030 "%<cancel sections%> inside "
3031 "%<nowait%> sections construct");
3032 }
3033 else
3034 {
3035 gcc_assert (ctx->outer
3036 && gimple_code (ctx->outer->stmt)
3037 == GIMPLE_OMP_SECTIONS);
3038 ctx->outer->cancellable = true;
3039 if (omp_find_clause (gimple_omp_sections_clauses
3040 (ctx->outer->stmt),
3041 OMP_CLAUSE_NOWAIT))
3042 warning_at (gimple_location (stmt), 0,
3043 "%<cancel sections%> inside "
3044 "%<nowait%> sections construct");
3045 }
3046 }
3047 kind = "sections";
3048 break;
3049 case 8:
3050 if (!is_task_ctx (ctx)
3051 && (!is_taskloop_ctx (ctx)
3052 || ctx->outer == NULL
3053 || !is_task_ctx (ctx->outer)))
3054 bad = "task";
3055 else
3056 {
3057 for (omp_context *octx = ctx->outer;
3058 octx; octx = octx->outer)
3059 {
3060 switch (gimple_code (octx->stmt))
3061 {
3062 case GIMPLE_OMP_TASKGROUP:
3063 break;
3064 case GIMPLE_OMP_TARGET:
3065 if (gimple_omp_target_kind (octx->stmt)
3066 != GF_OMP_TARGET_KIND_REGION)
3067 continue;
3068 /* FALLTHRU */
3069 case GIMPLE_OMP_PARALLEL:
3070 case GIMPLE_OMP_TEAMS:
3071 error_at (gimple_location (stmt),
3072 "%<%s taskgroup%> construct not closely "
3073 "nested inside of %<taskgroup%> region",
3074 construct);
3075 return false;
3076 case GIMPLE_OMP_TASK:
3077 if (gimple_omp_task_taskloop_p (octx->stmt)
3078 && octx->outer
3079 && is_taskloop_ctx (octx->outer))
3080 {
3081 tree clauses
3082 = gimple_omp_for_clauses (octx->outer->stmt);
3083 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3084 break;
3085 }
3086 continue;
3087 default:
3088 continue;
3089 }
3090 break;
3091 }
3092 ctx->cancellable = true;
3093 }
3094 kind = "taskgroup";
3095 break;
3096 default:
3097 error_at (gimple_location (stmt), "invalid arguments");
3098 return false;
3099 }
3100 if (bad)
3101 {
3102 error_at (gimple_location (stmt),
3103 "%<%s %s%> construct not closely nested inside of %qs",
3104 construct, kind, bad);
3105 return false;
3106 }
3107 }
3108 /* FALLTHRU */
3109 case GIMPLE_OMP_SECTIONS:
3110 case GIMPLE_OMP_SINGLE:
3111 for (; ctx != NULL; ctx = ctx->outer)
3112 switch (gimple_code (ctx->stmt))
3113 {
3114 case GIMPLE_OMP_FOR:
3115 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3116 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3117 break;
3118 /* FALLTHRU */
3119 case GIMPLE_OMP_SECTIONS:
3120 case GIMPLE_OMP_SINGLE:
3121 case GIMPLE_OMP_ORDERED:
3122 case GIMPLE_OMP_MASTER:
3123 case GIMPLE_OMP_TASK:
3124 case GIMPLE_OMP_CRITICAL:
3125 if (is_gimple_call (stmt))
3126 {
3127 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3128 != BUILT_IN_GOMP_BARRIER)
3129 return true;
3130 error_at (gimple_location (stmt),
3131 "barrier region may not be closely nested inside "
3132 "of work-sharing, %<loop%>, %<critical%>, "
3133 "%<ordered%>, %<master%>, explicit %<task%> or "
3134 "%<taskloop%> region");
3135 return false;
3136 }
3137 error_at (gimple_location (stmt),
3138 "work-sharing region may not be closely nested inside "
3139 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3140 "%<master%>, explicit %<task%> or %<taskloop%> region");
3141 return false;
3142 case GIMPLE_OMP_PARALLEL:
3143 case GIMPLE_OMP_TEAMS:
3144 return true;
3145 case GIMPLE_OMP_TARGET:
3146 if (gimple_omp_target_kind (ctx->stmt)
3147 == GF_OMP_TARGET_KIND_REGION)
3148 return true;
3149 break;
3150 default:
3151 break;
3152 }
3153 break;
3154 case GIMPLE_OMP_MASTER:
3155 for (; ctx != NULL; ctx = ctx->outer)
3156 switch (gimple_code (ctx->stmt))
3157 {
3158 case GIMPLE_OMP_FOR:
3159 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3160 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3161 break;
3162 /* FALLTHRU */
3163 case GIMPLE_OMP_SECTIONS:
3164 case GIMPLE_OMP_SINGLE:
3165 case GIMPLE_OMP_TASK:
3166 error_at (gimple_location (stmt),
3167 "%<master%> region may not be closely nested inside "
3168 "of work-sharing, %<loop%>, explicit %<task%> or "
3169 "%<taskloop%> region");
3170 return false;
3171 case GIMPLE_OMP_PARALLEL:
3172 case GIMPLE_OMP_TEAMS:
3173 return true;
3174 case GIMPLE_OMP_TARGET:
3175 if (gimple_omp_target_kind (ctx->stmt)
3176 == GF_OMP_TARGET_KIND_REGION)
3177 return true;
3178 break;
3179 default:
3180 break;
3181 }
3182 break;
3183 case GIMPLE_OMP_TASK:
3184 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3186 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3187 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3188 {
3189 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3190 error_at (OMP_CLAUSE_LOCATION (c),
3191 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3192 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3193 return false;
3194 }
3195 break;
3196 case GIMPLE_OMP_ORDERED:
3197 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3198 c; c = OMP_CLAUSE_CHAIN (c))
3199 {
3200 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3201 {
3202 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3203 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3204 continue;
3205 }
3206 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3207 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3208 || kind == OMP_CLAUSE_DEPEND_SINK)
3209 {
3210 tree oclause;
3211 /* Look for containing ordered(N) loop. */
3212 if (ctx == NULL
3213 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3214 || (oclause
3215 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3216 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3217 {
3218 error_at (OMP_CLAUSE_LOCATION (c),
3219 "%<ordered%> construct with %<depend%> clause "
3220 "must be closely nested inside an %<ordered%> "
3221 "loop");
3222 return false;
3223 }
3224 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3225 {
3226 error_at (OMP_CLAUSE_LOCATION (c),
3227 "%<ordered%> construct with %<depend%> clause "
3228 "must be closely nested inside a loop with "
3229 "%<ordered%> clause with a parameter");
3230 return false;
3231 }
3232 }
3233 else
3234 {
3235 error_at (OMP_CLAUSE_LOCATION (c),
3236 "invalid depend kind in omp %<ordered%> %<depend%>");
3237 return false;
3238 }
3239 }
3240 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3241 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3242 {
3243 /* ordered simd must be closely nested inside of simd region,
3244 and simd region must not encounter constructs other than
3245 ordered simd, therefore ordered simd may be either orphaned,
3246 or ctx->stmt must be simd. The latter case is handled already
3247 earlier. */
3248 if (ctx != NULL)
3249 {
3250 error_at (gimple_location (stmt),
3251 "%<ordered%> %<simd%> must be closely nested inside "
3252 "%<simd%> region");
3253 return false;
3254 }
3255 }
3256 for (; ctx != NULL; ctx = ctx->outer)
3257 switch (gimple_code (ctx->stmt))
3258 {
3259 case GIMPLE_OMP_CRITICAL:
3260 case GIMPLE_OMP_TASK:
3261 case GIMPLE_OMP_ORDERED:
3262 ordered_in_taskloop:
3263 error_at (gimple_location (stmt),
3264 "%<ordered%> region may not be closely nested inside "
3265 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3266 "%<taskloop%> region");
3267 return false;
3268 case GIMPLE_OMP_FOR:
3269 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3270 goto ordered_in_taskloop;
3271 tree o;
3272 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3273 OMP_CLAUSE_ORDERED);
3274 if (o == NULL)
3275 {
3276 error_at (gimple_location (stmt),
3277 "%<ordered%> region must be closely nested inside "
3278 "a loop region with an %<ordered%> clause");
3279 return false;
3280 }
3281 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3282 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3283 {
3284 error_at (gimple_location (stmt),
3285 "%<ordered%> region without %<depend%> clause may "
3286 "not be closely nested inside a loop region with "
3287 "an %<ordered%> clause with a parameter");
3288 return false;
3289 }
3290 return true;
3291 case GIMPLE_OMP_TARGET:
3292 if (gimple_omp_target_kind (ctx->stmt)
3293 != GF_OMP_TARGET_KIND_REGION)
3294 break;
3295 /* FALLTHRU */
3296 case GIMPLE_OMP_PARALLEL:
3297 case GIMPLE_OMP_TEAMS:
3298 error_at (gimple_location (stmt),
3299 "%<ordered%> region must be closely nested inside "
3300 "a loop region with an %<ordered%> clause");
3301 return false;
3302 default:
3303 break;
3304 }
3305 break;
3306 case GIMPLE_OMP_CRITICAL:
3307 {
3308 tree this_stmt_name
3309 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3310 for (; ctx != NULL; ctx = ctx->outer)
3311 if (gomp_critical *other_crit
3312 = dyn_cast <gomp_critical *> (ctx->stmt))
3313 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3314 {
3315 error_at (gimple_location (stmt),
3316 "%<critical%> region may not be nested inside "
3317 "a %<critical%> region with the same name");
3318 return false;
3319 }
3320 }
3321 break;
3322 case GIMPLE_OMP_TEAMS:
3323 if (ctx == NULL)
3324 break;
3325 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3326 || (gimple_omp_target_kind (ctx->stmt)
3327 != GF_OMP_TARGET_KIND_REGION))
3328 {
3329 /* Teams construct can appear either strictly nested inside of
3330 target construct with no intervening stmts, or can be encountered
3331 only by initial task (so must not appear inside any OpenMP
3332 construct. */
3333 error_at (gimple_location (stmt),
3334 "%<teams%> construct must be closely nested inside of "
3335 "%<target%> construct or not nested in any OpenMP "
3336 "construct");
3337 return false;
3338 }
3339 break;
3340 case GIMPLE_OMP_TARGET:
3341 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3342 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3343 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3344 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3345 {
3346 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3347 error_at (OMP_CLAUSE_LOCATION (c),
3348 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3349 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3350 return false;
3351 }
3352 if (is_gimple_omp_offloaded (stmt)
3353 && oacc_get_fn_attrib (cfun->decl) != NULL)
3354 {
3355 error_at (gimple_location (stmt),
3356 "OpenACC region inside of OpenACC routine, nested "
3357 "parallelism not supported yet");
3358 return false;
3359 }
3360 for (; ctx != NULL; ctx = ctx->outer)
3361 {
3362 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3363 {
3364 if (is_gimple_omp (stmt)
3365 && is_gimple_omp_oacc (stmt)
3366 && is_gimple_omp (ctx->stmt))
3367 {
3368 error_at (gimple_location (stmt),
3369 "OpenACC construct inside of non-OpenACC region");
3370 return false;
3371 }
3372 continue;
3373 }
3374
3375 const char *stmt_name, *ctx_stmt_name;
3376 switch (gimple_omp_target_kind (stmt))
3377 {
3378 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3379 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3380 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3381 case GF_OMP_TARGET_KIND_ENTER_DATA:
3382 stmt_name = "target enter data"; break;
3383 case GF_OMP_TARGET_KIND_EXIT_DATA:
3384 stmt_name = "target exit data"; break;
3385 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3386 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3387 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3388 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3389 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3390 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3391 stmt_name = "enter/exit data"; break;
3392 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3393 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3394 break;
3395 default: gcc_unreachable ();
3396 }
3397 switch (gimple_omp_target_kind (ctx->stmt))
3398 {
3399 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3400 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3401 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3402 ctx_stmt_name = "parallel"; break;
3403 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3404 ctx_stmt_name = "kernels"; break;
3405 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3406 ctx_stmt_name = "serial"; break;
3407 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3408 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3409 ctx_stmt_name = "host_data"; break;
3410 default: gcc_unreachable ();
3411 }
3412
3413 /* OpenACC/OpenMP mismatch? */
3414 if (is_gimple_omp_oacc (stmt)
3415 != is_gimple_omp_oacc (ctx->stmt))
3416 {
3417 error_at (gimple_location (stmt),
3418 "%s %qs construct inside of %s %qs region",
3419 (is_gimple_omp_oacc (stmt)
3420 ? "OpenACC" : "OpenMP"), stmt_name,
3421 (is_gimple_omp_oacc (ctx->stmt)
3422 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3423 return false;
3424 }
3425 if (is_gimple_omp_offloaded (ctx->stmt))
3426 {
3427 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3428 if (is_gimple_omp_oacc (ctx->stmt))
3429 {
3430 error_at (gimple_location (stmt),
3431 "%qs construct inside of %qs region",
3432 stmt_name, ctx_stmt_name);
3433 return false;
3434 }
3435 else
3436 {
3437 warning_at (gimple_location (stmt), 0,
3438 "%qs construct inside of %qs region",
3439 stmt_name, ctx_stmt_name);
3440 }
3441 }
3442 }
3443 break;
3444 default:
3445 break;
3446 }
3447 return true;
3448 }
3449
3450
3451 /* Helper function scan_omp.
3452
3453 Callback for walk_tree or operators in walk_gimple_stmt used to
3454 scan for OMP directives in TP. */
3455
3456 static tree
3457 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3458 {
3459 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3460 omp_context *ctx = (omp_context *) wi->info;
3461 tree t = *tp;
3462
3463 switch (TREE_CODE (t))
3464 {
3465 case VAR_DECL:
3466 case PARM_DECL:
3467 case LABEL_DECL:
3468 case RESULT_DECL:
3469 if (ctx)
3470 {
3471 tree repl = remap_decl (t, &ctx->cb);
3472 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3473 *tp = repl;
3474 }
3475 break;
3476
3477 default:
3478 if (ctx && TYPE_P (t))
3479 *tp = remap_type (t, &ctx->cb);
3480 else if (!DECL_P (t))
3481 {
3482 *walk_subtrees = 1;
3483 if (ctx)
3484 {
3485 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3486 if (tem != TREE_TYPE (t))
3487 {
3488 if (TREE_CODE (t) == INTEGER_CST)
3489 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3490 else
3491 TREE_TYPE (t) = tem;
3492 }
3493 }
3494 }
3495 break;
3496 }
3497
3498 return NULL_TREE;
3499 }
3500
3501 /* Return true if FNDECL is a setjmp or a longjmp. */
3502
3503 static bool
3504 setjmp_or_longjmp_p (const_tree fndecl)
3505 {
3506 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3507 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3508 return true;
3509
3510 tree declname = DECL_NAME (fndecl);
3511 if (!declname
3512 || (DECL_CONTEXT (fndecl) != NULL_TREE
3513 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3514 || !TREE_PUBLIC (fndecl))
3515 return false;
3516
3517 const char *name = IDENTIFIER_POINTER (declname);
3518 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3519 }
3520
3521 /* Return true if FNDECL is an omp_* runtime API call. */
3522
3523 static bool
3524 omp_runtime_api_call (const_tree fndecl)
3525 {
3526 tree declname = DECL_NAME (fndecl);
3527 if (!declname
3528 || (DECL_CONTEXT (fndecl) != NULL_TREE
3529 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3530 || !TREE_PUBLIC (fndecl))
3531 return false;
3532
3533 const char *name = IDENTIFIER_POINTER (declname);
3534 if (strncmp (name, "omp_", 4) != 0)
3535 return false;
3536
3537 static const char *omp_runtime_apis[] =
3538 {
3539 /* This array has 3 sections. First omp_* calls that don't
3540 have any suffixes. */
3541 "target_alloc",
3542 "target_associate_ptr",
3543 "target_disassociate_ptr",
3544 "target_free",
3545 "target_is_present",
3546 "target_memcpy",
3547 "target_memcpy_rect",
3548 NULL,
3549 /* Now omp_* calls that are available as omp_* and omp_*_. */
3550 "capture_affinity",
3551 "destroy_lock",
3552 "destroy_nest_lock",
3553 "display_affinity",
3554 "get_active_level",
3555 "get_affinity_format",
3556 "get_cancellation",
3557 "get_default_device",
3558 "get_dynamic",
3559 "get_initial_device",
3560 "get_level",
3561 "get_max_active_levels",
3562 "get_max_task_priority",
3563 "get_max_threads",
3564 "get_nested",
3565 "get_num_devices",
3566 "get_num_places",
3567 "get_num_procs",
3568 "get_num_teams",
3569 "get_num_threads",
3570 "get_partition_num_places",
3571 "get_place_num",
3572 "get_proc_bind",
3573 "get_team_num",
3574 "get_thread_limit",
3575 "get_thread_num",
3576 "get_wtick",
3577 "get_wtime",
3578 "in_final",
3579 "in_parallel",
3580 "init_lock",
3581 "init_nest_lock",
3582 "is_initial_device",
3583 "pause_resource",
3584 "pause_resource_all",
3585 "set_affinity_format",
3586 "set_lock",
3587 "set_nest_lock",
3588 "test_lock",
3589 "test_nest_lock",
3590 "unset_lock",
3591 "unset_nest_lock",
3592 NULL,
3593 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3594 "get_ancestor_thread_num",
3595 "get_partition_place_nums",
3596 "get_place_num_procs",
3597 "get_place_proc_ids",
3598 "get_schedule",
3599 "get_team_size",
3600 "set_default_device",
3601 "set_dynamic",
3602 "set_max_active_levels",
3603 "set_nested",
3604 "set_num_threads",
3605 "set_schedule"
3606 };
3607
3608 int mode = 0;
3609 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3610 {
3611 if (omp_runtime_apis[i] == NULL)
3612 {
3613 mode++;
3614 continue;
3615 }
3616 size_t len = strlen (omp_runtime_apis[i]);
3617 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3618 && (name[4 + len] == '\0'
3619 || (mode > 0
3620 && name[4 + len] == '_'
3621 && (name[4 + len + 1] == '\0'
3622 || (mode > 1
3623 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3624 return true;
3625 }
3626 return false;
3627 }
3628
3629 /* Helper function for scan_omp.
3630
3631 Callback for walk_gimple_stmt used to scan for OMP directives in
3632 the current statement in GSI. */
3633
3634 static tree
3635 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3636 struct walk_stmt_info *wi)
3637 {
3638 gimple *stmt = gsi_stmt (*gsi);
3639 omp_context *ctx = (omp_context *) wi->info;
3640
3641 if (gimple_has_location (stmt))
3642 input_location = gimple_location (stmt);
3643
3644 /* Check the nesting restrictions. */
3645 bool remove = false;
3646 if (is_gimple_omp (stmt))
3647 remove = !check_omp_nesting_restrictions (stmt, ctx);
3648 else if (is_gimple_call (stmt))
3649 {
3650 tree fndecl = gimple_call_fndecl (stmt);
3651 if (fndecl)
3652 {
3653 if (ctx
3654 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3655 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3656 && setjmp_or_longjmp_p (fndecl)
3657 && !ctx->loop_p)
3658 {
3659 remove = true;
3660 error_at (gimple_location (stmt),
3661 "setjmp/longjmp inside %<simd%> construct");
3662 }
3663 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3664 switch (DECL_FUNCTION_CODE (fndecl))
3665 {
3666 case BUILT_IN_GOMP_BARRIER:
3667 case BUILT_IN_GOMP_CANCEL:
3668 case BUILT_IN_GOMP_CANCELLATION_POINT:
3669 case BUILT_IN_GOMP_TASKYIELD:
3670 case BUILT_IN_GOMP_TASKWAIT:
3671 case BUILT_IN_GOMP_TASKGROUP_START:
3672 case BUILT_IN_GOMP_TASKGROUP_END:
3673 remove = !check_omp_nesting_restrictions (stmt, ctx);
3674 break;
3675 default:
3676 break;
3677 }
3678 else if (ctx)
3679 {
3680 omp_context *octx = ctx;
3681 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3682 octx = ctx->outer;
3683 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3684 {
3685 remove = true;
3686 error_at (gimple_location (stmt),
3687 "OpenMP runtime API call %qD in a region with "
3688 "%<order(concurrent)%> clause", fndecl);
3689 }
3690 }
3691 }
3692 }
3693 if (remove)
3694 {
3695 stmt = gimple_build_nop ();
3696 gsi_replace (gsi, stmt, false);
3697 }
3698
3699 *handled_ops_p = true;
3700
3701 switch (gimple_code (stmt))
3702 {
3703 case GIMPLE_OMP_PARALLEL:
3704 taskreg_nesting_level++;
3705 scan_omp_parallel (gsi, ctx);
3706 taskreg_nesting_level--;
3707 break;
3708
3709 case GIMPLE_OMP_TASK:
3710 taskreg_nesting_level++;
3711 scan_omp_task (gsi, ctx);
3712 taskreg_nesting_level--;
3713 break;
3714
3715 case GIMPLE_OMP_FOR:
3716 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3717 == GF_OMP_FOR_KIND_SIMD)
3718 && gimple_omp_for_combined_into_p (stmt)
3719 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3720 {
3721 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3722 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3723 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3724 {
3725 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3726 break;
3727 }
3728 }
3729 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3730 == GF_OMP_FOR_KIND_SIMD)
3731 && omp_maybe_offloaded_ctx (ctx)
3732 && omp_max_simt_vf ())
3733 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3734 else
3735 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3736 break;
3737
3738 case GIMPLE_OMP_SECTIONS:
3739 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3740 break;
3741
3742 case GIMPLE_OMP_SINGLE:
3743 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3744 break;
3745
3746 case GIMPLE_OMP_SCAN:
3747 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3748 {
3749 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3750 ctx->scan_inclusive = true;
3751 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3752 ctx->scan_exclusive = true;
3753 }
3754 /* FALLTHRU */
3755 case GIMPLE_OMP_SECTION:
3756 case GIMPLE_OMP_MASTER:
3757 case GIMPLE_OMP_ORDERED:
3758 case GIMPLE_OMP_CRITICAL:
3759 ctx = new_omp_context (stmt, ctx);
3760 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3761 break;
3762
3763 case GIMPLE_OMP_TASKGROUP:
3764 ctx = new_omp_context (stmt, ctx);
3765 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3766 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3767 break;
3768
3769 case GIMPLE_OMP_TARGET:
3770 if (is_gimple_omp_offloaded (stmt))
3771 {
3772 taskreg_nesting_level++;
3773 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3774 taskreg_nesting_level--;
3775 }
3776 else
3777 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3778 break;
3779
3780 case GIMPLE_OMP_TEAMS:
3781 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3782 {
3783 taskreg_nesting_level++;
3784 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3785 taskreg_nesting_level--;
3786 }
3787 else
3788 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3789 break;
3790
3791 case GIMPLE_BIND:
3792 {
3793 tree var;
3794
3795 *handled_ops_p = false;
3796 if (ctx)
3797 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3798 var ;
3799 var = DECL_CHAIN (var))
3800 insert_decl_map (&ctx->cb, var, var);
3801 }
3802 break;
3803 default:
3804 *handled_ops_p = false;
3805 break;
3806 }
3807
3808 return NULL_TREE;
3809 }
3810
3811
3812 /* Scan all the statements starting at the current statement. CTX
3813 contains context information about the OMP directives and
3814 clauses found during the scan. */
3815
3816 static void
3817 scan_omp (gimple_seq *body_p, omp_context *ctx)
3818 {
3819 location_t saved_location;
3820 struct walk_stmt_info wi;
3821
3822 memset (&wi, 0, sizeof (wi));
3823 wi.info = ctx;
3824 wi.want_locations = true;
3825
3826 saved_location = input_location;
3827 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3828 input_location = saved_location;
3829 }
3830 \f
3831 /* Re-gimplification and code generation routines. */
3832
3833 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3834 of BIND if in a method. */
3835
3836 static void
3837 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3838 {
3839 if (DECL_ARGUMENTS (current_function_decl)
3840 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3841 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3842 == POINTER_TYPE))
3843 {
3844 tree vars = gimple_bind_vars (bind);
3845 for (tree *pvar = &vars; *pvar; )
3846 if (omp_member_access_dummy_var (*pvar))
3847 *pvar = DECL_CHAIN (*pvar);
3848 else
3849 pvar = &DECL_CHAIN (*pvar);
3850 gimple_bind_set_vars (bind, vars);
3851 }
3852 }
3853
3854 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3855 block and its subblocks. */
3856
3857 static void
3858 remove_member_access_dummy_vars (tree block)
3859 {
3860 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3861 if (omp_member_access_dummy_var (*pvar))
3862 *pvar = DECL_CHAIN (*pvar);
3863 else
3864 pvar = &DECL_CHAIN (*pvar);
3865
3866 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3867 remove_member_access_dummy_vars (block);
3868 }
3869
3870 /* If a context was created for STMT when it was scanned, return it. */
3871
3872 static omp_context *
3873 maybe_lookup_ctx (gimple *stmt)
3874 {
3875 splay_tree_node n;
3876 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3877 return n ? (omp_context *) n->value : NULL;
3878 }
3879
3880
3881 /* Find the mapping for DECL in CTX or the immediately enclosing
3882 context that has a mapping for DECL.
3883
3884 If CTX is a nested parallel directive, we may have to use the decl
3885 mappings created in CTX's parent context. Suppose that we have the
3886 following parallel nesting (variable UIDs showed for clarity):
3887
3888 iD.1562 = 0;
3889 #omp parallel shared(iD.1562) -> outer parallel
3890 iD.1562 = iD.1562 + 1;
3891
3892 #omp parallel shared (iD.1562) -> inner parallel
3893 iD.1562 = iD.1562 - 1;
3894
3895 Each parallel structure will create a distinct .omp_data_s structure
3896 for copying iD.1562 in/out of the directive:
3897
3898 outer parallel .omp_data_s.1.i -> iD.1562
3899 inner parallel .omp_data_s.2.i -> iD.1562
3900
3901 A shared variable mapping will produce a copy-out operation before
3902 the parallel directive and a copy-in operation after it. So, in
3903 this case we would have:
3904
3905 iD.1562 = 0;
3906 .omp_data_o.1.i = iD.1562;
3907 #omp parallel shared(iD.1562) -> outer parallel
3908 .omp_data_i.1 = &.omp_data_o.1
3909 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3910
3911 .omp_data_o.2.i = iD.1562; -> **
3912 #omp parallel shared(iD.1562) -> inner parallel
3913 .omp_data_i.2 = &.omp_data_o.2
3914 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3915
3916
3917 ** This is a problem. The symbol iD.1562 cannot be referenced
3918 inside the body of the outer parallel region. But since we are
3919 emitting this copy operation while expanding the inner parallel
3920 directive, we need to access the CTX structure of the outer
3921 parallel directive to get the correct mapping:
3922
3923 .omp_data_o.2.i = .omp_data_i.1->i
3924
3925 Since there may be other workshare or parallel directives enclosing
3926 the parallel directive, it may be necessary to walk up the context
3927 parent chain. This is not a problem in general because nested
3928 parallelism happens only rarely. */
3929
3930 static tree
3931 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3932 {
3933 tree t;
3934 omp_context *up;
3935
3936 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3937 t = maybe_lookup_decl (decl, up);
3938
3939 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3940
3941 return t ? t : decl;
3942 }
3943
3944
3945 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3946 in outer contexts. */
3947
3948 static tree
3949 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3950 {
3951 tree t = NULL;
3952 omp_context *up;
3953
3954 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3955 t = maybe_lookup_decl (decl, up);
3956
3957 return t ? t : decl;
3958 }
3959
3960
3961 /* Construct the initialization value for reduction operation OP. */
3962
3963 tree
3964 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3965 {
3966 switch (op)
3967 {
3968 case PLUS_EXPR:
3969 case MINUS_EXPR:
3970 case BIT_IOR_EXPR:
3971 case BIT_XOR_EXPR:
3972 case TRUTH_OR_EXPR:
3973 case TRUTH_ORIF_EXPR:
3974 case TRUTH_XOR_EXPR:
3975 case NE_EXPR:
3976 return build_zero_cst (type);
3977
3978 case MULT_EXPR:
3979 case TRUTH_AND_EXPR:
3980 case TRUTH_ANDIF_EXPR:
3981 case EQ_EXPR:
3982 return fold_convert_loc (loc, type, integer_one_node);
3983
3984 case BIT_AND_EXPR:
3985 return fold_convert_loc (loc, type, integer_minus_one_node);
3986
3987 case MAX_EXPR:
3988 if (SCALAR_FLOAT_TYPE_P (type))
3989 {
3990 REAL_VALUE_TYPE max, min;
3991 if (HONOR_INFINITIES (type))
3992 {
3993 real_inf (&max);
3994 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3995 }
3996 else
3997 real_maxval (&min, 1, TYPE_MODE (type));
3998 return build_real (type, min);
3999 }
4000 else if (POINTER_TYPE_P (type))
4001 {
4002 wide_int min
4003 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4004 return wide_int_to_tree (type, min);
4005 }
4006 else
4007 {
4008 gcc_assert (INTEGRAL_TYPE_P (type));
4009 return TYPE_MIN_VALUE (type);
4010 }
4011
4012 case MIN_EXPR:
4013 if (SCALAR_FLOAT_TYPE_P (type))
4014 {
4015 REAL_VALUE_TYPE max;
4016 if (HONOR_INFINITIES (type))
4017 real_inf (&max);
4018 else
4019 real_maxval (&max, 0, TYPE_MODE (type));
4020 return build_real (type, max);
4021 }
4022 else if (POINTER_TYPE_P (type))
4023 {
4024 wide_int max
4025 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4026 return wide_int_to_tree (type, max);
4027 }
4028 else
4029 {
4030 gcc_assert (INTEGRAL_TYPE_P (type));
4031 return TYPE_MAX_VALUE (type);
4032 }
4033
4034 default:
4035 gcc_unreachable ();
4036 }
4037 }
4038
4039 /* Construct the initialization value for reduction CLAUSE. */
4040
4041 tree
4042 omp_reduction_init (tree clause, tree type)
4043 {
4044 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4045 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4046 }
4047
4048 /* Return alignment to be assumed for var in CLAUSE, which should be
4049 OMP_CLAUSE_ALIGNED. */
4050
4051 static tree
4052 omp_clause_aligned_alignment (tree clause)
4053 {
4054 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4055 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4056
4057 /* Otherwise return implementation defined alignment. */
4058 unsigned int al = 1;
4059 opt_scalar_mode mode_iter;
4060 auto_vector_modes modes;
4061 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4062 static enum mode_class classes[]
4063 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4064 for (int i = 0; i < 4; i += 2)
4065 /* The for loop above dictates that we only walk through scalar classes. */
4066 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4067 {
4068 scalar_mode mode = mode_iter.require ();
4069 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4070 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4071 continue;
4072 machine_mode alt_vmode;
4073 for (unsigned int j = 0; j < modes.length (); ++j)
4074 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4075 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4076 vmode = alt_vmode;
4077
4078 tree type = lang_hooks.types.type_for_mode (mode, 1);
4079 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4080 continue;
4081 type = build_vector_type_for_mode (type, vmode);
4082 if (TYPE_MODE (type) != vmode)
4083 continue;
4084 if (TYPE_ALIGN_UNIT (type) > al)
4085 al = TYPE_ALIGN_UNIT (type);
4086 }
4087 return build_int_cst (integer_type_node, al);
4088 }
4089
4090
4091 /* This structure is part of the interface between lower_rec_simd_input_clauses
4092 and lower_rec_input_clauses. */
4093
4094 class omplow_simd_context {
4095 public:
4096 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4097 tree idx;
4098 tree lane;
4099 tree lastlane;
4100 vec<tree, va_heap> simt_eargs;
4101 gimple_seq simt_dlist;
4102 poly_uint64_pod max_vf;
4103 bool is_simt;
4104 };
4105
4106 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4107 privatization. */
4108
4109 static bool
4110 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4111 omplow_simd_context *sctx, tree &ivar,
4112 tree &lvar, tree *rvar = NULL,
4113 tree *rvar2 = NULL)
4114 {
4115 if (known_eq (sctx->max_vf, 0U))
4116 {
4117 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4118 if (maybe_gt (sctx->max_vf, 1U))
4119 {
4120 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4121 OMP_CLAUSE_SAFELEN);
4122 if (c)
4123 {
4124 poly_uint64 safe_len;
4125 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4126 || maybe_lt (safe_len, 1U))
4127 sctx->max_vf = 1;
4128 else
4129 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4130 }
4131 }
4132 if (maybe_gt (sctx->max_vf, 1U))
4133 {
4134 sctx->idx = create_tmp_var (unsigned_type_node);
4135 sctx->lane = create_tmp_var (unsigned_type_node);
4136 }
4137 }
4138 if (known_eq (sctx->max_vf, 1U))
4139 return false;
4140
4141 if (sctx->is_simt)
4142 {
4143 if (is_gimple_reg (new_var))
4144 {
4145 ivar = lvar = new_var;
4146 return true;
4147 }
4148 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4149 ivar = lvar = create_tmp_var (type);
4150 TREE_ADDRESSABLE (ivar) = 1;
4151 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4152 NULL, DECL_ATTRIBUTES (ivar));
4153 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4154 tree clobber = build_clobber (type);
4155 gimple *g = gimple_build_assign (ivar, clobber);
4156 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4157 }
4158 else
4159 {
4160 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4161 tree avar = create_tmp_var_raw (atype);
4162 if (TREE_ADDRESSABLE (new_var))
4163 TREE_ADDRESSABLE (avar) = 1;
4164 DECL_ATTRIBUTES (avar)
4165 = tree_cons (get_identifier ("omp simd array"), NULL,
4166 DECL_ATTRIBUTES (avar));
4167 gimple_add_tmp_var (avar);
4168 tree iavar = avar;
4169 if (rvar && !ctx->for_simd_scan_phase)
4170 {
4171 /* For inscan reductions, create another array temporary,
4172 which will hold the reduced value. */
4173 iavar = create_tmp_var_raw (atype);
4174 if (TREE_ADDRESSABLE (new_var))
4175 TREE_ADDRESSABLE (iavar) = 1;
4176 DECL_ATTRIBUTES (iavar)
4177 = tree_cons (get_identifier ("omp simd array"), NULL,
4178 tree_cons (get_identifier ("omp simd inscan"), NULL,
4179 DECL_ATTRIBUTES (iavar)));
4180 gimple_add_tmp_var (iavar);
4181 ctx->cb.decl_map->put (avar, iavar);
4182 if (sctx->lastlane == NULL_TREE)
4183 sctx->lastlane = create_tmp_var (unsigned_type_node);
4184 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4185 sctx->lastlane, NULL_TREE, NULL_TREE);
4186 TREE_THIS_NOTRAP (*rvar) = 1;
4187
4188 if (ctx->scan_exclusive)
4189 {
4190 /* And for exclusive scan yet another one, which will
4191 hold the value during the scan phase. */
4192 tree savar = create_tmp_var_raw (atype);
4193 if (TREE_ADDRESSABLE (new_var))
4194 TREE_ADDRESSABLE (savar) = 1;
4195 DECL_ATTRIBUTES (savar)
4196 = tree_cons (get_identifier ("omp simd array"), NULL,
4197 tree_cons (get_identifier ("omp simd inscan "
4198 "exclusive"), NULL,
4199 DECL_ATTRIBUTES (savar)));
4200 gimple_add_tmp_var (savar);
4201 ctx->cb.decl_map->put (iavar, savar);
4202 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4203 sctx->idx, NULL_TREE, NULL_TREE);
4204 TREE_THIS_NOTRAP (*rvar2) = 1;
4205 }
4206 }
4207 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4208 NULL_TREE, NULL_TREE);
4209 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4210 NULL_TREE, NULL_TREE);
4211 TREE_THIS_NOTRAP (ivar) = 1;
4212 TREE_THIS_NOTRAP (lvar) = 1;
4213 }
4214 if (DECL_P (new_var))
4215 {
4216 SET_DECL_VALUE_EXPR (new_var, lvar);
4217 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4218 }
4219 return true;
4220 }
4221
4222 /* Helper function of lower_rec_input_clauses. For a reference
4223 in simd reduction, add an underlying variable it will reference. */
4224
4225 static void
4226 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4227 {
4228 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4229 if (TREE_CONSTANT (z))
4230 {
4231 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4232 get_name (new_vard));
4233 gimple_add_tmp_var (z);
4234 TREE_ADDRESSABLE (z) = 1;
4235 z = build_fold_addr_expr_loc (loc, z);
4236 gimplify_assign (new_vard, z, ilist);
4237 }
4238 }
4239
4240 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4241 code to emit (type) (tskred_temp[idx]). */
4242
4243 static tree
4244 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4245 unsigned idx)
4246 {
4247 unsigned HOST_WIDE_INT sz
4248 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4249 tree r = build2 (MEM_REF, pointer_sized_int_node,
4250 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4251 idx * sz));
4252 tree v = create_tmp_var (pointer_sized_int_node);
4253 gimple *g = gimple_build_assign (v, r);
4254 gimple_seq_add_stmt (ilist, g);
4255 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4256 {
4257 v = create_tmp_var (type);
4258 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4259 gimple_seq_add_stmt (ilist, g);
4260 }
4261 return v;
4262 }
4263
4264 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4265 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4266 private variables. Initialization statements go in ILIST, while calls
4267 to destructors go in DLIST. */
4268
4269 static void
4270 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4271 omp_context *ctx, struct omp_for_data *fd)
4272 {
4273 tree c, copyin_seq, x, ptr;
4274 bool copyin_by_ref = false;
4275 bool lastprivate_firstprivate = false;
4276 bool reduction_omp_orig_ref = false;
4277 int pass;
4278 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4279 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4280 omplow_simd_context sctx = omplow_simd_context ();
4281 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4282 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4283 gimple_seq llist[4] = { };
4284 tree nonconst_simd_if = NULL_TREE;
4285
4286 copyin_seq = NULL;
4287 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4288
4289 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4290 with data sharing clauses referencing variable sized vars. That
4291 is unnecessarily hard to support and very unlikely to result in
4292 vectorized code anyway. */
4293 if (is_simd)
4294 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4295 switch (OMP_CLAUSE_CODE (c))
4296 {
4297 case OMP_CLAUSE_LINEAR:
4298 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4299 sctx.max_vf = 1;
4300 /* FALLTHRU */
4301 case OMP_CLAUSE_PRIVATE:
4302 case OMP_CLAUSE_FIRSTPRIVATE:
4303 case OMP_CLAUSE_LASTPRIVATE:
4304 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4305 sctx.max_vf = 1;
4306 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4307 {
4308 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4309 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4310 sctx.max_vf = 1;
4311 }
4312 break;
4313 case OMP_CLAUSE_REDUCTION:
4314 case OMP_CLAUSE_IN_REDUCTION:
4315 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4316 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4317 sctx.max_vf = 1;
4318 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4319 {
4320 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4321 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4322 sctx.max_vf = 1;
4323 }
4324 break;
4325 case OMP_CLAUSE_IF:
4326 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4327 sctx.max_vf = 1;
4328 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4329 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4330 break;
4331 case OMP_CLAUSE_SIMDLEN:
4332 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4333 sctx.max_vf = 1;
4334 break;
4335 case OMP_CLAUSE__CONDTEMP_:
4336 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4337 if (sctx.is_simt)
4338 sctx.max_vf = 1;
4339 break;
4340 default:
4341 continue;
4342 }
4343
4344 /* Add a placeholder for simduid. */
4345 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4346 sctx.simt_eargs.safe_push (NULL_TREE);
4347
4348 unsigned task_reduction_cnt = 0;
4349 unsigned task_reduction_cntorig = 0;
4350 unsigned task_reduction_cnt_full = 0;
4351 unsigned task_reduction_cntorig_full = 0;
4352 unsigned task_reduction_other_cnt = 0;
4353 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4354 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4355 /* Do all the fixed sized types in the first pass, and the variable sized
4356 types in the second pass. This makes sure that the scalar arguments to
4357 the variable sized types are processed before we use them in the
4358 variable sized operations. For task reductions we use 4 passes, in the
4359 first two we ignore them, in the third one gather arguments for
4360 GOMP_task_reduction_remap call and in the last pass actually handle
4361 the task reductions. */
4362 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4363 ? 4 : 2); ++pass)
4364 {
4365 if (pass == 2 && task_reduction_cnt)
4366 {
4367 tskred_atype
4368 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4369 + task_reduction_cntorig);
4370 tskred_avar = create_tmp_var_raw (tskred_atype);
4371 gimple_add_tmp_var (tskred_avar);
4372 TREE_ADDRESSABLE (tskred_avar) = 1;
4373 task_reduction_cnt_full = task_reduction_cnt;
4374 task_reduction_cntorig_full = task_reduction_cntorig;
4375 }
4376 else if (pass == 3 && task_reduction_cnt)
4377 {
4378 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4379 gimple *g
4380 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4381 size_int (task_reduction_cntorig),
4382 build_fold_addr_expr (tskred_avar));
4383 gimple_seq_add_stmt (ilist, g);
4384 }
4385 if (pass == 3 && task_reduction_other_cnt)
4386 {
4387 /* For reduction clauses, build
4388 tskred_base = (void *) tskred_temp[2]
4389 + omp_get_thread_num () * tskred_temp[1]
4390 or if tskred_temp[1] is known to be constant, that constant
4391 directly. This is the start of the private reduction copy block
4392 for the current thread. */
4393 tree v = create_tmp_var (integer_type_node);
4394 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4395 gimple *g = gimple_build_call (x, 0);
4396 gimple_call_set_lhs (g, v);
4397 gimple_seq_add_stmt (ilist, g);
4398 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4399 tskred_temp = OMP_CLAUSE_DECL (c);
4400 if (is_taskreg_ctx (ctx))
4401 tskred_temp = lookup_decl (tskred_temp, ctx);
4402 tree v2 = create_tmp_var (sizetype);
4403 g = gimple_build_assign (v2, NOP_EXPR, v);
4404 gimple_seq_add_stmt (ilist, g);
4405 if (ctx->task_reductions[0])
4406 v = fold_convert (sizetype, ctx->task_reductions[0]);
4407 else
4408 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4409 tree v3 = create_tmp_var (sizetype);
4410 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4411 gimple_seq_add_stmt (ilist, g);
4412 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4413 tskred_base = create_tmp_var (ptr_type_node);
4414 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4415 gimple_seq_add_stmt (ilist, g);
4416 }
4417 task_reduction_cnt = 0;
4418 task_reduction_cntorig = 0;
4419 task_reduction_other_cnt = 0;
4420 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4421 {
4422 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4423 tree var, new_var;
4424 bool by_ref;
4425 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4426 bool task_reduction_p = false;
4427 bool task_reduction_needs_orig_p = false;
4428 tree cond = NULL_TREE;
4429
4430 switch (c_kind)
4431 {
4432 case OMP_CLAUSE_PRIVATE:
4433 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4434 continue;
4435 break;
4436 case OMP_CLAUSE_SHARED:
4437 /* Ignore shared directives in teams construct inside
4438 of target construct. */
4439 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4440 && !is_host_teams_ctx (ctx))
4441 continue;
4442 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4443 {
4444 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4445 || is_global_var (OMP_CLAUSE_DECL (c)));
4446 continue;
4447 }
4448 case OMP_CLAUSE_FIRSTPRIVATE:
4449 case OMP_CLAUSE_COPYIN:
4450 break;
4451 case OMP_CLAUSE_LINEAR:
4452 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4453 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4454 lastprivate_firstprivate = true;
4455 break;
4456 case OMP_CLAUSE_REDUCTION:
4457 case OMP_CLAUSE_IN_REDUCTION:
4458 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4459 {
4460 task_reduction_p = true;
4461 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4462 {
4463 task_reduction_other_cnt++;
4464 if (pass == 2)
4465 continue;
4466 }
4467 else
4468 task_reduction_cnt++;
4469 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4470 {
4471 var = OMP_CLAUSE_DECL (c);
4472 /* If var is a global variable that isn't privatized
4473 in outer contexts, we don't need to look up the
4474 original address, it is always the address of the
4475 global variable itself. */
4476 if (!DECL_P (var)
4477 || omp_is_reference (var)
4478 || !is_global_var
4479 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4480 {
4481 task_reduction_needs_orig_p = true;
4482 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4483 task_reduction_cntorig++;
4484 }
4485 }
4486 }
4487 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4488 reduction_omp_orig_ref = true;
4489 break;
4490 case OMP_CLAUSE__REDUCTEMP_:
4491 if (!is_taskreg_ctx (ctx))
4492 continue;
4493 /* FALLTHRU */
4494 case OMP_CLAUSE__LOOPTEMP_:
4495 /* Handle _looptemp_/_reductemp_ clauses only on
4496 parallel/task. */
4497 if (fd)
4498 continue;
4499 break;
4500 case OMP_CLAUSE_LASTPRIVATE:
4501 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4502 {
4503 lastprivate_firstprivate = true;
4504 if (pass != 0 || is_taskloop_ctx (ctx))
4505 continue;
4506 }
4507 /* Even without corresponding firstprivate, if
4508 decl is Fortran allocatable, it needs outer var
4509 reference. */
4510 else if (pass == 0
4511 && lang_hooks.decls.omp_private_outer_ref
4512 (OMP_CLAUSE_DECL (c)))
4513 lastprivate_firstprivate = true;
4514 break;
4515 case OMP_CLAUSE_ALIGNED:
4516 if (pass != 1)
4517 continue;
4518 var = OMP_CLAUSE_DECL (c);
4519 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4520 && !is_global_var (var))
4521 {
4522 new_var = maybe_lookup_decl (var, ctx);
4523 if (new_var == NULL_TREE)
4524 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4525 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4526 tree alarg = omp_clause_aligned_alignment (c);
4527 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4528 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4529 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4530 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4531 gimplify_and_add (x, ilist);
4532 }
4533 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4534 && is_global_var (var))
4535 {
4536 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4537 new_var = lookup_decl (var, ctx);
4538 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4539 t = build_fold_addr_expr_loc (clause_loc, t);
4540 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4541 tree alarg = omp_clause_aligned_alignment (c);
4542 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4543 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4544 t = fold_convert_loc (clause_loc, ptype, t);
4545 x = create_tmp_var (ptype);
4546 t = build2 (MODIFY_EXPR, ptype, x, t);
4547 gimplify_and_add (t, ilist);
4548 t = build_simple_mem_ref_loc (clause_loc, x);
4549 SET_DECL_VALUE_EXPR (new_var, t);
4550 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4551 }
4552 continue;
4553 case OMP_CLAUSE__CONDTEMP_:
4554 if (is_parallel_ctx (ctx)
4555 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4556 break;
4557 continue;
4558 default:
4559 continue;
4560 }
4561
4562 if (task_reduction_p != (pass >= 2))
4563 continue;
4564
4565 new_var = var = OMP_CLAUSE_DECL (c);
4566 if ((c_kind == OMP_CLAUSE_REDUCTION
4567 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4568 && TREE_CODE (var) == MEM_REF)
4569 {
4570 var = TREE_OPERAND (var, 0);
4571 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4572 var = TREE_OPERAND (var, 0);
4573 if (TREE_CODE (var) == INDIRECT_REF
4574 || TREE_CODE (var) == ADDR_EXPR)
4575 var = TREE_OPERAND (var, 0);
4576 if (is_variable_sized (var))
4577 {
4578 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4579 var = DECL_VALUE_EXPR (var);
4580 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4581 var = TREE_OPERAND (var, 0);
4582 gcc_assert (DECL_P (var));
4583 }
4584 new_var = var;
4585 }
4586 if (c_kind != OMP_CLAUSE_COPYIN)
4587 new_var = lookup_decl (var, ctx);
4588
4589 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4590 {
4591 if (pass != 0)
4592 continue;
4593 }
4594 /* C/C++ array section reductions. */
4595 else if ((c_kind == OMP_CLAUSE_REDUCTION
4596 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4597 && var != OMP_CLAUSE_DECL (c))
4598 {
4599 if (pass == 0)
4600 continue;
4601
4602 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4603 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4604
4605 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4606 {
4607 tree b = TREE_OPERAND (orig_var, 1);
4608 b = maybe_lookup_decl (b, ctx);
4609 if (b == NULL)
4610 {
4611 b = TREE_OPERAND (orig_var, 1);
4612 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4613 }
4614 if (integer_zerop (bias))
4615 bias = b;
4616 else
4617 {
4618 bias = fold_convert_loc (clause_loc,
4619 TREE_TYPE (b), bias);
4620 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4621 TREE_TYPE (b), b, bias);
4622 }
4623 orig_var = TREE_OPERAND (orig_var, 0);
4624 }
4625 if (pass == 2)
4626 {
4627 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4628 if (is_global_var (out)
4629 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4630 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4631 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4632 != POINTER_TYPE)))
4633 x = var;
4634 else
4635 {
4636 bool by_ref = use_pointer_for_field (var, NULL);
4637 x = build_receiver_ref (var, by_ref, ctx);
4638 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4639 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4640 == POINTER_TYPE))
4641 x = build_fold_addr_expr (x);
4642 }
4643 if (TREE_CODE (orig_var) == INDIRECT_REF)
4644 x = build_simple_mem_ref (x);
4645 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4646 {
4647 if (var == TREE_OPERAND (orig_var, 0))
4648 x = build_fold_addr_expr (x);
4649 }
4650 bias = fold_convert (sizetype, bias);
4651 x = fold_convert (ptr_type_node, x);
4652 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4653 TREE_TYPE (x), x, bias);
4654 unsigned cnt = task_reduction_cnt - 1;
4655 if (!task_reduction_needs_orig_p)
4656 cnt += (task_reduction_cntorig_full
4657 - task_reduction_cntorig);
4658 else
4659 cnt = task_reduction_cntorig - 1;
4660 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4661 size_int (cnt), NULL_TREE, NULL_TREE);
4662 gimplify_assign (r, x, ilist);
4663 continue;
4664 }
4665
4666 if (TREE_CODE (orig_var) == INDIRECT_REF
4667 || TREE_CODE (orig_var) == ADDR_EXPR)
4668 orig_var = TREE_OPERAND (orig_var, 0);
4669 tree d = OMP_CLAUSE_DECL (c);
4670 tree type = TREE_TYPE (d);
4671 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4672 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4673 const char *name = get_name (orig_var);
4674 if (pass == 3)
4675 {
4676 tree xv = create_tmp_var (ptr_type_node);
4677 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4678 {
4679 unsigned cnt = task_reduction_cnt - 1;
4680 if (!task_reduction_needs_orig_p)
4681 cnt += (task_reduction_cntorig_full
4682 - task_reduction_cntorig);
4683 else
4684 cnt = task_reduction_cntorig - 1;
4685 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4686 size_int (cnt), NULL_TREE, NULL_TREE);
4687
4688 gimple *g = gimple_build_assign (xv, x);
4689 gimple_seq_add_stmt (ilist, g);
4690 }
4691 else
4692 {
4693 unsigned int idx = *ctx->task_reduction_map->get (c);
4694 tree off;
4695 if (ctx->task_reductions[1 + idx])
4696 off = fold_convert (sizetype,
4697 ctx->task_reductions[1 + idx]);
4698 else
4699 off = task_reduction_read (ilist, tskred_temp, sizetype,
4700 7 + 3 * idx + 1);
4701 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4702 tskred_base, off);
4703 gimple_seq_add_stmt (ilist, g);
4704 }
4705 x = fold_convert (build_pointer_type (boolean_type_node),
4706 xv);
4707 if (TREE_CONSTANT (v))
4708 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4709 TYPE_SIZE_UNIT (type));
4710 else
4711 {
4712 tree t = maybe_lookup_decl (v, ctx);
4713 if (t)
4714 v = t;
4715 else
4716 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4717 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4718 fb_rvalue);
4719 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4720 TREE_TYPE (v), v,
4721 build_int_cst (TREE_TYPE (v), 1));
4722 t = fold_build2_loc (clause_loc, MULT_EXPR,
4723 TREE_TYPE (v), t,
4724 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4725 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4726 }
4727 cond = create_tmp_var (TREE_TYPE (x));
4728 gimplify_assign (cond, x, ilist);
4729 x = xv;
4730 }
4731 else if (TREE_CONSTANT (v))
4732 {
4733 x = create_tmp_var_raw (type, name);
4734 gimple_add_tmp_var (x);
4735 TREE_ADDRESSABLE (x) = 1;
4736 x = build_fold_addr_expr_loc (clause_loc, x);
4737 }
4738 else
4739 {
4740 tree atmp
4741 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4742 tree t = maybe_lookup_decl (v, ctx);
4743 if (t)
4744 v = t;
4745 else
4746 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4747 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4748 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4749 TREE_TYPE (v), v,
4750 build_int_cst (TREE_TYPE (v), 1));
4751 t = fold_build2_loc (clause_loc, MULT_EXPR,
4752 TREE_TYPE (v), t,
4753 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4754 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4755 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4756 }
4757
4758 tree ptype = build_pointer_type (TREE_TYPE (type));
4759 x = fold_convert_loc (clause_loc, ptype, x);
4760 tree y = create_tmp_var (ptype, name);
4761 gimplify_assign (y, x, ilist);
4762 x = y;
4763 tree yb = y;
4764
4765 if (!integer_zerop (bias))
4766 {
4767 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4768 bias);
4769 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4770 x);
4771 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4772 pointer_sized_int_node, yb, bias);
4773 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4774 yb = create_tmp_var (ptype, name);
4775 gimplify_assign (yb, x, ilist);
4776 x = yb;
4777 }
4778
4779 d = TREE_OPERAND (d, 0);
4780 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4781 d = TREE_OPERAND (d, 0);
4782 if (TREE_CODE (d) == ADDR_EXPR)
4783 {
4784 if (orig_var != var)
4785 {
4786 gcc_assert (is_variable_sized (orig_var));
4787 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4788 x);
4789 gimplify_assign (new_var, x, ilist);
4790 tree new_orig_var = lookup_decl (orig_var, ctx);
4791 tree t = build_fold_indirect_ref (new_var);
4792 DECL_IGNORED_P (new_var) = 0;
4793 TREE_THIS_NOTRAP (t) = 1;
4794 SET_DECL_VALUE_EXPR (new_orig_var, t);
4795 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4796 }
4797 else
4798 {
4799 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4800 build_int_cst (ptype, 0));
4801 SET_DECL_VALUE_EXPR (new_var, x);
4802 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4803 }
4804 }
4805 else
4806 {
4807 gcc_assert (orig_var == var);
4808 if (TREE_CODE (d) == INDIRECT_REF)
4809 {
4810 x = create_tmp_var (ptype, name);
4811 TREE_ADDRESSABLE (x) = 1;
4812 gimplify_assign (x, yb, ilist);
4813 x = build_fold_addr_expr_loc (clause_loc, x);
4814 }
4815 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4816 gimplify_assign (new_var, x, ilist);
4817 }
4818 /* GOMP_taskgroup_reduction_register memsets the whole
4819 array to zero. If the initializer is zero, we don't
4820 need to initialize it again, just mark it as ever
4821 used unconditionally, i.e. cond = true. */
4822 if (cond
4823 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4824 && initializer_zerop (omp_reduction_init (c,
4825 TREE_TYPE (type))))
4826 {
4827 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4828 boolean_true_node);
4829 gimple_seq_add_stmt (ilist, g);
4830 continue;
4831 }
4832 tree end = create_artificial_label (UNKNOWN_LOCATION);
4833 if (cond)
4834 {
4835 gimple *g;
4836 if (!is_parallel_ctx (ctx))
4837 {
4838 tree condv = create_tmp_var (boolean_type_node);
4839 g = gimple_build_assign (condv,
4840 build_simple_mem_ref (cond));
4841 gimple_seq_add_stmt (ilist, g);
4842 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4843 g = gimple_build_cond (NE_EXPR, condv,
4844 boolean_false_node, end, lab1);
4845 gimple_seq_add_stmt (ilist, g);
4846 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4847 }
4848 g = gimple_build_assign (build_simple_mem_ref (cond),
4849 boolean_true_node);
4850 gimple_seq_add_stmt (ilist, g);
4851 }
4852
4853 tree y1 = create_tmp_var (ptype);
4854 gimplify_assign (y1, y, ilist);
4855 tree i2 = NULL_TREE, y2 = NULL_TREE;
4856 tree body2 = NULL_TREE, end2 = NULL_TREE;
4857 tree y3 = NULL_TREE, y4 = NULL_TREE;
4858 if (task_reduction_needs_orig_p)
4859 {
4860 y3 = create_tmp_var (ptype);
4861 tree ref;
4862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4863 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4864 size_int (task_reduction_cnt_full
4865 + task_reduction_cntorig - 1),
4866 NULL_TREE, NULL_TREE);
4867 else
4868 {
4869 unsigned int idx = *ctx->task_reduction_map->get (c);
4870 ref = task_reduction_read (ilist, tskred_temp, ptype,
4871 7 + 3 * idx);
4872 }
4873 gimplify_assign (y3, ref, ilist);
4874 }
4875 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4876 {
4877 if (pass != 3)
4878 {
4879 y2 = create_tmp_var (ptype);
4880 gimplify_assign (y2, y, ilist);
4881 }
4882 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4883 {
4884 tree ref = build_outer_var_ref (var, ctx);
4885 /* For ref build_outer_var_ref already performs this. */
4886 if (TREE_CODE (d) == INDIRECT_REF)
4887 gcc_assert (omp_is_reference (var));
4888 else if (TREE_CODE (d) == ADDR_EXPR)
4889 ref = build_fold_addr_expr (ref);
4890 else if (omp_is_reference (var))
4891 ref = build_fold_addr_expr (ref);
4892 ref = fold_convert_loc (clause_loc, ptype, ref);
4893 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4894 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4895 {
4896 y3 = create_tmp_var (ptype);
4897 gimplify_assign (y3, unshare_expr (ref), ilist);
4898 }
4899 if (is_simd)
4900 {
4901 y4 = create_tmp_var (ptype);
4902 gimplify_assign (y4, ref, dlist);
4903 }
4904 }
4905 }
4906 tree i = create_tmp_var (TREE_TYPE (v));
4907 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4908 tree body = create_artificial_label (UNKNOWN_LOCATION);
4909 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4910 if (y2)
4911 {
4912 i2 = create_tmp_var (TREE_TYPE (v));
4913 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4914 body2 = create_artificial_label (UNKNOWN_LOCATION);
4915 end2 = create_artificial_label (UNKNOWN_LOCATION);
4916 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4917 }
4918 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4919 {
4920 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4921 tree decl_placeholder
4922 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4923 SET_DECL_VALUE_EXPR (decl_placeholder,
4924 build_simple_mem_ref (y1));
4925 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4926 SET_DECL_VALUE_EXPR (placeholder,
4927 y3 ? build_simple_mem_ref (y3)
4928 : error_mark_node);
4929 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4930 x = lang_hooks.decls.omp_clause_default_ctor
4931 (c, build_simple_mem_ref (y1),
4932 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4933 if (x)
4934 gimplify_and_add (x, ilist);
4935 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4936 {
4937 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4938 lower_omp (&tseq, ctx);
4939 gimple_seq_add_seq (ilist, tseq);
4940 }
4941 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4942 if (is_simd)
4943 {
4944 SET_DECL_VALUE_EXPR (decl_placeholder,
4945 build_simple_mem_ref (y2));
4946 SET_DECL_VALUE_EXPR (placeholder,
4947 build_simple_mem_ref (y4));
4948 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4949 lower_omp (&tseq, ctx);
4950 gimple_seq_add_seq (dlist, tseq);
4951 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4952 }
4953 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4954 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4955 if (y2)
4956 {
4957 x = lang_hooks.decls.omp_clause_dtor
4958 (c, build_simple_mem_ref (y2));
4959 if (x)
4960 gimplify_and_add (x, dlist);
4961 }
4962 }
4963 else
4964 {
4965 x = omp_reduction_init (c, TREE_TYPE (type));
4966 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4967
4968 /* reduction(-:var) sums up the partial results, so it
4969 acts identically to reduction(+:var). */
4970 if (code == MINUS_EXPR)
4971 code = PLUS_EXPR;
4972
4973 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4974 if (is_simd)
4975 {
4976 x = build2 (code, TREE_TYPE (type),
4977 build_simple_mem_ref (y4),
4978 build_simple_mem_ref (y2));
4979 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4980 }
4981 }
4982 gimple *g
4983 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4984 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4985 gimple_seq_add_stmt (ilist, g);
4986 if (y3)
4987 {
4988 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4989 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4990 gimple_seq_add_stmt (ilist, g);
4991 }
4992 g = gimple_build_assign (i, PLUS_EXPR, i,
4993 build_int_cst (TREE_TYPE (i), 1));
4994 gimple_seq_add_stmt (ilist, g);
4995 g = gimple_build_cond (LE_EXPR, i, v, body, end);
4996 gimple_seq_add_stmt (ilist, g);
4997 gimple_seq_add_stmt (ilist, gimple_build_label (end));
4998 if (y2)
4999 {
5000 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5001 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5002 gimple_seq_add_stmt (dlist, g);
5003 if (y4)
5004 {
5005 g = gimple_build_assign
5006 (y4, POINTER_PLUS_EXPR, y4,
5007 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5008 gimple_seq_add_stmt (dlist, g);
5009 }
5010 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5011 build_int_cst (TREE_TYPE (i2), 1));
5012 gimple_seq_add_stmt (dlist, g);
5013 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5014 gimple_seq_add_stmt (dlist, g);
5015 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5016 }
5017 continue;
5018 }
5019 else if (pass == 2)
5020 {
5021 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5022 x = var;
5023 else
5024 {
5025 bool by_ref = use_pointer_for_field (var, ctx);
5026 x = build_receiver_ref (var, by_ref, ctx);
5027 }
5028 if (!omp_is_reference (var))
5029 x = build_fold_addr_expr (x);
5030 x = fold_convert (ptr_type_node, x);
5031 unsigned cnt = task_reduction_cnt - 1;
5032 if (!task_reduction_needs_orig_p)
5033 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5034 else
5035 cnt = task_reduction_cntorig - 1;
5036 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5037 size_int (cnt), NULL_TREE, NULL_TREE);
5038 gimplify_assign (r, x, ilist);
5039 continue;
5040 }
5041 else if (pass == 3)
5042 {
5043 tree type = TREE_TYPE (new_var);
5044 if (!omp_is_reference (var))
5045 type = build_pointer_type (type);
5046 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5047 {
5048 unsigned cnt = task_reduction_cnt - 1;
5049 if (!task_reduction_needs_orig_p)
5050 cnt += (task_reduction_cntorig_full
5051 - task_reduction_cntorig);
5052 else
5053 cnt = task_reduction_cntorig - 1;
5054 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5055 size_int (cnt), NULL_TREE, NULL_TREE);
5056 }
5057 else
5058 {
5059 unsigned int idx = *ctx->task_reduction_map->get (c);
5060 tree off;
5061 if (ctx->task_reductions[1 + idx])
5062 off = fold_convert (sizetype,
5063 ctx->task_reductions[1 + idx]);
5064 else
5065 off = task_reduction_read (ilist, tskred_temp, sizetype,
5066 7 + 3 * idx + 1);
5067 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5068 tskred_base, off);
5069 }
5070 x = fold_convert (type, x);
5071 tree t;
5072 if (omp_is_reference (var))
5073 {
5074 gimplify_assign (new_var, x, ilist);
5075 t = new_var;
5076 new_var = build_simple_mem_ref (new_var);
5077 }
5078 else
5079 {
5080 t = create_tmp_var (type);
5081 gimplify_assign (t, x, ilist);
5082 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5083 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5084 }
5085 t = fold_convert (build_pointer_type (boolean_type_node), t);
5086 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5087 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5088 cond = create_tmp_var (TREE_TYPE (t));
5089 gimplify_assign (cond, t, ilist);
5090 }
5091 else if (is_variable_sized (var))
5092 {
5093 /* For variable sized types, we need to allocate the
5094 actual storage here. Call alloca and store the
5095 result in the pointer decl that we created elsewhere. */
5096 if (pass == 0)
5097 continue;
5098
5099 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5100 {
5101 gcall *stmt;
5102 tree tmp, atmp;
5103
5104 ptr = DECL_VALUE_EXPR (new_var);
5105 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5106 ptr = TREE_OPERAND (ptr, 0);
5107 gcc_assert (DECL_P (ptr));
5108 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5109
5110 /* void *tmp = __builtin_alloca */
5111 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5112 stmt = gimple_build_call (atmp, 2, x,
5113 size_int (DECL_ALIGN (var)));
5114 tmp = create_tmp_var_raw (ptr_type_node);
5115 gimple_add_tmp_var (tmp);
5116 gimple_call_set_lhs (stmt, tmp);
5117
5118 gimple_seq_add_stmt (ilist, stmt);
5119
5120 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5121 gimplify_assign (ptr, x, ilist);
5122 }
5123 }
5124 else if (omp_is_reference (var)
5125 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5126 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5127 {
5128 /* For references that are being privatized for Fortran,
5129 allocate new backing storage for the new pointer
5130 variable. This allows us to avoid changing all the
5131 code that expects a pointer to something that expects
5132 a direct variable. */
5133 if (pass == 0)
5134 continue;
5135
5136 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5137 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5138 {
5139 x = build_receiver_ref (var, false, ctx);
5140 x = build_fold_addr_expr_loc (clause_loc, x);
5141 }
5142 else if (TREE_CONSTANT (x))
5143 {
5144 /* For reduction in SIMD loop, defer adding the
5145 initialization of the reference, because if we decide
5146 to use SIMD array for it, the initilization could cause
5147 expansion ICE. Ditto for other privatization clauses. */
5148 if (is_simd)
5149 x = NULL_TREE;
5150 else
5151 {
5152 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5153 get_name (var));
5154 gimple_add_tmp_var (x);
5155 TREE_ADDRESSABLE (x) = 1;
5156 x = build_fold_addr_expr_loc (clause_loc, x);
5157 }
5158 }
5159 else
5160 {
5161 tree atmp
5162 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5163 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5164 tree al = size_int (TYPE_ALIGN (rtype));
5165 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5166 }
5167
5168 if (x)
5169 {
5170 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5171 gimplify_assign (new_var, x, ilist);
5172 }
5173
5174 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5175 }
5176 else if ((c_kind == OMP_CLAUSE_REDUCTION
5177 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5178 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5179 {
5180 if (pass == 0)
5181 continue;
5182 }
5183 else if (pass != 0)
5184 continue;
5185
5186 switch (OMP_CLAUSE_CODE (c))
5187 {
5188 case OMP_CLAUSE_SHARED:
5189 /* Ignore shared directives in teams construct inside
5190 target construct. */
5191 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5192 && !is_host_teams_ctx (ctx))
5193 continue;
5194 /* Shared global vars are just accessed directly. */
5195 if (is_global_var (new_var))
5196 break;
5197 /* For taskloop firstprivate/lastprivate, represented
5198 as firstprivate and shared clause on the task, new_var
5199 is the firstprivate var. */
5200 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5201 break;
5202 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5203 needs to be delayed until after fixup_child_record_type so
5204 that we get the correct type during the dereference. */
5205 by_ref = use_pointer_for_field (var, ctx);
5206 x = build_receiver_ref (var, by_ref, ctx);
5207 SET_DECL_VALUE_EXPR (new_var, x);
5208 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5209
5210 /* ??? If VAR is not passed by reference, and the variable
5211 hasn't been initialized yet, then we'll get a warning for
5212 the store into the omp_data_s structure. Ideally, we'd be
5213 able to notice this and not store anything at all, but
5214 we're generating code too early. Suppress the warning. */
5215 if (!by_ref)
5216 TREE_NO_WARNING (var) = 1;
5217 break;
5218
5219 case OMP_CLAUSE__CONDTEMP_:
5220 if (is_parallel_ctx (ctx))
5221 {
5222 x = build_receiver_ref (var, false, ctx);
5223 SET_DECL_VALUE_EXPR (new_var, x);
5224 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5225 }
5226 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5227 {
5228 x = build_zero_cst (TREE_TYPE (var));
5229 goto do_private;
5230 }
5231 break;
5232
5233 case OMP_CLAUSE_LASTPRIVATE:
5234 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5235 break;
5236 /* FALLTHRU */
5237
5238 case OMP_CLAUSE_PRIVATE:
5239 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5240 x = build_outer_var_ref (var, ctx);
5241 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5242 {
5243 if (is_task_ctx (ctx))
5244 x = build_receiver_ref (var, false, ctx);
5245 else
5246 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5247 }
5248 else
5249 x = NULL;
5250 do_private:
5251 tree nx;
5252 bool copy_ctor;
5253 copy_ctor = false;
5254 nx = unshare_expr (new_var);
5255 if (is_simd
5256 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5257 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5258 copy_ctor = true;
5259 if (copy_ctor)
5260 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5261 else
5262 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5263 if (is_simd)
5264 {
5265 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5266 if ((TREE_ADDRESSABLE (new_var) || nx || y
5267 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5268 && (gimple_omp_for_collapse (ctx->stmt) != 1
5269 || (gimple_omp_for_index (ctx->stmt, 0)
5270 != new_var)))
5271 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5272 || omp_is_reference (var))
5273 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5274 ivar, lvar))
5275 {
5276 if (omp_is_reference (var))
5277 {
5278 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5279 tree new_vard = TREE_OPERAND (new_var, 0);
5280 gcc_assert (DECL_P (new_vard));
5281 SET_DECL_VALUE_EXPR (new_vard,
5282 build_fold_addr_expr (lvar));
5283 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5284 }
5285
5286 if (nx)
5287 {
5288 tree iv = unshare_expr (ivar);
5289 if (copy_ctor)
5290 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5291 x);
5292 else
5293 x = lang_hooks.decls.omp_clause_default_ctor (c,
5294 iv,
5295 x);
5296 }
5297 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5298 {
5299 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5300 unshare_expr (ivar), x);
5301 nx = x;
5302 }
5303 if (nx && x)
5304 gimplify_and_add (x, &llist[0]);
5305 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5306 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5307 {
5308 tree v = new_var;
5309 if (!DECL_P (v))
5310 {
5311 gcc_assert (TREE_CODE (v) == MEM_REF);
5312 v = TREE_OPERAND (v, 0);
5313 gcc_assert (DECL_P (v));
5314 }
5315 v = *ctx->lastprivate_conditional_map->get (v);
5316 tree t = create_tmp_var (TREE_TYPE (v));
5317 tree z = build_zero_cst (TREE_TYPE (v));
5318 tree orig_v
5319 = build_outer_var_ref (var, ctx,
5320 OMP_CLAUSE_LASTPRIVATE);
5321 gimple_seq_add_stmt (dlist,
5322 gimple_build_assign (t, z));
5323 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5324 tree civar = DECL_VALUE_EXPR (v);
5325 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5326 civar = unshare_expr (civar);
5327 TREE_OPERAND (civar, 1) = sctx.idx;
5328 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5329 unshare_expr (civar));
5330 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5331 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5332 orig_v, unshare_expr (ivar)));
5333 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5334 civar);
5335 x = build3 (COND_EXPR, void_type_node, cond, x,
5336 void_node);
5337 gimple_seq tseq = NULL;
5338 gimplify_and_add (x, &tseq);
5339 if (ctx->outer)
5340 lower_omp (&tseq, ctx->outer);
5341 gimple_seq_add_seq (&llist[1], tseq);
5342 }
5343 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5344 && ctx->for_simd_scan_phase)
5345 {
5346 x = unshare_expr (ivar);
5347 tree orig_v
5348 = build_outer_var_ref (var, ctx,
5349 OMP_CLAUSE_LASTPRIVATE);
5350 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5351 orig_v);
5352 gimplify_and_add (x, &llist[0]);
5353 }
5354 if (y)
5355 {
5356 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5357 if (y)
5358 gimplify_and_add (y, &llist[1]);
5359 }
5360 break;
5361 }
5362 if (omp_is_reference (var))
5363 {
5364 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5365 tree new_vard = TREE_OPERAND (new_var, 0);
5366 gcc_assert (DECL_P (new_vard));
5367 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5368 x = TYPE_SIZE_UNIT (type);
5369 if (TREE_CONSTANT (x))
5370 {
5371 x = create_tmp_var_raw (type, get_name (var));
5372 gimple_add_tmp_var (x);
5373 TREE_ADDRESSABLE (x) = 1;
5374 x = build_fold_addr_expr_loc (clause_loc, x);
5375 x = fold_convert_loc (clause_loc,
5376 TREE_TYPE (new_vard), x);
5377 gimplify_assign (new_vard, x, ilist);
5378 }
5379 }
5380 }
5381 if (nx)
5382 gimplify_and_add (nx, ilist);
5383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5384 && is_simd
5385 && ctx->for_simd_scan_phase)
5386 {
5387 tree orig_v = build_outer_var_ref (var, ctx,
5388 OMP_CLAUSE_LASTPRIVATE);
5389 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5390 orig_v);
5391 gimplify_and_add (x, ilist);
5392 }
5393 /* FALLTHRU */
5394
5395 do_dtor:
5396 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5397 if (x)
5398 gimplify_and_add (x, dlist);
5399 break;
5400
5401 case OMP_CLAUSE_LINEAR:
5402 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5403 goto do_firstprivate;
5404 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5405 x = NULL;
5406 else
5407 x = build_outer_var_ref (var, ctx);
5408 goto do_private;
5409
5410 case OMP_CLAUSE_FIRSTPRIVATE:
5411 if (is_task_ctx (ctx))
5412 {
5413 if ((omp_is_reference (var)
5414 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5415 || is_variable_sized (var))
5416 goto do_dtor;
5417 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5418 ctx))
5419 || use_pointer_for_field (var, NULL))
5420 {
5421 x = build_receiver_ref (var, false, ctx);
5422 SET_DECL_VALUE_EXPR (new_var, x);
5423 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5424 goto do_dtor;
5425 }
5426 }
5427 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5428 && omp_is_reference (var))
5429 {
5430 x = build_outer_var_ref (var, ctx);
5431 gcc_assert (TREE_CODE (x) == MEM_REF
5432 && integer_zerop (TREE_OPERAND (x, 1)));
5433 x = TREE_OPERAND (x, 0);
5434 x = lang_hooks.decls.omp_clause_copy_ctor
5435 (c, unshare_expr (new_var), x);
5436 gimplify_and_add (x, ilist);
5437 goto do_dtor;
5438 }
5439 do_firstprivate:
5440 x = build_outer_var_ref (var, ctx);
5441 if (is_simd)
5442 {
5443 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5444 && gimple_omp_for_combined_into_p (ctx->stmt))
5445 {
5446 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5447 tree stept = TREE_TYPE (t);
5448 tree ct = omp_find_clause (clauses,
5449 OMP_CLAUSE__LOOPTEMP_);
5450 gcc_assert (ct);
5451 tree l = OMP_CLAUSE_DECL (ct);
5452 tree n1 = fd->loop.n1;
5453 tree step = fd->loop.step;
5454 tree itype = TREE_TYPE (l);
5455 if (POINTER_TYPE_P (itype))
5456 itype = signed_type_for (itype);
5457 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5458 if (TYPE_UNSIGNED (itype)
5459 && fd->loop.cond_code == GT_EXPR)
5460 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5461 fold_build1 (NEGATE_EXPR, itype, l),
5462 fold_build1 (NEGATE_EXPR,
5463 itype, step));
5464 else
5465 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5466 t = fold_build2 (MULT_EXPR, stept,
5467 fold_convert (stept, l), t);
5468
5469 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5470 {
5471 if (omp_is_reference (var))
5472 {
5473 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5474 tree new_vard = TREE_OPERAND (new_var, 0);
5475 gcc_assert (DECL_P (new_vard));
5476 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5477 nx = TYPE_SIZE_UNIT (type);
5478 if (TREE_CONSTANT (nx))
5479 {
5480 nx = create_tmp_var_raw (type,
5481 get_name (var));
5482 gimple_add_tmp_var (nx);
5483 TREE_ADDRESSABLE (nx) = 1;
5484 nx = build_fold_addr_expr_loc (clause_loc,
5485 nx);
5486 nx = fold_convert_loc (clause_loc,
5487 TREE_TYPE (new_vard),
5488 nx);
5489 gimplify_assign (new_vard, nx, ilist);
5490 }
5491 }
5492
5493 x = lang_hooks.decls.omp_clause_linear_ctor
5494 (c, new_var, x, t);
5495 gimplify_and_add (x, ilist);
5496 goto do_dtor;
5497 }
5498
5499 if (POINTER_TYPE_P (TREE_TYPE (x)))
5500 x = fold_build2 (POINTER_PLUS_EXPR,
5501 TREE_TYPE (x), x, t);
5502 else
5503 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5504 }
5505
5506 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5507 || TREE_ADDRESSABLE (new_var)
5508 || omp_is_reference (var))
5509 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5510 ivar, lvar))
5511 {
5512 if (omp_is_reference (var))
5513 {
5514 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5515 tree new_vard = TREE_OPERAND (new_var, 0);
5516 gcc_assert (DECL_P (new_vard));
5517 SET_DECL_VALUE_EXPR (new_vard,
5518 build_fold_addr_expr (lvar));
5519 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5520 }
5521 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5522 {
5523 tree iv = create_tmp_var (TREE_TYPE (new_var));
5524 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5525 gimplify_and_add (x, ilist);
5526 gimple_stmt_iterator gsi
5527 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5528 gassign *g
5529 = gimple_build_assign (unshare_expr (lvar), iv);
5530 gsi_insert_before_without_update (&gsi, g,
5531 GSI_SAME_STMT);
5532 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5533 enum tree_code code = PLUS_EXPR;
5534 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5535 code = POINTER_PLUS_EXPR;
5536 g = gimple_build_assign (iv, code, iv, t);
5537 gsi_insert_before_without_update (&gsi, g,
5538 GSI_SAME_STMT);
5539 break;
5540 }
5541 x = lang_hooks.decls.omp_clause_copy_ctor
5542 (c, unshare_expr (ivar), x);
5543 gimplify_and_add (x, &llist[0]);
5544 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5545 if (x)
5546 gimplify_and_add (x, &llist[1]);
5547 break;
5548 }
5549 if (omp_is_reference (var))
5550 {
5551 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5552 tree new_vard = TREE_OPERAND (new_var, 0);
5553 gcc_assert (DECL_P (new_vard));
5554 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5555 nx = TYPE_SIZE_UNIT (type);
5556 if (TREE_CONSTANT (nx))
5557 {
5558 nx = create_tmp_var_raw (type, get_name (var));
5559 gimple_add_tmp_var (nx);
5560 TREE_ADDRESSABLE (nx) = 1;
5561 nx = build_fold_addr_expr_loc (clause_loc, nx);
5562 nx = fold_convert_loc (clause_loc,
5563 TREE_TYPE (new_vard), nx);
5564 gimplify_assign (new_vard, nx, ilist);
5565 }
5566 }
5567 }
5568 x = lang_hooks.decls.omp_clause_copy_ctor
5569 (c, unshare_expr (new_var), x);
5570 gimplify_and_add (x, ilist);
5571 goto do_dtor;
5572
5573 case OMP_CLAUSE__LOOPTEMP_:
5574 case OMP_CLAUSE__REDUCTEMP_:
5575 gcc_assert (is_taskreg_ctx (ctx));
5576 x = build_outer_var_ref (var, ctx);
5577 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5578 gimplify_and_add (x, ilist);
5579 break;
5580
5581 case OMP_CLAUSE_COPYIN:
5582 by_ref = use_pointer_for_field (var, NULL);
5583 x = build_receiver_ref (var, by_ref, ctx);
5584 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5585 append_to_statement_list (x, &copyin_seq);
5586 copyin_by_ref |= by_ref;
5587 break;
5588
5589 case OMP_CLAUSE_REDUCTION:
5590 case OMP_CLAUSE_IN_REDUCTION:
5591 /* OpenACC reductions are initialized using the
5592 GOACC_REDUCTION internal function. */
5593 if (is_gimple_omp_oacc (ctx->stmt))
5594 break;
5595 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5596 {
5597 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5598 gimple *tseq;
5599 tree ptype = TREE_TYPE (placeholder);
5600 if (cond)
5601 {
5602 x = error_mark_node;
5603 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5604 && !task_reduction_needs_orig_p)
5605 x = var;
5606 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5607 {
5608 tree pptype = build_pointer_type (ptype);
5609 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5610 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5611 size_int (task_reduction_cnt_full
5612 + task_reduction_cntorig - 1),
5613 NULL_TREE, NULL_TREE);
5614 else
5615 {
5616 unsigned int idx
5617 = *ctx->task_reduction_map->get (c);
5618 x = task_reduction_read (ilist, tskred_temp,
5619 pptype, 7 + 3 * idx);
5620 }
5621 x = fold_convert (pptype, x);
5622 x = build_simple_mem_ref (x);
5623 }
5624 }
5625 else
5626 {
5627 x = build_outer_var_ref (var, ctx);
5628
5629 if (omp_is_reference (var)
5630 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5631 x = build_fold_addr_expr_loc (clause_loc, x);
5632 }
5633 SET_DECL_VALUE_EXPR (placeholder, x);
5634 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5635 tree new_vard = new_var;
5636 if (omp_is_reference (var))
5637 {
5638 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5639 new_vard = TREE_OPERAND (new_var, 0);
5640 gcc_assert (DECL_P (new_vard));
5641 }
5642 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5643 if (is_simd
5644 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5645 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5646 rvarp = &rvar;
5647 if (is_simd
5648 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5649 ivar, lvar, rvarp,
5650 &rvar2))
5651 {
5652 if (new_vard == new_var)
5653 {
5654 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5655 SET_DECL_VALUE_EXPR (new_var, ivar);
5656 }
5657 else
5658 {
5659 SET_DECL_VALUE_EXPR (new_vard,
5660 build_fold_addr_expr (ivar));
5661 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5662 }
5663 x = lang_hooks.decls.omp_clause_default_ctor
5664 (c, unshare_expr (ivar),
5665 build_outer_var_ref (var, ctx));
5666 if (rvarp && ctx->for_simd_scan_phase)
5667 {
5668 if (x)
5669 gimplify_and_add (x, &llist[0]);
5670 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5671 if (x)
5672 gimplify_and_add (x, &llist[1]);
5673 break;
5674 }
5675 else if (rvarp)
5676 {
5677 if (x)
5678 {
5679 gimplify_and_add (x, &llist[0]);
5680
5681 tree ivar2 = unshare_expr (lvar);
5682 TREE_OPERAND (ivar2, 1) = sctx.idx;
5683 x = lang_hooks.decls.omp_clause_default_ctor
5684 (c, ivar2, build_outer_var_ref (var, ctx));
5685 gimplify_and_add (x, &llist[0]);
5686
5687 if (rvar2)
5688 {
5689 x = lang_hooks.decls.omp_clause_default_ctor
5690 (c, unshare_expr (rvar2),
5691 build_outer_var_ref (var, ctx));
5692 gimplify_and_add (x, &llist[0]);
5693 }
5694
5695 /* For types that need construction, add another
5696 private var which will be default constructed
5697 and optionally initialized with
5698 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5699 loop we want to assign this value instead of
5700 constructing and destructing it in each
5701 iteration. */
5702 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5703 gimple_add_tmp_var (nv);
5704 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5705 ? rvar2
5706 : ivar, 0),
5707 nv);
5708 x = lang_hooks.decls.omp_clause_default_ctor
5709 (c, nv, build_outer_var_ref (var, ctx));
5710 gimplify_and_add (x, ilist);
5711
5712 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5713 {
5714 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5715 x = DECL_VALUE_EXPR (new_vard);
5716 tree vexpr = nv;
5717 if (new_vard != new_var)
5718 vexpr = build_fold_addr_expr (nv);
5719 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5720 lower_omp (&tseq, ctx);
5721 SET_DECL_VALUE_EXPR (new_vard, x);
5722 gimple_seq_add_seq (ilist, tseq);
5723 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5724 }
5725
5726 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5727 if (x)
5728 gimplify_and_add (x, dlist);
5729 }
5730
5731 tree ref = build_outer_var_ref (var, ctx);
5732 x = unshare_expr (ivar);
5733 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5734 ref);
5735 gimplify_and_add (x, &llist[0]);
5736
5737 ref = build_outer_var_ref (var, ctx);
5738 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5739 rvar);
5740 gimplify_and_add (x, &llist[3]);
5741
5742 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5743 if (new_vard == new_var)
5744 SET_DECL_VALUE_EXPR (new_var, lvar);
5745 else
5746 SET_DECL_VALUE_EXPR (new_vard,
5747 build_fold_addr_expr (lvar));
5748
5749 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5750 if (x)
5751 gimplify_and_add (x, &llist[1]);
5752
5753 tree ivar2 = unshare_expr (lvar);
5754 TREE_OPERAND (ivar2, 1) = sctx.idx;
5755 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5756 if (x)
5757 gimplify_and_add (x, &llist[1]);
5758
5759 if (rvar2)
5760 {
5761 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5762 if (x)
5763 gimplify_and_add (x, &llist[1]);
5764 }
5765 break;
5766 }
5767 if (x)
5768 gimplify_and_add (x, &llist[0]);
5769 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5770 {
5771 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5772 lower_omp (&tseq, ctx);
5773 gimple_seq_add_seq (&llist[0], tseq);
5774 }
5775 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5776 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5777 lower_omp (&tseq, ctx);
5778 gimple_seq_add_seq (&llist[1], tseq);
5779 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5780 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5781 if (new_vard == new_var)
5782 SET_DECL_VALUE_EXPR (new_var, lvar);
5783 else
5784 SET_DECL_VALUE_EXPR (new_vard,
5785 build_fold_addr_expr (lvar));
5786 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5787 if (x)
5788 gimplify_and_add (x, &llist[1]);
5789 break;
5790 }
5791 /* If this is a reference to constant size reduction var
5792 with placeholder, we haven't emitted the initializer
5793 for it because it is undesirable if SIMD arrays are used.
5794 But if they aren't used, we need to emit the deferred
5795 initialization now. */
5796 else if (omp_is_reference (var) && is_simd)
5797 handle_simd_reference (clause_loc, new_vard, ilist);
5798
5799 tree lab2 = NULL_TREE;
5800 if (cond)
5801 {
5802 gimple *g;
5803 if (!is_parallel_ctx (ctx))
5804 {
5805 tree condv = create_tmp_var (boolean_type_node);
5806 tree m = build_simple_mem_ref (cond);
5807 g = gimple_build_assign (condv, m);
5808 gimple_seq_add_stmt (ilist, g);
5809 tree lab1
5810 = create_artificial_label (UNKNOWN_LOCATION);
5811 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5812 g = gimple_build_cond (NE_EXPR, condv,
5813 boolean_false_node,
5814 lab2, lab1);
5815 gimple_seq_add_stmt (ilist, g);
5816 gimple_seq_add_stmt (ilist,
5817 gimple_build_label (lab1));
5818 }
5819 g = gimple_build_assign (build_simple_mem_ref (cond),
5820 boolean_true_node);
5821 gimple_seq_add_stmt (ilist, g);
5822 }
5823 x = lang_hooks.decls.omp_clause_default_ctor
5824 (c, unshare_expr (new_var),
5825 cond ? NULL_TREE
5826 : build_outer_var_ref (var, ctx));
5827 if (x)
5828 gimplify_and_add (x, ilist);
5829
5830 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5831 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5832 {
5833 if (ctx->for_simd_scan_phase)
5834 goto do_dtor;
5835 if (x || (!is_simd
5836 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5837 {
5838 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5839 gimple_add_tmp_var (nv);
5840 ctx->cb.decl_map->put (new_vard, nv);
5841 x = lang_hooks.decls.omp_clause_default_ctor
5842 (c, nv, build_outer_var_ref (var, ctx));
5843 if (x)
5844 gimplify_and_add (x, ilist);
5845 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5846 {
5847 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5848 tree vexpr = nv;
5849 if (new_vard != new_var)
5850 vexpr = build_fold_addr_expr (nv);
5851 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5852 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5853 lower_omp (&tseq, ctx);
5854 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5855 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5856 gimple_seq_add_seq (ilist, tseq);
5857 }
5858 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5859 if (is_simd && ctx->scan_exclusive)
5860 {
5861 tree nv2
5862 = create_tmp_var_raw (TREE_TYPE (new_var));
5863 gimple_add_tmp_var (nv2);
5864 ctx->cb.decl_map->put (nv, nv2);
5865 x = lang_hooks.decls.omp_clause_default_ctor
5866 (c, nv2, build_outer_var_ref (var, ctx));
5867 gimplify_and_add (x, ilist);
5868 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5869 if (x)
5870 gimplify_and_add (x, dlist);
5871 }
5872 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5873 if (x)
5874 gimplify_and_add (x, dlist);
5875 }
5876 else if (is_simd
5877 && ctx->scan_exclusive
5878 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5879 {
5880 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5881 gimple_add_tmp_var (nv2);
5882 ctx->cb.decl_map->put (new_vard, nv2);
5883 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5884 if (x)
5885 gimplify_and_add (x, dlist);
5886 }
5887 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5888 goto do_dtor;
5889 }
5890
5891 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5892 {
5893 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5894 lower_omp (&tseq, ctx);
5895 gimple_seq_add_seq (ilist, tseq);
5896 }
5897 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5898 if (is_simd)
5899 {
5900 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5901 lower_omp (&tseq, ctx);
5902 gimple_seq_add_seq (dlist, tseq);
5903 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5904 }
5905 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5906 if (cond)
5907 {
5908 if (lab2)
5909 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5910 break;
5911 }
5912 goto do_dtor;
5913 }
5914 else
5915 {
5916 x = omp_reduction_init (c, TREE_TYPE (new_var));
5917 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5918 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5919
5920 if (cond)
5921 {
5922 gimple *g;
5923 tree lab2 = NULL_TREE;
5924 /* GOMP_taskgroup_reduction_register memsets the whole
5925 array to zero. If the initializer is zero, we don't
5926 need to initialize it again, just mark it as ever
5927 used unconditionally, i.e. cond = true. */
5928 if (initializer_zerop (x))
5929 {
5930 g = gimple_build_assign (build_simple_mem_ref (cond),
5931 boolean_true_node);
5932 gimple_seq_add_stmt (ilist, g);
5933 break;
5934 }
5935
5936 /* Otherwise, emit
5937 if (!cond) { cond = true; new_var = x; } */
5938 if (!is_parallel_ctx (ctx))
5939 {
5940 tree condv = create_tmp_var (boolean_type_node);
5941 tree m = build_simple_mem_ref (cond);
5942 g = gimple_build_assign (condv, m);
5943 gimple_seq_add_stmt (ilist, g);
5944 tree lab1
5945 = create_artificial_label (UNKNOWN_LOCATION);
5946 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5947 g = gimple_build_cond (NE_EXPR, condv,
5948 boolean_false_node,
5949 lab2, lab1);
5950 gimple_seq_add_stmt (ilist, g);
5951 gimple_seq_add_stmt (ilist,
5952 gimple_build_label (lab1));
5953 }
5954 g = gimple_build_assign (build_simple_mem_ref (cond),
5955 boolean_true_node);
5956 gimple_seq_add_stmt (ilist, g);
5957 gimplify_assign (new_var, x, ilist);
5958 if (lab2)
5959 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5960 break;
5961 }
5962
5963 /* reduction(-:var) sums up the partial results, so it
5964 acts identically to reduction(+:var). */
5965 if (code == MINUS_EXPR)
5966 code = PLUS_EXPR;
5967
5968 tree new_vard = new_var;
5969 if (is_simd && omp_is_reference (var))
5970 {
5971 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5972 new_vard = TREE_OPERAND (new_var, 0);
5973 gcc_assert (DECL_P (new_vard));
5974 }
5975 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5976 if (is_simd
5977 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5978 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5979 rvarp = &rvar;
5980 if (is_simd
5981 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5982 ivar, lvar, rvarp,
5983 &rvar2))
5984 {
5985 if (new_vard != new_var)
5986 {
5987 SET_DECL_VALUE_EXPR (new_vard,
5988 build_fold_addr_expr (lvar));
5989 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5990 }
5991
5992 tree ref = build_outer_var_ref (var, ctx);
5993
5994 if (rvarp)
5995 {
5996 if (ctx->for_simd_scan_phase)
5997 break;
5998 gimplify_assign (ivar, ref, &llist[0]);
5999 ref = build_outer_var_ref (var, ctx);
6000 gimplify_assign (ref, rvar, &llist[3]);
6001 break;
6002 }
6003
6004 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6005
6006 if (sctx.is_simt)
6007 {
6008 if (!simt_lane)
6009 simt_lane = create_tmp_var (unsigned_type_node);
6010 x = build_call_expr_internal_loc
6011 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6012 TREE_TYPE (ivar), 2, ivar, simt_lane);
6013 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6014 gimplify_assign (ivar, x, &llist[2]);
6015 }
6016 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6017 ref = build_outer_var_ref (var, ctx);
6018 gimplify_assign (ref, x, &llist[1]);
6019
6020 }
6021 else
6022 {
6023 if (omp_is_reference (var) && is_simd)
6024 handle_simd_reference (clause_loc, new_vard, ilist);
6025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6026 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6027 break;
6028 gimplify_assign (new_var, x, ilist);
6029 if (is_simd)
6030 {
6031 tree ref = build_outer_var_ref (var, ctx);
6032
6033 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6034 ref = build_outer_var_ref (var, ctx);
6035 gimplify_assign (ref, x, dlist);
6036 }
6037 }
6038 }
6039 break;
6040
6041 default:
6042 gcc_unreachable ();
6043 }
6044 }
6045 }
6046 if (tskred_avar)
6047 {
6048 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6049 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6050 }
6051
6052 if (known_eq (sctx.max_vf, 1U))
6053 {
6054 sctx.is_simt = false;
6055 if (ctx->lastprivate_conditional_map)
6056 {
6057 if (gimple_omp_for_combined_into_p (ctx->stmt))
6058 {
6059 /* Signal to lower_omp_1 that it should use parent context. */
6060 ctx->combined_into_simd_safelen1 = true;
6061 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6062 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6063 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6064 {
6065 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6066 omp_context *outer = ctx->outer;
6067 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6068 outer = outer->outer;
6069 tree *v = ctx->lastprivate_conditional_map->get (o);
6070 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6071 tree *pv = outer->lastprivate_conditional_map->get (po);
6072 *v = *pv;
6073 }
6074 }
6075 else
6076 {
6077 /* When not vectorized, treat lastprivate(conditional:) like
6078 normal lastprivate, as there will be just one simd lane
6079 writing the privatized variable. */
6080 delete ctx->lastprivate_conditional_map;
6081 ctx->lastprivate_conditional_map = NULL;
6082 }
6083 }
6084 }
6085
6086 if (nonconst_simd_if)
6087 {
6088 if (sctx.lane == NULL_TREE)
6089 {
6090 sctx.idx = create_tmp_var (unsigned_type_node);
6091 sctx.lane = create_tmp_var (unsigned_type_node);
6092 }
6093 /* FIXME: For now. */
6094 sctx.is_simt = false;
6095 }
6096
6097 if (sctx.lane || sctx.is_simt)
6098 {
6099 uid = create_tmp_var (ptr_type_node, "simduid");
6100 /* Don't want uninit warnings on simduid, it is always uninitialized,
6101 but we use it not for the value, but for the DECL_UID only. */
6102 TREE_NO_WARNING (uid) = 1;
6103 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6104 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6105 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6106 gimple_omp_for_set_clauses (ctx->stmt, c);
6107 }
6108 /* Emit calls denoting privatized variables and initializing a pointer to
6109 structure that holds private variables as fields after ompdevlow pass. */
6110 if (sctx.is_simt)
6111 {
6112 sctx.simt_eargs[0] = uid;
6113 gimple *g
6114 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6115 gimple_call_set_lhs (g, uid);
6116 gimple_seq_add_stmt (ilist, g);
6117 sctx.simt_eargs.release ();
6118
6119 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6120 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6121 gimple_call_set_lhs (g, simtrec);
6122 gimple_seq_add_stmt (ilist, g);
6123 }
6124 if (sctx.lane)
6125 {
6126 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6127 2 + (nonconst_simd_if != NULL),
6128 uid, integer_zero_node,
6129 nonconst_simd_if);
6130 gimple_call_set_lhs (g, sctx.lane);
6131 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6132 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6133 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6134 build_int_cst (unsigned_type_node, 0));
6135 gimple_seq_add_stmt (ilist, g);
6136 if (sctx.lastlane)
6137 {
6138 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6139 2, uid, sctx.lane);
6140 gimple_call_set_lhs (g, sctx.lastlane);
6141 gimple_seq_add_stmt (dlist, g);
6142 gimple_seq_add_seq (dlist, llist[3]);
6143 }
6144 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6145 if (llist[2])
6146 {
6147 tree simt_vf = create_tmp_var (unsigned_type_node);
6148 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6149 gimple_call_set_lhs (g, simt_vf);
6150 gimple_seq_add_stmt (dlist, g);
6151
6152 tree t = build_int_cst (unsigned_type_node, 1);
6153 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6154 gimple_seq_add_stmt (dlist, g);
6155
6156 t = build_int_cst (unsigned_type_node, 0);
6157 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6158 gimple_seq_add_stmt (dlist, g);
6159
6160 tree body = create_artificial_label (UNKNOWN_LOCATION);
6161 tree header = create_artificial_label (UNKNOWN_LOCATION);
6162 tree end = create_artificial_label (UNKNOWN_LOCATION);
6163 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6164 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6165
6166 gimple_seq_add_seq (dlist, llist[2]);
6167
6168 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6169 gimple_seq_add_stmt (dlist, g);
6170
6171 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6172 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6173 gimple_seq_add_stmt (dlist, g);
6174
6175 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6176 }
6177 for (int i = 0; i < 2; i++)
6178 if (llist[i])
6179 {
6180 tree vf = create_tmp_var (unsigned_type_node);
6181 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6182 gimple_call_set_lhs (g, vf);
6183 gimple_seq *seq = i == 0 ? ilist : dlist;
6184 gimple_seq_add_stmt (seq, g);
6185 tree t = build_int_cst (unsigned_type_node, 0);
6186 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6187 gimple_seq_add_stmt (seq, g);
6188 tree body = create_artificial_label (UNKNOWN_LOCATION);
6189 tree header = create_artificial_label (UNKNOWN_LOCATION);
6190 tree end = create_artificial_label (UNKNOWN_LOCATION);
6191 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6192 gimple_seq_add_stmt (seq, gimple_build_label (body));
6193 gimple_seq_add_seq (seq, llist[i]);
6194 t = build_int_cst (unsigned_type_node, 1);
6195 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6196 gimple_seq_add_stmt (seq, g);
6197 gimple_seq_add_stmt (seq, gimple_build_label (header));
6198 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6199 gimple_seq_add_stmt (seq, g);
6200 gimple_seq_add_stmt (seq, gimple_build_label (end));
6201 }
6202 }
6203 if (sctx.is_simt)
6204 {
6205 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6206 gimple *g
6207 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6208 gimple_seq_add_stmt (dlist, g);
6209 }
6210
6211 /* The copyin sequence is not to be executed by the main thread, since
6212 that would result in self-copies. Perhaps not visible to scalars,
6213 but it certainly is to C++ operator=. */
6214 if (copyin_seq)
6215 {
6216 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6217 0);
6218 x = build2 (NE_EXPR, boolean_type_node, x,
6219 build_int_cst (TREE_TYPE (x), 0));
6220 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6221 gimplify_and_add (x, ilist);
6222 }
6223
6224 /* If any copyin variable is passed by reference, we must ensure the
6225 master thread doesn't modify it before it is copied over in all
6226 threads. Similarly for variables in both firstprivate and
6227 lastprivate clauses we need to ensure the lastprivate copying
6228 happens after firstprivate copying in all threads. And similarly
6229 for UDRs if initializer expression refers to omp_orig. */
6230 if (copyin_by_ref || lastprivate_firstprivate
6231 || (reduction_omp_orig_ref
6232 && !ctx->scan_inclusive
6233 && !ctx->scan_exclusive))
6234 {
6235 /* Don't add any barrier for #pragma omp simd or
6236 #pragma omp distribute. */
6237 if (!is_task_ctx (ctx)
6238 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6239 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6240 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6241 }
6242
6243 /* If max_vf is non-zero, then we can use only a vectorization factor
6244 up to the max_vf we chose. So stick it into the safelen clause. */
6245 if (maybe_ne (sctx.max_vf, 0U))
6246 {
6247 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6248 OMP_CLAUSE_SAFELEN);
6249 poly_uint64 safe_len;
6250 if (c == NULL_TREE
6251 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6252 && maybe_gt (safe_len, sctx.max_vf)))
6253 {
6254 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6255 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6256 sctx.max_vf);
6257 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6258 gimple_omp_for_set_clauses (ctx->stmt, c);
6259 }
6260 }
6261 }
6262
6263 /* Create temporary variables for lastprivate(conditional:) implementation
6264 in context CTX with CLAUSES. */
6265
6266 static void
6267 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6268 {
6269 tree iter_type = NULL_TREE;
6270 tree cond_ptr = NULL_TREE;
6271 tree iter_var = NULL_TREE;
6272 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6273 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6274 tree next = *clauses;
6275 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6277 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6278 {
6279 if (is_simd)
6280 {
6281 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6282 gcc_assert (cc);
6283 if (iter_type == NULL_TREE)
6284 {
6285 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6286 iter_var = create_tmp_var_raw (iter_type);
6287 DECL_CONTEXT (iter_var) = current_function_decl;
6288 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6289 DECL_CHAIN (iter_var) = ctx->block_vars;
6290 ctx->block_vars = iter_var;
6291 tree c3
6292 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6293 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6294 OMP_CLAUSE_DECL (c3) = iter_var;
6295 OMP_CLAUSE_CHAIN (c3) = *clauses;
6296 *clauses = c3;
6297 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6298 }
6299 next = OMP_CLAUSE_CHAIN (cc);
6300 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6301 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6302 ctx->lastprivate_conditional_map->put (o, v);
6303 continue;
6304 }
6305 if (iter_type == NULL)
6306 {
6307 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6308 {
6309 struct omp_for_data fd;
6310 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6311 NULL);
6312 iter_type = unsigned_type_for (fd.iter_type);
6313 }
6314 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6315 iter_type = unsigned_type_node;
6316 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6317 if (c2)
6318 {
6319 cond_ptr
6320 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6321 OMP_CLAUSE_DECL (c2) = cond_ptr;
6322 }
6323 else
6324 {
6325 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6326 DECL_CONTEXT (cond_ptr) = current_function_decl;
6327 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6328 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6329 ctx->block_vars = cond_ptr;
6330 c2 = build_omp_clause (UNKNOWN_LOCATION,
6331 OMP_CLAUSE__CONDTEMP_);
6332 OMP_CLAUSE_DECL (c2) = cond_ptr;
6333 OMP_CLAUSE_CHAIN (c2) = *clauses;
6334 *clauses = c2;
6335 }
6336 iter_var = create_tmp_var_raw (iter_type);
6337 DECL_CONTEXT (iter_var) = current_function_decl;
6338 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6339 DECL_CHAIN (iter_var) = ctx->block_vars;
6340 ctx->block_vars = iter_var;
6341 tree c3
6342 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6343 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6344 OMP_CLAUSE_DECL (c3) = iter_var;
6345 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6346 OMP_CLAUSE_CHAIN (c2) = c3;
6347 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6348 }
6349 tree v = create_tmp_var_raw (iter_type);
6350 DECL_CONTEXT (v) = current_function_decl;
6351 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6352 DECL_CHAIN (v) = ctx->block_vars;
6353 ctx->block_vars = v;
6354 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6355 ctx->lastprivate_conditional_map->put (o, v);
6356 }
6357 }
6358
6359
6360 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6361 both parallel and workshare constructs. PREDICATE may be NULL if it's
6362 always true. BODY_P is the sequence to insert early initialization
6363 if needed, STMT_LIST is where the non-conditional lastprivate handling
6364 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6365 section. */
6366
6367 static void
6368 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6369 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6370 omp_context *ctx)
6371 {
6372 tree x, c, label = NULL, orig_clauses = clauses;
6373 bool par_clauses = false;
6374 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6375 unsigned HOST_WIDE_INT conditional_off = 0;
6376 gimple_seq post_stmt_list = NULL;
6377
6378 /* Early exit if there are no lastprivate or linear clauses. */
6379 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6380 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6381 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6382 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6383 break;
6384 if (clauses == NULL)
6385 {
6386 /* If this was a workshare clause, see if it had been combined
6387 with its parallel. In that case, look for the clauses on the
6388 parallel statement itself. */
6389 if (is_parallel_ctx (ctx))
6390 return;
6391
6392 ctx = ctx->outer;
6393 if (ctx == NULL || !is_parallel_ctx (ctx))
6394 return;
6395
6396 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6397 OMP_CLAUSE_LASTPRIVATE);
6398 if (clauses == NULL)
6399 return;
6400 par_clauses = true;
6401 }
6402
6403 bool maybe_simt = false;
6404 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6405 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6406 {
6407 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6408 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6409 if (simduid)
6410 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6411 }
6412
6413 if (predicate)
6414 {
6415 gcond *stmt;
6416 tree label_true, arm1, arm2;
6417 enum tree_code pred_code = TREE_CODE (predicate);
6418
6419 label = create_artificial_label (UNKNOWN_LOCATION);
6420 label_true = create_artificial_label (UNKNOWN_LOCATION);
6421 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6422 {
6423 arm1 = TREE_OPERAND (predicate, 0);
6424 arm2 = TREE_OPERAND (predicate, 1);
6425 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6426 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6427 }
6428 else
6429 {
6430 arm1 = predicate;
6431 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6432 arm2 = boolean_false_node;
6433 pred_code = NE_EXPR;
6434 }
6435 if (maybe_simt)
6436 {
6437 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6438 c = fold_convert (integer_type_node, c);
6439 simtcond = create_tmp_var (integer_type_node);
6440 gimplify_assign (simtcond, c, stmt_list);
6441 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6442 1, simtcond);
6443 c = create_tmp_var (integer_type_node);
6444 gimple_call_set_lhs (g, c);
6445 gimple_seq_add_stmt (stmt_list, g);
6446 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6447 label_true, label);
6448 }
6449 else
6450 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6451 gimple_seq_add_stmt (stmt_list, stmt);
6452 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6453 }
6454
6455 tree cond_ptr = NULL_TREE;
6456 for (c = clauses; c ;)
6457 {
6458 tree var, new_var;
6459 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6460 gimple_seq *this_stmt_list = stmt_list;
6461 tree lab2 = NULL_TREE;
6462
6463 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6464 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6465 && ctx->lastprivate_conditional_map
6466 && !ctx->combined_into_simd_safelen1)
6467 {
6468 gcc_assert (body_p);
6469 if (simduid)
6470 goto next;
6471 if (cond_ptr == NULL_TREE)
6472 {
6473 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6474 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6475 }
6476 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6477 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6478 tree v = *ctx->lastprivate_conditional_map->get (o);
6479 gimplify_assign (v, build_zero_cst (type), body_p);
6480 this_stmt_list = cstmt_list;
6481 tree mem;
6482 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6483 {
6484 mem = build2 (MEM_REF, type, cond_ptr,
6485 build_int_cst (TREE_TYPE (cond_ptr),
6486 conditional_off));
6487 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6488 }
6489 else
6490 mem = build4 (ARRAY_REF, type, cond_ptr,
6491 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6492 tree mem2 = copy_node (mem);
6493 gimple_seq seq = NULL;
6494 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6495 gimple_seq_add_seq (this_stmt_list, seq);
6496 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6497 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6498 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6499 gimple_seq_add_stmt (this_stmt_list, g);
6500 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6501 gimplify_assign (mem2, v, this_stmt_list);
6502 }
6503 else if (predicate
6504 && ctx->combined_into_simd_safelen1
6505 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6506 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6507 && ctx->lastprivate_conditional_map)
6508 this_stmt_list = &post_stmt_list;
6509
6510 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6511 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6512 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6513 {
6514 var = OMP_CLAUSE_DECL (c);
6515 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6516 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6517 && is_taskloop_ctx (ctx))
6518 {
6519 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6520 new_var = lookup_decl (var, ctx->outer);
6521 }
6522 else
6523 {
6524 new_var = lookup_decl (var, ctx);
6525 /* Avoid uninitialized warnings for lastprivate and
6526 for linear iterators. */
6527 if (predicate
6528 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6529 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6530 TREE_NO_WARNING (new_var) = 1;
6531 }
6532
6533 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6534 {
6535 tree val = DECL_VALUE_EXPR (new_var);
6536 if (TREE_CODE (val) == ARRAY_REF
6537 && VAR_P (TREE_OPERAND (val, 0))
6538 && lookup_attribute ("omp simd array",
6539 DECL_ATTRIBUTES (TREE_OPERAND (val,
6540 0))))
6541 {
6542 if (lastlane == NULL)
6543 {
6544 lastlane = create_tmp_var (unsigned_type_node);
6545 gcall *g
6546 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6547 2, simduid,
6548 TREE_OPERAND (val, 1));
6549 gimple_call_set_lhs (g, lastlane);
6550 gimple_seq_add_stmt (this_stmt_list, g);
6551 }
6552 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6553 TREE_OPERAND (val, 0), lastlane,
6554 NULL_TREE, NULL_TREE);
6555 TREE_THIS_NOTRAP (new_var) = 1;
6556 }
6557 }
6558 else if (maybe_simt)
6559 {
6560 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6561 ? DECL_VALUE_EXPR (new_var)
6562 : new_var);
6563 if (simtlast == NULL)
6564 {
6565 simtlast = create_tmp_var (unsigned_type_node);
6566 gcall *g = gimple_build_call_internal
6567 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6568 gimple_call_set_lhs (g, simtlast);
6569 gimple_seq_add_stmt (this_stmt_list, g);
6570 }
6571 x = build_call_expr_internal_loc
6572 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6573 TREE_TYPE (val), 2, val, simtlast);
6574 new_var = unshare_expr (new_var);
6575 gimplify_assign (new_var, x, this_stmt_list);
6576 new_var = unshare_expr (new_var);
6577 }
6578
6579 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6580 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6581 {
6582 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6583 gimple_seq_add_seq (this_stmt_list,
6584 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6585 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6586 }
6587 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6588 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6589 {
6590 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6591 gimple_seq_add_seq (this_stmt_list,
6592 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6593 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6594 }
6595
6596 x = NULL_TREE;
6597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6598 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6599 && is_taskloop_ctx (ctx))
6600 {
6601 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6602 ctx->outer->outer);
6603 if (is_global_var (ovar))
6604 x = ovar;
6605 }
6606 if (!x)
6607 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6608 if (omp_is_reference (var))
6609 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6610 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6611 gimplify_and_add (x, this_stmt_list);
6612
6613 if (lab2)
6614 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6615 }
6616
6617 next:
6618 c = OMP_CLAUSE_CHAIN (c);
6619 if (c == NULL && !par_clauses)
6620 {
6621 /* If this was a workshare clause, see if it had been combined
6622 with its parallel. In that case, continue looking for the
6623 clauses also on the parallel statement itself. */
6624 if (is_parallel_ctx (ctx))
6625 break;
6626
6627 ctx = ctx->outer;
6628 if (ctx == NULL || !is_parallel_ctx (ctx))
6629 break;
6630
6631 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6632 OMP_CLAUSE_LASTPRIVATE);
6633 par_clauses = true;
6634 }
6635 }
6636
6637 if (label)
6638 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6639 gimple_seq_add_seq (stmt_list, post_stmt_list);
6640 }
6641
6642 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6643 (which might be a placeholder). INNER is true if this is an inner
6644 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6645 join markers. Generate the before-loop forking sequence in
6646 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6647 general form of these sequences is
6648
6649 GOACC_REDUCTION_SETUP
6650 GOACC_FORK
6651 GOACC_REDUCTION_INIT
6652 ...
6653 GOACC_REDUCTION_FINI
6654 GOACC_JOIN
6655 GOACC_REDUCTION_TEARDOWN. */
6656
6657 static void
6658 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6659 gcall *fork, gcall *join, gimple_seq *fork_seq,
6660 gimple_seq *join_seq, omp_context *ctx)
6661 {
6662 gimple_seq before_fork = NULL;
6663 gimple_seq after_fork = NULL;
6664 gimple_seq before_join = NULL;
6665 gimple_seq after_join = NULL;
6666 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6667 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6668 unsigned offset = 0;
6669
6670 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6671 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6672 {
6673 tree orig = OMP_CLAUSE_DECL (c);
6674 tree var = maybe_lookup_decl (orig, ctx);
6675 tree ref_to_res = NULL_TREE;
6676 tree incoming, outgoing, v1, v2, v3;
6677 bool is_private = false;
6678
6679 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6680 if (rcode == MINUS_EXPR)
6681 rcode = PLUS_EXPR;
6682 else if (rcode == TRUTH_ANDIF_EXPR)
6683 rcode = BIT_AND_EXPR;
6684 else if (rcode == TRUTH_ORIF_EXPR)
6685 rcode = BIT_IOR_EXPR;
6686 tree op = build_int_cst (unsigned_type_node, rcode);
6687
6688 if (!var)
6689 var = orig;
6690
6691 incoming = outgoing = var;
6692
6693 if (!inner)
6694 {
6695 /* See if an outer construct also reduces this variable. */
6696 omp_context *outer = ctx;
6697
6698 while (omp_context *probe = outer->outer)
6699 {
6700 enum gimple_code type = gimple_code (probe->stmt);
6701 tree cls;
6702
6703 switch (type)
6704 {
6705 case GIMPLE_OMP_FOR:
6706 cls = gimple_omp_for_clauses (probe->stmt);
6707 break;
6708
6709 case GIMPLE_OMP_TARGET:
6710 if ((gimple_omp_target_kind (probe->stmt)
6711 != GF_OMP_TARGET_KIND_OACC_PARALLEL)
6712 && (gimple_omp_target_kind (probe->stmt)
6713 != GF_OMP_TARGET_KIND_OACC_SERIAL))
6714 goto do_lookup;
6715
6716 cls = gimple_omp_target_clauses (probe->stmt);
6717 break;
6718
6719 default:
6720 goto do_lookup;
6721 }
6722
6723 outer = probe;
6724 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6725 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6726 && orig == OMP_CLAUSE_DECL (cls))
6727 {
6728 incoming = outgoing = lookup_decl (orig, probe);
6729 goto has_outer_reduction;
6730 }
6731 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6732 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6733 && orig == OMP_CLAUSE_DECL (cls))
6734 {
6735 is_private = true;
6736 goto do_lookup;
6737 }
6738 }
6739
6740 do_lookup:
6741 /* This is the outermost construct with this reduction,
6742 see if there's a mapping for it. */
6743 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6744 && maybe_lookup_field (orig, outer) && !is_private)
6745 {
6746 ref_to_res = build_receiver_ref (orig, false, outer);
6747 if (omp_is_reference (orig))
6748 ref_to_res = build_simple_mem_ref (ref_to_res);
6749
6750 tree type = TREE_TYPE (var);
6751 if (POINTER_TYPE_P (type))
6752 type = TREE_TYPE (type);
6753
6754 outgoing = var;
6755 incoming = omp_reduction_init_op (loc, rcode, type);
6756 }
6757 else
6758 {
6759 /* Try to look at enclosing contexts for reduction var,
6760 use original if no mapping found. */
6761 tree t = NULL_TREE;
6762 omp_context *c = ctx->outer;
6763 while (c && !t)
6764 {
6765 t = maybe_lookup_decl (orig, c);
6766 c = c->outer;
6767 }
6768 incoming = outgoing = (t ? t : orig);
6769 }
6770
6771 has_outer_reduction:;
6772 }
6773
6774 if (!ref_to_res)
6775 ref_to_res = integer_zero_node;
6776
6777 if (omp_is_reference (orig))
6778 {
6779 tree type = TREE_TYPE (var);
6780 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6781
6782 if (!inner)
6783 {
6784 tree x = create_tmp_var (TREE_TYPE (type), id);
6785 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6786 }
6787
6788 v1 = create_tmp_var (type, id);
6789 v2 = create_tmp_var (type, id);
6790 v3 = create_tmp_var (type, id);
6791
6792 gimplify_assign (v1, var, fork_seq);
6793 gimplify_assign (v2, var, fork_seq);
6794 gimplify_assign (v3, var, fork_seq);
6795
6796 var = build_simple_mem_ref (var);
6797 v1 = build_simple_mem_ref (v1);
6798 v2 = build_simple_mem_ref (v2);
6799 v3 = build_simple_mem_ref (v3);
6800 outgoing = build_simple_mem_ref (outgoing);
6801
6802 if (!TREE_CONSTANT (incoming))
6803 incoming = build_simple_mem_ref (incoming);
6804 }
6805 else
6806 v1 = v2 = v3 = var;
6807
6808 /* Determine position in reduction buffer, which may be used
6809 by target. The parser has ensured that this is not a
6810 variable-sized type. */
6811 fixed_size_mode mode
6812 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6813 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6814 offset = (offset + align - 1) & ~(align - 1);
6815 tree off = build_int_cst (sizetype, offset);
6816 offset += GET_MODE_SIZE (mode);
6817
6818 if (!init_code)
6819 {
6820 init_code = build_int_cst (integer_type_node,
6821 IFN_GOACC_REDUCTION_INIT);
6822 fini_code = build_int_cst (integer_type_node,
6823 IFN_GOACC_REDUCTION_FINI);
6824 setup_code = build_int_cst (integer_type_node,
6825 IFN_GOACC_REDUCTION_SETUP);
6826 teardown_code = build_int_cst (integer_type_node,
6827 IFN_GOACC_REDUCTION_TEARDOWN);
6828 }
6829
6830 tree setup_call
6831 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6832 TREE_TYPE (var), 6, setup_code,
6833 unshare_expr (ref_to_res),
6834 incoming, level, op, off);
6835 tree init_call
6836 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6837 TREE_TYPE (var), 6, init_code,
6838 unshare_expr (ref_to_res),
6839 v1, level, op, off);
6840 tree fini_call
6841 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6842 TREE_TYPE (var), 6, fini_code,
6843 unshare_expr (ref_to_res),
6844 v2, level, op, off);
6845 tree teardown_call
6846 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6847 TREE_TYPE (var), 6, teardown_code,
6848 ref_to_res, v3, level, op, off);
6849
6850 gimplify_assign (v1, setup_call, &before_fork);
6851 gimplify_assign (v2, init_call, &after_fork);
6852 gimplify_assign (v3, fini_call, &before_join);
6853 gimplify_assign (outgoing, teardown_call, &after_join);
6854 }
6855
6856 /* Now stitch things together. */
6857 gimple_seq_add_seq (fork_seq, before_fork);
6858 if (fork)
6859 gimple_seq_add_stmt (fork_seq, fork);
6860 gimple_seq_add_seq (fork_seq, after_fork);
6861
6862 gimple_seq_add_seq (join_seq, before_join);
6863 if (join)
6864 gimple_seq_add_stmt (join_seq, join);
6865 gimple_seq_add_seq (join_seq, after_join);
6866 }
6867
6868 /* Generate code to implement the REDUCTION clauses, append it
6869 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6870 that should be emitted also inside of the critical section,
6871 in that case clear *CLIST afterwards, otherwise leave it as is
6872 and let the caller emit it itself. */
6873
6874 static void
6875 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6876 gimple_seq *clist, omp_context *ctx)
6877 {
6878 gimple_seq sub_seq = NULL;
6879 gimple *stmt;
6880 tree x, c;
6881 int count = 0;
6882
6883 /* OpenACC loop reductions are handled elsewhere. */
6884 if (is_gimple_omp_oacc (ctx->stmt))
6885 return;
6886
6887 /* SIMD reductions are handled in lower_rec_input_clauses. */
6888 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6889 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6890 return;
6891
6892 /* inscan reductions are handled elsewhere. */
6893 if (ctx->scan_inclusive || ctx->scan_exclusive)
6894 return;
6895
6896 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6897 update in that case, otherwise use a lock. */
6898 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6899 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6900 && !OMP_CLAUSE_REDUCTION_TASK (c))
6901 {
6902 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6903 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6904 {
6905 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6906 count = -1;
6907 break;
6908 }
6909 count++;
6910 }
6911
6912 if (count == 0)
6913 return;
6914
6915 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6916 {
6917 tree var, ref, new_var, orig_var;
6918 enum tree_code code;
6919 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6920
6921 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6922 || OMP_CLAUSE_REDUCTION_TASK (c))
6923 continue;
6924
6925 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6926 orig_var = var = OMP_CLAUSE_DECL (c);
6927 if (TREE_CODE (var) == MEM_REF)
6928 {
6929 var = TREE_OPERAND (var, 0);
6930 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6931 var = TREE_OPERAND (var, 0);
6932 if (TREE_CODE (var) == ADDR_EXPR)
6933 var = TREE_OPERAND (var, 0);
6934 else
6935 {
6936 /* If this is a pointer or referenced based array
6937 section, the var could be private in the outer
6938 context e.g. on orphaned loop construct. Pretend this
6939 is private variable's outer reference. */
6940 ccode = OMP_CLAUSE_PRIVATE;
6941 if (TREE_CODE (var) == INDIRECT_REF)
6942 var = TREE_OPERAND (var, 0);
6943 }
6944 orig_var = var;
6945 if (is_variable_sized (var))
6946 {
6947 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6948 var = DECL_VALUE_EXPR (var);
6949 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
6950 var = TREE_OPERAND (var, 0);
6951 gcc_assert (DECL_P (var));
6952 }
6953 }
6954 new_var = lookup_decl (var, ctx);
6955 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
6956 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6957 ref = build_outer_var_ref (var, ctx, ccode);
6958 code = OMP_CLAUSE_REDUCTION_CODE (c);
6959
6960 /* reduction(-:var) sums up the partial results, so it acts
6961 identically to reduction(+:var). */
6962 if (code == MINUS_EXPR)
6963 code = PLUS_EXPR;
6964
6965 if (count == 1)
6966 {
6967 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
6968
6969 addr = save_expr (addr);
6970 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
6971 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
6972 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
6973 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
6974 gimplify_and_add (x, stmt_seqp);
6975 return;
6976 }
6977 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6978 {
6979 tree d = OMP_CLAUSE_DECL (c);
6980 tree type = TREE_TYPE (d);
6981 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6982 tree i = create_tmp_var (TREE_TYPE (v));
6983 tree ptype = build_pointer_type (TREE_TYPE (type));
6984 tree bias = TREE_OPERAND (d, 1);
6985 d = TREE_OPERAND (d, 0);
6986 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
6987 {
6988 tree b = TREE_OPERAND (d, 1);
6989 b = maybe_lookup_decl (b, ctx);
6990 if (b == NULL)
6991 {
6992 b = TREE_OPERAND (d, 1);
6993 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
6994 }
6995 if (integer_zerop (bias))
6996 bias = b;
6997 else
6998 {
6999 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7000 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7001 TREE_TYPE (b), b, bias);
7002 }
7003 d = TREE_OPERAND (d, 0);
7004 }
7005 /* For ref build_outer_var_ref already performs this, so
7006 only new_var needs a dereference. */
7007 if (TREE_CODE (d) == INDIRECT_REF)
7008 {
7009 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7010 gcc_assert (omp_is_reference (var) && var == orig_var);
7011 }
7012 else if (TREE_CODE (d) == ADDR_EXPR)
7013 {
7014 if (orig_var == var)
7015 {
7016 new_var = build_fold_addr_expr (new_var);
7017 ref = build_fold_addr_expr (ref);
7018 }
7019 }
7020 else
7021 {
7022 gcc_assert (orig_var == var);
7023 if (omp_is_reference (var))
7024 ref = build_fold_addr_expr (ref);
7025 }
7026 if (DECL_P (v))
7027 {
7028 tree t = maybe_lookup_decl (v, ctx);
7029 if (t)
7030 v = t;
7031 else
7032 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7033 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7034 }
7035 if (!integer_zerop (bias))
7036 {
7037 bias = fold_convert_loc (clause_loc, sizetype, bias);
7038 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7039 TREE_TYPE (new_var), new_var,
7040 unshare_expr (bias));
7041 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7042 TREE_TYPE (ref), ref, bias);
7043 }
7044 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7045 ref = fold_convert_loc (clause_loc, ptype, ref);
7046 tree m = create_tmp_var (ptype);
7047 gimplify_assign (m, new_var, stmt_seqp);
7048 new_var = m;
7049 m = create_tmp_var (ptype);
7050 gimplify_assign (m, ref, stmt_seqp);
7051 ref = m;
7052 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7053 tree body = create_artificial_label (UNKNOWN_LOCATION);
7054 tree end = create_artificial_label (UNKNOWN_LOCATION);
7055 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7056 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7057 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7058 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7059 {
7060 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7061 tree decl_placeholder
7062 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7063 SET_DECL_VALUE_EXPR (placeholder, out);
7064 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7065 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7066 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7067 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7068 gimple_seq_add_seq (&sub_seq,
7069 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7070 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7071 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7072 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7073 }
7074 else
7075 {
7076 x = build2 (code, TREE_TYPE (out), out, priv);
7077 out = unshare_expr (out);
7078 gimplify_assign (out, x, &sub_seq);
7079 }
7080 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7081 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7082 gimple_seq_add_stmt (&sub_seq, g);
7083 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7084 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7085 gimple_seq_add_stmt (&sub_seq, g);
7086 g = gimple_build_assign (i, PLUS_EXPR, i,
7087 build_int_cst (TREE_TYPE (i), 1));
7088 gimple_seq_add_stmt (&sub_seq, g);
7089 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7090 gimple_seq_add_stmt (&sub_seq, g);
7091 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7092 }
7093 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7094 {
7095 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7096
7097 if (omp_is_reference (var)
7098 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7099 TREE_TYPE (ref)))
7100 ref = build_fold_addr_expr_loc (clause_loc, ref);
7101 SET_DECL_VALUE_EXPR (placeholder, ref);
7102 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7103 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7104 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7105 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7106 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7107 }
7108 else
7109 {
7110 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7111 ref = build_outer_var_ref (var, ctx);
7112 gimplify_assign (ref, x, &sub_seq);
7113 }
7114 }
7115
7116 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7117 0);
7118 gimple_seq_add_stmt (stmt_seqp, stmt);
7119
7120 gimple_seq_add_seq (stmt_seqp, sub_seq);
7121
7122 if (clist)
7123 {
7124 gimple_seq_add_seq (stmt_seqp, *clist);
7125 *clist = NULL;
7126 }
7127
7128 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7129 0);
7130 gimple_seq_add_stmt (stmt_seqp, stmt);
7131 }
7132
7133
7134 /* Generate code to implement the COPYPRIVATE clauses. */
7135
7136 static void
7137 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7138 omp_context *ctx)
7139 {
7140 tree c;
7141
7142 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7143 {
7144 tree var, new_var, ref, x;
7145 bool by_ref;
7146 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7147
7148 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7149 continue;
7150
7151 var = OMP_CLAUSE_DECL (c);
7152 by_ref = use_pointer_for_field (var, NULL);
7153
7154 ref = build_sender_ref (var, ctx);
7155 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7156 if (by_ref)
7157 {
7158 x = build_fold_addr_expr_loc (clause_loc, new_var);
7159 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7160 }
7161 gimplify_assign (ref, x, slist);
7162
7163 ref = build_receiver_ref (var, false, ctx);
7164 if (by_ref)
7165 {
7166 ref = fold_convert_loc (clause_loc,
7167 build_pointer_type (TREE_TYPE (new_var)),
7168 ref);
7169 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7170 }
7171 if (omp_is_reference (var))
7172 {
7173 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7174 ref = build_simple_mem_ref_loc (clause_loc, ref);
7175 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7176 }
7177 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7178 gimplify_and_add (x, rlist);
7179 }
7180 }
7181
7182
7183 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7184 and REDUCTION from the sender (aka parent) side. */
7185
7186 static void
7187 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7188 omp_context *ctx)
7189 {
7190 tree c, t;
7191 int ignored_looptemp = 0;
7192 bool is_taskloop = false;
7193
7194 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7195 by GOMP_taskloop. */
7196 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7197 {
7198 ignored_looptemp = 2;
7199 is_taskloop = true;
7200 }
7201
7202 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7203 {
7204 tree val, ref, x, var;
7205 bool by_ref, do_in = false, do_out = false;
7206 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7207
7208 switch (OMP_CLAUSE_CODE (c))
7209 {
7210 case OMP_CLAUSE_PRIVATE:
7211 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7212 break;
7213 continue;
7214 case OMP_CLAUSE_FIRSTPRIVATE:
7215 case OMP_CLAUSE_COPYIN:
7216 case OMP_CLAUSE_LASTPRIVATE:
7217 case OMP_CLAUSE_IN_REDUCTION:
7218 case OMP_CLAUSE__REDUCTEMP_:
7219 break;
7220 case OMP_CLAUSE_REDUCTION:
7221 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7222 continue;
7223 break;
7224 case OMP_CLAUSE_SHARED:
7225 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7226 break;
7227 continue;
7228 case OMP_CLAUSE__LOOPTEMP_:
7229 if (ignored_looptemp)
7230 {
7231 ignored_looptemp--;
7232 continue;
7233 }
7234 break;
7235 default:
7236 continue;
7237 }
7238
7239 val = OMP_CLAUSE_DECL (c);
7240 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7241 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7242 && TREE_CODE (val) == MEM_REF)
7243 {
7244 val = TREE_OPERAND (val, 0);
7245 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7246 val = TREE_OPERAND (val, 0);
7247 if (TREE_CODE (val) == INDIRECT_REF
7248 || TREE_CODE (val) == ADDR_EXPR)
7249 val = TREE_OPERAND (val, 0);
7250 if (is_variable_sized (val))
7251 continue;
7252 }
7253
7254 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7255 outer taskloop region. */
7256 omp_context *ctx_for_o = ctx;
7257 if (is_taskloop
7258 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7259 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7260 ctx_for_o = ctx->outer;
7261
7262 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7263
7264 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7265 && is_global_var (var)
7266 && (val == OMP_CLAUSE_DECL (c)
7267 || !is_task_ctx (ctx)
7268 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7269 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7270 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7271 != POINTER_TYPE)))))
7272 continue;
7273
7274 t = omp_member_access_dummy_var (var);
7275 if (t)
7276 {
7277 var = DECL_VALUE_EXPR (var);
7278 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7279 if (o != t)
7280 var = unshare_and_remap (var, t, o);
7281 else
7282 var = unshare_expr (var);
7283 }
7284
7285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7286 {
7287 /* Handle taskloop firstprivate/lastprivate, where the
7288 lastprivate on GIMPLE_OMP_TASK is represented as
7289 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7290 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7291 x = omp_build_component_ref (ctx->sender_decl, f);
7292 if (use_pointer_for_field (val, ctx))
7293 var = build_fold_addr_expr (var);
7294 gimplify_assign (x, var, ilist);
7295 DECL_ABSTRACT_ORIGIN (f) = NULL;
7296 continue;
7297 }
7298
7299 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7300 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7301 || val == OMP_CLAUSE_DECL (c))
7302 && is_variable_sized (val))
7303 continue;
7304 by_ref = use_pointer_for_field (val, NULL);
7305
7306 switch (OMP_CLAUSE_CODE (c))
7307 {
7308 case OMP_CLAUSE_FIRSTPRIVATE:
7309 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7310 && !by_ref
7311 && is_task_ctx (ctx))
7312 TREE_NO_WARNING (var) = 1;
7313 do_in = true;
7314 break;
7315
7316 case OMP_CLAUSE_PRIVATE:
7317 case OMP_CLAUSE_COPYIN:
7318 case OMP_CLAUSE__LOOPTEMP_:
7319 case OMP_CLAUSE__REDUCTEMP_:
7320 do_in = true;
7321 break;
7322
7323 case OMP_CLAUSE_LASTPRIVATE:
7324 if (by_ref || omp_is_reference (val))
7325 {
7326 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7327 continue;
7328 do_in = true;
7329 }
7330 else
7331 {
7332 do_out = true;
7333 if (lang_hooks.decls.omp_private_outer_ref (val))
7334 do_in = true;
7335 }
7336 break;
7337
7338 case OMP_CLAUSE_REDUCTION:
7339 case OMP_CLAUSE_IN_REDUCTION:
7340 do_in = true;
7341 if (val == OMP_CLAUSE_DECL (c))
7342 {
7343 if (is_task_ctx (ctx))
7344 by_ref = use_pointer_for_field (val, ctx);
7345 else
7346 do_out = !(by_ref || omp_is_reference (val));
7347 }
7348 else
7349 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7350 break;
7351
7352 default:
7353 gcc_unreachable ();
7354 }
7355
7356 if (do_in)
7357 {
7358 ref = build_sender_ref (val, ctx);
7359 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7360 gimplify_assign (ref, x, ilist);
7361 if (is_task_ctx (ctx))
7362 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7363 }
7364
7365 if (do_out)
7366 {
7367 ref = build_sender_ref (val, ctx);
7368 gimplify_assign (var, ref, olist);
7369 }
7370 }
7371 }
7372
7373 /* Generate code to implement SHARED from the sender (aka parent)
7374 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7375 list things that got automatically shared. */
7376
7377 static void
7378 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7379 {
7380 tree var, ovar, nvar, t, f, x, record_type;
7381
7382 if (ctx->record_type == NULL)
7383 return;
7384
7385 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7386 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7387 {
7388 ovar = DECL_ABSTRACT_ORIGIN (f);
7389 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7390 continue;
7391
7392 nvar = maybe_lookup_decl (ovar, ctx);
7393 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7394 continue;
7395
7396 /* If CTX is a nested parallel directive. Find the immediately
7397 enclosing parallel or workshare construct that contains a
7398 mapping for OVAR. */
7399 var = lookup_decl_in_outer_ctx (ovar, ctx);
7400
7401 t = omp_member_access_dummy_var (var);
7402 if (t)
7403 {
7404 var = DECL_VALUE_EXPR (var);
7405 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7406 if (o != t)
7407 var = unshare_and_remap (var, t, o);
7408 else
7409 var = unshare_expr (var);
7410 }
7411
7412 if (use_pointer_for_field (ovar, ctx))
7413 {
7414 x = build_sender_ref (ovar, ctx);
7415 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7416 && TREE_TYPE (f) == TREE_TYPE (ovar))
7417 {
7418 gcc_assert (is_parallel_ctx (ctx)
7419 && DECL_ARTIFICIAL (ovar));
7420 /* _condtemp_ clause. */
7421 var = build_constructor (TREE_TYPE (x), NULL);
7422 }
7423 else
7424 var = build_fold_addr_expr (var);
7425 gimplify_assign (x, var, ilist);
7426 }
7427 else
7428 {
7429 x = build_sender_ref (ovar, ctx);
7430 gimplify_assign (x, var, ilist);
7431
7432 if (!TREE_READONLY (var)
7433 /* We don't need to receive a new reference to a result
7434 or parm decl. In fact we may not store to it as we will
7435 invalidate any pending RSO and generate wrong gimple
7436 during inlining. */
7437 && !((TREE_CODE (var) == RESULT_DECL
7438 || TREE_CODE (var) == PARM_DECL)
7439 && DECL_BY_REFERENCE (var)))
7440 {
7441 x = build_sender_ref (ovar, ctx);
7442 gimplify_assign (var, x, olist);
7443 }
7444 }
7445 }
7446 }
7447
7448 /* Emit an OpenACC head marker call, encapulating the partitioning and
7449 other information that must be processed by the target compiler.
7450 Return the maximum number of dimensions the associated loop might
7451 be partitioned over. */
7452
7453 static unsigned
7454 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7455 gimple_seq *seq, omp_context *ctx)
7456 {
7457 unsigned levels = 0;
7458 unsigned tag = 0;
7459 tree gang_static = NULL_TREE;
7460 auto_vec<tree, 5> args;
7461
7462 args.quick_push (build_int_cst
7463 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7464 args.quick_push (ddvar);
7465 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7466 {
7467 switch (OMP_CLAUSE_CODE (c))
7468 {
7469 case OMP_CLAUSE_GANG:
7470 tag |= OLF_DIM_GANG;
7471 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7472 /* static:* is represented by -1, and we can ignore it, as
7473 scheduling is always static. */
7474 if (gang_static && integer_minus_onep (gang_static))
7475 gang_static = NULL_TREE;
7476 levels++;
7477 break;
7478
7479 case OMP_CLAUSE_WORKER:
7480 tag |= OLF_DIM_WORKER;
7481 levels++;
7482 break;
7483
7484 case OMP_CLAUSE_VECTOR:
7485 tag |= OLF_DIM_VECTOR;
7486 levels++;
7487 break;
7488
7489 case OMP_CLAUSE_SEQ:
7490 tag |= OLF_SEQ;
7491 break;
7492
7493 case OMP_CLAUSE_AUTO:
7494 tag |= OLF_AUTO;
7495 break;
7496
7497 case OMP_CLAUSE_INDEPENDENT:
7498 tag |= OLF_INDEPENDENT;
7499 break;
7500
7501 case OMP_CLAUSE_TILE:
7502 tag |= OLF_TILE;
7503 break;
7504
7505 default:
7506 continue;
7507 }
7508 }
7509
7510 if (gang_static)
7511 {
7512 if (DECL_P (gang_static))
7513 gang_static = build_outer_var_ref (gang_static, ctx);
7514 tag |= OLF_GANG_STATIC;
7515 }
7516
7517 /* In a parallel region, loops are implicitly INDEPENDENT. */
7518 omp_context *tgt = enclosing_target_ctx (ctx);
7519 if (!tgt || is_oacc_parallel_or_serial (tgt))
7520 tag |= OLF_INDEPENDENT;
7521
7522 if (tag & OLF_TILE)
7523 /* Tiling could use all 3 levels. */
7524 levels = 3;
7525 else
7526 {
7527 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7528 Ensure at least one level, or 2 for possible auto
7529 partitioning */
7530 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7531 << OLF_DIM_BASE) | OLF_SEQ));
7532
7533 if (levels < 1u + maybe_auto)
7534 levels = 1u + maybe_auto;
7535 }
7536
7537 args.quick_push (build_int_cst (integer_type_node, levels));
7538 args.quick_push (build_int_cst (integer_type_node, tag));
7539 if (gang_static)
7540 args.quick_push (gang_static);
7541
7542 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7543 gimple_set_location (call, loc);
7544 gimple_set_lhs (call, ddvar);
7545 gimple_seq_add_stmt (seq, call);
7546
7547 return levels;
7548 }
7549
7550 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7551 partitioning level of the enclosed region. */
7552
7553 static void
7554 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7555 tree tofollow, gimple_seq *seq)
7556 {
7557 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7558 : IFN_UNIQUE_OACC_TAIL_MARK);
7559 tree marker = build_int_cst (integer_type_node, marker_kind);
7560 int nargs = 2 + (tofollow != NULL_TREE);
7561 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7562 marker, ddvar, tofollow);
7563 gimple_set_location (call, loc);
7564 gimple_set_lhs (call, ddvar);
7565 gimple_seq_add_stmt (seq, call);
7566 }
7567
7568 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7569 the loop clauses, from which we extract reductions. Initialize
7570 HEAD and TAIL. */
7571
7572 static void
7573 lower_oacc_head_tail (location_t loc, tree clauses,
7574 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7575 {
7576 bool inner = false;
7577 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7578 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7579
7580 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7581 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7582 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7583
7584 gcc_assert (count);
7585 for (unsigned done = 1; count; count--, done++)
7586 {
7587 gimple_seq fork_seq = NULL;
7588 gimple_seq join_seq = NULL;
7589
7590 tree place = build_int_cst (integer_type_node, -1);
7591 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7592 fork_kind, ddvar, place);
7593 gimple_set_location (fork, loc);
7594 gimple_set_lhs (fork, ddvar);
7595
7596 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7597 join_kind, ddvar, place);
7598 gimple_set_location (join, loc);
7599 gimple_set_lhs (join, ddvar);
7600
7601 /* Mark the beginning of this level sequence. */
7602 if (inner)
7603 lower_oacc_loop_marker (loc, ddvar, true,
7604 build_int_cst (integer_type_node, count),
7605 &fork_seq);
7606 lower_oacc_loop_marker (loc, ddvar, false,
7607 build_int_cst (integer_type_node, done),
7608 &join_seq);
7609
7610 lower_oacc_reductions (loc, clauses, place, inner,
7611 fork, join, &fork_seq, &join_seq, ctx);
7612
7613 /* Append this level to head. */
7614 gimple_seq_add_seq (head, fork_seq);
7615 /* Prepend it to tail. */
7616 gimple_seq_add_seq (&join_seq, *tail);
7617 *tail = join_seq;
7618
7619 inner = true;
7620 }
7621
7622 /* Mark the end of the sequence. */
7623 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7624 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7625 }
7626
7627 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7628 catch handler and return it. This prevents programs from violating the
7629 structured block semantics with throws. */
7630
7631 static gimple_seq
7632 maybe_catch_exception (gimple_seq body)
7633 {
7634 gimple *g;
7635 tree decl;
7636
7637 if (!flag_exceptions)
7638 return body;
7639
7640 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7641 decl = lang_hooks.eh_protect_cleanup_actions ();
7642 else
7643 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7644
7645 g = gimple_build_eh_must_not_throw (decl);
7646 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7647 GIMPLE_TRY_CATCH);
7648
7649 return gimple_seq_alloc_with_stmt (g);
7650 }
7651
7652 \f
7653 /* Routines to lower OMP directives into OMP-GIMPLE. */
7654
7655 /* If ctx is a worksharing context inside of a cancellable parallel
7656 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7657 and conditional branch to parallel's cancel_label to handle
7658 cancellation in the implicit barrier. */
7659
7660 static void
7661 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7662 gimple_seq *body)
7663 {
7664 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7665 if (gimple_omp_return_nowait_p (omp_return))
7666 return;
7667 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7668 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7669 && outer->cancellable)
7670 {
7671 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7672 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7673 tree lhs = create_tmp_var (c_bool_type);
7674 gimple_omp_return_set_lhs (omp_return, lhs);
7675 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7676 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7677 fold_convert (c_bool_type,
7678 boolean_false_node),
7679 outer->cancel_label, fallthru_label);
7680 gimple_seq_add_stmt (body, g);
7681 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7682 }
7683 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7684 return;
7685 }
7686
7687 /* Find the first task_reduction or reduction clause or return NULL
7688 if there are none. */
7689
7690 static inline tree
7691 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7692 enum omp_clause_code ccode)
7693 {
7694 while (1)
7695 {
7696 clauses = omp_find_clause (clauses, ccode);
7697 if (clauses == NULL_TREE)
7698 return NULL_TREE;
7699 if (ccode != OMP_CLAUSE_REDUCTION
7700 || code == OMP_TASKLOOP
7701 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7702 return clauses;
7703 clauses = OMP_CLAUSE_CHAIN (clauses);
7704 }
7705 }
7706
7707 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7708 gimple_seq *, gimple_seq *);
7709
7710 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7711 CTX is the enclosing OMP context for the current statement. */
7712
7713 static void
7714 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7715 {
7716 tree block, control;
7717 gimple_stmt_iterator tgsi;
7718 gomp_sections *stmt;
7719 gimple *t;
7720 gbind *new_stmt, *bind;
7721 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7722
7723 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7724
7725 push_gimplify_context ();
7726
7727 dlist = NULL;
7728 ilist = NULL;
7729
7730 tree rclauses
7731 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7732 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7733 tree rtmp = NULL_TREE;
7734 if (rclauses)
7735 {
7736 tree type = build_pointer_type (pointer_sized_int_node);
7737 tree temp = create_tmp_var (type);
7738 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7739 OMP_CLAUSE_DECL (c) = temp;
7740 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7741 gimple_omp_sections_set_clauses (stmt, c);
7742 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7743 gimple_omp_sections_clauses (stmt),
7744 &ilist, &tred_dlist);
7745 rclauses = c;
7746 rtmp = make_ssa_name (type);
7747 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7748 }
7749
7750 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7751 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7752
7753 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7754 &ilist, &dlist, ctx, NULL);
7755
7756 control = create_tmp_var (unsigned_type_node, ".section");
7757 gimple_omp_sections_set_control (stmt, control);
7758
7759 new_body = gimple_omp_body (stmt);
7760 gimple_omp_set_body (stmt, NULL);
7761 tgsi = gsi_start (new_body);
7762 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7763 {
7764 omp_context *sctx;
7765 gimple *sec_start;
7766
7767 sec_start = gsi_stmt (tgsi);
7768 sctx = maybe_lookup_ctx (sec_start);
7769 gcc_assert (sctx);
7770
7771 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7772 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7773 GSI_CONTINUE_LINKING);
7774 gimple_omp_set_body (sec_start, NULL);
7775
7776 if (gsi_one_before_end_p (tgsi))
7777 {
7778 gimple_seq l = NULL;
7779 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7780 &ilist, &l, &clist, ctx);
7781 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7782 gimple_omp_section_set_last (sec_start);
7783 }
7784
7785 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7786 GSI_CONTINUE_LINKING);
7787 }
7788
7789 block = make_node (BLOCK);
7790 bind = gimple_build_bind (NULL, new_body, block);
7791
7792 olist = NULL;
7793 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7794 &clist, ctx);
7795 if (clist)
7796 {
7797 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7798 gcall *g = gimple_build_call (fndecl, 0);
7799 gimple_seq_add_stmt (&olist, g);
7800 gimple_seq_add_seq (&olist, clist);
7801 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7802 g = gimple_build_call (fndecl, 0);
7803 gimple_seq_add_stmt (&olist, g);
7804 }
7805
7806 block = make_node (BLOCK);
7807 new_stmt = gimple_build_bind (NULL, NULL, block);
7808 gsi_replace (gsi_p, new_stmt, true);
7809
7810 pop_gimplify_context (new_stmt);
7811 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7812 BLOCK_VARS (block) = gimple_bind_vars (bind);
7813 if (BLOCK_VARS (block))
7814 TREE_USED (block) = 1;
7815
7816 new_body = NULL;
7817 gimple_seq_add_seq (&new_body, ilist);
7818 gimple_seq_add_stmt (&new_body, stmt);
7819 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7820 gimple_seq_add_stmt (&new_body, bind);
7821
7822 t = gimple_build_omp_continue (control, control);
7823 gimple_seq_add_stmt (&new_body, t);
7824
7825 gimple_seq_add_seq (&new_body, olist);
7826 if (ctx->cancellable)
7827 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7828 gimple_seq_add_seq (&new_body, dlist);
7829
7830 new_body = maybe_catch_exception (new_body);
7831
7832 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7833 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7834 t = gimple_build_omp_return (nowait);
7835 gimple_seq_add_stmt (&new_body, t);
7836 gimple_seq_add_seq (&new_body, tred_dlist);
7837 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7838
7839 if (rclauses)
7840 OMP_CLAUSE_DECL (rclauses) = rtmp;
7841
7842 gimple_bind_set_body (new_stmt, new_body);
7843 }
7844
7845
7846 /* A subroutine of lower_omp_single. Expand the simple form of
7847 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7848
7849 if (GOMP_single_start ())
7850 BODY;
7851 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7852
7853 FIXME. It may be better to delay expanding the logic of this until
7854 pass_expand_omp. The expanded logic may make the job more difficult
7855 to a synchronization analysis pass. */
7856
7857 static void
7858 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7859 {
7860 location_t loc = gimple_location (single_stmt);
7861 tree tlabel = create_artificial_label (loc);
7862 tree flabel = create_artificial_label (loc);
7863 gimple *call, *cond;
7864 tree lhs, decl;
7865
7866 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7867 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7868 call = gimple_build_call (decl, 0);
7869 gimple_call_set_lhs (call, lhs);
7870 gimple_seq_add_stmt (pre_p, call);
7871
7872 cond = gimple_build_cond (EQ_EXPR, lhs,
7873 fold_convert_loc (loc, TREE_TYPE (lhs),
7874 boolean_true_node),
7875 tlabel, flabel);
7876 gimple_seq_add_stmt (pre_p, cond);
7877 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7878 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7879 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7880 }
7881
7882
7883 /* A subroutine of lower_omp_single. Expand the simple form of
7884 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7885
7886 #pragma omp single copyprivate (a, b, c)
7887
7888 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7889
7890 {
7891 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7892 {
7893 BODY;
7894 copyout.a = a;
7895 copyout.b = b;
7896 copyout.c = c;
7897 GOMP_single_copy_end (&copyout);
7898 }
7899 else
7900 {
7901 a = copyout_p->a;
7902 b = copyout_p->b;
7903 c = copyout_p->c;
7904 }
7905 GOMP_barrier ();
7906 }
7907
7908 FIXME. It may be better to delay expanding the logic of this until
7909 pass_expand_omp. The expanded logic may make the job more difficult
7910 to a synchronization analysis pass. */
7911
7912 static void
7913 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7914 omp_context *ctx)
7915 {
7916 tree ptr_type, t, l0, l1, l2, bfn_decl;
7917 gimple_seq copyin_seq;
7918 location_t loc = gimple_location (single_stmt);
7919
7920 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7921
7922 ptr_type = build_pointer_type (ctx->record_type);
7923 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7924
7925 l0 = create_artificial_label (loc);
7926 l1 = create_artificial_label (loc);
7927 l2 = create_artificial_label (loc);
7928
7929 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7930 t = build_call_expr_loc (loc, bfn_decl, 0);
7931 t = fold_convert_loc (loc, ptr_type, t);
7932 gimplify_assign (ctx->receiver_decl, t, pre_p);
7933
7934 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7935 build_int_cst (ptr_type, 0));
7936 t = build3 (COND_EXPR, void_type_node, t,
7937 build_and_jump (&l0), build_and_jump (&l1));
7938 gimplify_and_add (t, pre_p);
7939
7940 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
7941
7942 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7943
7944 copyin_seq = NULL;
7945 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
7946 &copyin_seq, ctx);
7947
7948 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7949 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
7950 t = build_call_expr_loc (loc, bfn_decl, 1, t);
7951 gimplify_and_add (t, pre_p);
7952
7953 t = build_and_jump (&l2);
7954 gimplify_and_add (t, pre_p);
7955
7956 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
7957
7958 gimple_seq_add_seq (pre_p, copyin_seq);
7959
7960 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
7961 }
7962
7963
7964 /* Expand code for an OpenMP single directive. */
7965
7966 static void
7967 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7968 {
7969 tree block;
7970 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
7971 gbind *bind;
7972 gimple_seq bind_body, bind_body_tail = NULL, dlist;
7973
7974 push_gimplify_context ();
7975
7976 block = make_node (BLOCK);
7977 bind = gimple_build_bind (NULL, NULL, block);
7978 gsi_replace (gsi_p, bind, true);
7979 bind_body = NULL;
7980 dlist = NULL;
7981 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
7982 &bind_body, &dlist, ctx, NULL);
7983 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
7984
7985 gimple_seq_add_stmt (&bind_body, single_stmt);
7986
7987 if (ctx->record_type)
7988 lower_omp_single_copy (single_stmt, &bind_body, ctx);
7989 else
7990 lower_omp_single_simple (single_stmt, &bind_body);
7991
7992 gimple_omp_set_body (single_stmt, NULL);
7993
7994 gimple_seq_add_seq (&bind_body, dlist);
7995
7996 bind_body = maybe_catch_exception (bind_body);
7997
7998 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
7999 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8000 gimple *g = gimple_build_omp_return (nowait);
8001 gimple_seq_add_stmt (&bind_body_tail, g);
8002 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8003 if (ctx->record_type)
8004 {
8005 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8006 tree clobber = build_clobber (ctx->record_type);
8007 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8008 clobber), GSI_SAME_STMT);
8009 }
8010 gimple_seq_add_seq (&bind_body, bind_body_tail);
8011 gimple_bind_set_body (bind, bind_body);
8012
8013 pop_gimplify_context (bind);
8014
8015 gimple_bind_append_vars (bind, ctx->block_vars);
8016 BLOCK_VARS (block) = ctx->block_vars;
8017 if (BLOCK_VARS (block))
8018 TREE_USED (block) = 1;
8019 }
8020
8021
8022 /* Expand code for an OpenMP master directive. */
8023
8024 static void
8025 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8026 {
8027 tree block, lab = NULL, x, bfn_decl;
8028 gimple *stmt = gsi_stmt (*gsi_p);
8029 gbind *bind;
8030 location_t loc = gimple_location (stmt);
8031 gimple_seq tseq;
8032
8033 push_gimplify_context ();
8034
8035 block = make_node (BLOCK);
8036 bind = gimple_build_bind (NULL, NULL, block);
8037 gsi_replace (gsi_p, bind, true);
8038 gimple_bind_add_stmt (bind, stmt);
8039
8040 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8041 x = build_call_expr_loc (loc, bfn_decl, 0);
8042 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8043 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8044 tseq = NULL;
8045 gimplify_and_add (x, &tseq);
8046 gimple_bind_add_seq (bind, tseq);
8047
8048 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8049 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8050 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8051 gimple_omp_set_body (stmt, NULL);
8052
8053 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8054
8055 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8056
8057 pop_gimplify_context (bind);
8058
8059 gimple_bind_append_vars (bind, ctx->block_vars);
8060 BLOCK_VARS (block) = ctx->block_vars;
8061 }
8062
8063 /* Helper function for lower_omp_task_reductions. For a specific PASS
8064 find out the current clause it should be processed, or return false
8065 if all have been processed already. */
8066
8067 static inline bool
8068 omp_task_reduction_iterate (int pass, enum tree_code code,
8069 enum omp_clause_code ccode, tree *c, tree *decl,
8070 tree *type, tree *next)
8071 {
8072 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8073 {
8074 if (ccode == OMP_CLAUSE_REDUCTION
8075 && code != OMP_TASKLOOP
8076 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8077 continue;
8078 *decl = OMP_CLAUSE_DECL (*c);
8079 *type = TREE_TYPE (*decl);
8080 if (TREE_CODE (*decl) == MEM_REF)
8081 {
8082 if (pass != 1)
8083 continue;
8084 }
8085 else
8086 {
8087 if (omp_is_reference (*decl))
8088 *type = TREE_TYPE (*type);
8089 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8090 continue;
8091 }
8092 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8093 return true;
8094 }
8095 *decl = NULL_TREE;
8096 *type = NULL_TREE;
8097 *next = NULL_TREE;
8098 return false;
8099 }
8100
8101 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8102 OMP_TASKGROUP only with task modifier). Register mapping of those in
8103 START sequence and reducing them and unregister them in the END sequence. */
8104
8105 static void
8106 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8107 gimple_seq *start, gimple_seq *end)
8108 {
8109 enum omp_clause_code ccode
8110 = (code == OMP_TASKGROUP
8111 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8112 tree cancellable = NULL_TREE;
8113 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8114 if (clauses == NULL_TREE)
8115 return;
8116 if (code == OMP_FOR || code == OMP_SECTIONS)
8117 {
8118 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8119 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8120 && outer->cancellable)
8121 {
8122 cancellable = error_mark_node;
8123 break;
8124 }
8125 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8126 break;
8127 }
8128 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8129 tree *last = &TYPE_FIELDS (record_type);
8130 unsigned cnt = 0;
8131 if (cancellable)
8132 {
8133 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8134 ptr_type_node);
8135 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8136 integer_type_node);
8137 *last = field;
8138 DECL_CHAIN (field) = ifield;
8139 last = &DECL_CHAIN (ifield);
8140 DECL_CONTEXT (field) = record_type;
8141 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8142 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8143 DECL_CONTEXT (ifield) = record_type;
8144 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8145 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8146 }
8147 for (int pass = 0; pass < 2; pass++)
8148 {
8149 tree decl, type, next;
8150 for (tree c = clauses;
8151 omp_task_reduction_iterate (pass, code, ccode,
8152 &c, &decl, &type, &next); c = next)
8153 {
8154 ++cnt;
8155 tree new_type = type;
8156 if (ctx->outer)
8157 new_type = remap_type (type, &ctx->outer->cb);
8158 tree field
8159 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8160 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8161 new_type);
8162 if (DECL_P (decl) && type == TREE_TYPE (decl))
8163 {
8164 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8165 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8166 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8167 }
8168 else
8169 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8170 DECL_CONTEXT (field) = record_type;
8171 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8172 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8173 *last = field;
8174 last = &DECL_CHAIN (field);
8175 tree bfield
8176 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8177 boolean_type_node);
8178 DECL_CONTEXT (bfield) = record_type;
8179 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8180 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8181 *last = bfield;
8182 last = &DECL_CHAIN (bfield);
8183 }
8184 }
8185 *last = NULL_TREE;
8186 layout_type (record_type);
8187
8188 /* Build up an array which registers with the runtime all the reductions
8189 and deregisters them at the end. Format documented in libgomp/task.c. */
8190 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8191 tree avar = create_tmp_var_raw (atype);
8192 gimple_add_tmp_var (avar);
8193 TREE_ADDRESSABLE (avar) = 1;
8194 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8195 NULL_TREE, NULL_TREE);
8196 tree t = build_int_cst (pointer_sized_int_node, cnt);
8197 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8198 gimple_seq seq = NULL;
8199 tree sz = fold_convert (pointer_sized_int_node,
8200 TYPE_SIZE_UNIT (record_type));
8201 int cachesz = 64;
8202 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8203 build_int_cst (pointer_sized_int_node, cachesz - 1));
8204 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8205 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8206 ctx->task_reductions.create (1 + cnt);
8207 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8208 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8209 ? sz : NULL_TREE);
8210 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8211 gimple_seq_add_seq (start, seq);
8212 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8213 NULL_TREE, NULL_TREE);
8214 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8215 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8216 NULL_TREE, NULL_TREE);
8217 t = build_int_cst (pointer_sized_int_node,
8218 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8219 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8220 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8221 NULL_TREE, NULL_TREE);
8222 t = build_int_cst (pointer_sized_int_node, -1);
8223 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8224 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8225 NULL_TREE, NULL_TREE);
8226 t = build_int_cst (pointer_sized_int_node, 0);
8227 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8228
8229 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8230 and for each task reduction checks a bool right after the private variable
8231 within that thread's chunk; if the bool is clear, it hasn't been
8232 initialized and thus isn't going to be reduced nor destructed, otherwise
8233 reduce and destruct it. */
8234 tree idx = create_tmp_var (size_type_node);
8235 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8236 tree num_thr_sz = create_tmp_var (size_type_node);
8237 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8238 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8239 tree lab3 = NULL_TREE;
8240 gimple *g;
8241 if (code == OMP_FOR || code == OMP_SECTIONS)
8242 {
8243 /* For worksharing constructs, only perform it in the master thread,
8244 with the exception of cancelled implicit barriers - then only handle
8245 the current thread. */
8246 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8247 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8248 tree thr_num = create_tmp_var (integer_type_node);
8249 g = gimple_build_call (t, 0);
8250 gimple_call_set_lhs (g, thr_num);
8251 gimple_seq_add_stmt (end, g);
8252 if (cancellable)
8253 {
8254 tree c;
8255 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8256 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8257 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8258 if (code == OMP_FOR)
8259 c = gimple_omp_for_clauses (ctx->stmt);
8260 else /* if (code == OMP_SECTIONS) */
8261 c = gimple_omp_sections_clauses (ctx->stmt);
8262 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8263 cancellable = c;
8264 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8265 lab5, lab6);
8266 gimple_seq_add_stmt (end, g);
8267 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8268 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8269 gimple_seq_add_stmt (end, g);
8270 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8271 build_one_cst (TREE_TYPE (idx)));
8272 gimple_seq_add_stmt (end, g);
8273 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8274 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8275 }
8276 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8277 gimple_seq_add_stmt (end, g);
8278 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8279 }
8280 if (code != OMP_PARALLEL)
8281 {
8282 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8283 tree num_thr = create_tmp_var (integer_type_node);
8284 g = gimple_build_call (t, 0);
8285 gimple_call_set_lhs (g, num_thr);
8286 gimple_seq_add_stmt (end, g);
8287 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8288 gimple_seq_add_stmt (end, g);
8289 if (cancellable)
8290 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8291 }
8292 else
8293 {
8294 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8295 OMP_CLAUSE__REDUCTEMP_);
8296 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8297 t = fold_convert (size_type_node, t);
8298 gimplify_assign (num_thr_sz, t, end);
8299 }
8300 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8301 NULL_TREE, NULL_TREE);
8302 tree data = create_tmp_var (pointer_sized_int_node);
8303 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8304 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8305 tree ptr;
8306 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8307 ptr = create_tmp_var (build_pointer_type (record_type));
8308 else
8309 ptr = create_tmp_var (ptr_type_node);
8310 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8311
8312 tree field = TYPE_FIELDS (record_type);
8313 cnt = 0;
8314 if (cancellable)
8315 field = DECL_CHAIN (DECL_CHAIN (field));
8316 for (int pass = 0; pass < 2; pass++)
8317 {
8318 tree decl, type, next;
8319 for (tree c = clauses;
8320 omp_task_reduction_iterate (pass, code, ccode,
8321 &c, &decl, &type, &next); c = next)
8322 {
8323 tree var = decl, ref;
8324 if (TREE_CODE (decl) == MEM_REF)
8325 {
8326 var = TREE_OPERAND (var, 0);
8327 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8328 var = TREE_OPERAND (var, 0);
8329 tree v = var;
8330 if (TREE_CODE (var) == ADDR_EXPR)
8331 var = TREE_OPERAND (var, 0);
8332 else if (TREE_CODE (var) == INDIRECT_REF)
8333 var = TREE_OPERAND (var, 0);
8334 tree orig_var = var;
8335 if (is_variable_sized (var))
8336 {
8337 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8338 var = DECL_VALUE_EXPR (var);
8339 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8340 var = TREE_OPERAND (var, 0);
8341 gcc_assert (DECL_P (var));
8342 }
8343 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8344 if (orig_var != var)
8345 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8346 else if (TREE_CODE (v) == ADDR_EXPR)
8347 t = build_fold_addr_expr (t);
8348 else if (TREE_CODE (v) == INDIRECT_REF)
8349 t = build_fold_indirect_ref (t);
8350 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8351 {
8352 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8353 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8354 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8355 }
8356 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8357 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8358 fold_convert (size_type_node,
8359 TREE_OPERAND (decl, 1)));
8360 }
8361 else
8362 {
8363 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8364 if (!omp_is_reference (decl))
8365 t = build_fold_addr_expr (t);
8366 }
8367 t = fold_convert (pointer_sized_int_node, t);
8368 seq = NULL;
8369 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8370 gimple_seq_add_seq (start, seq);
8371 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8372 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8373 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8374 t = unshare_expr (byte_position (field));
8375 t = fold_convert (pointer_sized_int_node, t);
8376 ctx->task_reduction_map->put (c, cnt);
8377 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8378 ? t : NULL_TREE);
8379 seq = NULL;
8380 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8381 gimple_seq_add_seq (start, seq);
8382 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8383 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8384 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8385
8386 tree bfield = DECL_CHAIN (field);
8387 tree cond;
8388 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8389 /* In parallel or worksharing all threads unconditionally
8390 initialize all their task reduction private variables. */
8391 cond = boolean_true_node;
8392 else if (TREE_TYPE (ptr) == ptr_type_node)
8393 {
8394 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8395 unshare_expr (byte_position (bfield)));
8396 seq = NULL;
8397 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8398 gimple_seq_add_seq (end, seq);
8399 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8400 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8401 build_int_cst (pbool, 0));
8402 }
8403 else
8404 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8405 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8406 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8407 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8408 tree condv = create_tmp_var (boolean_type_node);
8409 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8410 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8411 lab3, lab4);
8412 gimple_seq_add_stmt (end, g);
8413 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8414 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8415 {
8416 /* If this reduction doesn't need destruction and parallel
8417 has been cancelled, there is nothing to do for this
8418 reduction, so jump around the merge operation. */
8419 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8420 g = gimple_build_cond (NE_EXPR, cancellable,
8421 build_zero_cst (TREE_TYPE (cancellable)),
8422 lab4, lab5);
8423 gimple_seq_add_stmt (end, g);
8424 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8425 }
8426
8427 tree new_var;
8428 if (TREE_TYPE (ptr) == ptr_type_node)
8429 {
8430 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8431 unshare_expr (byte_position (field)));
8432 seq = NULL;
8433 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8434 gimple_seq_add_seq (end, seq);
8435 tree pbool = build_pointer_type (TREE_TYPE (field));
8436 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8437 build_int_cst (pbool, 0));
8438 }
8439 else
8440 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8441 build_simple_mem_ref (ptr), field, NULL_TREE);
8442
8443 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8444 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8445 ref = build_simple_mem_ref (ref);
8446 /* reduction(-:var) sums up the partial results, so it acts
8447 identically to reduction(+:var). */
8448 if (rcode == MINUS_EXPR)
8449 rcode = PLUS_EXPR;
8450 if (TREE_CODE (decl) == MEM_REF)
8451 {
8452 tree type = TREE_TYPE (new_var);
8453 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8454 tree i = create_tmp_var (TREE_TYPE (v));
8455 tree ptype = build_pointer_type (TREE_TYPE (type));
8456 if (DECL_P (v))
8457 {
8458 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8459 tree vv = create_tmp_var (TREE_TYPE (v));
8460 gimplify_assign (vv, v, start);
8461 v = vv;
8462 }
8463 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8464 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8465 new_var = build_fold_addr_expr (new_var);
8466 new_var = fold_convert (ptype, new_var);
8467 ref = fold_convert (ptype, ref);
8468 tree m = create_tmp_var (ptype);
8469 gimplify_assign (m, new_var, end);
8470 new_var = m;
8471 m = create_tmp_var (ptype);
8472 gimplify_assign (m, ref, end);
8473 ref = m;
8474 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8475 tree body = create_artificial_label (UNKNOWN_LOCATION);
8476 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8477 gimple_seq_add_stmt (end, gimple_build_label (body));
8478 tree priv = build_simple_mem_ref (new_var);
8479 tree out = build_simple_mem_ref (ref);
8480 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8481 {
8482 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8483 tree decl_placeholder
8484 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8485 tree lab6 = NULL_TREE;
8486 if (cancellable)
8487 {
8488 /* If this reduction needs destruction and parallel
8489 has been cancelled, jump around the merge operation
8490 to the destruction. */
8491 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8492 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8493 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8494 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8495 lab6, lab5);
8496 gimple_seq_add_stmt (end, g);
8497 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8498 }
8499 SET_DECL_VALUE_EXPR (placeholder, out);
8500 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8501 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8502 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8503 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8504 gimple_seq_add_seq (end,
8505 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8506 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8507 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8508 {
8509 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8510 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8511 }
8512 if (cancellable)
8513 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8514 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8515 if (x)
8516 {
8517 gimple_seq tseq = NULL;
8518 gimplify_stmt (&x, &tseq);
8519 gimple_seq_add_seq (end, tseq);
8520 }
8521 }
8522 else
8523 {
8524 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8525 out = unshare_expr (out);
8526 gimplify_assign (out, x, end);
8527 }
8528 gimple *g
8529 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8530 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8531 gimple_seq_add_stmt (end, g);
8532 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8533 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8534 gimple_seq_add_stmt (end, g);
8535 g = gimple_build_assign (i, PLUS_EXPR, i,
8536 build_int_cst (TREE_TYPE (i), 1));
8537 gimple_seq_add_stmt (end, g);
8538 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8539 gimple_seq_add_stmt (end, g);
8540 gimple_seq_add_stmt (end, gimple_build_label (endl));
8541 }
8542 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8543 {
8544 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8545 tree oldv = NULL_TREE;
8546 tree lab6 = NULL_TREE;
8547 if (cancellable)
8548 {
8549 /* If this reduction needs destruction and parallel
8550 has been cancelled, jump around the merge operation
8551 to the destruction. */
8552 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8553 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8554 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8555 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8556 lab6, lab5);
8557 gimple_seq_add_stmt (end, g);
8558 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8559 }
8560 if (omp_is_reference (decl)
8561 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8562 TREE_TYPE (ref)))
8563 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8564 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8565 tree refv = create_tmp_var (TREE_TYPE (ref));
8566 gimplify_assign (refv, ref, end);
8567 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8568 SET_DECL_VALUE_EXPR (placeholder, ref);
8569 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8570 tree d = maybe_lookup_decl (decl, ctx);
8571 gcc_assert (d);
8572 if (DECL_HAS_VALUE_EXPR_P (d))
8573 oldv = DECL_VALUE_EXPR (d);
8574 if (omp_is_reference (var))
8575 {
8576 tree v = fold_convert (TREE_TYPE (d),
8577 build_fold_addr_expr (new_var));
8578 SET_DECL_VALUE_EXPR (d, v);
8579 }
8580 else
8581 SET_DECL_VALUE_EXPR (d, new_var);
8582 DECL_HAS_VALUE_EXPR_P (d) = 1;
8583 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8584 if (oldv)
8585 SET_DECL_VALUE_EXPR (d, oldv);
8586 else
8587 {
8588 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8589 DECL_HAS_VALUE_EXPR_P (d) = 0;
8590 }
8591 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8592 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8593 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8594 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8595 if (cancellable)
8596 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8597 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8598 if (x)
8599 {
8600 gimple_seq tseq = NULL;
8601 gimplify_stmt (&x, &tseq);
8602 gimple_seq_add_seq (end, tseq);
8603 }
8604 }
8605 else
8606 {
8607 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8608 ref = unshare_expr (ref);
8609 gimplify_assign (ref, x, end);
8610 }
8611 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8612 ++cnt;
8613 field = DECL_CHAIN (bfield);
8614 }
8615 }
8616
8617 if (code == OMP_TASKGROUP)
8618 {
8619 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8620 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8621 gimple_seq_add_stmt (start, g);
8622 }
8623 else
8624 {
8625 tree c;
8626 if (code == OMP_FOR)
8627 c = gimple_omp_for_clauses (ctx->stmt);
8628 else if (code == OMP_SECTIONS)
8629 c = gimple_omp_sections_clauses (ctx->stmt);
8630 else
8631 c = gimple_omp_taskreg_clauses (ctx->stmt);
8632 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8633 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8634 build_fold_addr_expr (avar));
8635 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8636 }
8637
8638 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8639 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8640 size_one_node));
8641 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8642 gimple_seq_add_stmt (end, g);
8643 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8644 if (code == OMP_FOR || code == OMP_SECTIONS)
8645 {
8646 enum built_in_function bfn
8647 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8648 t = builtin_decl_explicit (bfn);
8649 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8650 tree arg;
8651 if (cancellable)
8652 {
8653 arg = create_tmp_var (c_bool_type);
8654 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8655 cancellable));
8656 }
8657 else
8658 arg = build_int_cst (c_bool_type, 0);
8659 g = gimple_build_call (t, 1, arg);
8660 }
8661 else
8662 {
8663 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8664 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8665 }
8666 gimple_seq_add_stmt (end, g);
8667 t = build_constructor (atype, NULL);
8668 TREE_THIS_VOLATILE (t) = 1;
8669 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8670 }
8671
8672 /* Expand code for an OpenMP taskgroup directive. */
8673
8674 static void
8675 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8676 {
8677 gimple *stmt = gsi_stmt (*gsi_p);
8678 gcall *x;
8679 gbind *bind;
8680 gimple_seq dseq = NULL;
8681 tree block = make_node (BLOCK);
8682
8683 bind = gimple_build_bind (NULL, NULL, block);
8684 gsi_replace (gsi_p, bind, true);
8685 gimple_bind_add_stmt (bind, stmt);
8686
8687 push_gimplify_context ();
8688
8689 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8690 0);
8691 gimple_bind_add_stmt (bind, x);
8692
8693 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8694 gimple_omp_taskgroup_clauses (stmt),
8695 gimple_bind_body_ptr (bind), &dseq);
8696
8697 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8698 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8699 gimple_omp_set_body (stmt, NULL);
8700
8701 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8702 gimple_bind_add_seq (bind, dseq);
8703
8704 pop_gimplify_context (bind);
8705
8706 gimple_bind_append_vars (bind, ctx->block_vars);
8707 BLOCK_VARS (block) = ctx->block_vars;
8708 }
8709
8710
8711 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8712
8713 static void
8714 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8715 omp_context *ctx)
8716 {
8717 struct omp_for_data fd;
8718 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8719 return;
8720
8721 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8722 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8723 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8724 if (!fd.ordered)
8725 return;
8726
8727 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8728 tree c = gimple_omp_ordered_clauses (ord_stmt);
8729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8730 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8731 {
8732 /* Merge depend clauses from multiple adjacent
8733 #pragma omp ordered depend(sink:...) constructs
8734 into one #pragma omp ordered depend(sink:...), so that
8735 we can optimize them together. */
8736 gimple_stmt_iterator gsi = *gsi_p;
8737 gsi_next (&gsi);
8738 while (!gsi_end_p (gsi))
8739 {
8740 gimple *stmt = gsi_stmt (gsi);
8741 if (is_gimple_debug (stmt)
8742 || gimple_code (stmt) == GIMPLE_NOP)
8743 {
8744 gsi_next (&gsi);
8745 continue;
8746 }
8747 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8748 break;
8749 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8750 c = gimple_omp_ordered_clauses (ord_stmt2);
8751 if (c == NULL_TREE
8752 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8753 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8754 break;
8755 while (*list_p)
8756 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8757 *list_p = c;
8758 gsi_remove (&gsi, true);
8759 }
8760 }
8761
8762 /* Canonicalize sink dependence clauses into one folded clause if
8763 possible.
8764
8765 The basic algorithm is to create a sink vector whose first
8766 element is the GCD of all the first elements, and whose remaining
8767 elements are the minimum of the subsequent columns.
8768
8769 We ignore dependence vectors whose first element is zero because
8770 such dependencies are known to be executed by the same thread.
8771
8772 We take into account the direction of the loop, so a minimum
8773 becomes a maximum if the loop is iterating forwards. We also
8774 ignore sink clauses where the loop direction is unknown, or where
8775 the offsets are clearly invalid because they are not a multiple
8776 of the loop increment.
8777
8778 For example:
8779
8780 #pragma omp for ordered(2)
8781 for (i=0; i < N; ++i)
8782 for (j=0; j < M; ++j)
8783 {
8784 #pragma omp ordered \
8785 depend(sink:i-8,j-2) \
8786 depend(sink:i,j-1) \ // Completely ignored because i+0.
8787 depend(sink:i-4,j-3) \
8788 depend(sink:i-6,j-4)
8789 #pragma omp ordered depend(source)
8790 }
8791
8792 Folded clause is:
8793
8794 depend(sink:-gcd(8,4,6),-min(2,3,4))
8795 -or-
8796 depend(sink:-2,-2)
8797 */
8798
8799 /* FIXME: Computing GCD's where the first element is zero is
8800 non-trivial in the presence of collapsed loops. Do this later. */
8801 if (fd.collapse > 1)
8802 return;
8803
8804 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8805
8806 /* wide_int is not a POD so it must be default-constructed. */
8807 for (unsigned i = 0; i != 2 * len - 1; ++i)
8808 new (static_cast<void*>(folded_deps + i)) wide_int ();
8809
8810 tree folded_dep = NULL_TREE;
8811 /* TRUE if the first dimension's offset is negative. */
8812 bool neg_offset_p = false;
8813
8814 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8815 unsigned int i;
8816 while ((c = *list_p) != NULL)
8817 {
8818 bool remove = false;
8819
8820 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8821 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8822 goto next_ordered_clause;
8823
8824 tree vec;
8825 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8826 vec && TREE_CODE (vec) == TREE_LIST;
8827 vec = TREE_CHAIN (vec), ++i)
8828 {
8829 gcc_assert (i < len);
8830
8831 /* omp_extract_for_data has canonicalized the condition. */
8832 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8833 || fd.loops[i].cond_code == GT_EXPR);
8834 bool forward = fd.loops[i].cond_code == LT_EXPR;
8835 bool maybe_lexically_later = true;
8836
8837 /* While the committee makes up its mind, bail if we have any
8838 non-constant steps. */
8839 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8840 goto lower_omp_ordered_ret;
8841
8842 tree itype = TREE_TYPE (TREE_VALUE (vec));
8843 if (POINTER_TYPE_P (itype))
8844 itype = sizetype;
8845 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8846 TYPE_PRECISION (itype),
8847 TYPE_SIGN (itype));
8848
8849 /* Ignore invalid offsets that are not multiples of the step. */
8850 if (!wi::multiple_of_p (wi::abs (offset),
8851 wi::abs (wi::to_wide (fd.loops[i].step)),
8852 UNSIGNED))
8853 {
8854 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8855 "ignoring sink clause with offset that is not "
8856 "a multiple of the loop step");
8857 remove = true;
8858 goto next_ordered_clause;
8859 }
8860
8861 /* Calculate the first dimension. The first dimension of
8862 the folded dependency vector is the GCD of the first
8863 elements, while ignoring any first elements whose offset
8864 is 0. */
8865 if (i == 0)
8866 {
8867 /* Ignore dependence vectors whose first dimension is 0. */
8868 if (offset == 0)
8869 {
8870 remove = true;
8871 goto next_ordered_clause;
8872 }
8873 else
8874 {
8875 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8876 {
8877 error_at (OMP_CLAUSE_LOCATION (c),
8878 "first offset must be in opposite direction "
8879 "of loop iterations");
8880 goto lower_omp_ordered_ret;
8881 }
8882 if (forward)
8883 offset = -offset;
8884 neg_offset_p = forward;
8885 /* Initialize the first time around. */
8886 if (folded_dep == NULL_TREE)
8887 {
8888 folded_dep = c;
8889 folded_deps[0] = offset;
8890 }
8891 else
8892 folded_deps[0] = wi::gcd (folded_deps[0],
8893 offset, UNSIGNED);
8894 }
8895 }
8896 /* Calculate minimum for the remaining dimensions. */
8897 else
8898 {
8899 folded_deps[len + i - 1] = offset;
8900 if (folded_dep == c)
8901 folded_deps[i] = offset;
8902 else if (maybe_lexically_later
8903 && !wi::eq_p (folded_deps[i], offset))
8904 {
8905 if (forward ^ wi::gts_p (folded_deps[i], offset))
8906 {
8907 unsigned int j;
8908 folded_dep = c;
8909 for (j = 1; j <= i; j++)
8910 folded_deps[j] = folded_deps[len + j - 1];
8911 }
8912 else
8913 maybe_lexically_later = false;
8914 }
8915 }
8916 }
8917 gcc_assert (i == len);
8918
8919 remove = true;
8920
8921 next_ordered_clause:
8922 if (remove)
8923 *list_p = OMP_CLAUSE_CHAIN (c);
8924 else
8925 list_p = &OMP_CLAUSE_CHAIN (c);
8926 }
8927
8928 if (folded_dep)
8929 {
8930 if (neg_offset_p)
8931 folded_deps[0] = -folded_deps[0];
8932
8933 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8934 if (POINTER_TYPE_P (itype))
8935 itype = sizetype;
8936
8937 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8938 = wide_int_to_tree (itype, folded_deps[0]);
8939 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8940 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
8941 }
8942
8943 lower_omp_ordered_ret:
8944
8945 /* Ordered without clauses is #pragma omp threads, while we want
8946 a nop instead if we remove all clauses. */
8947 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
8948 gsi_replace (gsi_p, gimple_build_nop (), true);
8949 }
8950
8951
8952 /* Expand code for an OpenMP ordered directive. */
8953
8954 static void
8955 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8956 {
8957 tree block;
8958 gimple *stmt = gsi_stmt (*gsi_p), *g;
8959 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
8960 gcall *x;
8961 gbind *bind;
8962 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8963 OMP_CLAUSE_SIMD);
8964 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
8965 loop. */
8966 bool maybe_simt
8967 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
8968 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8969 OMP_CLAUSE_THREADS);
8970
8971 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
8972 OMP_CLAUSE_DEPEND))
8973 {
8974 /* FIXME: This is needs to be moved to the expansion to verify various
8975 conditions only testable on cfg with dominators computed, and also
8976 all the depend clauses to be merged still might need to be available
8977 for the runtime checks. */
8978 if (0)
8979 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
8980 return;
8981 }
8982
8983 push_gimplify_context ();
8984
8985 block = make_node (BLOCK);
8986 bind = gimple_build_bind (NULL, NULL, block);
8987 gsi_replace (gsi_p, bind, true);
8988 gimple_bind_add_stmt (bind, stmt);
8989
8990 if (simd)
8991 {
8992 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
8993 build_int_cst (NULL_TREE, threads));
8994 cfun->has_simduid_loops = true;
8995 }
8996 else
8997 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8998 0);
8999 gimple_bind_add_stmt (bind, x);
9000
9001 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9002 if (maybe_simt)
9003 {
9004 counter = create_tmp_var (integer_type_node);
9005 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9006 gimple_call_set_lhs (g, counter);
9007 gimple_bind_add_stmt (bind, g);
9008
9009 body = create_artificial_label (UNKNOWN_LOCATION);
9010 test = create_artificial_label (UNKNOWN_LOCATION);
9011 gimple_bind_add_stmt (bind, gimple_build_label (body));
9012
9013 tree simt_pred = create_tmp_var (integer_type_node);
9014 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9015 gimple_call_set_lhs (g, simt_pred);
9016 gimple_bind_add_stmt (bind, g);
9017
9018 tree t = create_artificial_label (UNKNOWN_LOCATION);
9019 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9020 gimple_bind_add_stmt (bind, g);
9021
9022 gimple_bind_add_stmt (bind, gimple_build_label (t));
9023 }
9024 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9025 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9026 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9027 gimple_omp_set_body (stmt, NULL);
9028
9029 if (maybe_simt)
9030 {
9031 gimple_bind_add_stmt (bind, gimple_build_label (test));
9032 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9033 gimple_bind_add_stmt (bind, g);
9034
9035 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9036 tree nonneg = create_tmp_var (integer_type_node);
9037 gimple_seq tseq = NULL;
9038 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9039 gimple_bind_add_seq (bind, tseq);
9040
9041 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9042 gimple_call_set_lhs (g, nonneg);
9043 gimple_bind_add_stmt (bind, g);
9044
9045 tree end = create_artificial_label (UNKNOWN_LOCATION);
9046 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9047 gimple_bind_add_stmt (bind, g);
9048
9049 gimple_bind_add_stmt (bind, gimple_build_label (end));
9050 }
9051 if (simd)
9052 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9053 build_int_cst (NULL_TREE, threads));
9054 else
9055 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9056 0);
9057 gimple_bind_add_stmt (bind, x);
9058
9059 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9060
9061 pop_gimplify_context (bind);
9062
9063 gimple_bind_append_vars (bind, ctx->block_vars);
9064 BLOCK_VARS (block) = gimple_bind_vars (bind);
9065 }
9066
9067
9068 /* Expand code for an OpenMP scan directive and the structured block
9069 before the scan directive. */
9070
9071 static void
9072 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9073 {
9074 gimple *stmt = gsi_stmt (*gsi_p);
9075 bool has_clauses
9076 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9077 tree lane = NULL_TREE;
9078 gimple_seq before = NULL;
9079 omp_context *octx = ctx->outer;
9080 gcc_assert (octx);
9081 if (octx->scan_exclusive && !has_clauses)
9082 {
9083 gimple_stmt_iterator gsi2 = *gsi_p;
9084 gsi_next (&gsi2);
9085 gimple *stmt2 = gsi_stmt (gsi2);
9086 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9087 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9088 the one with exclusive clause(s), comes first. */
9089 if (stmt2
9090 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9091 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9092 {
9093 gsi_remove (gsi_p, false);
9094 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9095 ctx = maybe_lookup_ctx (stmt2);
9096 gcc_assert (ctx);
9097 lower_omp_scan (gsi_p, ctx);
9098 return;
9099 }
9100 }
9101
9102 bool input_phase = has_clauses ^ octx->scan_inclusive;
9103 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9104 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9105 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9106 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9107 && !gimple_omp_for_combined_p (octx->stmt));
9108 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9109 if (is_for_simd && octx->for_simd_scan_phase)
9110 is_simd = false;
9111 if (is_simd)
9112 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9113 OMP_CLAUSE__SIMDUID_))
9114 {
9115 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9116 lane = create_tmp_var (unsigned_type_node);
9117 tree t = build_int_cst (integer_type_node,
9118 input_phase ? 1
9119 : octx->scan_inclusive ? 2 : 3);
9120 gimple *g
9121 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9122 gimple_call_set_lhs (g, lane);
9123 gimple_seq_add_stmt (&before, g);
9124 }
9125
9126 if (is_simd || is_for)
9127 {
9128 for (tree c = gimple_omp_for_clauses (octx->stmt);
9129 c; c = OMP_CLAUSE_CHAIN (c))
9130 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9131 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9132 {
9133 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9134 tree var = OMP_CLAUSE_DECL (c);
9135 tree new_var = lookup_decl (var, octx);
9136 tree val = new_var;
9137 tree var2 = NULL_TREE;
9138 tree var3 = NULL_TREE;
9139 tree var4 = NULL_TREE;
9140 tree lane0 = NULL_TREE;
9141 tree new_vard = new_var;
9142 if (omp_is_reference (var))
9143 {
9144 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9145 val = new_var;
9146 }
9147 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9148 {
9149 val = DECL_VALUE_EXPR (new_vard);
9150 if (new_vard != new_var)
9151 {
9152 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9153 val = TREE_OPERAND (val, 0);
9154 }
9155 if (TREE_CODE (val) == ARRAY_REF
9156 && VAR_P (TREE_OPERAND (val, 0)))
9157 {
9158 tree v = TREE_OPERAND (val, 0);
9159 if (lookup_attribute ("omp simd array",
9160 DECL_ATTRIBUTES (v)))
9161 {
9162 val = unshare_expr (val);
9163 lane0 = TREE_OPERAND (val, 1);
9164 TREE_OPERAND (val, 1) = lane;
9165 var2 = lookup_decl (v, octx);
9166 if (octx->scan_exclusive)
9167 var4 = lookup_decl (var2, octx);
9168 if (input_phase
9169 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9170 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9171 if (!input_phase)
9172 {
9173 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9174 var2, lane, NULL_TREE, NULL_TREE);
9175 TREE_THIS_NOTRAP (var2) = 1;
9176 if (octx->scan_exclusive)
9177 {
9178 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9179 var4, lane, NULL_TREE,
9180 NULL_TREE);
9181 TREE_THIS_NOTRAP (var4) = 1;
9182 }
9183 }
9184 else
9185 var2 = val;
9186 }
9187 }
9188 gcc_assert (var2);
9189 }
9190 else
9191 {
9192 var2 = build_outer_var_ref (var, octx);
9193 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9194 {
9195 var3 = maybe_lookup_decl (new_vard, octx);
9196 if (var3 == new_vard || var3 == NULL_TREE)
9197 var3 = NULL_TREE;
9198 else if (is_simd && octx->scan_exclusive && !input_phase)
9199 {
9200 var4 = maybe_lookup_decl (var3, octx);
9201 if (var4 == var3 || var4 == NULL_TREE)
9202 {
9203 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9204 {
9205 var4 = var3;
9206 var3 = NULL_TREE;
9207 }
9208 else
9209 var4 = NULL_TREE;
9210 }
9211 }
9212 }
9213 if (is_simd
9214 && octx->scan_exclusive
9215 && !input_phase
9216 && var4 == NULL_TREE)
9217 var4 = create_tmp_var (TREE_TYPE (val));
9218 }
9219 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9220 {
9221 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9222 if (input_phase)
9223 {
9224 if (var3)
9225 {
9226 /* If we've added a separate identity element
9227 variable, copy it over into val. */
9228 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9229 var3);
9230 gimplify_and_add (x, &before);
9231 }
9232 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9233 {
9234 /* Otherwise, assign to it the identity element. */
9235 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9236 if (is_for)
9237 tseq = copy_gimple_seq_and_replace_locals (tseq);
9238 tree ref = build_outer_var_ref (var, octx);
9239 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9240 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9241 if (x)
9242 {
9243 if (new_vard != new_var)
9244 val = build_fold_addr_expr_loc (clause_loc, val);
9245 SET_DECL_VALUE_EXPR (new_vard, val);
9246 }
9247 SET_DECL_VALUE_EXPR (placeholder, ref);
9248 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9249 lower_omp (&tseq, octx);
9250 if (x)
9251 SET_DECL_VALUE_EXPR (new_vard, x);
9252 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9253 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9254 gimple_seq_add_seq (&before, tseq);
9255 if (is_simd)
9256 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9257 }
9258 }
9259 else if (is_simd)
9260 {
9261 tree x;
9262 if (octx->scan_exclusive)
9263 {
9264 tree v4 = unshare_expr (var4);
9265 tree v2 = unshare_expr (var2);
9266 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9267 gimplify_and_add (x, &before);
9268 }
9269 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9270 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9271 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9272 tree vexpr = val;
9273 if (x && new_vard != new_var)
9274 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9275 if (x)
9276 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9277 SET_DECL_VALUE_EXPR (placeholder, var2);
9278 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9279 lower_omp (&tseq, octx);
9280 gimple_seq_add_seq (&before, tseq);
9281 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9282 if (x)
9283 SET_DECL_VALUE_EXPR (new_vard, x);
9284 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9285 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9286 if (octx->scan_inclusive)
9287 {
9288 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9289 var2);
9290 gimplify_and_add (x, &before);
9291 }
9292 else if (lane0 == NULL_TREE)
9293 {
9294 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9295 var4);
9296 gimplify_and_add (x, &before);
9297 }
9298 }
9299 }
9300 else
9301 {
9302 if (input_phase)
9303 {
9304 /* input phase. Set val to initializer before
9305 the body. */
9306 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9307 gimplify_assign (val, x, &before);
9308 }
9309 else if (is_simd)
9310 {
9311 /* scan phase. */
9312 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9313 if (code == MINUS_EXPR)
9314 code = PLUS_EXPR;
9315
9316 tree x = build2 (code, TREE_TYPE (var2),
9317 unshare_expr (var2), unshare_expr (val));
9318 if (octx->scan_inclusive)
9319 {
9320 gimplify_assign (unshare_expr (var2), x, &before);
9321 gimplify_assign (val, var2, &before);
9322 }
9323 else
9324 {
9325 gimplify_assign (unshare_expr (var4),
9326 unshare_expr (var2), &before);
9327 gimplify_assign (var2, x, &before);
9328 if (lane0 == NULL_TREE)
9329 gimplify_assign (val, var4, &before);
9330 }
9331 }
9332 }
9333 if (octx->scan_exclusive && !input_phase && lane0)
9334 {
9335 tree vexpr = unshare_expr (var4);
9336 TREE_OPERAND (vexpr, 1) = lane0;
9337 if (new_vard != new_var)
9338 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9339 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9340 }
9341 }
9342 }
9343 if (is_simd && !is_for_simd)
9344 {
9345 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9346 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9347 gsi_replace (gsi_p, gimple_build_nop (), true);
9348 return;
9349 }
9350 lower_omp (gimple_omp_body_ptr (stmt), octx);
9351 if (before)
9352 {
9353 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9354 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9355 }
9356 }
9357
9358
9359 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9360 substitution of a couple of function calls. But in the NAMED case,
9361 requires that languages coordinate a symbol name. It is therefore
9362 best put here in common code. */
9363
9364 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9365
9366 static void
9367 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9368 {
9369 tree block;
9370 tree name, lock, unlock;
9371 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9372 gbind *bind;
9373 location_t loc = gimple_location (stmt);
9374 gimple_seq tbody;
9375
9376 name = gimple_omp_critical_name (stmt);
9377 if (name)
9378 {
9379 tree decl;
9380
9381 if (!critical_name_mutexes)
9382 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9383
9384 tree *n = critical_name_mutexes->get (name);
9385 if (n == NULL)
9386 {
9387 char *new_str;
9388
9389 decl = create_tmp_var_raw (ptr_type_node);
9390
9391 new_str = ACONCAT ((".gomp_critical_user_",
9392 IDENTIFIER_POINTER (name), NULL));
9393 DECL_NAME (decl) = get_identifier (new_str);
9394 TREE_PUBLIC (decl) = 1;
9395 TREE_STATIC (decl) = 1;
9396 DECL_COMMON (decl) = 1;
9397 DECL_ARTIFICIAL (decl) = 1;
9398 DECL_IGNORED_P (decl) = 1;
9399
9400 varpool_node::finalize_decl (decl);
9401
9402 critical_name_mutexes->put (name, decl);
9403 }
9404 else
9405 decl = *n;
9406
9407 /* If '#pragma omp critical' is inside offloaded region or
9408 inside function marked as offloadable, the symbol must be
9409 marked as offloadable too. */
9410 omp_context *octx;
9411 if (cgraph_node::get (current_function_decl)->offloadable)
9412 varpool_node::get_create (decl)->offloadable = 1;
9413 else
9414 for (octx = ctx->outer; octx; octx = octx->outer)
9415 if (is_gimple_omp_offloaded (octx->stmt))
9416 {
9417 varpool_node::get_create (decl)->offloadable = 1;
9418 break;
9419 }
9420
9421 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9422 lock = build_call_expr_loc (loc, lock, 1,
9423 build_fold_addr_expr_loc (loc, decl));
9424
9425 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9426 unlock = build_call_expr_loc (loc, unlock, 1,
9427 build_fold_addr_expr_loc (loc, decl));
9428 }
9429 else
9430 {
9431 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9432 lock = build_call_expr_loc (loc, lock, 0);
9433
9434 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9435 unlock = build_call_expr_loc (loc, unlock, 0);
9436 }
9437
9438 push_gimplify_context ();
9439
9440 block = make_node (BLOCK);
9441 bind = gimple_build_bind (NULL, NULL, block);
9442 gsi_replace (gsi_p, bind, true);
9443 gimple_bind_add_stmt (bind, stmt);
9444
9445 tbody = gimple_bind_body (bind);
9446 gimplify_and_add (lock, &tbody);
9447 gimple_bind_set_body (bind, tbody);
9448
9449 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9450 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9451 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9452 gimple_omp_set_body (stmt, NULL);
9453
9454 tbody = gimple_bind_body (bind);
9455 gimplify_and_add (unlock, &tbody);
9456 gimple_bind_set_body (bind, tbody);
9457
9458 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9459
9460 pop_gimplify_context (bind);
9461 gimple_bind_append_vars (bind, ctx->block_vars);
9462 BLOCK_VARS (block) = gimple_bind_vars (bind);
9463 }
9464
9465 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9466 for a lastprivate clause. Given a loop control predicate of (V
9467 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9468 is appended to *DLIST, iterator initialization is appended to
9469 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9470 to be emitted in a critical section. */
9471
9472 static void
9473 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9474 gimple_seq *dlist, gimple_seq *clist,
9475 struct omp_context *ctx)
9476 {
9477 tree clauses, cond, vinit;
9478 enum tree_code cond_code;
9479 gimple_seq stmts;
9480
9481 cond_code = fd->loop.cond_code;
9482 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9483
9484 /* When possible, use a strict equality expression. This can let VRP
9485 type optimizations deduce the value and remove a copy. */
9486 if (tree_fits_shwi_p (fd->loop.step))
9487 {
9488 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9489 if (step == 1 || step == -1)
9490 cond_code = EQ_EXPR;
9491 }
9492
9493 tree n2 = fd->loop.n2;
9494 if (fd->collapse > 1
9495 && TREE_CODE (n2) != INTEGER_CST
9496 && gimple_omp_for_combined_into_p (fd->for_stmt))
9497 {
9498 struct omp_context *taskreg_ctx = NULL;
9499 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9500 {
9501 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9502 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9503 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9504 {
9505 if (gimple_omp_for_combined_into_p (gfor))
9506 {
9507 gcc_assert (ctx->outer->outer
9508 && is_parallel_ctx (ctx->outer->outer));
9509 taskreg_ctx = ctx->outer->outer;
9510 }
9511 else
9512 {
9513 struct omp_for_data outer_fd;
9514 omp_extract_for_data (gfor, &outer_fd, NULL);
9515 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9516 }
9517 }
9518 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9519 taskreg_ctx = ctx->outer->outer;
9520 }
9521 else if (is_taskreg_ctx (ctx->outer))
9522 taskreg_ctx = ctx->outer;
9523 if (taskreg_ctx)
9524 {
9525 int i;
9526 tree taskreg_clauses
9527 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9528 tree innerc = omp_find_clause (taskreg_clauses,
9529 OMP_CLAUSE__LOOPTEMP_);
9530 gcc_assert (innerc);
9531 for (i = 0; i < fd->collapse; i++)
9532 {
9533 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9534 OMP_CLAUSE__LOOPTEMP_);
9535 gcc_assert (innerc);
9536 }
9537 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9538 OMP_CLAUSE__LOOPTEMP_);
9539 if (innerc)
9540 n2 = fold_convert (TREE_TYPE (n2),
9541 lookup_decl (OMP_CLAUSE_DECL (innerc),
9542 taskreg_ctx));
9543 }
9544 }
9545 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9546
9547 clauses = gimple_omp_for_clauses (fd->for_stmt);
9548 stmts = NULL;
9549 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9550 if (!gimple_seq_empty_p (stmts))
9551 {
9552 gimple_seq_add_seq (&stmts, *dlist);
9553 *dlist = stmts;
9554
9555 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9556 vinit = fd->loop.n1;
9557 if (cond_code == EQ_EXPR
9558 && tree_fits_shwi_p (fd->loop.n2)
9559 && ! integer_zerop (fd->loop.n2))
9560 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9561 else
9562 vinit = unshare_expr (vinit);
9563
9564 /* Initialize the iterator variable, so that threads that don't execute
9565 any iterations don't execute the lastprivate clauses by accident. */
9566 gimplify_assign (fd->loop.v, vinit, body_p);
9567 }
9568 }
9569
9570 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9571
9572 static tree
9573 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9574 struct walk_stmt_info *wi)
9575 {
9576 gimple *stmt = gsi_stmt (*gsi_p);
9577
9578 *handled_ops_p = true;
9579 switch (gimple_code (stmt))
9580 {
9581 WALK_SUBSTMTS;
9582
9583 case GIMPLE_OMP_FOR:
9584 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9585 && gimple_omp_for_combined_into_p (stmt))
9586 *handled_ops_p = false;
9587 break;
9588
9589 case GIMPLE_OMP_SCAN:
9590 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9591 return integer_zero_node;
9592 default:
9593 break;
9594 }
9595 return NULL;
9596 }
9597
9598 /* Helper function for lower_omp_for, add transformations for a worksharing
9599 loop with scan directives inside of it.
9600 For worksharing loop not combined with simd, transform:
9601 #pragma omp for reduction(inscan,+:r) private(i)
9602 for (i = 0; i < n; i = i + 1)
9603 {
9604 {
9605 update (r);
9606 }
9607 #pragma omp scan inclusive(r)
9608 {
9609 use (r);
9610 }
9611 }
9612
9613 into two worksharing loops + code to merge results:
9614
9615 num_threads = omp_get_num_threads ();
9616 thread_num = omp_get_thread_num ();
9617 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9618 <D.2099>:
9619 var2 = r;
9620 goto <D.2101>;
9621 <D.2100>:
9622 // For UDRs this is UDR init, or if ctors are needed, copy from
9623 // var3 that has been constructed to contain the neutral element.
9624 var2 = 0;
9625 <D.2101>:
9626 ivar = 0;
9627 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9628 // a shared array with num_threads elements and rprivb to a local array
9629 // number of elements equal to the number of (contiguous) iterations the
9630 // current thread will perform. controlb and controlp variables are
9631 // temporaries to handle deallocation of rprivb at the end of second
9632 // GOMP_FOR.
9633 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9634 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9635 for (i = 0; i < n; i = i + 1)
9636 {
9637 {
9638 // For UDRs this is UDR init or copy from var3.
9639 r = 0;
9640 // This is the input phase from user code.
9641 update (r);
9642 }
9643 {
9644 // For UDRs this is UDR merge.
9645 var2 = var2 + r;
9646 // Rather than handing it over to the user, save to local thread's
9647 // array.
9648 rprivb[ivar] = var2;
9649 // For exclusive scan, the above two statements are swapped.
9650 ivar = ivar + 1;
9651 }
9652 }
9653 // And remember the final value from this thread's into the shared
9654 // rpriva array.
9655 rpriva[(sizetype) thread_num] = var2;
9656 // If more than one thread, compute using Work-Efficient prefix sum
9657 // the inclusive parallel scan of the rpriva array.
9658 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9659 <D.2102>:
9660 GOMP_barrier ();
9661 down = 0;
9662 k = 1;
9663 num_threadsu = (unsigned int) num_threads;
9664 thread_numup1 = (unsigned int) thread_num + 1;
9665 <D.2108>:
9666 twok = k << 1;
9667 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9668 <D.2110>:
9669 down = 4294967295;
9670 k = k >> 1;
9671 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9672 <D.2112>:
9673 k = k >> 1;
9674 <D.2111>:
9675 twok = k << 1;
9676 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9677 mul = REALPART_EXPR <cplx>;
9678 ovf = IMAGPART_EXPR <cplx>;
9679 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9680 <D.2116>:
9681 andv = k & down;
9682 andvm1 = andv + 4294967295;
9683 l = mul + andvm1;
9684 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9685 <D.2120>:
9686 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9687 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9688 rpriva[l] = rpriva[l - k] + rpriva[l];
9689 <D.2117>:
9690 if (down == 0) goto <D.2121>; else goto <D.2122>;
9691 <D.2121>:
9692 k = k << 1;
9693 goto <D.2123>;
9694 <D.2122>:
9695 k = k >> 1;
9696 <D.2123>:
9697 GOMP_barrier ();
9698 if (k != 0) goto <D.2108>; else goto <D.2103>;
9699 <D.2103>:
9700 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9701 <D.2124>:
9702 // For UDRs this is UDR init or copy from var3.
9703 var2 = 0;
9704 goto <D.2126>;
9705 <D.2125>:
9706 var2 = rpriva[thread_num - 1];
9707 <D.2126>:
9708 ivar = 0;
9709 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9710 reduction(inscan,+:r) private(i)
9711 for (i = 0; i < n; i = i + 1)
9712 {
9713 {
9714 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9715 r = var2 + rprivb[ivar];
9716 }
9717 {
9718 // This is the scan phase from user code.
9719 use (r);
9720 // Plus a bump of the iterator.
9721 ivar = ivar + 1;
9722 }
9723 } */
9724
9725 static void
9726 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9727 struct omp_for_data *fd, omp_context *ctx)
9728 {
9729 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9730 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9731
9732 gimple_seq body = gimple_omp_body (stmt);
9733 gimple_stmt_iterator input1_gsi = gsi_none ();
9734 struct walk_stmt_info wi;
9735 memset (&wi, 0, sizeof (wi));
9736 wi.val_only = true;
9737 wi.info = (void *) &input1_gsi;
9738 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9739 gcc_assert (!gsi_end_p (input1_gsi));
9740
9741 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9742 gimple_stmt_iterator gsi = input1_gsi;
9743 gsi_next (&gsi);
9744 gimple_stmt_iterator scan1_gsi = gsi;
9745 gimple *scan_stmt1 = gsi_stmt (gsi);
9746 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9747
9748 gimple_seq input_body = gimple_omp_body (input_stmt1);
9749 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9750 gimple_omp_set_body (input_stmt1, NULL);
9751 gimple_omp_set_body (scan_stmt1, NULL);
9752 gimple_omp_set_body (stmt, NULL);
9753
9754 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9755 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9756 gimple_omp_set_body (stmt, body);
9757 gimple_omp_set_body (input_stmt1, input_body);
9758
9759 gimple_stmt_iterator input2_gsi = gsi_none ();
9760 memset (&wi, 0, sizeof (wi));
9761 wi.val_only = true;
9762 wi.info = (void *) &input2_gsi;
9763 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9764 gcc_assert (!gsi_end_p (input2_gsi));
9765
9766 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9767 gsi = input2_gsi;
9768 gsi_next (&gsi);
9769 gimple_stmt_iterator scan2_gsi = gsi;
9770 gimple *scan_stmt2 = gsi_stmt (gsi);
9771 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9772 gimple_omp_set_body (scan_stmt2, scan_body);
9773
9774 gimple_stmt_iterator input3_gsi = gsi_none ();
9775 gimple_stmt_iterator scan3_gsi = gsi_none ();
9776 gimple_stmt_iterator input4_gsi = gsi_none ();
9777 gimple_stmt_iterator scan4_gsi = gsi_none ();
9778 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9779 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9780 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9781 if (is_for_simd)
9782 {
9783 memset (&wi, 0, sizeof (wi));
9784 wi.val_only = true;
9785 wi.info = (void *) &input3_gsi;
9786 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9787 gcc_assert (!gsi_end_p (input3_gsi));
9788
9789 input_stmt3 = gsi_stmt (input3_gsi);
9790 gsi = input3_gsi;
9791 gsi_next (&gsi);
9792 scan3_gsi = gsi;
9793 scan_stmt3 = gsi_stmt (gsi);
9794 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9795
9796 memset (&wi, 0, sizeof (wi));
9797 wi.val_only = true;
9798 wi.info = (void *) &input4_gsi;
9799 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9800 gcc_assert (!gsi_end_p (input4_gsi));
9801
9802 input_stmt4 = gsi_stmt (input4_gsi);
9803 gsi = input4_gsi;
9804 gsi_next (&gsi);
9805 scan4_gsi = gsi;
9806 scan_stmt4 = gsi_stmt (gsi);
9807 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9808
9809 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9810 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9811 }
9812
9813 tree num_threads = create_tmp_var (integer_type_node);
9814 tree thread_num = create_tmp_var (integer_type_node);
9815 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9816 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9817 gimple *g = gimple_build_call (nthreads_decl, 0);
9818 gimple_call_set_lhs (g, num_threads);
9819 gimple_seq_add_stmt (body_p, g);
9820 g = gimple_build_call (threadnum_decl, 0);
9821 gimple_call_set_lhs (g, thread_num);
9822 gimple_seq_add_stmt (body_p, g);
9823
9824 tree ivar = create_tmp_var (sizetype);
9825 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9826 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9827 tree k = create_tmp_var (unsigned_type_node);
9828 tree l = create_tmp_var (unsigned_type_node);
9829
9830 gimple_seq clist = NULL, mdlist = NULL;
9831 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9832 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9833 gimple_seq scan1_list = NULL, input2_list = NULL;
9834 gimple_seq last_list = NULL, reduc_list = NULL;
9835 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9837 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9838 {
9839 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9840 tree var = OMP_CLAUSE_DECL (c);
9841 tree new_var = lookup_decl (var, ctx);
9842 tree var3 = NULL_TREE;
9843 tree new_vard = new_var;
9844 if (omp_is_reference (var))
9845 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9846 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9847 {
9848 var3 = maybe_lookup_decl (new_vard, ctx);
9849 if (var3 == new_vard)
9850 var3 = NULL_TREE;
9851 }
9852
9853 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9854 tree rpriva = create_tmp_var (ptype);
9855 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9856 OMP_CLAUSE_DECL (nc) = rpriva;
9857 *cp1 = nc;
9858 cp1 = &OMP_CLAUSE_CHAIN (nc);
9859
9860 tree rprivb = create_tmp_var (ptype);
9861 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9862 OMP_CLAUSE_DECL (nc) = rprivb;
9863 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9864 *cp1 = nc;
9865 cp1 = &OMP_CLAUSE_CHAIN (nc);
9866
9867 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9868 if (new_vard != new_var)
9869 TREE_ADDRESSABLE (var2) = 1;
9870 gimple_add_tmp_var (var2);
9871
9872 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9873 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9874 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9875 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9876 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9877
9878 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9879 thread_num, integer_minus_one_node);
9880 x = fold_convert_loc (clause_loc, sizetype, x);
9881 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9882 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9883 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9884 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9885
9886 x = fold_convert_loc (clause_loc, sizetype, l);
9887 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9888 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9889 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9890 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9891
9892 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9893 x = fold_convert_loc (clause_loc, sizetype, x);
9894 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9895 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9896 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9897 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9898
9899 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9900 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9901 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9902 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9903
9904 tree var4 = is_for_simd ? new_var : var2;
9905 tree var5 = NULL_TREE, var6 = NULL_TREE;
9906 if (is_for_simd)
9907 {
9908 var5 = lookup_decl (var, input_simd_ctx);
9909 var6 = lookup_decl (var, scan_simd_ctx);
9910 if (new_vard != new_var)
9911 {
9912 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9913 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9914 }
9915 }
9916 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9917 {
9918 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9919 tree val = var2;
9920
9921 x = lang_hooks.decls.omp_clause_default_ctor
9922 (c, var2, build_outer_var_ref (var, ctx));
9923 if (x)
9924 gimplify_and_add (x, &clist);
9925
9926 x = build_outer_var_ref (var, ctx);
9927 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9928 x);
9929 gimplify_and_add (x, &thr01_list);
9930
9931 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9932 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9933 if (var3)
9934 {
9935 x = unshare_expr (var4);
9936 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9937 gimplify_and_add (x, &thrn1_list);
9938 x = unshare_expr (var4);
9939 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
9940 gimplify_and_add (x, &thr02_list);
9941 }
9942 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9943 {
9944 /* Otherwise, assign to it the identity element. */
9945 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9946 tseq = copy_gimple_seq_and_replace_locals (tseq);
9947 if (!is_for_simd)
9948 {
9949 if (new_vard != new_var)
9950 val = build_fold_addr_expr_loc (clause_loc, val);
9951 SET_DECL_VALUE_EXPR (new_vard, val);
9952 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
9953 }
9954 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
9955 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9956 lower_omp (&tseq, ctx);
9957 gimple_seq_add_seq (&thrn1_list, tseq);
9958 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9959 lower_omp (&tseq, ctx);
9960 gimple_seq_add_seq (&thr02_list, tseq);
9961 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9962 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9963 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9964 if (y)
9965 SET_DECL_VALUE_EXPR (new_vard, y);
9966 else
9967 {
9968 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
9969 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
9970 }
9971 }
9972
9973 x = unshare_expr (var4);
9974 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
9975 gimplify_and_add (x, &thrn2_list);
9976
9977 if (is_for_simd)
9978 {
9979 x = unshare_expr (rprivb_ref);
9980 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
9981 gimplify_and_add (x, &scan1_list);
9982 }
9983 else
9984 {
9985 if (ctx->scan_exclusive)
9986 {
9987 x = unshare_expr (rprivb_ref);
9988 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
9989 gimplify_and_add (x, &scan1_list);
9990 }
9991
9992 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9993 tseq = copy_gimple_seq_and_replace_locals (tseq);
9994 SET_DECL_VALUE_EXPR (placeholder, var2);
9995 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9996 lower_omp (&tseq, ctx);
9997 gimple_seq_add_seq (&scan1_list, tseq);
9998
9999 if (ctx->scan_inclusive)
10000 {
10001 x = unshare_expr (rprivb_ref);
10002 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10003 gimplify_and_add (x, &scan1_list);
10004 }
10005 }
10006
10007 x = unshare_expr (rpriva_ref);
10008 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10009 unshare_expr (var4));
10010 gimplify_and_add (x, &mdlist);
10011
10012 x = unshare_expr (is_for_simd ? var6 : new_var);
10013 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10014 gimplify_and_add (x, &input2_list);
10015
10016 val = rprivb_ref;
10017 if (new_vard != new_var)
10018 val = build_fold_addr_expr_loc (clause_loc, val);
10019
10020 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10021 tseq = copy_gimple_seq_and_replace_locals (tseq);
10022 SET_DECL_VALUE_EXPR (new_vard, val);
10023 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10024 if (is_for_simd)
10025 {
10026 SET_DECL_VALUE_EXPR (placeholder, var6);
10027 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10028 }
10029 else
10030 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10031 lower_omp (&tseq, ctx);
10032 if (y)
10033 SET_DECL_VALUE_EXPR (new_vard, y);
10034 else
10035 {
10036 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10037 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10038 }
10039 if (!is_for_simd)
10040 {
10041 SET_DECL_VALUE_EXPR (placeholder, new_var);
10042 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10043 lower_omp (&tseq, ctx);
10044 }
10045 gimple_seq_add_seq (&input2_list, tseq);
10046
10047 x = build_outer_var_ref (var, ctx);
10048 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10049 gimplify_and_add (x, &last_list);
10050
10051 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10052 gimplify_and_add (x, &reduc_list);
10053 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10054 tseq = copy_gimple_seq_and_replace_locals (tseq);
10055 val = rprival_ref;
10056 if (new_vard != new_var)
10057 val = build_fold_addr_expr_loc (clause_loc, val);
10058 SET_DECL_VALUE_EXPR (new_vard, val);
10059 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10060 SET_DECL_VALUE_EXPR (placeholder, var2);
10061 lower_omp (&tseq, ctx);
10062 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10063 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10064 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10065 if (y)
10066 SET_DECL_VALUE_EXPR (new_vard, y);
10067 else
10068 {
10069 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10070 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10071 }
10072 gimple_seq_add_seq (&reduc_list, tseq);
10073 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10074 gimplify_and_add (x, &reduc_list);
10075
10076 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10077 if (x)
10078 gimplify_and_add (x, dlist);
10079 }
10080 else
10081 {
10082 x = build_outer_var_ref (var, ctx);
10083 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10084
10085 x = omp_reduction_init (c, TREE_TYPE (new_var));
10086 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10087 &thrn1_list);
10088 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10089
10090 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10091
10092 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10093 if (code == MINUS_EXPR)
10094 code = PLUS_EXPR;
10095
10096 if (is_for_simd)
10097 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10098 else
10099 {
10100 if (ctx->scan_exclusive)
10101 gimplify_assign (unshare_expr (rprivb_ref), var2,
10102 &scan1_list);
10103 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10104 gimplify_assign (var2, x, &scan1_list);
10105 if (ctx->scan_inclusive)
10106 gimplify_assign (unshare_expr (rprivb_ref), var2,
10107 &scan1_list);
10108 }
10109
10110 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10111 &mdlist);
10112
10113 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10114 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10115
10116 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10117 &last_list);
10118
10119 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10120 unshare_expr (rprival_ref));
10121 gimplify_assign (rprival_ref, x, &reduc_list);
10122 }
10123 }
10124
10125 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10126 gimple_seq_add_stmt (&scan1_list, g);
10127 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10128 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10129 ? scan_stmt4 : scan_stmt2), g);
10130
10131 tree controlb = create_tmp_var (boolean_type_node);
10132 tree controlp = create_tmp_var (ptr_type_node);
10133 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10134 OMP_CLAUSE_DECL (nc) = controlb;
10135 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10136 *cp1 = nc;
10137 cp1 = &OMP_CLAUSE_CHAIN (nc);
10138 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10139 OMP_CLAUSE_DECL (nc) = controlp;
10140 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10141 *cp1 = nc;
10142 cp1 = &OMP_CLAUSE_CHAIN (nc);
10143 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10144 OMP_CLAUSE_DECL (nc) = controlb;
10145 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10146 *cp2 = nc;
10147 cp2 = &OMP_CLAUSE_CHAIN (nc);
10148 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10149 OMP_CLAUSE_DECL (nc) = controlp;
10150 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10151 *cp2 = nc;
10152 cp2 = &OMP_CLAUSE_CHAIN (nc);
10153
10154 *cp1 = gimple_omp_for_clauses (stmt);
10155 gimple_omp_for_set_clauses (stmt, new_clauses1);
10156 *cp2 = gimple_omp_for_clauses (new_stmt);
10157 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10158
10159 if (is_for_simd)
10160 {
10161 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10162 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10163
10164 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10165 GSI_SAME_STMT);
10166 gsi_remove (&input3_gsi, true);
10167 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10168 GSI_SAME_STMT);
10169 gsi_remove (&scan3_gsi, true);
10170 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10171 GSI_SAME_STMT);
10172 gsi_remove (&input4_gsi, true);
10173 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10174 GSI_SAME_STMT);
10175 gsi_remove (&scan4_gsi, true);
10176 }
10177 else
10178 {
10179 gimple_omp_set_body (scan_stmt1, scan1_list);
10180 gimple_omp_set_body (input_stmt2, input2_list);
10181 }
10182
10183 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10184 GSI_SAME_STMT);
10185 gsi_remove (&input1_gsi, true);
10186 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10187 GSI_SAME_STMT);
10188 gsi_remove (&scan1_gsi, true);
10189 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10190 GSI_SAME_STMT);
10191 gsi_remove (&input2_gsi, true);
10192 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10193 GSI_SAME_STMT);
10194 gsi_remove (&scan2_gsi, true);
10195
10196 gimple_seq_add_seq (body_p, clist);
10197
10198 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10199 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10200 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10201 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10202 gimple_seq_add_stmt (body_p, g);
10203 g = gimple_build_label (lab1);
10204 gimple_seq_add_stmt (body_p, g);
10205 gimple_seq_add_seq (body_p, thr01_list);
10206 g = gimple_build_goto (lab3);
10207 gimple_seq_add_stmt (body_p, g);
10208 g = gimple_build_label (lab2);
10209 gimple_seq_add_stmt (body_p, g);
10210 gimple_seq_add_seq (body_p, thrn1_list);
10211 g = gimple_build_label (lab3);
10212 gimple_seq_add_stmt (body_p, g);
10213
10214 g = gimple_build_assign (ivar, size_zero_node);
10215 gimple_seq_add_stmt (body_p, g);
10216
10217 gimple_seq_add_stmt (body_p, stmt);
10218 gimple_seq_add_seq (body_p, body);
10219 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10220 fd->loop.v));
10221
10222 g = gimple_build_omp_return (true);
10223 gimple_seq_add_stmt (body_p, g);
10224 gimple_seq_add_seq (body_p, mdlist);
10225
10226 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10227 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10228 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10229 gimple_seq_add_stmt (body_p, g);
10230 g = gimple_build_label (lab1);
10231 gimple_seq_add_stmt (body_p, g);
10232
10233 g = omp_build_barrier (NULL);
10234 gimple_seq_add_stmt (body_p, g);
10235
10236 tree down = create_tmp_var (unsigned_type_node);
10237 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10238 gimple_seq_add_stmt (body_p, g);
10239
10240 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10241 gimple_seq_add_stmt (body_p, g);
10242
10243 tree num_threadsu = create_tmp_var (unsigned_type_node);
10244 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10245 gimple_seq_add_stmt (body_p, g);
10246
10247 tree thread_numu = create_tmp_var (unsigned_type_node);
10248 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10249 gimple_seq_add_stmt (body_p, g);
10250
10251 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10252 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10253 build_int_cst (unsigned_type_node, 1));
10254 gimple_seq_add_stmt (body_p, g);
10255
10256 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10257 g = gimple_build_label (lab3);
10258 gimple_seq_add_stmt (body_p, g);
10259
10260 tree twok = create_tmp_var (unsigned_type_node);
10261 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10262 gimple_seq_add_stmt (body_p, g);
10263
10264 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10265 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10266 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10267 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10268 gimple_seq_add_stmt (body_p, g);
10269 g = gimple_build_label (lab4);
10270 gimple_seq_add_stmt (body_p, g);
10271 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10272 gimple_seq_add_stmt (body_p, g);
10273 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10274 gimple_seq_add_stmt (body_p, g);
10275
10276 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10277 gimple_seq_add_stmt (body_p, g);
10278 g = gimple_build_label (lab6);
10279 gimple_seq_add_stmt (body_p, g);
10280
10281 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10282 gimple_seq_add_stmt (body_p, g);
10283
10284 g = gimple_build_label (lab5);
10285 gimple_seq_add_stmt (body_p, g);
10286
10287 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10288 gimple_seq_add_stmt (body_p, g);
10289
10290 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10291 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10292 gimple_call_set_lhs (g, cplx);
10293 gimple_seq_add_stmt (body_p, g);
10294 tree mul = create_tmp_var (unsigned_type_node);
10295 g = gimple_build_assign (mul, REALPART_EXPR,
10296 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10297 gimple_seq_add_stmt (body_p, g);
10298 tree ovf = create_tmp_var (unsigned_type_node);
10299 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10300 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10301 gimple_seq_add_stmt (body_p, g);
10302
10303 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10304 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10305 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10306 lab7, lab8);
10307 gimple_seq_add_stmt (body_p, g);
10308 g = gimple_build_label (lab7);
10309 gimple_seq_add_stmt (body_p, g);
10310
10311 tree andv = create_tmp_var (unsigned_type_node);
10312 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10313 gimple_seq_add_stmt (body_p, g);
10314 tree andvm1 = create_tmp_var (unsigned_type_node);
10315 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10316 build_minus_one_cst (unsigned_type_node));
10317 gimple_seq_add_stmt (body_p, g);
10318
10319 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10320 gimple_seq_add_stmt (body_p, g);
10321
10322 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10323 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10324 gimple_seq_add_stmt (body_p, g);
10325 g = gimple_build_label (lab9);
10326 gimple_seq_add_stmt (body_p, g);
10327 gimple_seq_add_seq (body_p, reduc_list);
10328 g = gimple_build_label (lab8);
10329 gimple_seq_add_stmt (body_p, g);
10330
10331 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10332 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10333 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10334 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10335 lab10, lab11);
10336 gimple_seq_add_stmt (body_p, g);
10337 g = gimple_build_label (lab10);
10338 gimple_seq_add_stmt (body_p, g);
10339 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10340 gimple_seq_add_stmt (body_p, g);
10341 g = gimple_build_goto (lab12);
10342 gimple_seq_add_stmt (body_p, g);
10343 g = gimple_build_label (lab11);
10344 gimple_seq_add_stmt (body_p, g);
10345 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10346 gimple_seq_add_stmt (body_p, g);
10347 g = gimple_build_label (lab12);
10348 gimple_seq_add_stmt (body_p, g);
10349
10350 g = omp_build_barrier (NULL);
10351 gimple_seq_add_stmt (body_p, g);
10352
10353 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10354 lab3, lab2);
10355 gimple_seq_add_stmt (body_p, g);
10356
10357 g = gimple_build_label (lab2);
10358 gimple_seq_add_stmt (body_p, g);
10359
10360 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10361 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10362 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10363 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10364 gimple_seq_add_stmt (body_p, g);
10365 g = gimple_build_label (lab1);
10366 gimple_seq_add_stmt (body_p, g);
10367 gimple_seq_add_seq (body_p, thr02_list);
10368 g = gimple_build_goto (lab3);
10369 gimple_seq_add_stmt (body_p, g);
10370 g = gimple_build_label (lab2);
10371 gimple_seq_add_stmt (body_p, g);
10372 gimple_seq_add_seq (body_p, thrn2_list);
10373 g = gimple_build_label (lab3);
10374 gimple_seq_add_stmt (body_p, g);
10375
10376 g = gimple_build_assign (ivar, size_zero_node);
10377 gimple_seq_add_stmt (body_p, g);
10378 gimple_seq_add_stmt (body_p, new_stmt);
10379 gimple_seq_add_seq (body_p, new_body);
10380
10381 gimple_seq new_dlist = NULL;
10382 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10383 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10384 tree num_threadsm1 = create_tmp_var (integer_type_node);
10385 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10386 integer_minus_one_node);
10387 gimple_seq_add_stmt (&new_dlist, g);
10388 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10389 gimple_seq_add_stmt (&new_dlist, g);
10390 g = gimple_build_label (lab1);
10391 gimple_seq_add_stmt (&new_dlist, g);
10392 gimple_seq_add_seq (&new_dlist, last_list);
10393 g = gimple_build_label (lab2);
10394 gimple_seq_add_stmt (&new_dlist, g);
10395 gimple_seq_add_seq (&new_dlist, *dlist);
10396 *dlist = new_dlist;
10397 }
10398
10399 /* Lower code for an OMP loop directive. */
10400
10401 static void
10402 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10403 {
10404 tree *rhs_p, block;
10405 struct omp_for_data fd, *fdp = NULL;
10406 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10407 gbind *new_stmt;
10408 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10409 gimple_seq cnt_list = NULL, clist = NULL;
10410 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10411 size_t i;
10412
10413 push_gimplify_context ();
10414
10415 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10416
10417 block = make_node (BLOCK);
10418 new_stmt = gimple_build_bind (NULL, NULL, block);
10419 /* Replace at gsi right away, so that 'stmt' is no member
10420 of a sequence anymore as we're going to add to a different
10421 one below. */
10422 gsi_replace (gsi_p, new_stmt, true);
10423
10424 /* Move declaration of temporaries in the loop body before we make
10425 it go away. */
10426 omp_for_body = gimple_omp_body (stmt);
10427 if (!gimple_seq_empty_p (omp_for_body)
10428 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10429 {
10430 gbind *inner_bind
10431 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10432 tree vars = gimple_bind_vars (inner_bind);
10433 gimple_bind_append_vars (new_stmt, vars);
10434 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10435 keep them on the inner_bind and it's block. */
10436 gimple_bind_set_vars (inner_bind, NULL_TREE);
10437 if (gimple_bind_block (inner_bind))
10438 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10439 }
10440
10441 if (gimple_omp_for_combined_into_p (stmt))
10442 {
10443 omp_extract_for_data (stmt, &fd, NULL);
10444 fdp = &fd;
10445
10446 /* We need two temporaries with fd.loop.v type (istart/iend)
10447 and then (fd.collapse - 1) temporaries with the same
10448 type for count2 ... countN-1 vars if not constant. */
10449 size_t count = 2;
10450 tree type = fd.iter_type;
10451 if (fd.collapse > 1
10452 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10453 count += fd.collapse - 1;
10454 bool taskreg_for
10455 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10456 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10457 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10458 tree simtc = NULL;
10459 tree clauses = *pc;
10460 if (taskreg_for)
10461 outerc
10462 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10463 OMP_CLAUSE__LOOPTEMP_);
10464 if (ctx->simt_stmt)
10465 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10466 OMP_CLAUSE__LOOPTEMP_);
10467 for (i = 0; i < count; i++)
10468 {
10469 tree temp;
10470 if (taskreg_for)
10471 {
10472 gcc_assert (outerc);
10473 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10474 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10475 OMP_CLAUSE__LOOPTEMP_);
10476 }
10477 else
10478 {
10479 /* If there are 2 adjacent SIMD stmts, one with _simt_
10480 clause, another without, make sure they have the same
10481 decls in _looptemp_ clauses, because the outer stmt
10482 they are combined into will look up just one inner_stmt. */
10483 if (ctx->simt_stmt)
10484 temp = OMP_CLAUSE_DECL (simtc);
10485 else
10486 temp = create_tmp_var (type);
10487 insert_decl_map (&ctx->outer->cb, temp, temp);
10488 }
10489 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10490 OMP_CLAUSE_DECL (*pc) = temp;
10491 pc = &OMP_CLAUSE_CHAIN (*pc);
10492 if (ctx->simt_stmt)
10493 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10494 OMP_CLAUSE__LOOPTEMP_);
10495 }
10496 *pc = clauses;
10497 }
10498
10499 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10500 dlist = NULL;
10501 body = NULL;
10502 tree rclauses
10503 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10504 OMP_CLAUSE_REDUCTION);
10505 tree rtmp = NULL_TREE;
10506 if (rclauses)
10507 {
10508 tree type = build_pointer_type (pointer_sized_int_node);
10509 tree temp = create_tmp_var (type);
10510 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10511 OMP_CLAUSE_DECL (c) = temp;
10512 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10513 gimple_omp_for_set_clauses (stmt, c);
10514 lower_omp_task_reductions (ctx, OMP_FOR,
10515 gimple_omp_for_clauses (stmt),
10516 &tred_ilist, &tred_dlist);
10517 rclauses = c;
10518 rtmp = make_ssa_name (type);
10519 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10520 }
10521
10522 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10523 ctx);
10524
10525 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10526 fdp);
10527 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10528 gimple_omp_for_pre_body (stmt));
10529
10530 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10531
10532 /* Lower the header expressions. At this point, we can assume that
10533 the header is of the form:
10534
10535 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10536
10537 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10538 using the .omp_data_s mapping, if needed. */
10539 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10540 {
10541 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10542 if (TREE_CODE (*rhs_p) == TREE_VEC)
10543 {
10544 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10545 TREE_VEC_ELT (*rhs_p, 1)
10546 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10547 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10548 TREE_VEC_ELT (*rhs_p, 2)
10549 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10550 }
10551 else if (!is_gimple_min_invariant (*rhs_p))
10552 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10553 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10554 recompute_tree_invariant_for_addr_expr (*rhs_p);
10555
10556 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10557 if (TREE_CODE (*rhs_p) == TREE_VEC)
10558 {
10559 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10560 TREE_VEC_ELT (*rhs_p, 1)
10561 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10562 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10563 TREE_VEC_ELT (*rhs_p, 2)
10564 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10565 }
10566 else if (!is_gimple_min_invariant (*rhs_p))
10567 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10568 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10569 recompute_tree_invariant_for_addr_expr (*rhs_p);
10570
10571 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10572 if (!is_gimple_min_invariant (*rhs_p))
10573 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10574 }
10575 if (rclauses)
10576 gimple_seq_add_seq (&tred_ilist, cnt_list);
10577 else
10578 gimple_seq_add_seq (&body, cnt_list);
10579
10580 /* Once lowered, extract the bounds and clauses. */
10581 omp_extract_for_data (stmt, &fd, NULL);
10582
10583 if (is_gimple_omp_oacc (ctx->stmt)
10584 && !ctx_in_oacc_kernels_region (ctx))
10585 lower_oacc_head_tail (gimple_location (stmt),
10586 gimple_omp_for_clauses (stmt),
10587 &oacc_head, &oacc_tail, ctx);
10588
10589 /* Add OpenACC partitioning and reduction markers just before the loop. */
10590 if (oacc_head)
10591 gimple_seq_add_seq (&body, oacc_head);
10592
10593 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10594
10595 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10596 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10597 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10598 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10599 {
10600 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10601 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10602 OMP_CLAUSE_LINEAR_STEP (c)
10603 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10604 ctx);
10605 }
10606
10607 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10608 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10609 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10610 else
10611 {
10612 gimple_seq_add_stmt (&body, stmt);
10613 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10614 }
10615
10616 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10617 fd.loop.v));
10618
10619 /* After the loop, add exit clauses. */
10620 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10621
10622 if (clist)
10623 {
10624 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10625 gcall *g = gimple_build_call (fndecl, 0);
10626 gimple_seq_add_stmt (&body, g);
10627 gimple_seq_add_seq (&body, clist);
10628 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10629 g = gimple_build_call (fndecl, 0);
10630 gimple_seq_add_stmt (&body, g);
10631 }
10632
10633 if (ctx->cancellable)
10634 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10635
10636 gimple_seq_add_seq (&body, dlist);
10637
10638 if (rclauses)
10639 {
10640 gimple_seq_add_seq (&tred_ilist, body);
10641 body = tred_ilist;
10642 }
10643
10644 body = maybe_catch_exception (body);
10645
10646 /* Region exit marker goes at the end of the loop body. */
10647 gimple *g = gimple_build_omp_return (fd.have_nowait);
10648 gimple_seq_add_stmt (&body, g);
10649
10650 gimple_seq_add_seq (&body, tred_dlist);
10651
10652 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10653
10654 if (rclauses)
10655 OMP_CLAUSE_DECL (rclauses) = rtmp;
10656
10657 /* Add OpenACC joining and reduction markers just after the loop. */
10658 if (oacc_tail)
10659 gimple_seq_add_seq (&body, oacc_tail);
10660
10661 pop_gimplify_context (new_stmt);
10662
10663 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10664 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10665 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10666 if (BLOCK_VARS (block))
10667 TREE_USED (block) = 1;
10668
10669 gimple_bind_set_body (new_stmt, body);
10670 gimple_omp_set_body (stmt, NULL);
10671 gimple_omp_for_set_pre_body (stmt, NULL);
10672 }
10673
10674 /* Callback for walk_stmts. Check if the current statement only contains
10675 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10676
10677 static tree
10678 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10679 bool *handled_ops_p,
10680 struct walk_stmt_info *wi)
10681 {
10682 int *info = (int *) wi->info;
10683 gimple *stmt = gsi_stmt (*gsi_p);
10684
10685 *handled_ops_p = true;
10686 switch (gimple_code (stmt))
10687 {
10688 WALK_SUBSTMTS;
10689
10690 case GIMPLE_DEBUG:
10691 break;
10692 case GIMPLE_OMP_FOR:
10693 case GIMPLE_OMP_SECTIONS:
10694 *info = *info == 0 ? 1 : -1;
10695 break;
10696 default:
10697 *info = -1;
10698 break;
10699 }
10700 return NULL;
10701 }
10702
10703 struct omp_taskcopy_context
10704 {
10705 /* This field must be at the beginning, as we do "inheritance": Some
10706 callback functions for tree-inline.c (e.g., omp_copy_decl)
10707 receive a copy_body_data pointer that is up-casted to an
10708 omp_context pointer. */
10709 copy_body_data cb;
10710 omp_context *ctx;
10711 };
10712
10713 static tree
10714 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10715 {
10716 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10717
10718 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10719 return create_tmp_var (TREE_TYPE (var));
10720
10721 return var;
10722 }
10723
10724 static tree
10725 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10726 {
10727 tree name, new_fields = NULL, type, f;
10728
10729 type = lang_hooks.types.make_type (RECORD_TYPE);
10730 name = DECL_NAME (TYPE_NAME (orig_type));
10731 name = build_decl (gimple_location (tcctx->ctx->stmt),
10732 TYPE_DECL, name, type);
10733 TYPE_NAME (type) = name;
10734
10735 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10736 {
10737 tree new_f = copy_node (f);
10738 DECL_CONTEXT (new_f) = type;
10739 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10740 TREE_CHAIN (new_f) = new_fields;
10741 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10742 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10743 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10744 &tcctx->cb, NULL);
10745 new_fields = new_f;
10746 tcctx->cb.decl_map->put (f, new_f);
10747 }
10748 TYPE_FIELDS (type) = nreverse (new_fields);
10749 layout_type (type);
10750 return type;
10751 }
10752
10753 /* Create task copyfn. */
10754
10755 static void
10756 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10757 {
10758 struct function *child_cfun;
10759 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10760 tree record_type, srecord_type, bind, list;
10761 bool record_needs_remap = false, srecord_needs_remap = false;
10762 splay_tree_node n;
10763 struct omp_taskcopy_context tcctx;
10764 location_t loc = gimple_location (task_stmt);
10765 size_t looptempno = 0;
10766
10767 child_fn = gimple_omp_task_copy_fn (task_stmt);
10768 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10769 gcc_assert (child_cfun->cfg == NULL);
10770 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10771
10772 /* Reset DECL_CONTEXT on function arguments. */
10773 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10774 DECL_CONTEXT (t) = child_fn;
10775
10776 /* Populate the function. */
10777 push_gimplify_context ();
10778 push_cfun (child_cfun);
10779
10780 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10781 TREE_SIDE_EFFECTS (bind) = 1;
10782 list = NULL;
10783 DECL_SAVED_TREE (child_fn) = bind;
10784 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10785
10786 /* Remap src and dst argument types if needed. */
10787 record_type = ctx->record_type;
10788 srecord_type = ctx->srecord_type;
10789 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10790 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10791 {
10792 record_needs_remap = true;
10793 break;
10794 }
10795 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10796 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10797 {
10798 srecord_needs_remap = true;
10799 break;
10800 }
10801
10802 if (record_needs_remap || srecord_needs_remap)
10803 {
10804 memset (&tcctx, '\0', sizeof (tcctx));
10805 tcctx.cb.src_fn = ctx->cb.src_fn;
10806 tcctx.cb.dst_fn = child_fn;
10807 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10808 gcc_checking_assert (tcctx.cb.src_node);
10809 tcctx.cb.dst_node = tcctx.cb.src_node;
10810 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10811 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10812 tcctx.cb.eh_lp_nr = 0;
10813 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10814 tcctx.cb.decl_map = new hash_map<tree, tree>;
10815 tcctx.ctx = ctx;
10816
10817 if (record_needs_remap)
10818 record_type = task_copyfn_remap_type (&tcctx, record_type);
10819 if (srecord_needs_remap)
10820 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10821 }
10822 else
10823 tcctx.cb.decl_map = NULL;
10824
10825 arg = DECL_ARGUMENTS (child_fn);
10826 TREE_TYPE (arg) = build_pointer_type (record_type);
10827 sarg = DECL_CHAIN (arg);
10828 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10829
10830 /* First pass: initialize temporaries used in record_type and srecord_type
10831 sizes and field offsets. */
10832 if (tcctx.cb.decl_map)
10833 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10835 {
10836 tree *p;
10837
10838 decl = OMP_CLAUSE_DECL (c);
10839 p = tcctx.cb.decl_map->get (decl);
10840 if (p == NULL)
10841 continue;
10842 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10843 sf = (tree) n->value;
10844 sf = *tcctx.cb.decl_map->get (sf);
10845 src = build_simple_mem_ref_loc (loc, sarg);
10846 src = omp_build_component_ref (src, sf);
10847 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10848 append_to_statement_list (t, &list);
10849 }
10850
10851 /* Second pass: copy shared var pointers and copy construct non-VLA
10852 firstprivate vars. */
10853 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10854 switch (OMP_CLAUSE_CODE (c))
10855 {
10856 splay_tree_key key;
10857 case OMP_CLAUSE_SHARED:
10858 decl = OMP_CLAUSE_DECL (c);
10859 key = (splay_tree_key) decl;
10860 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10861 key = (splay_tree_key) &DECL_UID (decl);
10862 n = splay_tree_lookup (ctx->field_map, key);
10863 if (n == NULL)
10864 break;
10865 f = (tree) n->value;
10866 if (tcctx.cb.decl_map)
10867 f = *tcctx.cb.decl_map->get (f);
10868 n = splay_tree_lookup (ctx->sfield_map, key);
10869 sf = (tree) n->value;
10870 if (tcctx.cb.decl_map)
10871 sf = *tcctx.cb.decl_map->get (sf);
10872 src = build_simple_mem_ref_loc (loc, sarg);
10873 src = omp_build_component_ref (src, sf);
10874 dst = build_simple_mem_ref_loc (loc, arg);
10875 dst = omp_build_component_ref (dst, f);
10876 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10877 append_to_statement_list (t, &list);
10878 break;
10879 case OMP_CLAUSE_REDUCTION:
10880 case OMP_CLAUSE_IN_REDUCTION:
10881 decl = OMP_CLAUSE_DECL (c);
10882 if (TREE_CODE (decl) == MEM_REF)
10883 {
10884 decl = TREE_OPERAND (decl, 0);
10885 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10886 decl = TREE_OPERAND (decl, 0);
10887 if (TREE_CODE (decl) == INDIRECT_REF
10888 || TREE_CODE (decl) == ADDR_EXPR)
10889 decl = TREE_OPERAND (decl, 0);
10890 }
10891 key = (splay_tree_key) decl;
10892 n = splay_tree_lookup (ctx->field_map, key);
10893 if (n == NULL)
10894 break;
10895 f = (tree) n->value;
10896 if (tcctx.cb.decl_map)
10897 f = *tcctx.cb.decl_map->get (f);
10898 n = splay_tree_lookup (ctx->sfield_map, key);
10899 sf = (tree) n->value;
10900 if (tcctx.cb.decl_map)
10901 sf = *tcctx.cb.decl_map->get (sf);
10902 src = build_simple_mem_ref_loc (loc, sarg);
10903 src = omp_build_component_ref (src, sf);
10904 if (decl != OMP_CLAUSE_DECL (c)
10905 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10906 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10907 src = build_simple_mem_ref_loc (loc, src);
10908 dst = build_simple_mem_ref_loc (loc, arg);
10909 dst = omp_build_component_ref (dst, f);
10910 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10911 append_to_statement_list (t, &list);
10912 break;
10913 case OMP_CLAUSE__LOOPTEMP_:
10914 /* Fields for first two _looptemp_ clauses are initialized by
10915 GOMP_taskloop*, the rest are handled like firstprivate. */
10916 if (looptempno < 2)
10917 {
10918 looptempno++;
10919 break;
10920 }
10921 /* FALLTHRU */
10922 case OMP_CLAUSE__REDUCTEMP_:
10923 case OMP_CLAUSE_FIRSTPRIVATE:
10924 decl = OMP_CLAUSE_DECL (c);
10925 if (is_variable_sized (decl))
10926 break;
10927 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10928 if (n == NULL)
10929 break;
10930 f = (tree) n->value;
10931 if (tcctx.cb.decl_map)
10932 f = *tcctx.cb.decl_map->get (f);
10933 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10934 if (n != NULL)
10935 {
10936 sf = (tree) n->value;
10937 if (tcctx.cb.decl_map)
10938 sf = *tcctx.cb.decl_map->get (sf);
10939 src = build_simple_mem_ref_loc (loc, sarg);
10940 src = omp_build_component_ref (src, sf);
10941 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
10942 src = build_simple_mem_ref_loc (loc, src);
10943 }
10944 else
10945 src = decl;
10946 dst = build_simple_mem_ref_loc (loc, arg);
10947 dst = omp_build_component_ref (dst, f);
10948 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
10949 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10950 else
10951 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
10952 append_to_statement_list (t, &list);
10953 break;
10954 case OMP_CLAUSE_PRIVATE:
10955 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
10956 break;
10957 decl = OMP_CLAUSE_DECL (c);
10958 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10959 f = (tree) n->value;
10960 if (tcctx.cb.decl_map)
10961 f = *tcctx.cb.decl_map->get (f);
10962 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10963 if (n != NULL)
10964 {
10965 sf = (tree) n->value;
10966 if (tcctx.cb.decl_map)
10967 sf = *tcctx.cb.decl_map->get (sf);
10968 src = build_simple_mem_ref_loc (loc, sarg);
10969 src = omp_build_component_ref (src, sf);
10970 if (use_pointer_for_field (decl, NULL))
10971 src = build_simple_mem_ref_loc (loc, src);
10972 }
10973 else
10974 src = decl;
10975 dst = build_simple_mem_ref_loc (loc, arg);
10976 dst = omp_build_component_ref (dst, f);
10977 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10978 append_to_statement_list (t, &list);
10979 break;
10980 default:
10981 break;
10982 }
10983
10984 /* Last pass: handle VLA firstprivates. */
10985 if (tcctx.cb.decl_map)
10986 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10988 {
10989 tree ind, ptr, df;
10990
10991 decl = OMP_CLAUSE_DECL (c);
10992 if (!is_variable_sized (decl))
10993 continue;
10994 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10995 if (n == NULL)
10996 continue;
10997 f = (tree) n->value;
10998 f = *tcctx.cb.decl_map->get (f);
10999 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11000 ind = DECL_VALUE_EXPR (decl);
11001 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11002 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11003 n = splay_tree_lookup (ctx->sfield_map,
11004 (splay_tree_key) TREE_OPERAND (ind, 0));
11005 sf = (tree) n->value;
11006 sf = *tcctx.cb.decl_map->get (sf);
11007 src = build_simple_mem_ref_loc (loc, sarg);
11008 src = omp_build_component_ref (src, sf);
11009 src = build_simple_mem_ref_loc (loc, src);
11010 dst = build_simple_mem_ref_loc (loc, arg);
11011 dst = omp_build_component_ref (dst, f);
11012 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11013 append_to_statement_list (t, &list);
11014 n = splay_tree_lookup (ctx->field_map,
11015 (splay_tree_key) TREE_OPERAND (ind, 0));
11016 df = (tree) n->value;
11017 df = *tcctx.cb.decl_map->get (df);
11018 ptr = build_simple_mem_ref_loc (loc, arg);
11019 ptr = omp_build_component_ref (ptr, df);
11020 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11021 build_fold_addr_expr_loc (loc, dst));
11022 append_to_statement_list (t, &list);
11023 }
11024
11025 t = build1 (RETURN_EXPR, void_type_node, NULL);
11026 append_to_statement_list (t, &list);
11027
11028 if (tcctx.cb.decl_map)
11029 delete tcctx.cb.decl_map;
11030 pop_gimplify_context (NULL);
11031 BIND_EXPR_BODY (bind) = list;
11032 pop_cfun ();
11033 }
11034
11035 static void
11036 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11037 {
11038 tree c, clauses;
11039 gimple *g;
11040 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11041
11042 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11043 gcc_assert (clauses);
11044 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11046 switch (OMP_CLAUSE_DEPEND_KIND (c))
11047 {
11048 case OMP_CLAUSE_DEPEND_LAST:
11049 /* Lowering already done at gimplification. */
11050 return;
11051 case OMP_CLAUSE_DEPEND_IN:
11052 cnt[2]++;
11053 break;
11054 case OMP_CLAUSE_DEPEND_OUT:
11055 case OMP_CLAUSE_DEPEND_INOUT:
11056 cnt[0]++;
11057 break;
11058 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11059 cnt[1]++;
11060 break;
11061 case OMP_CLAUSE_DEPEND_DEPOBJ:
11062 cnt[3]++;
11063 break;
11064 case OMP_CLAUSE_DEPEND_SOURCE:
11065 case OMP_CLAUSE_DEPEND_SINK:
11066 /* FALLTHRU */
11067 default:
11068 gcc_unreachable ();
11069 }
11070 if (cnt[1] || cnt[3])
11071 idx = 5;
11072 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11073 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11074 tree array = create_tmp_var (type);
11075 TREE_ADDRESSABLE (array) = 1;
11076 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11077 NULL_TREE);
11078 if (idx == 5)
11079 {
11080 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11081 gimple_seq_add_stmt (iseq, g);
11082 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11083 NULL_TREE);
11084 }
11085 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11086 gimple_seq_add_stmt (iseq, g);
11087 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11088 {
11089 r = build4 (ARRAY_REF, ptr_type_node, array,
11090 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11091 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11092 gimple_seq_add_stmt (iseq, g);
11093 }
11094 for (i = 0; i < 4; i++)
11095 {
11096 if (cnt[i] == 0)
11097 continue;
11098 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11099 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11100 continue;
11101 else
11102 {
11103 switch (OMP_CLAUSE_DEPEND_KIND (c))
11104 {
11105 case OMP_CLAUSE_DEPEND_IN:
11106 if (i != 2)
11107 continue;
11108 break;
11109 case OMP_CLAUSE_DEPEND_OUT:
11110 case OMP_CLAUSE_DEPEND_INOUT:
11111 if (i != 0)
11112 continue;
11113 break;
11114 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11115 if (i != 1)
11116 continue;
11117 break;
11118 case OMP_CLAUSE_DEPEND_DEPOBJ:
11119 if (i != 3)
11120 continue;
11121 break;
11122 default:
11123 gcc_unreachable ();
11124 }
11125 tree t = OMP_CLAUSE_DECL (c);
11126 t = fold_convert (ptr_type_node, t);
11127 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11128 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11129 NULL_TREE, NULL_TREE);
11130 g = gimple_build_assign (r, t);
11131 gimple_seq_add_stmt (iseq, g);
11132 }
11133 }
11134 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11135 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11136 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11137 OMP_CLAUSE_CHAIN (c) = *pclauses;
11138 *pclauses = c;
11139 tree clobber = build_clobber (type);
11140 g = gimple_build_assign (array, clobber);
11141 gimple_seq_add_stmt (oseq, g);
11142 }
11143
11144 /* Lower the OpenMP parallel or task directive in the current statement
11145 in GSI_P. CTX holds context information for the directive. */
11146
11147 static void
11148 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11149 {
11150 tree clauses;
11151 tree child_fn, t;
11152 gimple *stmt = gsi_stmt (*gsi_p);
11153 gbind *par_bind, *bind, *dep_bind = NULL;
11154 gimple_seq par_body;
11155 location_t loc = gimple_location (stmt);
11156
11157 clauses = gimple_omp_taskreg_clauses (stmt);
11158 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11159 && gimple_omp_task_taskwait_p (stmt))
11160 {
11161 par_bind = NULL;
11162 par_body = NULL;
11163 }
11164 else
11165 {
11166 par_bind
11167 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11168 par_body = gimple_bind_body (par_bind);
11169 }
11170 child_fn = ctx->cb.dst_fn;
11171 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11172 && !gimple_omp_parallel_combined_p (stmt))
11173 {
11174 struct walk_stmt_info wi;
11175 int ws_num = 0;
11176
11177 memset (&wi, 0, sizeof (wi));
11178 wi.info = &ws_num;
11179 wi.val_only = true;
11180 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11181 if (ws_num == 1)
11182 gimple_omp_parallel_set_combined_p (stmt, true);
11183 }
11184 gimple_seq dep_ilist = NULL;
11185 gimple_seq dep_olist = NULL;
11186 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11187 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11188 {
11189 push_gimplify_context ();
11190 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11191 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11192 &dep_ilist, &dep_olist);
11193 }
11194
11195 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11196 && gimple_omp_task_taskwait_p (stmt))
11197 {
11198 if (dep_bind)
11199 {
11200 gsi_replace (gsi_p, dep_bind, true);
11201 gimple_bind_add_seq (dep_bind, dep_ilist);
11202 gimple_bind_add_stmt (dep_bind, stmt);
11203 gimple_bind_add_seq (dep_bind, dep_olist);
11204 pop_gimplify_context (dep_bind);
11205 }
11206 return;
11207 }
11208
11209 if (ctx->srecord_type)
11210 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11211
11212 gimple_seq tskred_ilist = NULL;
11213 gimple_seq tskred_olist = NULL;
11214 if ((is_task_ctx (ctx)
11215 && gimple_omp_task_taskloop_p (ctx->stmt)
11216 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11217 OMP_CLAUSE_REDUCTION))
11218 || (is_parallel_ctx (ctx)
11219 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11220 OMP_CLAUSE__REDUCTEMP_)))
11221 {
11222 if (dep_bind == NULL)
11223 {
11224 push_gimplify_context ();
11225 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11226 }
11227 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11228 : OMP_PARALLEL,
11229 gimple_omp_taskreg_clauses (ctx->stmt),
11230 &tskred_ilist, &tskred_olist);
11231 }
11232
11233 push_gimplify_context ();
11234
11235 gimple_seq par_olist = NULL;
11236 gimple_seq par_ilist = NULL;
11237 gimple_seq par_rlist = NULL;
11238 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11239 lower_omp (&par_body, ctx);
11240 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11241 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11242
11243 /* Declare all the variables created by mapping and the variables
11244 declared in the scope of the parallel body. */
11245 record_vars_into (ctx->block_vars, child_fn);
11246 maybe_remove_omp_member_access_dummy_vars (par_bind);
11247 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11248
11249 if (ctx->record_type)
11250 {
11251 ctx->sender_decl
11252 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11253 : ctx->record_type, ".omp_data_o");
11254 DECL_NAMELESS (ctx->sender_decl) = 1;
11255 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11256 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11257 }
11258
11259 gimple_seq olist = NULL;
11260 gimple_seq ilist = NULL;
11261 lower_send_clauses (clauses, &ilist, &olist, ctx);
11262 lower_send_shared_vars (&ilist, &olist, ctx);
11263
11264 if (ctx->record_type)
11265 {
11266 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11267 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11268 clobber));
11269 }
11270
11271 /* Once all the expansions are done, sequence all the different
11272 fragments inside gimple_omp_body. */
11273
11274 gimple_seq new_body = NULL;
11275
11276 if (ctx->record_type)
11277 {
11278 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11279 /* fixup_child_record_type might have changed receiver_decl's type. */
11280 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11281 gimple_seq_add_stmt (&new_body,
11282 gimple_build_assign (ctx->receiver_decl, t));
11283 }
11284
11285 gimple_seq_add_seq (&new_body, par_ilist);
11286 gimple_seq_add_seq (&new_body, par_body);
11287 gimple_seq_add_seq (&new_body, par_rlist);
11288 if (ctx->cancellable)
11289 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11290 gimple_seq_add_seq (&new_body, par_olist);
11291 new_body = maybe_catch_exception (new_body);
11292 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11293 gimple_seq_add_stmt (&new_body,
11294 gimple_build_omp_continue (integer_zero_node,
11295 integer_zero_node));
11296 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11297 gimple_omp_set_body (stmt, new_body);
11298
11299 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11300 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11301 else
11302 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11303 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11304 gimple_bind_add_seq (bind, ilist);
11305 gimple_bind_add_stmt (bind, stmt);
11306 gimple_bind_add_seq (bind, olist);
11307
11308 pop_gimplify_context (NULL);
11309
11310 if (dep_bind)
11311 {
11312 gimple_bind_add_seq (dep_bind, dep_ilist);
11313 gimple_bind_add_seq (dep_bind, tskred_ilist);
11314 gimple_bind_add_stmt (dep_bind, bind);
11315 gimple_bind_add_seq (dep_bind, tskred_olist);
11316 gimple_bind_add_seq (dep_bind, dep_olist);
11317 pop_gimplify_context (dep_bind);
11318 }
11319 }
11320
11321 /* Lower the GIMPLE_OMP_TARGET in the current statement
11322 in GSI_P. CTX holds context information for the directive. */
11323
11324 static void
11325 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11326 {
11327 tree clauses;
11328 tree child_fn, t, c;
11329 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11330 gbind *tgt_bind, *bind, *dep_bind = NULL;
11331 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11332 location_t loc = gimple_location (stmt);
11333 bool offloaded, data_region;
11334 unsigned int map_cnt = 0;
11335
11336 offloaded = is_gimple_omp_offloaded (stmt);
11337 switch (gimple_omp_target_kind (stmt))
11338 {
11339 case GF_OMP_TARGET_KIND_REGION:
11340 case GF_OMP_TARGET_KIND_UPDATE:
11341 case GF_OMP_TARGET_KIND_ENTER_DATA:
11342 case GF_OMP_TARGET_KIND_EXIT_DATA:
11343 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11344 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11345 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11346 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11347 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11348 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11349 data_region = false;
11350 break;
11351 case GF_OMP_TARGET_KIND_DATA:
11352 case GF_OMP_TARGET_KIND_OACC_DATA:
11353 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11354 data_region = true;
11355 break;
11356 default:
11357 gcc_unreachable ();
11358 }
11359
11360 clauses = gimple_omp_target_clauses (stmt);
11361
11362 gimple_seq dep_ilist = NULL;
11363 gimple_seq dep_olist = NULL;
11364 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11365 {
11366 push_gimplify_context ();
11367 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11368 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11369 &dep_ilist, &dep_olist);
11370 }
11371
11372 tgt_bind = NULL;
11373 tgt_body = NULL;
11374 if (offloaded)
11375 {
11376 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11377 tgt_body = gimple_bind_body (tgt_bind);
11378 }
11379 else if (data_region)
11380 tgt_body = gimple_omp_body (stmt);
11381 child_fn = ctx->cb.dst_fn;
11382
11383 push_gimplify_context ();
11384 fplist = NULL;
11385
11386 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11387 switch (OMP_CLAUSE_CODE (c))
11388 {
11389 tree var, x;
11390
11391 default:
11392 break;
11393 case OMP_CLAUSE_MAP:
11394 #if CHECKING_P
11395 /* First check what we're prepared to handle in the following. */
11396 switch (OMP_CLAUSE_MAP_KIND (c))
11397 {
11398 case GOMP_MAP_ALLOC:
11399 case GOMP_MAP_TO:
11400 case GOMP_MAP_FROM:
11401 case GOMP_MAP_TOFROM:
11402 case GOMP_MAP_POINTER:
11403 case GOMP_MAP_TO_PSET:
11404 case GOMP_MAP_DELETE:
11405 case GOMP_MAP_RELEASE:
11406 case GOMP_MAP_ALWAYS_TO:
11407 case GOMP_MAP_ALWAYS_FROM:
11408 case GOMP_MAP_ALWAYS_TOFROM:
11409 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11410 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11411 case GOMP_MAP_STRUCT:
11412 case GOMP_MAP_ALWAYS_POINTER:
11413 break;
11414 case GOMP_MAP_IF_PRESENT:
11415 case GOMP_MAP_FORCE_ALLOC:
11416 case GOMP_MAP_FORCE_TO:
11417 case GOMP_MAP_FORCE_FROM:
11418 case GOMP_MAP_FORCE_TOFROM:
11419 case GOMP_MAP_FORCE_PRESENT:
11420 case GOMP_MAP_FORCE_DEVICEPTR:
11421 case GOMP_MAP_DEVICE_RESIDENT:
11422 case GOMP_MAP_LINK:
11423 case GOMP_MAP_ATTACH:
11424 case GOMP_MAP_DETACH:
11425 case GOMP_MAP_FORCE_DETACH:
11426 gcc_assert (is_gimple_omp_oacc (stmt));
11427 break;
11428 default:
11429 gcc_unreachable ();
11430 }
11431 #endif
11432 /* FALLTHRU */
11433 case OMP_CLAUSE_TO:
11434 case OMP_CLAUSE_FROM:
11435 oacc_firstprivate:
11436 var = OMP_CLAUSE_DECL (c);
11437 if (!DECL_P (var))
11438 {
11439 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11440 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11441 && (OMP_CLAUSE_MAP_KIND (c)
11442 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11443 map_cnt++;
11444 continue;
11445 }
11446
11447 if (DECL_SIZE (var)
11448 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11449 {
11450 tree var2 = DECL_VALUE_EXPR (var);
11451 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11452 var2 = TREE_OPERAND (var2, 0);
11453 gcc_assert (DECL_P (var2));
11454 var = var2;
11455 }
11456
11457 if (offloaded
11458 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11459 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11460 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11461 {
11462 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11463 {
11464 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11465 && varpool_node::get_create (var)->offloadable)
11466 continue;
11467
11468 tree type = build_pointer_type (TREE_TYPE (var));
11469 tree new_var = lookup_decl (var, ctx);
11470 x = create_tmp_var_raw (type, get_name (new_var));
11471 gimple_add_tmp_var (x);
11472 x = build_simple_mem_ref (x);
11473 SET_DECL_VALUE_EXPR (new_var, x);
11474 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11475 }
11476 continue;
11477 }
11478
11479 if (!maybe_lookup_field (var, ctx))
11480 continue;
11481
11482 /* Don't remap compute constructs' reduction variables, because the
11483 intermediate result must be local to each gang. */
11484 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11485 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11486 {
11487 x = build_receiver_ref (var, true, ctx);
11488 tree new_var = lookup_decl (var, ctx);
11489
11490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11491 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11492 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11493 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11494 x = build_simple_mem_ref (x);
11495 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11496 {
11497 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11498 if (omp_is_reference (new_var)
11499 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11500 || DECL_BY_REFERENCE (var)))
11501 {
11502 /* Create a local object to hold the instance
11503 value. */
11504 tree type = TREE_TYPE (TREE_TYPE (new_var));
11505 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11506 tree inst = create_tmp_var (type, id);
11507 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11508 x = build_fold_addr_expr (inst);
11509 }
11510 gimplify_assign (new_var, x, &fplist);
11511 }
11512 else if (DECL_P (new_var))
11513 {
11514 SET_DECL_VALUE_EXPR (new_var, x);
11515 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11516 }
11517 else
11518 gcc_unreachable ();
11519 }
11520 map_cnt++;
11521 break;
11522
11523 case OMP_CLAUSE_FIRSTPRIVATE:
11524 if (is_oacc_parallel_or_serial (ctx))
11525 goto oacc_firstprivate;
11526 map_cnt++;
11527 var = OMP_CLAUSE_DECL (c);
11528 if (!omp_is_reference (var)
11529 && !is_gimple_reg_type (TREE_TYPE (var)))
11530 {
11531 tree new_var = lookup_decl (var, ctx);
11532 if (is_variable_sized (var))
11533 {
11534 tree pvar = DECL_VALUE_EXPR (var);
11535 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11536 pvar = TREE_OPERAND (pvar, 0);
11537 gcc_assert (DECL_P (pvar));
11538 tree new_pvar = lookup_decl (pvar, ctx);
11539 x = build_fold_indirect_ref (new_pvar);
11540 TREE_THIS_NOTRAP (x) = 1;
11541 }
11542 else
11543 x = build_receiver_ref (var, true, ctx);
11544 SET_DECL_VALUE_EXPR (new_var, x);
11545 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11546 }
11547 break;
11548
11549 case OMP_CLAUSE_PRIVATE:
11550 if (is_gimple_omp_oacc (ctx->stmt))
11551 break;
11552 var = OMP_CLAUSE_DECL (c);
11553 if (is_variable_sized (var))
11554 {
11555 tree new_var = lookup_decl (var, ctx);
11556 tree pvar = DECL_VALUE_EXPR (var);
11557 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11558 pvar = TREE_OPERAND (pvar, 0);
11559 gcc_assert (DECL_P (pvar));
11560 tree new_pvar = lookup_decl (pvar, ctx);
11561 x = build_fold_indirect_ref (new_pvar);
11562 TREE_THIS_NOTRAP (x) = 1;
11563 SET_DECL_VALUE_EXPR (new_var, x);
11564 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11565 }
11566 break;
11567
11568 case OMP_CLAUSE_USE_DEVICE_PTR:
11569 case OMP_CLAUSE_USE_DEVICE_ADDR:
11570 case OMP_CLAUSE_IS_DEVICE_PTR:
11571 var = OMP_CLAUSE_DECL (c);
11572 map_cnt++;
11573 if (is_variable_sized (var))
11574 {
11575 tree new_var = lookup_decl (var, ctx);
11576 tree pvar = DECL_VALUE_EXPR (var);
11577 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11578 pvar = TREE_OPERAND (pvar, 0);
11579 gcc_assert (DECL_P (pvar));
11580 tree new_pvar = lookup_decl (pvar, ctx);
11581 x = build_fold_indirect_ref (new_pvar);
11582 TREE_THIS_NOTRAP (x) = 1;
11583 SET_DECL_VALUE_EXPR (new_var, x);
11584 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11585 }
11586 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11587 && !omp_is_reference (var)
11588 && !omp_is_allocatable_or_ptr (var)
11589 && !lang_hooks.decls.omp_array_data (var, true))
11590 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11591 {
11592 tree new_var = lookup_decl (var, ctx);
11593 tree type = build_pointer_type (TREE_TYPE (var));
11594 x = create_tmp_var_raw (type, get_name (new_var));
11595 gimple_add_tmp_var (x);
11596 x = build_simple_mem_ref (x);
11597 SET_DECL_VALUE_EXPR (new_var, x);
11598 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11599 }
11600 else
11601 {
11602 tree new_var = lookup_decl (var, ctx);
11603 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11604 gimple_add_tmp_var (x);
11605 SET_DECL_VALUE_EXPR (new_var, x);
11606 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11607 }
11608 break;
11609 }
11610
11611 if (offloaded)
11612 {
11613 target_nesting_level++;
11614 lower_omp (&tgt_body, ctx);
11615 target_nesting_level--;
11616 }
11617 else if (data_region)
11618 lower_omp (&tgt_body, ctx);
11619
11620 if (offloaded)
11621 {
11622 /* Declare all the variables created by mapping and the variables
11623 declared in the scope of the target body. */
11624 record_vars_into (ctx->block_vars, child_fn);
11625 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11626 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11627 }
11628
11629 olist = NULL;
11630 ilist = NULL;
11631 if (ctx->record_type)
11632 {
11633 ctx->sender_decl
11634 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11635 DECL_NAMELESS (ctx->sender_decl) = 1;
11636 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11637 t = make_tree_vec (3);
11638 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11639 TREE_VEC_ELT (t, 1)
11640 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11641 ".omp_data_sizes");
11642 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11643 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11644 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11645 tree tkind_type = short_unsigned_type_node;
11646 int talign_shift = 8;
11647 TREE_VEC_ELT (t, 2)
11648 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11649 ".omp_data_kinds");
11650 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11651 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11652 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11653 gimple_omp_target_set_data_arg (stmt, t);
11654
11655 vec<constructor_elt, va_gc> *vsize;
11656 vec<constructor_elt, va_gc> *vkind;
11657 vec_alloc (vsize, map_cnt);
11658 vec_alloc (vkind, map_cnt);
11659 unsigned int map_idx = 0;
11660
11661 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11662 switch (OMP_CLAUSE_CODE (c))
11663 {
11664 tree ovar, nc, s, purpose, var, x, type;
11665 unsigned int talign;
11666
11667 default:
11668 break;
11669
11670 case OMP_CLAUSE_MAP:
11671 case OMP_CLAUSE_TO:
11672 case OMP_CLAUSE_FROM:
11673 oacc_firstprivate_map:
11674 nc = c;
11675 ovar = OMP_CLAUSE_DECL (c);
11676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11677 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11678 || (OMP_CLAUSE_MAP_KIND (c)
11679 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11680 break;
11681 if (!DECL_P (ovar))
11682 {
11683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11684 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11685 {
11686 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11687 == get_base_address (ovar));
11688 nc = OMP_CLAUSE_CHAIN (c);
11689 ovar = OMP_CLAUSE_DECL (nc);
11690 }
11691 else
11692 {
11693 tree x = build_sender_ref (ovar, ctx);
11694 tree v
11695 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11696 gimplify_assign (x, v, &ilist);
11697 nc = NULL_TREE;
11698 }
11699 }
11700 else
11701 {
11702 if (DECL_SIZE (ovar)
11703 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11704 {
11705 tree ovar2 = DECL_VALUE_EXPR (ovar);
11706 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11707 ovar2 = TREE_OPERAND (ovar2, 0);
11708 gcc_assert (DECL_P (ovar2));
11709 ovar = ovar2;
11710 }
11711 if (!maybe_lookup_field (ovar, ctx))
11712 continue;
11713 }
11714
11715 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11716 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11717 talign = DECL_ALIGN_UNIT (ovar);
11718 if (nc)
11719 {
11720 var = lookup_decl_in_outer_ctx (ovar, ctx);
11721 x = build_sender_ref (ovar, ctx);
11722
11723 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11724 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11725 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11726 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11727 {
11728 gcc_assert (offloaded);
11729 tree avar
11730 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11731 mark_addressable (avar);
11732 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11733 talign = DECL_ALIGN_UNIT (avar);
11734 avar = build_fold_addr_expr (avar);
11735 gimplify_assign (x, avar, &ilist);
11736 }
11737 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11738 {
11739 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11740 if (!omp_is_reference (var))
11741 {
11742 if (is_gimple_reg (var)
11743 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11744 TREE_NO_WARNING (var) = 1;
11745 var = build_fold_addr_expr (var);
11746 }
11747 else
11748 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11749 gimplify_assign (x, var, &ilist);
11750 }
11751 else if (is_gimple_reg (var))
11752 {
11753 gcc_assert (offloaded);
11754 tree avar = create_tmp_var (TREE_TYPE (var));
11755 mark_addressable (avar);
11756 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11757 if (GOMP_MAP_COPY_TO_P (map_kind)
11758 || map_kind == GOMP_MAP_POINTER
11759 || map_kind == GOMP_MAP_TO_PSET
11760 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11761 {
11762 /* If we need to initialize a temporary
11763 with VAR because it is not addressable, and
11764 the variable hasn't been initialized yet, then
11765 we'll get a warning for the store to avar.
11766 Don't warn in that case, the mapping might
11767 be implicit. */
11768 TREE_NO_WARNING (var) = 1;
11769 gimplify_assign (avar, var, &ilist);
11770 }
11771 avar = build_fold_addr_expr (avar);
11772 gimplify_assign (x, avar, &ilist);
11773 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11774 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11775 && !TYPE_READONLY (TREE_TYPE (var)))
11776 {
11777 x = unshare_expr (x);
11778 x = build_simple_mem_ref (x);
11779 gimplify_assign (var, x, &olist);
11780 }
11781 }
11782 else
11783 {
11784 /* While MAP is handled explicitly by the FE,
11785 for 'target update', only the identified is passed. */
11786 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11787 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11788 && (omp_is_allocatable_or_ptr (var)
11789 && omp_check_optional_argument (var, false)))
11790 var = build_fold_indirect_ref (var);
11791 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11792 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11793 || (!omp_is_allocatable_or_ptr (var)
11794 && !omp_check_optional_argument (var, false)))
11795 var = build_fold_addr_expr (var);
11796 gimplify_assign (x, var, &ilist);
11797 }
11798 }
11799 s = NULL_TREE;
11800 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11801 {
11802 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11803 s = TREE_TYPE (ovar);
11804 if (TREE_CODE (s) == REFERENCE_TYPE
11805 || omp_check_optional_argument (ovar, false))
11806 s = TREE_TYPE (s);
11807 s = TYPE_SIZE_UNIT (s);
11808 }
11809 else
11810 s = OMP_CLAUSE_SIZE (c);
11811 if (s == NULL_TREE)
11812 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11813 s = fold_convert (size_type_node, s);
11814 purpose = size_int (map_idx++);
11815 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11816 if (TREE_CODE (s) != INTEGER_CST)
11817 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11818
11819 unsigned HOST_WIDE_INT tkind, tkind_zero;
11820 switch (OMP_CLAUSE_CODE (c))
11821 {
11822 case OMP_CLAUSE_MAP:
11823 tkind = OMP_CLAUSE_MAP_KIND (c);
11824 tkind_zero = tkind;
11825 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11826 switch (tkind)
11827 {
11828 case GOMP_MAP_ALLOC:
11829 case GOMP_MAP_IF_PRESENT:
11830 case GOMP_MAP_TO:
11831 case GOMP_MAP_FROM:
11832 case GOMP_MAP_TOFROM:
11833 case GOMP_MAP_ALWAYS_TO:
11834 case GOMP_MAP_ALWAYS_FROM:
11835 case GOMP_MAP_ALWAYS_TOFROM:
11836 case GOMP_MAP_RELEASE:
11837 case GOMP_MAP_FORCE_TO:
11838 case GOMP_MAP_FORCE_FROM:
11839 case GOMP_MAP_FORCE_TOFROM:
11840 case GOMP_MAP_FORCE_PRESENT:
11841 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11842 break;
11843 case GOMP_MAP_DELETE:
11844 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11845 default:
11846 break;
11847 }
11848 if (tkind_zero != tkind)
11849 {
11850 if (integer_zerop (s))
11851 tkind = tkind_zero;
11852 else if (integer_nonzerop (s))
11853 tkind_zero = tkind;
11854 }
11855 break;
11856 case OMP_CLAUSE_FIRSTPRIVATE:
11857 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11858 tkind = GOMP_MAP_TO;
11859 tkind_zero = tkind;
11860 break;
11861 case OMP_CLAUSE_TO:
11862 tkind = GOMP_MAP_TO;
11863 tkind_zero = tkind;
11864 break;
11865 case OMP_CLAUSE_FROM:
11866 tkind = GOMP_MAP_FROM;
11867 tkind_zero = tkind;
11868 break;
11869 default:
11870 gcc_unreachable ();
11871 }
11872 gcc_checking_assert (tkind
11873 < (HOST_WIDE_INT_C (1U) << talign_shift));
11874 gcc_checking_assert (tkind_zero
11875 < (HOST_WIDE_INT_C (1U) << talign_shift));
11876 talign = ceil_log2 (talign);
11877 tkind |= talign << talign_shift;
11878 tkind_zero |= talign << talign_shift;
11879 gcc_checking_assert (tkind
11880 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11881 gcc_checking_assert (tkind_zero
11882 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11883 if (tkind == tkind_zero)
11884 x = build_int_cstu (tkind_type, tkind);
11885 else
11886 {
11887 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11888 x = build3 (COND_EXPR, tkind_type,
11889 fold_build2 (EQ_EXPR, boolean_type_node,
11890 unshare_expr (s), size_zero_node),
11891 build_int_cstu (tkind_type, tkind_zero),
11892 build_int_cstu (tkind_type, tkind));
11893 }
11894 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11895 if (nc && nc != c)
11896 c = nc;
11897 break;
11898
11899 case OMP_CLAUSE_FIRSTPRIVATE:
11900 if (is_oacc_parallel_or_serial (ctx))
11901 goto oacc_firstprivate_map;
11902 ovar = OMP_CLAUSE_DECL (c);
11903 if (omp_is_reference (ovar))
11904 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11905 else
11906 talign = DECL_ALIGN_UNIT (ovar);
11907 var = lookup_decl_in_outer_ctx (ovar, ctx);
11908 x = build_sender_ref (ovar, ctx);
11909 tkind = GOMP_MAP_FIRSTPRIVATE;
11910 type = TREE_TYPE (ovar);
11911 if (omp_is_reference (ovar))
11912 type = TREE_TYPE (type);
11913 if ((INTEGRAL_TYPE_P (type)
11914 && TYPE_PRECISION (type) <= POINTER_SIZE)
11915 || TREE_CODE (type) == POINTER_TYPE)
11916 {
11917 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11918 tree t = var;
11919 if (omp_is_reference (var))
11920 t = build_simple_mem_ref (var);
11921 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11922 TREE_NO_WARNING (var) = 1;
11923 if (TREE_CODE (type) != POINTER_TYPE)
11924 t = fold_convert (pointer_sized_int_node, t);
11925 t = fold_convert (TREE_TYPE (x), t);
11926 gimplify_assign (x, t, &ilist);
11927 }
11928 else if (omp_is_reference (var))
11929 gimplify_assign (x, var, &ilist);
11930 else if (is_gimple_reg (var))
11931 {
11932 tree avar = create_tmp_var (TREE_TYPE (var));
11933 mark_addressable (avar);
11934 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11935 TREE_NO_WARNING (var) = 1;
11936 gimplify_assign (avar, var, &ilist);
11937 avar = build_fold_addr_expr (avar);
11938 gimplify_assign (x, avar, &ilist);
11939 }
11940 else
11941 {
11942 var = build_fold_addr_expr (var);
11943 gimplify_assign (x, var, &ilist);
11944 }
11945 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
11946 s = size_int (0);
11947 else if (omp_is_reference (ovar))
11948 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11949 else
11950 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11951 s = fold_convert (size_type_node, s);
11952 purpose = size_int (map_idx++);
11953 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11954 if (TREE_CODE (s) != INTEGER_CST)
11955 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11956
11957 gcc_checking_assert (tkind
11958 < (HOST_WIDE_INT_C (1U) << talign_shift));
11959 talign = ceil_log2 (talign);
11960 tkind |= talign << talign_shift;
11961 gcc_checking_assert (tkind
11962 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11963 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
11964 build_int_cstu (tkind_type, tkind));
11965 break;
11966
11967 case OMP_CLAUSE_USE_DEVICE_PTR:
11968 case OMP_CLAUSE_USE_DEVICE_ADDR:
11969 case OMP_CLAUSE_IS_DEVICE_PTR:
11970 ovar = OMP_CLAUSE_DECL (c);
11971 var = lookup_decl_in_outer_ctx (ovar, ctx);
11972
11973 if (lang_hooks.decls.omp_array_data (ovar, true))
11974 {
11975 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
11976 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
11977 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
11978 }
11979 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
11980 {
11981 tkind = GOMP_MAP_USE_DEVICE_PTR;
11982 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
11983 }
11984 else
11985 {
11986 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
11987 x = build_sender_ref (ovar, ctx);
11988 }
11989
11990 if (is_gimple_omp_oacc (ctx->stmt))
11991 {
11992 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
11993
11994 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
11995 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
11996 }
11997
11998 type = TREE_TYPE (ovar);
11999 if (lang_hooks.decls.omp_array_data (ovar, true))
12000 var = lang_hooks.decls.omp_array_data (ovar, false);
12001 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12002 && !omp_is_reference (ovar)
12003 && !omp_is_allocatable_or_ptr (ovar))
12004 || TREE_CODE (type) == ARRAY_TYPE)
12005 var = build_fold_addr_expr (var);
12006 else
12007 {
12008 if (omp_is_reference (ovar)
12009 || omp_check_optional_argument (ovar, false)
12010 || omp_is_allocatable_or_ptr (ovar))
12011 {
12012 type = TREE_TYPE (type);
12013 if (TREE_CODE (type) != ARRAY_TYPE
12014 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12015 && !omp_is_allocatable_or_ptr (ovar))
12016 || (omp_is_reference (ovar)
12017 && omp_is_allocatable_or_ptr (ovar))))
12018 var = build_simple_mem_ref (var);
12019 var = fold_convert (TREE_TYPE (x), var);
12020 }
12021 }
12022 tree present;
12023 present = omp_check_optional_argument (ovar, true);
12024 if (present)
12025 {
12026 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12027 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12028 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12029 tree new_x = unshare_expr (x);
12030 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12031 fb_rvalue);
12032 gcond *cond = gimple_build_cond_from_tree (present,
12033 notnull_label,
12034 null_label);
12035 gimple_seq_add_stmt (&ilist, cond);
12036 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12037 gimplify_assign (new_x, null_pointer_node, &ilist);
12038 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12039 gimple_seq_add_stmt (&ilist,
12040 gimple_build_label (notnull_label));
12041 gimplify_assign (x, var, &ilist);
12042 gimple_seq_add_stmt (&ilist,
12043 gimple_build_label (opt_arg_label));
12044 }
12045 else
12046 gimplify_assign (x, var, &ilist);
12047 s = size_int (0);
12048 purpose = size_int (map_idx++);
12049 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12050 gcc_checking_assert (tkind
12051 < (HOST_WIDE_INT_C (1U) << talign_shift));
12052 gcc_checking_assert (tkind
12053 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12054 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12055 build_int_cstu (tkind_type, tkind));
12056 break;
12057 }
12058
12059 gcc_assert (map_idx == map_cnt);
12060
12061 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12062 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12063 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12064 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12065 for (int i = 1; i <= 2; i++)
12066 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12067 {
12068 gimple_seq initlist = NULL;
12069 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12070 TREE_VEC_ELT (t, i)),
12071 &initlist, true, NULL_TREE);
12072 gimple_seq_add_seq (&ilist, initlist);
12073
12074 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12075 gimple_seq_add_stmt (&olist,
12076 gimple_build_assign (TREE_VEC_ELT (t, i),
12077 clobber));
12078 }
12079
12080 tree clobber = build_clobber (ctx->record_type);
12081 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12082 clobber));
12083 }
12084
12085 /* Once all the expansions are done, sequence all the different
12086 fragments inside gimple_omp_body. */
12087
12088 new_body = NULL;
12089
12090 if (offloaded
12091 && ctx->record_type)
12092 {
12093 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12094 /* fixup_child_record_type might have changed receiver_decl's type. */
12095 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12096 gimple_seq_add_stmt (&new_body,
12097 gimple_build_assign (ctx->receiver_decl, t));
12098 }
12099 gimple_seq_add_seq (&new_body, fplist);
12100
12101 if (offloaded || data_region)
12102 {
12103 tree prev = NULL_TREE;
12104 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12105 switch (OMP_CLAUSE_CODE (c))
12106 {
12107 tree var, x;
12108 default:
12109 break;
12110 case OMP_CLAUSE_FIRSTPRIVATE:
12111 if (is_gimple_omp_oacc (ctx->stmt))
12112 break;
12113 var = OMP_CLAUSE_DECL (c);
12114 if (omp_is_reference (var)
12115 || is_gimple_reg_type (TREE_TYPE (var)))
12116 {
12117 tree new_var = lookup_decl (var, ctx);
12118 tree type;
12119 type = TREE_TYPE (var);
12120 if (omp_is_reference (var))
12121 type = TREE_TYPE (type);
12122 if ((INTEGRAL_TYPE_P (type)
12123 && TYPE_PRECISION (type) <= POINTER_SIZE)
12124 || TREE_CODE (type) == POINTER_TYPE)
12125 {
12126 x = build_receiver_ref (var, false, ctx);
12127 if (TREE_CODE (type) != POINTER_TYPE)
12128 x = fold_convert (pointer_sized_int_node, x);
12129 x = fold_convert (type, x);
12130 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12131 fb_rvalue);
12132 if (omp_is_reference (var))
12133 {
12134 tree v = create_tmp_var_raw (type, get_name (var));
12135 gimple_add_tmp_var (v);
12136 TREE_ADDRESSABLE (v) = 1;
12137 gimple_seq_add_stmt (&new_body,
12138 gimple_build_assign (v, x));
12139 x = build_fold_addr_expr (v);
12140 }
12141 gimple_seq_add_stmt (&new_body,
12142 gimple_build_assign (new_var, x));
12143 }
12144 else
12145 {
12146 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12147 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12148 fb_rvalue);
12149 gimple_seq_add_stmt (&new_body,
12150 gimple_build_assign (new_var, x));
12151 }
12152 }
12153 else if (is_variable_sized (var))
12154 {
12155 tree pvar = DECL_VALUE_EXPR (var);
12156 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12157 pvar = TREE_OPERAND (pvar, 0);
12158 gcc_assert (DECL_P (pvar));
12159 tree new_var = lookup_decl (pvar, ctx);
12160 x = build_receiver_ref (var, false, ctx);
12161 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12162 gimple_seq_add_stmt (&new_body,
12163 gimple_build_assign (new_var, x));
12164 }
12165 break;
12166 case OMP_CLAUSE_PRIVATE:
12167 if (is_gimple_omp_oacc (ctx->stmt))
12168 break;
12169 var = OMP_CLAUSE_DECL (c);
12170 if (omp_is_reference (var))
12171 {
12172 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12173 tree new_var = lookup_decl (var, ctx);
12174 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12175 if (TREE_CONSTANT (x))
12176 {
12177 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12178 get_name (var));
12179 gimple_add_tmp_var (x);
12180 TREE_ADDRESSABLE (x) = 1;
12181 x = build_fold_addr_expr_loc (clause_loc, x);
12182 }
12183 else
12184 break;
12185
12186 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12187 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12188 gimple_seq_add_stmt (&new_body,
12189 gimple_build_assign (new_var, x));
12190 }
12191 break;
12192 case OMP_CLAUSE_USE_DEVICE_PTR:
12193 case OMP_CLAUSE_USE_DEVICE_ADDR:
12194 case OMP_CLAUSE_IS_DEVICE_PTR:
12195 tree new_var;
12196 gimple_seq assign_body;
12197 bool is_array_data;
12198 bool do_optional_check;
12199 assign_body = NULL;
12200 do_optional_check = false;
12201 var = OMP_CLAUSE_DECL (c);
12202 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12203
12204 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12205 x = build_sender_ref (is_array_data
12206 ? (splay_tree_key) &DECL_NAME (var)
12207 : (splay_tree_key) &DECL_UID (var), ctx);
12208 else
12209 x = build_receiver_ref (var, false, ctx);
12210
12211 if (is_array_data)
12212 {
12213 bool is_ref = omp_is_reference (var);
12214 do_optional_check = true;
12215 /* First, we copy the descriptor data from the host; then
12216 we update its data to point to the target address. */
12217 new_var = lookup_decl (var, ctx);
12218 new_var = DECL_VALUE_EXPR (new_var);
12219 tree v = new_var;
12220
12221 if (is_ref)
12222 {
12223 var = build_fold_indirect_ref (var);
12224 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12225 fb_rvalue);
12226 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12227 gimple_add_tmp_var (v);
12228 TREE_ADDRESSABLE (v) = 1;
12229 gimple_seq_add_stmt (&assign_body,
12230 gimple_build_assign (v, var));
12231 tree rhs = build_fold_addr_expr (v);
12232 gimple_seq_add_stmt (&assign_body,
12233 gimple_build_assign (new_var, rhs));
12234 }
12235 else
12236 gimple_seq_add_stmt (&assign_body,
12237 gimple_build_assign (new_var, var));
12238
12239 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12240 gcc_assert (v2);
12241 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12242 gimple_seq_add_stmt (&assign_body,
12243 gimple_build_assign (v2, x));
12244 }
12245 else if (is_variable_sized (var))
12246 {
12247 tree pvar = DECL_VALUE_EXPR (var);
12248 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12249 pvar = TREE_OPERAND (pvar, 0);
12250 gcc_assert (DECL_P (pvar));
12251 new_var = lookup_decl (pvar, ctx);
12252 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12253 gimple_seq_add_stmt (&assign_body,
12254 gimple_build_assign (new_var, x));
12255 }
12256 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12257 && !omp_is_reference (var)
12258 && !omp_is_allocatable_or_ptr (var))
12259 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12260 {
12261 new_var = lookup_decl (var, ctx);
12262 new_var = DECL_VALUE_EXPR (new_var);
12263 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12264 new_var = TREE_OPERAND (new_var, 0);
12265 gcc_assert (DECL_P (new_var));
12266 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12267 gimple_seq_add_stmt (&assign_body,
12268 gimple_build_assign (new_var, x));
12269 }
12270 else
12271 {
12272 tree type = TREE_TYPE (var);
12273 new_var = lookup_decl (var, ctx);
12274 if (omp_is_reference (var))
12275 {
12276 type = TREE_TYPE (type);
12277 if (TREE_CODE (type) != ARRAY_TYPE
12278 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12279 || (omp_is_reference (var)
12280 && omp_is_allocatable_or_ptr (var))))
12281 {
12282 tree v = create_tmp_var_raw (type, get_name (var));
12283 gimple_add_tmp_var (v);
12284 TREE_ADDRESSABLE (v) = 1;
12285 x = fold_convert (type, x);
12286 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12287 fb_rvalue);
12288 gimple_seq_add_stmt (&assign_body,
12289 gimple_build_assign (v, x));
12290 x = build_fold_addr_expr (v);
12291 do_optional_check = true;
12292 }
12293 }
12294 new_var = DECL_VALUE_EXPR (new_var);
12295 x = fold_convert (TREE_TYPE (new_var), x);
12296 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12297 gimple_seq_add_stmt (&assign_body,
12298 gimple_build_assign (new_var, x));
12299 }
12300 tree present;
12301 present = (do_optional_check
12302 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12303 : NULL_TREE);
12304 if (present)
12305 {
12306 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12307 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12308 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12309 glabel *null_glabel = gimple_build_label (null_label);
12310 glabel *notnull_glabel = gimple_build_label (notnull_label);
12311 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12312 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12313 fb_rvalue);
12314 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12315 fb_rvalue);
12316 gcond *cond = gimple_build_cond_from_tree (present,
12317 notnull_label,
12318 null_label);
12319 gimple_seq_add_stmt (&new_body, cond);
12320 gimple_seq_add_stmt (&new_body, null_glabel);
12321 gimplify_assign (new_var, null_pointer_node, &new_body);
12322 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12323 gimple_seq_add_stmt (&new_body, notnull_glabel);
12324 gimple_seq_add_seq (&new_body, assign_body);
12325 gimple_seq_add_stmt (&new_body,
12326 gimple_build_label (opt_arg_label));
12327 }
12328 else
12329 gimple_seq_add_seq (&new_body, assign_body);
12330 break;
12331 }
12332 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12333 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12334 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12335 or references to VLAs. */
12336 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12337 switch (OMP_CLAUSE_CODE (c))
12338 {
12339 tree var;
12340 default:
12341 break;
12342 case OMP_CLAUSE_MAP:
12343 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12344 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12345 {
12346 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12347 poly_int64 offset = 0;
12348 gcc_assert (prev);
12349 var = OMP_CLAUSE_DECL (c);
12350 if (DECL_P (var)
12351 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12352 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12353 ctx))
12354 && varpool_node::get_create (var)->offloadable)
12355 break;
12356 if (TREE_CODE (var) == INDIRECT_REF
12357 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12358 var = TREE_OPERAND (var, 0);
12359 if (TREE_CODE (var) == COMPONENT_REF)
12360 {
12361 var = get_addr_base_and_unit_offset (var, &offset);
12362 gcc_assert (var != NULL_TREE && DECL_P (var));
12363 }
12364 else if (DECL_SIZE (var)
12365 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12366 {
12367 tree var2 = DECL_VALUE_EXPR (var);
12368 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12369 var2 = TREE_OPERAND (var2, 0);
12370 gcc_assert (DECL_P (var2));
12371 var = var2;
12372 }
12373 tree new_var = lookup_decl (var, ctx), x;
12374 tree type = TREE_TYPE (new_var);
12375 bool is_ref;
12376 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12377 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12378 == COMPONENT_REF))
12379 {
12380 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12381 is_ref = true;
12382 new_var = build2 (MEM_REF, type,
12383 build_fold_addr_expr (new_var),
12384 build_int_cst (build_pointer_type (type),
12385 offset));
12386 }
12387 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12388 {
12389 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12390 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12391 new_var = build2 (MEM_REF, type,
12392 build_fold_addr_expr (new_var),
12393 build_int_cst (build_pointer_type (type),
12394 offset));
12395 }
12396 else
12397 is_ref = omp_is_reference (var);
12398 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12399 is_ref = false;
12400 bool ref_to_array = false;
12401 if (is_ref)
12402 {
12403 type = TREE_TYPE (type);
12404 if (TREE_CODE (type) == ARRAY_TYPE)
12405 {
12406 type = build_pointer_type (type);
12407 ref_to_array = true;
12408 }
12409 }
12410 else if (TREE_CODE (type) == ARRAY_TYPE)
12411 {
12412 tree decl2 = DECL_VALUE_EXPR (new_var);
12413 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12414 decl2 = TREE_OPERAND (decl2, 0);
12415 gcc_assert (DECL_P (decl2));
12416 new_var = decl2;
12417 type = TREE_TYPE (new_var);
12418 }
12419 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12420 x = fold_convert_loc (clause_loc, type, x);
12421 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12422 {
12423 tree bias = OMP_CLAUSE_SIZE (c);
12424 if (DECL_P (bias))
12425 bias = lookup_decl (bias, ctx);
12426 bias = fold_convert_loc (clause_loc, sizetype, bias);
12427 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12428 bias);
12429 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12430 TREE_TYPE (x), x, bias);
12431 }
12432 if (ref_to_array)
12433 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12434 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12435 if (is_ref && !ref_to_array)
12436 {
12437 tree t = create_tmp_var_raw (type, get_name (var));
12438 gimple_add_tmp_var (t);
12439 TREE_ADDRESSABLE (t) = 1;
12440 gimple_seq_add_stmt (&new_body,
12441 gimple_build_assign (t, x));
12442 x = build_fold_addr_expr_loc (clause_loc, t);
12443 }
12444 gimple_seq_add_stmt (&new_body,
12445 gimple_build_assign (new_var, x));
12446 prev = NULL_TREE;
12447 }
12448 else if (OMP_CLAUSE_CHAIN (c)
12449 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12450 == OMP_CLAUSE_MAP
12451 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12452 == GOMP_MAP_FIRSTPRIVATE_POINTER
12453 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12454 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12455 prev = c;
12456 break;
12457 case OMP_CLAUSE_PRIVATE:
12458 var = OMP_CLAUSE_DECL (c);
12459 if (is_variable_sized (var))
12460 {
12461 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12462 tree new_var = lookup_decl (var, ctx);
12463 tree pvar = DECL_VALUE_EXPR (var);
12464 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12465 pvar = TREE_OPERAND (pvar, 0);
12466 gcc_assert (DECL_P (pvar));
12467 tree new_pvar = lookup_decl (pvar, ctx);
12468 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12469 tree al = size_int (DECL_ALIGN (var));
12470 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12471 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12472 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12473 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12474 gimple_seq_add_stmt (&new_body,
12475 gimple_build_assign (new_pvar, x));
12476 }
12477 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12478 {
12479 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12480 tree new_var = lookup_decl (var, ctx);
12481 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12482 if (TREE_CONSTANT (x))
12483 break;
12484 else
12485 {
12486 tree atmp
12487 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12488 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12489 tree al = size_int (TYPE_ALIGN (rtype));
12490 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12491 }
12492
12493 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12494 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12495 gimple_seq_add_stmt (&new_body,
12496 gimple_build_assign (new_var, x));
12497 }
12498 break;
12499 }
12500
12501 gimple_seq fork_seq = NULL;
12502 gimple_seq join_seq = NULL;
12503
12504 if (is_oacc_parallel_or_serial (ctx))
12505 {
12506 /* If there are reductions on the offloaded region itself, treat
12507 them as a dummy GANG loop. */
12508 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12509
12510 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12511 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12512 }
12513
12514 gimple_seq_add_seq (&new_body, fork_seq);
12515 gimple_seq_add_seq (&new_body, tgt_body);
12516 gimple_seq_add_seq (&new_body, join_seq);
12517
12518 if (offloaded)
12519 new_body = maybe_catch_exception (new_body);
12520
12521 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12522 gimple_omp_set_body (stmt, new_body);
12523 }
12524
12525 bind = gimple_build_bind (NULL, NULL,
12526 tgt_bind ? gimple_bind_block (tgt_bind)
12527 : NULL_TREE);
12528 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12529 gimple_bind_add_seq (bind, ilist);
12530 gimple_bind_add_stmt (bind, stmt);
12531 gimple_bind_add_seq (bind, olist);
12532
12533 pop_gimplify_context (NULL);
12534
12535 if (dep_bind)
12536 {
12537 gimple_bind_add_seq (dep_bind, dep_ilist);
12538 gimple_bind_add_stmt (dep_bind, bind);
12539 gimple_bind_add_seq (dep_bind, dep_olist);
12540 pop_gimplify_context (dep_bind);
12541 }
12542 }
12543
12544 /* Expand code for an OpenMP teams directive. */
12545
12546 static void
12547 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12548 {
12549 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12550 push_gimplify_context ();
12551
12552 tree block = make_node (BLOCK);
12553 gbind *bind = gimple_build_bind (NULL, NULL, block);
12554 gsi_replace (gsi_p, bind, true);
12555 gimple_seq bind_body = NULL;
12556 gimple_seq dlist = NULL;
12557 gimple_seq olist = NULL;
12558
12559 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12560 OMP_CLAUSE_NUM_TEAMS);
12561 if (num_teams == NULL_TREE)
12562 num_teams = build_int_cst (unsigned_type_node, 0);
12563 else
12564 {
12565 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12566 num_teams = fold_convert (unsigned_type_node, num_teams);
12567 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12568 }
12569 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12570 OMP_CLAUSE_THREAD_LIMIT);
12571 if (thread_limit == NULL_TREE)
12572 thread_limit = build_int_cst (unsigned_type_node, 0);
12573 else
12574 {
12575 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12576 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12577 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12578 fb_rvalue);
12579 }
12580
12581 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12582 &bind_body, &dlist, ctx, NULL);
12583 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12584 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12585 NULL, ctx);
12586 gimple_seq_add_stmt (&bind_body, teams_stmt);
12587
12588 location_t loc = gimple_location (teams_stmt);
12589 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12590 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12591 gimple_set_location (call, loc);
12592 gimple_seq_add_stmt (&bind_body, call);
12593
12594 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12595 gimple_omp_set_body (teams_stmt, NULL);
12596 gimple_seq_add_seq (&bind_body, olist);
12597 gimple_seq_add_seq (&bind_body, dlist);
12598 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12599 gimple_bind_set_body (bind, bind_body);
12600
12601 pop_gimplify_context (bind);
12602
12603 gimple_bind_append_vars (bind, ctx->block_vars);
12604 BLOCK_VARS (block) = ctx->block_vars;
12605 if (BLOCK_VARS (block))
12606 TREE_USED (block) = 1;
12607 }
12608
12609 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12610 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12611 of OMP context, but with task_shared_vars set. */
12612
12613 static tree
12614 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12615 void *data)
12616 {
12617 tree t = *tp;
12618
12619 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12620 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12621 return t;
12622
12623 if (task_shared_vars
12624 && DECL_P (t)
12625 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12626 return t;
12627
12628 /* If a global variable has been privatized, TREE_CONSTANT on
12629 ADDR_EXPR might be wrong. */
12630 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12631 recompute_tree_invariant_for_addr_expr (t);
12632
12633 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12634 return NULL_TREE;
12635 }
12636
12637 /* Data to be communicated between lower_omp_regimplify_operands and
12638 lower_omp_regimplify_operands_p. */
12639
12640 struct lower_omp_regimplify_operands_data
12641 {
12642 omp_context *ctx;
12643 vec<tree> *decls;
12644 };
12645
12646 /* Helper function for lower_omp_regimplify_operands. Find
12647 omp_member_access_dummy_var vars and adjust temporarily their
12648 DECL_VALUE_EXPRs if needed. */
12649
12650 static tree
12651 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12652 void *data)
12653 {
12654 tree t = omp_member_access_dummy_var (*tp);
12655 if (t)
12656 {
12657 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12658 lower_omp_regimplify_operands_data *ldata
12659 = (lower_omp_regimplify_operands_data *) wi->info;
12660 tree o = maybe_lookup_decl (t, ldata->ctx);
12661 if (o != t)
12662 {
12663 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12664 ldata->decls->safe_push (*tp);
12665 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12666 SET_DECL_VALUE_EXPR (*tp, v);
12667 }
12668 }
12669 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12670 return NULL_TREE;
12671 }
12672
12673 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12674 of omp_member_access_dummy_var vars during regimplification. */
12675
12676 static void
12677 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12678 gimple_stmt_iterator *gsi_p)
12679 {
12680 auto_vec<tree, 10> decls;
12681 if (ctx)
12682 {
12683 struct walk_stmt_info wi;
12684 memset (&wi, '\0', sizeof (wi));
12685 struct lower_omp_regimplify_operands_data data;
12686 data.ctx = ctx;
12687 data.decls = &decls;
12688 wi.info = &data;
12689 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12690 }
12691 gimple_regimplify_operands (stmt, gsi_p);
12692 while (!decls.is_empty ())
12693 {
12694 tree t = decls.pop ();
12695 tree v = decls.pop ();
12696 SET_DECL_VALUE_EXPR (t, v);
12697 }
12698 }
12699
12700 static void
12701 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12702 {
12703 gimple *stmt = gsi_stmt (*gsi_p);
12704 struct walk_stmt_info wi;
12705 gcall *call_stmt;
12706
12707 if (gimple_has_location (stmt))
12708 input_location = gimple_location (stmt);
12709
12710 if (task_shared_vars)
12711 memset (&wi, '\0', sizeof (wi));
12712
12713 /* If we have issued syntax errors, avoid doing any heavy lifting.
12714 Just replace the OMP directives with a NOP to avoid
12715 confusing RTL expansion. */
12716 if (seen_error () && is_gimple_omp (stmt))
12717 {
12718 gsi_replace (gsi_p, gimple_build_nop (), true);
12719 return;
12720 }
12721
12722 switch (gimple_code (stmt))
12723 {
12724 case GIMPLE_COND:
12725 {
12726 gcond *cond_stmt = as_a <gcond *> (stmt);
12727 if ((ctx || task_shared_vars)
12728 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12729 lower_omp_regimplify_p,
12730 ctx ? NULL : &wi, NULL)
12731 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12732 lower_omp_regimplify_p,
12733 ctx ? NULL : &wi, NULL)))
12734 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12735 }
12736 break;
12737 case GIMPLE_CATCH:
12738 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12739 break;
12740 case GIMPLE_EH_FILTER:
12741 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12742 break;
12743 case GIMPLE_TRY:
12744 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12745 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12746 break;
12747 case GIMPLE_TRANSACTION:
12748 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12749 ctx);
12750 break;
12751 case GIMPLE_BIND:
12752 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12753 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12754 break;
12755 case GIMPLE_OMP_PARALLEL:
12756 case GIMPLE_OMP_TASK:
12757 ctx = maybe_lookup_ctx (stmt);
12758 gcc_assert (ctx);
12759 if (ctx->cancellable)
12760 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12761 lower_omp_taskreg (gsi_p, ctx);
12762 break;
12763 case GIMPLE_OMP_FOR:
12764 ctx = maybe_lookup_ctx (stmt);
12765 gcc_assert (ctx);
12766 if (ctx->cancellable)
12767 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12768 lower_omp_for (gsi_p, ctx);
12769 break;
12770 case GIMPLE_OMP_SECTIONS:
12771 ctx = maybe_lookup_ctx (stmt);
12772 gcc_assert (ctx);
12773 if (ctx->cancellable)
12774 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12775 lower_omp_sections (gsi_p, ctx);
12776 break;
12777 case GIMPLE_OMP_SINGLE:
12778 ctx = maybe_lookup_ctx (stmt);
12779 gcc_assert (ctx);
12780 lower_omp_single (gsi_p, ctx);
12781 break;
12782 case GIMPLE_OMP_MASTER:
12783 ctx = maybe_lookup_ctx (stmt);
12784 gcc_assert (ctx);
12785 lower_omp_master (gsi_p, ctx);
12786 break;
12787 case GIMPLE_OMP_TASKGROUP:
12788 ctx = maybe_lookup_ctx (stmt);
12789 gcc_assert (ctx);
12790 lower_omp_taskgroup (gsi_p, ctx);
12791 break;
12792 case GIMPLE_OMP_ORDERED:
12793 ctx = maybe_lookup_ctx (stmt);
12794 gcc_assert (ctx);
12795 lower_omp_ordered (gsi_p, ctx);
12796 break;
12797 case GIMPLE_OMP_SCAN:
12798 ctx = maybe_lookup_ctx (stmt);
12799 gcc_assert (ctx);
12800 lower_omp_scan (gsi_p, ctx);
12801 break;
12802 case GIMPLE_OMP_CRITICAL:
12803 ctx = maybe_lookup_ctx (stmt);
12804 gcc_assert (ctx);
12805 lower_omp_critical (gsi_p, ctx);
12806 break;
12807 case GIMPLE_OMP_ATOMIC_LOAD:
12808 if ((ctx || task_shared_vars)
12809 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12810 as_a <gomp_atomic_load *> (stmt)),
12811 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12812 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12813 break;
12814 case GIMPLE_OMP_TARGET:
12815 ctx = maybe_lookup_ctx (stmt);
12816 gcc_assert (ctx);
12817 lower_omp_target (gsi_p, ctx);
12818 break;
12819 case GIMPLE_OMP_TEAMS:
12820 ctx = maybe_lookup_ctx (stmt);
12821 gcc_assert (ctx);
12822 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12823 lower_omp_taskreg (gsi_p, ctx);
12824 else
12825 lower_omp_teams (gsi_p, ctx);
12826 break;
12827 case GIMPLE_CALL:
12828 tree fndecl;
12829 call_stmt = as_a <gcall *> (stmt);
12830 fndecl = gimple_call_fndecl (call_stmt);
12831 if (fndecl
12832 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12833 switch (DECL_FUNCTION_CODE (fndecl))
12834 {
12835 case BUILT_IN_GOMP_BARRIER:
12836 if (ctx == NULL)
12837 break;
12838 /* FALLTHRU */
12839 case BUILT_IN_GOMP_CANCEL:
12840 case BUILT_IN_GOMP_CANCELLATION_POINT:
12841 omp_context *cctx;
12842 cctx = ctx;
12843 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12844 cctx = cctx->outer;
12845 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12846 if (!cctx->cancellable)
12847 {
12848 if (DECL_FUNCTION_CODE (fndecl)
12849 == BUILT_IN_GOMP_CANCELLATION_POINT)
12850 {
12851 stmt = gimple_build_nop ();
12852 gsi_replace (gsi_p, stmt, false);
12853 }
12854 break;
12855 }
12856 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12857 {
12858 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12859 gimple_call_set_fndecl (call_stmt, fndecl);
12860 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12861 }
12862 tree lhs;
12863 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12864 gimple_call_set_lhs (call_stmt, lhs);
12865 tree fallthru_label;
12866 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12867 gimple *g;
12868 g = gimple_build_label (fallthru_label);
12869 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12870 g = gimple_build_cond (NE_EXPR, lhs,
12871 fold_convert (TREE_TYPE (lhs),
12872 boolean_false_node),
12873 cctx->cancel_label, fallthru_label);
12874 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12875 break;
12876 default:
12877 break;
12878 }
12879 goto regimplify;
12880
12881 case GIMPLE_ASSIGN:
12882 for (omp_context *up = ctx; up; up = up->outer)
12883 {
12884 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12885 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12886 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12887 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12888 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12889 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12890 && (gimple_omp_target_kind (up->stmt)
12891 == GF_OMP_TARGET_KIND_DATA)))
12892 continue;
12893 else if (!up->lastprivate_conditional_map)
12894 break;
12895 tree lhs = get_base_address (gimple_assign_lhs (stmt));
12896 if (TREE_CODE (lhs) == MEM_REF
12897 && DECL_P (TREE_OPERAND (lhs, 0))
12898 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
12899 0))) == REFERENCE_TYPE)
12900 lhs = TREE_OPERAND (lhs, 0);
12901 if (DECL_P (lhs))
12902 if (tree *v = up->lastprivate_conditional_map->get (lhs))
12903 {
12904 tree clauses;
12905 if (up->combined_into_simd_safelen1)
12906 {
12907 up = up->outer;
12908 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
12909 up = up->outer;
12910 }
12911 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
12912 clauses = gimple_omp_for_clauses (up->stmt);
12913 else
12914 clauses = gimple_omp_sections_clauses (up->stmt);
12915 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
12916 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
12917 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12918 OMP_CLAUSE__CONDTEMP_);
12919 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
12920 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
12921 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12922 }
12923 }
12924 /* FALLTHRU */
12925
12926 default:
12927 regimplify:
12928 if ((ctx || task_shared_vars)
12929 && walk_gimple_op (stmt, lower_omp_regimplify_p,
12930 ctx ? NULL : &wi))
12931 {
12932 /* Just remove clobbers, this should happen only if we have
12933 "privatized" local addressable variables in SIMD regions,
12934 the clobber isn't needed in that case and gimplifying address
12935 of the ARRAY_REF into a pointer and creating MEM_REF based
12936 clobber would create worse code than we get with the clobber
12937 dropped. */
12938 if (gimple_clobber_p (stmt))
12939 {
12940 gsi_replace (gsi_p, gimple_build_nop (), true);
12941 break;
12942 }
12943 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12944 }
12945 break;
12946 }
12947 }
12948
12949 static void
12950 lower_omp (gimple_seq *body, omp_context *ctx)
12951 {
12952 location_t saved_location = input_location;
12953 gimple_stmt_iterator gsi;
12954 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12955 lower_omp_1 (&gsi, ctx);
12956 /* During gimplification, we haven't folded statments inside offloading
12957 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
12958 if (target_nesting_level || taskreg_nesting_level)
12959 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
12960 fold_stmt (&gsi);
12961 input_location = saved_location;
12962 }
12963
12964 /* Main entry point. */
12965
12966 static unsigned int
12967 execute_lower_omp (void)
12968 {
12969 gimple_seq body;
12970 int i;
12971 omp_context *ctx;
12972
12973 /* This pass always runs, to provide PROP_gimple_lomp.
12974 But often, there is nothing to do. */
12975 if (flag_openacc == 0 && flag_openmp == 0
12976 && flag_openmp_simd == 0)
12977 return 0;
12978
12979 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
12980 delete_omp_context);
12981
12982 body = gimple_body (current_function_decl);
12983
12984 scan_omp (&body, NULL);
12985 gcc_assert (taskreg_nesting_level == 0);
12986 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
12987 finish_taskreg_scan (ctx);
12988 taskreg_contexts.release ();
12989
12990 if (all_contexts->root)
12991 {
12992 if (task_shared_vars)
12993 push_gimplify_context ();
12994 lower_omp (&body, NULL);
12995 if (task_shared_vars)
12996 pop_gimplify_context (NULL);
12997 }
12998
12999 if (all_contexts)
13000 {
13001 splay_tree_delete (all_contexts);
13002 all_contexts = NULL;
13003 }
13004 BITMAP_FREE (task_shared_vars);
13005 BITMAP_FREE (global_nonaddressable_vars);
13006
13007 /* If current function is a method, remove artificial dummy VAR_DECL created
13008 for non-static data member privatization, they aren't needed for
13009 debuginfo nor anything else, have been already replaced everywhere in the
13010 IL and cause problems with LTO. */
13011 if (DECL_ARGUMENTS (current_function_decl)
13012 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13013 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13014 == POINTER_TYPE))
13015 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13016 return 0;
13017 }
13018
13019 namespace {
13020
13021 const pass_data pass_data_lower_omp =
13022 {
13023 GIMPLE_PASS, /* type */
13024 "omplower", /* name */
13025 OPTGROUP_OMP, /* optinfo_flags */
13026 TV_NONE, /* tv_id */
13027 PROP_gimple_any, /* properties_required */
13028 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13029 0, /* properties_destroyed */
13030 0, /* todo_flags_start */
13031 0, /* todo_flags_finish */
13032 };
13033
13034 class pass_lower_omp : public gimple_opt_pass
13035 {
13036 public:
13037 pass_lower_omp (gcc::context *ctxt)
13038 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13039 {}
13040
13041 /* opt_pass methods: */
13042 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13043
13044 }; // class pass_lower_omp
13045
13046 } // anon namespace
13047
13048 gimple_opt_pass *
13049 make_pass_lower_omp (gcc::context *ctxt)
13050 {
13051 return new pass_lower_omp (ctxt);
13052 }
13053 \f
13054 /* The following is a utility to diagnose structured block violations.
13055 It is not part of the "omplower" pass, as that's invoked too late. It
13056 should be invoked by the respective front ends after gimplification. */
13057
13058 static splay_tree all_labels;
13059
13060 /* Check for mismatched contexts and generate an error if needed. Return
13061 true if an error is detected. */
13062
13063 static bool
13064 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13065 gimple *branch_ctx, gimple *label_ctx)
13066 {
13067 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13068 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13069
13070 if (label_ctx == branch_ctx)
13071 return false;
13072
13073 const char* kind = NULL;
13074
13075 if (flag_openacc)
13076 {
13077 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13078 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13079 {
13080 gcc_checking_assert (kind == NULL);
13081 kind = "OpenACC";
13082 }
13083 }
13084 if (kind == NULL)
13085 {
13086 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13087 kind = "OpenMP";
13088 }
13089
13090 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13091 so we could traverse it and issue a correct "exit" or "enter" error
13092 message upon a structured block violation.
13093
13094 We built the context by building a list with tree_cons'ing, but there is
13095 no easy counterpart in gimple tuples. It seems like far too much work
13096 for issuing exit/enter error messages. If someone really misses the
13097 distinct error message... patches welcome. */
13098
13099 #if 0
13100 /* Try to avoid confusing the user by producing and error message
13101 with correct "exit" or "enter" verbiage. We prefer "exit"
13102 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13103 if (branch_ctx == NULL)
13104 exit_p = false;
13105 else
13106 {
13107 while (label_ctx)
13108 {
13109 if (TREE_VALUE (label_ctx) == branch_ctx)
13110 {
13111 exit_p = false;
13112 break;
13113 }
13114 label_ctx = TREE_CHAIN (label_ctx);
13115 }
13116 }
13117
13118 if (exit_p)
13119 error ("invalid exit from %s structured block", kind);
13120 else
13121 error ("invalid entry to %s structured block", kind);
13122 #endif
13123
13124 /* If it's obvious we have an invalid entry, be specific about the error. */
13125 if (branch_ctx == NULL)
13126 error ("invalid entry to %s structured block", kind);
13127 else
13128 {
13129 /* Otherwise, be vague and lazy, but efficient. */
13130 error ("invalid branch to/from %s structured block", kind);
13131 }
13132
13133 gsi_replace (gsi_p, gimple_build_nop (), false);
13134 return true;
13135 }
13136
13137 /* Pass 1: Create a minimal tree of structured blocks, and record
13138 where each label is found. */
13139
13140 static tree
13141 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13142 struct walk_stmt_info *wi)
13143 {
13144 gimple *context = (gimple *) wi->info;
13145 gimple *inner_context;
13146 gimple *stmt = gsi_stmt (*gsi_p);
13147
13148 *handled_ops_p = true;
13149
13150 switch (gimple_code (stmt))
13151 {
13152 WALK_SUBSTMTS;
13153
13154 case GIMPLE_OMP_PARALLEL:
13155 case GIMPLE_OMP_TASK:
13156 case GIMPLE_OMP_SECTIONS:
13157 case GIMPLE_OMP_SINGLE:
13158 case GIMPLE_OMP_SECTION:
13159 case GIMPLE_OMP_MASTER:
13160 case GIMPLE_OMP_ORDERED:
13161 case GIMPLE_OMP_SCAN:
13162 case GIMPLE_OMP_CRITICAL:
13163 case GIMPLE_OMP_TARGET:
13164 case GIMPLE_OMP_TEAMS:
13165 case GIMPLE_OMP_TASKGROUP:
13166 /* The minimal context here is just the current OMP construct. */
13167 inner_context = stmt;
13168 wi->info = inner_context;
13169 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13170 wi->info = context;
13171 break;
13172
13173 case GIMPLE_OMP_FOR:
13174 inner_context = stmt;
13175 wi->info = inner_context;
13176 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13177 walk them. */
13178 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13179 diagnose_sb_1, NULL, wi);
13180 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13181 wi->info = context;
13182 break;
13183
13184 case GIMPLE_LABEL:
13185 splay_tree_insert (all_labels,
13186 (splay_tree_key) gimple_label_label (
13187 as_a <glabel *> (stmt)),
13188 (splay_tree_value) context);
13189 break;
13190
13191 default:
13192 break;
13193 }
13194
13195 return NULL_TREE;
13196 }
13197
13198 /* Pass 2: Check each branch and see if its context differs from that of
13199 the destination label's context. */
13200
13201 static tree
13202 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13203 struct walk_stmt_info *wi)
13204 {
13205 gimple *context = (gimple *) wi->info;
13206 splay_tree_node n;
13207 gimple *stmt = gsi_stmt (*gsi_p);
13208
13209 *handled_ops_p = true;
13210
13211 switch (gimple_code (stmt))
13212 {
13213 WALK_SUBSTMTS;
13214
13215 case GIMPLE_OMP_PARALLEL:
13216 case GIMPLE_OMP_TASK:
13217 case GIMPLE_OMP_SECTIONS:
13218 case GIMPLE_OMP_SINGLE:
13219 case GIMPLE_OMP_SECTION:
13220 case GIMPLE_OMP_MASTER:
13221 case GIMPLE_OMP_ORDERED:
13222 case GIMPLE_OMP_SCAN:
13223 case GIMPLE_OMP_CRITICAL:
13224 case GIMPLE_OMP_TARGET:
13225 case GIMPLE_OMP_TEAMS:
13226 case GIMPLE_OMP_TASKGROUP:
13227 wi->info = stmt;
13228 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13229 wi->info = context;
13230 break;
13231
13232 case GIMPLE_OMP_FOR:
13233 wi->info = stmt;
13234 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13235 walk them. */
13236 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13237 diagnose_sb_2, NULL, wi);
13238 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13239 wi->info = context;
13240 break;
13241
13242 case GIMPLE_COND:
13243 {
13244 gcond *cond_stmt = as_a <gcond *> (stmt);
13245 tree lab = gimple_cond_true_label (cond_stmt);
13246 if (lab)
13247 {
13248 n = splay_tree_lookup (all_labels,
13249 (splay_tree_key) lab);
13250 diagnose_sb_0 (gsi_p, context,
13251 n ? (gimple *) n->value : NULL);
13252 }
13253 lab = gimple_cond_false_label (cond_stmt);
13254 if (lab)
13255 {
13256 n = splay_tree_lookup (all_labels,
13257 (splay_tree_key) lab);
13258 diagnose_sb_0 (gsi_p, context,
13259 n ? (gimple *) n->value : NULL);
13260 }
13261 }
13262 break;
13263
13264 case GIMPLE_GOTO:
13265 {
13266 tree lab = gimple_goto_dest (stmt);
13267 if (TREE_CODE (lab) != LABEL_DECL)
13268 break;
13269
13270 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13271 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13272 }
13273 break;
13274
13275 case GIMPLE_SWITCH:
13276 {
13277 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13278 unsigned int i;
13279 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13280 {
13281 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13282 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13283 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13284 break;
13285 }
13286 }
13287 break;
13288
13289 case GIMPLE_RETURN:
13290 diagnose_sb_0 (gsi_p, context, NULL);
13291 break;
13292
13293 default:
13294 break;
13295 }
13296
13297 return NULL_TREE;
13298 }
13299
13300 static unsigned int
13301 diagnose_omp_structured_block_errors (void)
13302 {
13303 struct walk_stmt_info wi;
13304 gimple_seq body = gimple_body (current_function_decl);
13305
13306 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13307
13308 memset (&wi, 0, sizeof (wi));
13309 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13310
13311 memset (&wi, 0, sizeof (wi));
13312 wi.want_locations = true;
13313 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13314
13315 gimple_set_body (current_function_decl, body);
13316
13317 splay_tree_delete (all_labels);
13318 all_labels = NULL;
13319
13320 return 0;
13321 }
13322
13323 namespace {
13324
13325 const pass_data pass_data_diagnose_omp_blocks =
13326 {
13327 GIMPLE_PASS, /* type */
13328 "*diagnose_omp_blocks", /* name */
13329 OPTGROUP_OMP, /* optinfo_flags */
13330 TV_NONE, /* tv_id */
13331 PROP_gimple_any, /* properties_required */
13332 0, /* properties_provided */
13333 0, /* properties_destroyed */
13334 0, /* todo_flags_start */
13335 0, /* todo_flags_finish */
13336 };
13337
13338 class pass_diagnose_omp_blocks : public gimple_opt_pass
13339 {
13340 public:
13341 pass_diagnose_omp_blocks (gcc::context *ctxt)
13342 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13343 {}
13344
13345 /* opt_pass methods: */
13346 virtual bool gate (function *)
13347 {
13348 return flag_openacc || flag_openmp || flag_openmp_simd;
13349 }
13350 virtual unsigned int execute (function *)
13351 {
13352 return diagnose_omp_structured_block_errors ();
13353 }
13354
13355 }; // class pass_diagnose_omp_blocks
13356
13357 } // anon namespace
13358
13359 gimple_opt_pass *
13360 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13361 {
13362 return new pass_diagnose_omp_blocks (ctxt);
13363 }
13364 \f
13365
13366 #include "gt-omp-low.h"