]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
re PR target/59216 (ARM negdi*extendsidi regression)
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
d1e082c2 6 Copyright (C) 2005-2013 Free Software Foundation, Inc.
953ff289
DN
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
953ff289
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
953ff289
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
d8a2d370
DN
29#include "stringpool.h"
30#include "stor-layout.h"
953ff289 31#include "rtl.h"
18f429e2 32#include "gimple.h"
45b0be94 33#include "gimplify.h"
5be5c238 34#include "gimple-iterator.h"
18f429e2 35#include "gimplify-me.h"
5be5c238 36#include "gimple-walk.h"
726a989a 37#include "tree-iterator.h"
953ff289
DN
38#include "tree-inline.h"
39#include "langhooks.h"
1da2ed5f 40#include "diagnostic-core.h"
442b4905
AM
41#include "gimple-ssa.h"
42#include "cgraph.h"
43#include "tree-cfg.h"
44#include "tree-phinodes.h"
45#include "ssa-iterators.h"
46#include "tree-ssanames.h"
47#include "tree-into-ssa.h"
d8a2d370 48#include "expr.h"
442b4905 49#include "tree-dfa.h"
7a300452 50#include "tree-ssa.h"
953ff289
DN
51#include "flags.h"
52#include "function.h"
53#include "expr.h"
953ff289
DN
54#include "tree-pass.h"
55#include "ggc.h"
56#include "except.h"
6be42dd4 57#include "splay-tree.h"
a509ebb5
RL
58#include "optabs.h"
59#include "cfgloop.h"
74bf76ed 60#include "target.h"
0645c1a2 61#include "omp-low.h"
4484a35a
AM
62#include "gimple-low.h"
63#include "tree-cfgcleanup.h"
1fe37220 64#include "tree-nested.h"
953ff289 65
726a989a 66
b8698a0f 67/* Lowering of OpenMP parallel and workshare constructs proceeds in two
953ff289
DN
68 phases. The first phase scans the function looking for OMP statements
69 and then for variables that must be replaced to satisfy data sharing
70 clauses. The second phase expands code for the constructs, as well as
c0220ea4 71 re-gimplifying things when variables have been replaced with complex
953ff289
DN
72 expressions.
73
7ebaeab5
DN
74 Final code generation is done by pass_expand_omp. The flowgraph is
75 scanned for parallel regions which are then moved to a new
76 function, to be invoked by the thread library. */
953ff289 77
0645c1a2
AM
78/* Parallel region information. Every parallel and workshare
79 directive is enclosed between two markers, the OMP_* directive
80 and a corresponding OMP_RETURN statement. */
81
82struct omp_region
83{
84 /* The enclosing region. */
85 struct omp_region *outer;
86
87 /* First child region. */
88 struct omp_region *inner;
89
90 /* Next peer region. */
91 struct omp_region *next;
92
93 /* Block containing the omp directive as its last stmt. */
94 basic_block entry;
95
96 /* Block containing the OMP_RETURN as its last stmt. */
97 basic_block exit;
98
99 /* Block containing the OMP_CONTINUE as its last stmt. */
100 basic_block cont;
101
102 /* If this is a combined parallel+workshare region, this is a list
103 of additional arguments needed by the combined parallel+workshare
104 library call. */
105 vec<tree, va_gc> *ws_args;
106
107 /* The code for the omp directive of this region. */
108 enum gimple_code type;
109
110 /* Schedule kind, only used for OMP_FOR type regions. */
111 enum omp_clause_schedule_kind sched_kind;
112
113 /* True if this is a combined parallel+workshare region. */
114 bool is_combined_parallel;
115};
116
953ff289
DN
117/* Context structure. Used to store information about each parallel
118 directive in the code. */
119
120typedef struct omp_context
121{
122 /* This field must be at the beginning, as we do "inheritance": Some
123 callback functions for tree-inline.c (e.g., omp_copy_decl)
124 receive a copy_body_data pointer that is up-casted to an
125 omp_context pointer. */
126 copy_body_data cb;
127
128 /* The tree of contexts corresponding to the encountered constructs. */
129 struct omp_context *outer;
726a989a 130 gimple stmt;
953ff289 131
b8698a0f 132 /* Map variables to fields in a structure that allows communication
953ff289
DN
133 between sending and receiving threads. */
134 splay_tree field_map;
135 tree record_type;
136 tree sender_decl;
137 tree receiver_decl;
138
a68ab351
JJ
139 /* These are used just by task contexts, if task firstprivate fn is
140 needed. srecord_type is used to communicate from the thread
141 that encountered the task construct to task firstprivate fn,
142 record_type is allocated by GOMP_task, initialized by task firstprivate
143 fn and passed to the task body fn. */
144 splay_tree sfield_map;
145 tree srecord_type;
146
953ff289
DN
147 /* A chain of variables to add to the top-level block surrounding the
148 construct. In the case of a parallel, this is in the child function. */
149 tree block_vars;
150
acf0174b
JJ
151 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
152 barriers should jump to during omplower pass. */
153 tree cancel_label;
154
953ff289
DN
155 /* What to do with variables with implicitly determined sharing
156 attributes. */
157 enum omp_clause_default_kind default_kind;
158
159 /* Nesting depth of this context. Used to beautify error messages re
160 invalid gotos. The outermost ctx is depth 1, with depth 0 being
161 reserved for the main body of the function. */
162 int depth;
163
953ff289
DN
164 /* True if this parallel directive is nested within another. */
165 bool is_nested;
acf0174b
JJ
166
167 /* True if this construct can be cancelled. */
168 bool cancellable;
953ff289
DN
169} omp_context;
170
171
a68ab351
JJ
172struct omp_for_data_loop
173{
174 tree v, n1, n2, step;
175 enum tree_code cond_code;
176};
177
50674e96 178/* A structure describing the main elements of a parallel loop. */
953ff289 179
50674e96 180struct omp_for_data
953ff289 181{
a68ab351 182 struct omp_for_data_loop loop;
726a989a
RB
183 tree chunk_size;
184 gimple for_stmt;
a68ab351
JJ
185 tree pre, iter_type;
186 int collapse;
953ff289
DN
187 bool have_nowait, have_ordered;
188 enum omp_clause_schedule_kind sched_kind;
a68ab351 189 struct omp_for_data_loop *loops;
953ff289
DN
190};
191
50674e96 192
953ff289 193static splay_tree all_contexts;
a68ab351 194static int taskreg_nesting_level;
acf0174b 195static int target_nesting_level;
0645c1a2 196static struct omp_region *root_omp_region;
a68ab351 197static bitmap task_shared_vars;
953ff289 198
26127932 199static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
200static tree scan_omp_1_op (tree *, int *, void *);
201
202#define WALK_SUBSTMTS \
203 case GIMPLE_BIND: \
204 case GIMPLE_TRY: \
205 case GIMPLE_CATCH: \
206 case GIMPLE_EH_FILTER: \
0a35513e 207 case GIMPLE_TRANSACTION: \
726a989a
RB
208 /* The sub-statements for these should be walked. */ \
209 *handled_ops_p = false; \
210 break;
211
212/* Convenience function for calling scan_omp_1_op on tree operands. */
213
214static inline tree
215scan_omp_op (tree *tp, omp_context *ctx)
216{
217 struct walk_stmt_info wi;
218
219 memset (&wi, 0, sizeof (wi));
220 wi.info = ctx;
221 wi.want_locations = true;
222
223 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
224}
225
355a7673 226static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
227static tree lookup_decl_in_outer_ctx (tree, omp_context *);
228static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
229
230/* Find an OpenMP clause of type KIND within CLAUSES. */
231
917948d3 232tree
e0c68ce9 233find_omp_clause (tree clauses, enum omp_clause_code kind)
953ff289
DN
234{
235 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 236 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
237 return clauses;
238
239 return NULL_TREE;
240}
241
242/* Return true if CTX is for an omp parallel. */
243
244static inline bool
245is_parallel_ctx (omp_context *ctx)
246{
726a989a 247 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
248}
249
50674e96 250
a68ab351
JJ
251/* Return true if CTX is for an omp task. */
252
253static inline bool
254is_task_ctx (omp_context *ctx)
255{
726a989a 256 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
257}
258
259
260/* Return true if CTX is for an omp parallel or omp task. */
261
262static inline bool
263is_taskreg_ctx (omp_context *ctx)
264{
726a989a
RB
265 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
266 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
267}
268
269
50674e96 270/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
271
272static inline bool
50674e96
DN
273is_combined_parallel (struct omp_region *region)
274{
275 return region->is_combined_parallel;
276}
277
278
279/* Extract the header elements of parallel loop FOR_STMT and store
280 them into *FD. */
281
282static void
726a989a 283extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
a68ab351 284 struct omp_for_data_loop *loops)
50674e96 285{
a68ab351
JJ
286 tree t, var, *collapse_iter, *collapse_count;
287 tree count = NULL_TREE, iter_type = long_integer_type_node;
288 struct omp_for_data_loop *loop;
289 int i;
290 struct omp_for_data_loop dummy_loop;
db3927fb 291 location_t loc = gimple_location (for_stmt);
c02065fc 292 bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_KIND_SIMD;
acf0174b
JJ
293 bool distribute = gimple_omp_for_kind (for_stmt)
294 == GF_OMP_FOR_KIND_DISTRIBUTE;
50674e96
DN
295
296 fd->for_stmt = for_stmt;
297 fd->pre = NULL;
726a989a 298 fd->collapse = gimple_omp_for_collapse (for_stmt);
a68ab351
JJ
299 if (fd->collapse > 1)
300 fd->loops = loops;
301 else
302 fd->loops = &fd->loop;
50674e96 303
acf0174b
JJ
304 fd->have_nowait = distribute || simd;
305 fd->have_ordered = false;
50674e96
DN
306 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
307 fd->chunk_size = NULL_TREE;
a68ab351
JJ
308 collapse_iter = NULL;
309 collapse_count = NULL;
50674e96 310
726a989a 311 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 312 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
313 {
314 case OMP_CLAUSE_NOWAIT:
315 fd->have_nowait = true;
316 break;
317 case OMP_CLAUSE_ORDERED:
318 fd->have_ordered = true;
319 break;
320 case OMP_CLAUSE_SCHEDULE:
acf0174b 321 gcc_assert (!distribute);
50674e96
DN
322 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
323 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
324 break;
acf0174b
JJ
325 case OMP_CLAUSE_DIST_SCHEDULE:
326 gcc_assert (distribute);
327 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
328 break;
a68ab351
JJ
329 case OMP_CLAUSE_COLLAPSE:
330 if (fd->collapse > 1)
331 {
332 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
333 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
334 }
50674e96
DN
335 default:
336 break;
337 }
338
a68ab351
JJ
339 /* FIXME: for now map schedule(auto) to schedule(static).
340 There should be analysis to determine whether all iterations
341 are approximately the same amount of work (then schedule(static)
1cbc62c0 342 is best) or if it varies (then schedule(dynamic,N) is better). */
a68ab351
JJ
343 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
344 {
345 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
346 gcc_assert (fd->chunk_size == NULL);
347 }
348 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
349 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
350 gcc_assert (fd->chunk_size == NULL);
351 else if (fd->chunk_size == NULL)
352 {
353 /* We only need to compute a default chunk size for ordered
354 static loops and dynamic loops. */
a68ab351 355 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 356 || fd->have_ordered)
50674e96
DN
357 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
358 ? integer_zero_node : integer_one_node;
359 }
a68ab351
JJ
360
361 for (i = 0; i < fd->collapse; i++)
362 {
363 if (fd->collapse == 1)
364 loop = &fd->loop;
365 else if (loops != NULL)
366 loop = loops + i;
367 else
368 loop = &dummy_loop;
369
726a989a 370 loop->v = gimple_omp_for_index (for_stmt, i);
a68ab351
JJ
371 gcc_assert (SSA_VAR_P (loop->v));
372 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
373 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
374 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
726a989a 375 loop->n1 = gimple_omp_for_initial (for_stmt, i);
a68ab351 376
726a989a
RB
377 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
378 loop->n2 = gimple_omp_for_final (for_stmt, i);
a68ab351
JJ
379 switch (loop->cond_code)
380 {
381 case LT_EXPR:
382 case GT_EXPR:
383 break;
c02065fc
AH
384 case NE_EXPR:
385 gcc_assert (gimple_omp_for_kind (for_stmt)
386 == GF_OMP_FOR_KIND_CILKSIMD);
387 break;
a68ab351
JJ
388 case LE_EXPR:
389 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 390 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
a68ab351 391 else
db3927fb
AH
392 loop->n2 = fold_build2_loc (loc,
393 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
394 build_int_cst (TREE_TYPE (loop->n2), 1));
395 loop->cond_code = LT_EXPR;
396 break;
397 case GE_EXPR:
398 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 399 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
a68ab351 400 else
db3927fb
AH
401 loop->n2 = fold_build2_loc (loc,
402 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
403 build_int_cst (TREE_TYPE (loop->n2), 1));
404 loop->cond_code = GT_EXPR;
405 break;
406 default:
407 gcc_unreachable ();
408 }
409
726a989a 410 t = gimple_omp_for_incr (for_stmt, i);
a68ab351
JJ
411 gcc_assert (TREE_OPERAND (t, 0) == var);
412 switch (TREE_CODE (t))
413 {
414 case PLUS_EXPR:
a68ab351
JJ
415 loop->step = TREE_OPERAND (t, 1);
416 break;
56099f00
RG
417 case POINTER_PLUS_EXPR:
418 loop->step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
419 break;
a68ab351
JJ
420 case MINUS_EXPR:
421 loop->step = TREE_OPERAND (t, 1);
db3927fb
AH
422 loop->step = fold_build1_loc (loc,
423 NEGATE_EXPR, TREE_TYPE (loop->step),
a68ab351
JJ
424 loop->step);
425 break;
426 default:
427 gcc_unreachable ();
428 }
429
acf0174b
JJ
430 if (simd
431 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
432 && !fd->have_ordered))
74bf76ed
JJ
433 {
434 if (fd->collapse == 1)
435 iter_type = TREE_TYPE (loop->v);
436 else if (i == 0
437 || TYPE_PRECISION (iter_type)
438 < TYPE_PRECISION (TREE_TYPE (loop->v)))
439 iter_type
440 = build_nonstandard_integer_type
acf0174b 441 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
74bf76ed
JJ
442 }
443 else if (iter_type != long_long_unsigned_type_node)
a68ab351
JJ
444 {
445 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
446 iter_type = long_long_unsigned_type_node;
447 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
448 && TYPE_PRECISION (TREE_TYPE (loop->v))
449 >= TYPE_PRECISION (iter_type))
450 {
451 tree n;
452
453 if (loop->cond_code == LT_EXPR)
db3927fb
AH
454 n = fold_build2_loc (loc,
455 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
456 loop->n2, loop->step);
457 else
458 n = loop->n1;
459 if (TREE_CODE (n) != INTEGER_CST
460 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
461 iter_type = long_long_unsigned_type_node;
462 }
463 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
464 > TYPE_PRECISION (iter_type))
465 {
466 tree n1, n2;
467
468 if (loop->cond_code == LT_EXPR)
469 {
470 n1 = loop->n1;
db3927fb
AH
471 n2 = fold_build2_loc (loc,
472 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
473 loop->n2, loop->step);
474 }
475 else
476 {
db3927fb
AH
477 n1 = fold_build2_loc (loc,
478 MINUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
479 loop->n2, loop->step);
480 n2 = loop->n1;
481 }
482 if (TREE_CODE (n1) != INTEGER_CST
483 || TREE_CODE (n2) != INTEGER_CST
484 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
485 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
486 iter_type = long_long_unsigned_type_node;
487 }
488 }
489
490 if (collapse_count && *collapse_count == NULL)
491 {
5a0f4dd3
JJ
492 t = fold_binary (loop->cond_code, boolean_type_node,
493 fold_convert (TREE_TYPE (loop->v), loop->n1),
494 fold_convert (TREE_TYPE (loop->v), loop->n2));
495 if (t && integer_zerop (t))
496 count = build_zero_cst (long_long_unsigned_type_node);
497 else if ((i == 0 || count != NULL_TREE)
498 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
499 && TREE_CONSTANT (loop->n1)
500 && TREE_CONSTANT (loop->n2)
501 && TREE_CODE (loop->step) == INTEGER_CST)
a68ab351
JJ
502 {
503 tree itype = TREE_TYPE (loop->v);
504
505 if (POINTER_TYPE_P (itype))
96f9265a 506 itype = signed_type_for (itype);
a68ab351 507 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
db3927fb
AH
508 t = fold_build2_loc (loc,
509 PLUS_EXPR, itype,
510 fold_convert_loc (loc, itype, loop->step), t);
511 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
512 fold_convert_loc (loc, itype, loop->n2));
513 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
514 fold_convert_loc (loc, itype, loop->n1));
a68ab351 515 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
db3927fb
AH
516 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
517 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
518 fold_build1_loc (loc, NEGATE_EXPR, itype,
519 fold_convert_loc (loc, itype,
520 loop->step)));
a68ab351 521 else
db3927fb
AH
522 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
523 fold_convert_loc (loc, itype, loop->step));
524 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
a68ab351 525 if (count != NULL_TREE)
db3927fb
AH
526 count = fold_build2_loc (loc,
527 MULT_EXPR, long_long_unsigned_type_node,
a68ab351
JJ
528 count, t);
529 else
530 count = t;
531 if (TREE_CODE (count) != INTEGER_CST)
532 count = NULL_TREE;
533 }
5a0f4dd3 534 else if (count && !integer_zerop (count))
a68ab351
JJ
535 count = NULL_TREE;
536 }
537 }
538
74bf76ed 539 if (count
acf0174b
JJ
540 && !simd
541 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
542 || fd->have_ordered))
a68ab351
JJ
543 {
544 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
545 iter_type = long_long_unsigned_type_node;
546 else
547 iter_type = long_integer_type_node;
548 }
549 else if (collapse_iter && *collapse_iter != NULL)
550 iter_type = TREE_TYPE (*collapse_iter);
551 fd->iter_type = iter_type;
552 if (collapse_iter && *collapse_iter == NULL)
553 *collapse_iter = create_tmp_var (iter_type, ".iter");
554 if (collapse_count && *collapse_count == NULL)
555 {
556 if (count)
db3927fb 557 *collapse_count = fold_convert_loc (loc, iter_type, count);
a68ab351
JJ
558 else
559 *collapse_count = create_tmp_var (iter_type, ".count");
560 }
561
562 if (fd->collapse > 1)
563 {
564 fd->loop.v = *collapse_iter;
565 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
566 fd->loop.n2 = *collapse_count;
567 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
568 fd->loop.cond_code = LT_EXPR;
569 }
50674e96
DN
570}
571
572
573/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
574 is the immediate dominator of PAR_ENTRY_BB, return true if there
575 are no data dependencies that would prevent expanding the parallel
576 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
577
578 When expanding a combined parallel+workshare region, the call to
579 the child function may need additional arguments in the case of
726a989a
RB
580 GIMPLE_OMP_FOR regions. In some cases, these arguments are
581 computed out of variables passed in from the parent to the child
582 via 'struct .omp_data_s'. For instance:
50674e96
DN
583
584 #pragma omp parallel for schedule (guided, i * 4)
585 for (j ...)
586
587 Is lowered into:
588
589 # BLOCK 2 (PAR_ENTRY_BB)
590 .omp_data_o.i = i;
591 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
b8698a0f 592
50674e96
DN
593 # BLOCK 3 (WS_ENTRY_BB)
594 .omp_data_i = &.omp_data_o;
595 D.1667 = .omp_data_i->i;
596 D.1598 = D.1667 * 4;
597 #pragma omp for schedule (guided, D.1598)
598
599 When we outline the parallel region, the call to the child function
600 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
601 that value is computed *after* the call site. So, in principle we
602 cannot do the transformation.
603
604 To see whether the code in WS_ENTRY_BB blocks the combined
605 parallel+workshare call, we collect all the variables used in the
726a989a 606 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
50674e96
DN
607 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
608 call.
609
610 FIXME. If we had the SSA form built at this point, we could merely
611 hoist the code in block 3 into block 2 and be done with it. But at
612 this point we don't have dataflow information and though we could
613 hack something up here, it is really not worth the aggravation. */
614
615static bool
0f900dfa 616workshare_safe_to_combine_p (basic_block ws_entry_bb)
50674e96
DN
617{
618 struct omp_for_data fd;
0f900dfa 619 gimple ws_stmt = last_stmt (ws_entry_bb);
50674e96 620
726a989a 621 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96
DN
622 return true;
623
726a989a 624 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
50674e96 625
a68ab351
JJ
626 extract_omp_for_data (ws_stmt, &fd, NULL);
627
628 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
629 return false;
630 if (fd.iter_type != long_integer_type_node)
631 return false;
50674e96
DN
632
633 /* FIXME. We give up too easily here. If any of these arguments
634 are not constants, they will likely involve variables that have
635 been mapped into fields of .omp_data_s for sharing with the child
636 function. With appropriate data flow, it would be possible to
637 see through this. */
a68ab351
JJ
638 if (!is_gimple_min_invariant (fd.loop.n1)
639 || !is_gimple_min_invariant (fd.loop.n2)
640 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
641 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
642 return false;
643
644 return true;
645}
646
647
648/* Collect additional arguments needed to emit a combined
649 parallel+workshare call. WS_STMT is the workshare directive being
650 expanded. */
651
9771b263 652static vec<tree, va_gc> *
acf0174b 653get_ws_args_for (gimple par_stmt, gimple ws_stmt)
50674e96
DN
654{
655 tree t;
db3927fb 656 location_t loc = gimple_location (ws_stmt);
9771b263 657 vec<tree, va_gc> *ws_args;
50674e96 658
726a989a 659 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
50674e96
DN
660 {
661 struct omp_for_data fd;
acf0174b 662 tree n1, n2;
50674e96 663
a68ab351 664 extract_omp_for_data (ws_stmt, &fd, NULL);
acf0174b
JJ
665 n1 = fd.loop.n1;
666 n2 = fd.loop.n2;
667
668 if (gimple_omp_for_combined_into_p (ws_stmt))
669 {
670 tree innerc
671 = find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
672 OMP_CLAUSE__LOOPTEMP_);
673 gcc_assert (innerc);
674 n1 = OMP_CLAUSE_DECL (innerc);
675 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
676 OMP_CLAUSE__LOOPTEMP_);
677 gcc_assert (innerc);
678 n2 = OMP_CLAUSE_DECL (innerc);
679 }
50674e96 680
9771b263 681 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
50674e96 682
acf0174b 683 t = fold_convert_loc (loc, long_integer_type_node, n1);
9771b263 684 ws_args->quick_push (t);
50674e96 685
acf0174b 686 t = fold_convert_loc (loc, long_integer_type_node, n2);
9771b263 687 ws_args->quick_push (t);
50674e96 688
3bb06db4 689 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
9771b263 690 ws_args->quick_push (t);
3bb06db4
NF
691
692 if (fd.chunk_size)
693 {
694 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
9771b263 695 ws_args->quick_push (t);
3bb06db4 696 }
50674e96
DN
697
698 return ws_args;
699 }
726a989a 700 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96 701 {
e5c95afe 702 /* Number of sections is equal to the number of edges from the
726a989a
RB
703 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
704 the exit of the sections region. */
705 basic_block bb = single_succ (gimple_bb (ws_stmt));
e5c95afe 706 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
9771b263
DN
707 vec_alloc (ws_args, 1);
708 ws_args->quick_push (t);
3bb06db4 709 return ws_args;
50674e96
DN
710 }
711
712 gcc_unreachable ();
713}
714
715
716/* Discover whether REGION is a combined parallel+workshare region. */
717
718static void
719determine_parallel_type (struct omp_region *region)
953ff289 720{
50674e96
DN
721 basic_block par_entry_bb, par_exit_bb;
722 basic_block ws_entry_bb, ws_exit_bb;
723
d3c673c7 724 if (region == NULL || region->inner == NULL
e5c95afe
ZD
725 || region->exit == NULL || region->inner->exit == NULL
726 || region->inner->cont == NULL)
50674e96
DN
727 return;
728
729 /* We only support parallel+for and parallel+sections. */
726a989a
RB
730 if (region->type != GIMPLE_OMP_PARALLEL
731 || (region->inner->type != GIMPLE_OMP_FOR
732 && region->inner->type != GIMPLE_OMP_SECTIONS))
50674e96
DN
733 return;
734
735 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
736 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
737 par_entry_bb = region->entry;
738 par_exit_bb = region->exit;
739 ws_entry_bb = region->inner->entry;
740 ws_exit_bb = region->inner->exit;
50674e96
DN
741
742 if (single_succ (par_entry_bb) == ws_entry_bb
743 && single_succ (ws_exit_bb) == par_exit_bb
0f900dfa 744 && workshare_safe_to_combine_p (ws_entry_bb)
726a989a 745 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
69f1837b
JJ
746 || (last_and_only_stmt (ws_entry_bb)
747 && last_and_only_stmt (par_exit_bb))))
50674e96 748 {
acf0174b 749 gimple par_stmt = last_stmt (par_entry_bb);
726a989a 750 gimple ws_stmt = last_stmt (ws_entry_bb);
777f7f9a 751
726a989a 752 if (region->inner->type == GIMPLE_OMP_FOR)
50674e96
DN
753 {
754 /* If this is a combined parallel loop, we need to determine
755 whether or not to use the combined library calls. There
756 are two cases where we do not apply the transformation:
757 static loops and any kind of ordered loop. In the first
758 case, we already open code the loop so there is no need
759 to do anything else. In the latter case, the combined
760 parallel loop call would still need extra synchronization
761 to implement ordered semantics, so there would not be any
762 gain in using the combined call. */
726a989a 763 tree clauses = gimple_omp_for_clauses (ws_stmt);
50674e96
DN
764 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
765 if (c == NULL
766 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
767 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
768 {
769 region->is_combined_parallel = false;
770 region->inner->is_combined_parallel = false;
771 return;
772 }
773 }
774
775 region->is_combined_parallel = true;
776 region->inner->is_combined_parallel = true;
acf0174b 777 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
50674e96 778 }
953ff289
DN
779}
780
50674e96 781
953ff289
DN
782/* Return true if EXPR is variable sized. */
783
784static inline bool
22ea9ec0 785is_variable_sized (const_tree expr)
953ff289
DN
786{
787 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
788}
789
790/* Return true if DECL is a reference type. */
791
792static inline bool
793is_reference (tree decl)
794{
795 return lang_hooks.decls.omp_privatize_by_reference (decl);
796}
797
798/* Lookup variables in the decl or field splay trees. The "maybe" form
799 allows for the variable form to not have been entered, otherwise we
800 assert that the variable must have been entered. */
801
802static inline tree
803lookup_decl (tree var, omp_context *ctx)
804{
6be42dd4
RG
805 tree *n;
806 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
807 return *n;
953ff289
DN
808}
809
810static inline tree
7c8f7639 811maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 812{
6be42dd4
RG
813 tree *n;
814 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
815 return n ? *n : NULL_TREE;
953ff289
DN
816}
817
818static inline tree
819lookup_field (tree var, omp_context *ctx)
820{
821 splay_tree_node n;
822 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
823 return (tree) n->value;
824}
825
a68ab351
JJ
826static inline tree
827lookup_sfield (tree var, omp_context *ctx)
828{
829 splay_tree_node n;
830 n = splay_tree_lookup (ctx->sfield_map
831 ? ctx->sfield_map : ctx->field_map,
832 (splay_tree_key) var);
833 return (tree) n->value;
834}
835
953ff289
DN
836static inline tree
837maybe_lookup_field (tree var, omp_context *ctx)
838{
839 splay_tree_node n;
840 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
841 return n ? (tree) n->value : NULL_TREE;
842}
843
7c8f7639
JJ
844/* Return true if DECL should be copied by pointer. SHARED_CTX is
845 the parallel context if DECL is to be shared. */
953ff289
DN
846
847static bool
a68ab351 848use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
849{
850 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
851 return true;
852
6fc0bb99 853 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 854 when we know the value is not accessible from an outer scope. */
7c8f7639 855 if (shared_ctx)
953ff289
DN
856 {
857 /* ??? Trivially accessible from anywhere. But why would we even
858 be passing an address in this case? Should we simply assert
859 this to be false, or should we have a cleanup pass that removes
860 these from the list of mappings? */
861 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
862 return true;
863
864 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
865 without analyzing the expression whether or not its location
866 is accessible to anyone else. In the case of nested parallel
867 regions it certainly may be. */
077b0dfb 868 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
869 return true;
870
871 /* Do not use copy-in/copy-out for variables that have their
872 address taken. */
873 if (TREE_ADDRESSABLE (decl))
874 return true;
7c8f7639 875
6d840d99
JJ
876 /* lower_send_shared_vars only uses copy-in, but not copy-out
877 for these. */
878 if (TREE_READONLY (decl)
879 || ((TREE_CODE (decl) == RESULT_DECL
880 || TREE_CODE (decl) == PARM_DECL)
881 && DECL_BY_REFERENCE (decl)))
882 return false;
883
7c8f7639
JJ
884 /* Disallow copy-in/out in nested parallel if
885 decl is shared in outer parallel, otherwise
886 each thread could store the shared variable
887 in its own copy-in location, making the
888 variable no longer really shared. */
6d840d99 889 if (shared_ctx->is_nested)
7c8f7639
JJ
890 {
891 omp_context *up;
892
893 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 894 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
895 break;
896
d9c194cb 897 if (up)
7c8f7639
JJ
898 {
899 tree c;
900
726a989a 901 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
902 c; c = OMP_CLAUSE_CHAIN (c))
903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
904 && OMP_CLAUSE_DECL (c) == decl)
905 break;
906
907 if (c)
25142650 908 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
909 }
910 }
a68ab351 911
6d840d99 912 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
913 deferred or executed in different thread, when GOMP_task
914 returns, the task hasn't necessarily terminated. */
6d840d99 915 if (is_task_ctx (shared_ctx))
a68ab351 916 {
25142650
JJ
917 tree outer;
918 maybe_mark_addressable_and_ret:
919 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
a68ab351
JJ
920 if (is_gimple_reg (outer))
921 {
922 /* Taking address of OUTER in lower_send_shared_vars
923 might need regimplification of everything that uses the
924 variable. */
925 if (!task_shared_vars)
926 task_shared_vars = BITMAP_ALLOC (NULL);
927 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
928 TREE_ADDRESSABLE (outer) = 1;
929 }
930 return true;
931 }
953ff289
DN
932 }
933
934 return false;
935}
936
917948d3
ZD
937/* Construct a new automatic decl similar to VAR. */
938
939static tree
940omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
941{
942 tree copy = copy_var_decl (var, name, type);
943
944 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 945 DECL_CHAIN (copy) = ctx->block_vars;
953ff289
DN
946 ctx->block_vars = copy;
947
948 return copy;
949}
950
951static tree
952omp_copy_decl_1 (tree var, omp_context *ctx)
953{
954 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
955}
956
a9a58711
JJ
957/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
958 as appropriate. */
959static tree
960omp_build_component_ref (tree obj, tree field)
961{
962 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
963 if (TREE_THIS_VOLATILE (field))
964 TREE_THIS_VOLATILE (ret) |= 1;
965 if (TREE_READONLY (field))
966 TREE_READONLY (ret) |= 1;
967 return ret;
968}
969
953ff289
DN
970/* Build tree nodes to access the field for VAR on the receiver side. */
971
972static tree
973build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
974{
975 tree x, field = lookup_field (var, ctx);
976
977 /* If the receiver record type was remapped in the child function,
978 remap the field into the new record type. */
979 x = maybe_lookup_field (field, ctx);
980 if (x != NULL)
981 field = x;
982
70f34814 983 x = build_simple_mem_ref (ctx->receiver_decl);
a9a58711 984 x = omp_build_component_ref (x, field);
953ff289 985 if (by_ref)
70f34814 986 x = build_simple_mem_ref (x);
953ff289
DN
987
988 return x;
989}
990
991/* Build tree nodes to access VAR in the scope outer to CTX. In the case
992 of a parallel, this is a component reference; for workshare constructs
993 this is some variable. */
994
995static tree
996build_outer_var_ref (tree var, omp_context *ctx)
997{
998 tree x;
999
8ca5b2a2 1000 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
1001 x = var;
1002 else if (is_variable_sized (var))
1003 {
1004 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1005 x = build_outer_var_ref (x, ctx);
70f34814 1006 x = build_simple_mem_ref (x);
953ff289 1007 }
a68ab351 1008 else if (is_taskreg_ctx (ctx))
953ff289 1009 {
7c8f7639 1010 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
1011 x = build_receiver_ref (var, by_ref, ctx);
1012 }
74bf76ed 1013 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 1014 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
1015 {
1016 /* #pragma omp simd isn't a worksharing construct, and can reference even
1017 private vars in its linear etc. clauses. */
1018 x = NULL_TREE;
1019 if (ctx->outer && is_taskreg_ctx (ctx))
1020 x = lookup_decl (var, ctx->outer);
1021 else if (ctx->outer)
f3b331d1 1022 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
1023 if (x == NULL_TREE)
1024 x = var;
1025 }
953ff289
DN
1026 else if (ctx->outer)
1027 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
1028 else if (is_reference (var))
1029 /* This can happen with orphaned constructs. If var is reference, it is
1030 possible it is shared and as such valid. */
1031 x = var;
953ff289
DN
1032 else
1033 gcc_unreachable ();
1034
1035 if (is_reference (var))
70f34814 1036 x = build_simple_mem_ref (x);
953ff289
DN
1037
1038 return x;
1039}
1040
1041/* Build tree nodes to access the field for VAR on the sender side. */
1042
1043static tree
1044build_sender_ref (tree var, omp_context *ctx)
1045{
a68ab351 1046 tree field = lookup_sfield (var, ctx);
a9a58711 1047 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
1048}
1049
1050/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
1051
1052static void
a68ab351 1053install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 1054{
a68ab351 1055 tree field, type, sfield = NULL_TREE;
953ff289 1056
a68ab351
JJ
1057 gcc_assert ((mask & 1) == 0
1058 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
1059 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
1060 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
1061
1062 type = TREE_TYPE (var);
acf0174b
JJ
1063 if (mask & 4)
1064 {
1065 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
1066 type = build_pointer_type (build_pointer_type (type));
1067 }
1068 else if (by_ref)
953ff289 1069 type = build_pointer_type (type);
a68ab351
JJ
1070 else if ((mask & 3) == 1 && is_reference (var))
1071 type = TREE_TYPE (type);
953ff289 1072
c2255bc4
AH
1073 field = build_decl (DECL_SOURCE_LOCATION (var),
1074 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
1075
1076 /* Remember what variable this field was created for. This does have a
1077 side effect of making dwarf2out ignore this member, so for helpful
1078 debugging we clear it later in delete_omp_context. */
1079 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
1080 if (type == TREE_TYPE (var))
1081 {
1082 DECL_ALIGN (field) = DECL_ALIGN (var);
1083 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
1084 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
1085 }
1086 else
1087 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 1088
a68ab351
JJ
1089 if ((mask & 3) == 3)
1090 {
1091 insert_field_into_struct (ctx->record_type, field);
1092 if (ctx->srecord_type)
1093 {
c2255bc4
AH
1094 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1095 FIELD_DECL, DECL_NAME (var), type);
a68ab351
JJ
1096 DECL_ABSTRACT_ORIGIN (sfield) = var;
1097 DECL_ALIGN (sfield) = DECL_ALIGN (field);
1098 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
1099 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
1100 insert_field_into_struct (ctx->srecord_type, sfield);
1101 }
1102 }
1103 else
1104 {
1105 if (ctx->srecord_type == NULL_TREE)
1106 {
1107 tree t;
1108
1109 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
1110 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1111 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1112 {
c2255bc4
AH
1113 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1114 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
1115 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
1116 insert_field_into_struct (ctx->srecord_type, sfield);
1117 splay_tree_insert (ctx->sfield_map,
1118 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
1119 (splay_tree_value) sfield);
1120 }
1121 }
1122 sfield = field;
1123 insert_field_into_struct ((mask & 1) ? ctx->record_type
1124 : ctx->srecord_type, field);
1125 }
953ff289 1126
a68ab351
JJ
1127 if (mask & 1)
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
1129 (splay_tree_value) field);
1130 if ((mask & 2) && ctx->sfield_map)
1131 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1132 (splay_tree_value) sfield);
953ff289
DN
1133}
1134
1135static tree
1136install_var_local (tree var, omp_context *ctx)
1137{
1138 tree new_var = omp_copy_decl_1 (var, ctx);
1139 insert_decl_map (&ctx->cb, var, new_var);
1140 return new_var;
1141}
1142
1143/* Adjust the replacement for DECL in CTX for the new context. This means
1144 copying the DECL_VALUE_EXPR, and fixing up the type. */
1145
1146static void
1147fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1148{
1149 tree new_decl, size;
1150
1151 new_decl = lookup_decl (decl, ctx);
1152
1153 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1154
1155 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1156 && DECL_HAS_VALUE_EXPR_P (decl))
1157 {
1158 tree ve = DECL_VALUE_EXPR (decl);
726a989a 1159 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
1160 SET_DECL_VALUE_EXPR (new_decl, ve);
1161 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1162 }
1163
1164 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1165 {
1166 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1167 if (size == error_mark_node)
1168 size = TYPE_SIZE (TREE_TYPE (new_decl));
1169 DECL_SIZE (new_decl) = size;
1170
1171 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1172 if (size == error_mark_node)
1173 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1174 DECL_SIZE_UNIT (new_decl) = size;
1175 }
1176}
1177
1178/* The callback for remap_decl. Search all containing contexts for a
1179 mapping of the variable; this avoids having to duplicate the splay
1180 tree ahead of time. We know a mapping doesn't already exist in the
1181 given context. Create new mappings to implement default semantics. */
1182
1183static tree
1184omp_copy_decl (tree var, copy_body_data *cb)
1185{
1186 omp_context *ctx = (omp_context *) cb;
1187 tree new_var;
1188
953ff289
DN
1189 if (TREE_CODE (var) == LABEL_DECL)
1190 {
c2255bc4 1191 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 1192 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1193 insert_decl_map (&ctx->cb, var, new_var);
1194 return new_var;
1195 }
1196
a68ab351 1197 while (!is_taskreg_ctx (ctx))
953ff289
DN
1198 {
1199 ctx = ctx->outer;
1200 if (ctx == NULL)
1201 return var;
1202 new_var = maybe_lookup_decl (var, ctx);
1203 if (new_var)
1204 return new_var;
1205 }
1206
8ca5b2a2
JJ
1207 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1208 return var;
1209
953ff289
DN
1210 return error_mark_node;
1211}
1212
50674e96
DN
1213
1214/* Return the parallel region associated with STMT. */
1215
50674e96
DN
1216/* Debugging dumps for parallel regions. */
1217void dump_omp_region (FILE *, struct omp_region *, int);
1218void debug_omp_region (struct omp_region *);
1219void debug_all_omp_regions (void);
1220
1221/* Dump the parallel region tree rooted at REGION. */
1222
1223void
1224dump_omp_region (FILE *file, struct omp_region *region, int indent)
1225{
777f7f9a 1226 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
726a989a 1227 gimple_code_name[region->type]);
50674e96
DN
1228
1229 if (region->inner)
1230 dump_omp_region (file, region->inner, indent + 4);
1231
777f7f9a
RH
1232 if (region->cont)
1233 {
726a989a 1234 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1235 region->cont->index);
1236 }
b8698a0f 1237
50674e96 1238 if (region->exit)
726a989a 1239 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
777f7f9a 1240 region->exit->index);
50674e96 1241 else
777f7f9a 1242 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1243
1244 if (region->next)
777f7f9a 1245 dump_omp_region (file, region->next, indent);
50674e96
DN
1246}
1247
24e47c76 1248DEBUG_FUNCTION void
50674e96
DN
1249debug_omp_region (struct omp_region *region)
1250{
1251 dump_omp_region (stderr, region, 0);
1252}
1253
24e47c76 1254DEBUG_FUNCTION void
50674e96
DN
1255debug_all_omp_regions (void)
1256{
1257 dump_omp_region (stderr, root_omp_region, 0);
1258}
1259
1260
1261/* Create a new parallel region starting at STMT inside region PARENT. */
1262
0645c1a2 1263static struct omp_region *
726a989a
RB
1264new_omp_region (basic_block bb, enum gimple_code type,
1265 struct omp_region *parent)
50674e96 1266{
d3bfe4de 1267 struct omp_region *region = XCNEW (struct omp_region);
50674e96
DN
1268
1269 region->outer = parent;
777f7f9a
RH
1270 region->entry = bb;
1271 region->type = type;
50674e96
DN
1272
1273 if (parent)
1274 {
1275 /* This is a nested region. Add it to the list of inner
1276 regions in PARENT. */
1277 region->next = parent->inner;
1278 parent->inner = region;
1279 }
777f7f9a 1280 else
50674e96
DN
1281 {
1282 /* This is a toplevel region. Add it to the list of toplevel
1283 regions in ROOT_OMP_REGION. */
1284 region->next = root_omp_region;
1285 root_omp_region = region;
1286 }
777f7f9a
RH
1287
1288 return region;
1289}
1290
1291/* Release the memory associated with the region tree rooted at REGION. */
1292
1293static void
1294free_omp_region_1 (struct omp_region *region)
1295{
1296 struct omp_region *i, *n;
1297
1298 for (i = region->inner; i ; i = n)
50674e96 1299 {
777f7f9a
RH
1300 n = i->next;
1301 free_omp_region_1 (i);
50674e96
DN
1302 }
1303
777f7f9a
RH
1304 free (region);
1305}
50674e96 1306
777f7f9a
RH
1307/* Release the memory for the entire omp region tree. */
1308
1309void
1310free_omp_regions (void)
1311{
1312 struct omp_region *r, *n;
1313 for (r = root_omp_region; r ; r = n)
1314 {
1315 n = r->next;
1316 free_omp_region_1 (r);
1317 }
1318 root_omp_region = NULL;
50674e96
DN
1319}
1320
1321
953ff289
DN
1322/* Create a new context, with OUTER_CTX being the surrounding context. */
1323
1324static omp_context *
726a989a 1325new_omp_context (gimple stmt, omp_context *outer_ctx)
953ff289
DN
1326{
1327 omp_context *ctx = XCNEW (omp_context);
1328
1329 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1330 (splay_tree_value) ctx);
1331 ctx->stmt = stmt;
1332
1333 if (outer_ctx)
1334 {
1335 ctx->outer = outer_ctx;
1336 ctx->cb = outer_ctx->cb;
1337 ctx->cb.block = NULL;
1338 ctx->depth = outer_ctx->depth + 1;
1339 }
1340 else
1341 {
1342 ctx->cb.src_fn = current_function_decl;
1343 ctx->cb.dst_fn = current_function_decl;
fe660d7b
MJ
1344 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1345 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
1346 ctx->cb.dst_node = ctx->cb.src_node;
1347 ctx->cb.src_cfun = cfun;
1348 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 1349 ctx->cb.eh_lp_nr = 0;
953ff289
DN
1350 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1351 ctx->depth = 1;
1352 }
1353
6be42dd4 1354 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1355
1356 return ctx;
1357}
1358
726a989a 1359static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
1360
1361/* Finalize task copyfn. */
1362
1363static void
726a989a 1364finalize_task_copyfn (gimple task_stmt)
2368a460
JJ
1365{
1366 struct function *child_cfun;
af16bc76 1367 tree child_fn;
355a7673 1368 gimple_seq seq = NULL, new_seq;
726a989a 1369 gimple bind;
2368a460 1370
726a989a 1371 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
1372 if (child_fn == NULL_TREE)
1373 return;
1374
1375 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 1376 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 1377
2368a460 1378 push_cfun (child_cfun);
3ad065ef 1379 bind = gimplify_body (child_fn, false);
726a989a
RB
1380 gimple_seq_add_stmt (&seq, bind);
1381 new_seq = maybe_catch_exception (seq);
1382 if (new_seq != seq)
1383 {
1384 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 1385 seq = NULL;
726a989a
RB
1386 gimple_seq_add_stmt (&seq, bind);
1387 }
1388 gimple_set_body (child_fn, seq);
2368a460 1389 pop_cfun ();
2368a460 1390
d7ed20db 1391 /* Inform the callgraph about the new function. */
2368a460
JJ
1392 cgraph_add_new_function (child_fn, false);
1393}
1394
953ff289
DN
1395/* Destroy a omp_context data structures. Called through the splay tree
1396 value delete callback. */
1397
1398static void
1399delete_omp_context (splay_tree_value value)
1400{
1401 omp_context *ctx = (omp_context *) value;
1402
6be42dd4 1403 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1404
1405 if (ctx->field_map)
1406 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1407 if (ctx->sfield_map)
1408 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1409
1410 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1411 it produces corrupt debug information. */
1412 if (ctx->record_type)
1413 {
1414 tree t;
910ad8de 1415 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
1416 DECL_ABSTRACT_ORIGIN (t) = NULL;
1417 }
a68ab351
JJ
1418 if (ctx->srecord_type)
1419 {
1420 tree t;
910ad8de 1421 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
1422 DECL_ABSTRACT_ORIGIN (t) = NULL;
1423 }
953ff289 1424
2368a460
JJ
1425 if (is_task_ctx (ctx))
1426 finalize_task_copyfn (ctx->stmt);
1427
953ff289
DN
1428 XDELETE (ctx);
1429}
1430
1431/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1432 context. */
1433
1434static void
1435fixup_child_record_type (omp_context *ctx)
1436{
1437 tree f, type = ctx->record_type;
1438
1439 /* ??? It isn't sufficient to just call remap_type here, because
1440 variably_modified_type_p doesn't work the way we expect for
1441 record types. Testing each field for whether it needs remapping
1442 and creating a new record by hand works, however. */
910ad8de 1443 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
1444 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1445 break;
1446 if (f)
1447 {
1448 tree name, new_fields = NULL;
1449
1450 type = lang_hooks.types.make_type (RECORD_TYPE);
1451 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
1452 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1453 TYPE_DECL, name, type);
953ff289
DN
1454 TYPE_NAME (type) = name;
1455
910ad8de 1456 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
1457 {
1458 tree new_f = copy_node (f);
1459 DECL_CONTEXT (new_f) = type;
1460 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 1461 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
1462 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1463 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1464 &ctx->cb, NULL);
1465 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1466 &ctx->cb, NULL);
953ff289
DN
1467 new_fields = new_f;
1468
1469 /* Arrange to be able to look up the receiver field
1470 given the sender field. */
1471 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1472 (splay_tree_value) new_f);
1473 }
1474 TYPE_FIELDS (type) = nreverse (new_fields);
1475 layout_type (type);
1476 }
1477
1478 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1479}
1480
1481/* Instantiate decls as necessary in CTX to satisfy the data sharing
1482 specified by CLAUSES. */
1483
1484static void
1485scan_sharing_clauses (tree clauses, omp_context *ctx)
1486{
1487 tree c, decl;
1488 bool scan_array_reductions = false;
1489
1490 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1491 {
1492 bool by_ref;
1493
aaf46ef9 1494 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1495 {
1496 case OMP_CLAUSE_PRIVATE:
1497 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1498 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1499 goto do_private;
1500 else if (!is_variable_sized (decl))
953ff289
DN
1501 install_var_local (decl, ctx);
1502 break;
1503
1504 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1505 /* Ignore shared directives in teams construct. */
1506 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1507 break;
a68ab351 1508 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1509 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1510 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1511 || !is_variable_sized (decl));
8ca5b2a2
JJ
1512 /* Global variables don't need to be copied,
1513 the receiver side will use them directly. */
1514 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1515 break;
a68ab351 1516 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1517 if (! TREE_READONLY (decl)
1518 || TREE_ADDRESSABLE (decl)
1519 || by_ref
1520 || is_reference (decl))
1521 {
a68ab351 1522 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1523 install_var_local (decl, ctx);
1524 break;
1525 }
1526 /* We don't need to copy const scalar vars back. */
aaf46ef9 1527 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1528 goto do_private;
1529
1530 case OMP_CLAUSE_LASTPRIVATE:
1531 /* Let the corresponding firstprivate clause create
1532 the variable. */
1533 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1534 break;
1535 /* FALLTHRU */
1536
1537 case OMP_CLAUSE_FIRSTPRIVATE:
1538 case OMP_CLAUSE_REDUCTION:
74bf76ed 1539 case OMP_CLAUSE_LINEAR:
953ff289
DN
1540 decl = OMP_CLAUSE_DECL (c);
1541 do_private:
1542 if (is_variable_sized (decl))
953ff289 1543 {
a68ab351
JJ
1544 if (is_task_ctx (ctx))
1545 install_var_field (decl, false, 1, ctx);
1546 break;
1547 }
1548 else if (is_taskreg_ctx (ctx))
1549 {
1550 bool global
1551 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1552 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1553
1554 if (is_task_ctx (ctx)
1555 && (global || by_ref || is_reference (decl)))
1556 {
1557 install_var_field (decl, false, 1, ctx);
1558 if (!global)
1559 install_var_field (decl, by_ref, 2, ctx);
1560 }
1561 else if (!global)
1562 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1563 }
1564 install_var_local (decl, ctx);
1565 break;
1566
acf0174b
JJ
1567 case OMP_CLAUSE__LOOPTEMP_:
1568 gcc_assert (is_parallel_ctx (ctx));
1569 decl = OMP_CLAUSE_DECL (c);
1570 install_var_field (decl, false, 3, ctx);
1571 install_var_local (decl, ctx);
1572 break;
1573
953ff289 1574 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1575 case OMP_CLAUSE_COPYIN:
1576 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1577 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1578 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1579 break;
1580
1581 case OMP_CLAUSE_DEFAULT:
1582 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1583 break;
1584
20906c66 1585 case OMP_CLAUSE_FINAL:
953ff289
DN
1586 case OMP_CLAUSE_IF:
1587 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1588 case OMP_CLAUSE_NUM_TEAMS:
1589 case OMP_CLAUSE_THREAD_LIMIT:
1590 case OMP_CLAUSE_DEVICE:
953ff289 1591 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1592 case OMP_CLAUSE_DIST_SCHEDULE:
1593 case OMP_CLAUSE_DEPEND:
953ff289 1594 if (ctx->outer)
726a989a 1595 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1596 break;
1597
acf0174b
JJ
1598 case OMP_CLAUSE_TO:
1599 case OMP_CLAUSE_FROM:
1600 case OMP_CLAUSE_MAP:
1601 if (ctx->outer)
1602 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1603 decl = OMP_CLAUSE_DECL (c);
1604 /* Global variables with "omp declare target" attribute
1605 don't need to be copied, the receiver side will use them
1606 directly. */
1607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1608 && DECL_P (decl)
1609 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1610 && lookup_attribute ("omp declare target",
1611 DECL_ATTRIBUTES (decl)))
1612 break;
1613 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1614 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER)
1615 {
1616 /* Ignore OMP_CLAUSE_MAP_POINTER kind for arrays in
1617 #pragma omp target data, there is nothing to map for
1618 those. */
1619 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA
1620 && !POINTER_TYPE_P (TREE_TYPE (decl)))
1621 break;
1622 }
1623 if (DECL_P (decl))
1624 {
1625 if (DECL_SIZE (decl)
1626 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1627 {
1628 tree decl2 = DECL_VALUE_EXPR (decl);
1629 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1630 decl2 = TREE_OPERAND (decl2, 0);
1631 gcc_assert (DECL_P (decl2));
1632 install_var_field (decl2, true, 3, ctx);
1633 install_var_local (decl2, ctx);
1634 install_var_local (decl, ctx);
1635 }
1636 else
1637 {
1638 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1639 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1640 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1641 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1642 install_var_field (decl, true, 7, ctx);
1643 else
1644 install_var_field (decl, true, 3, ctx);
1645 if (gimple_omp_target_kind (ctx->stmt)
1646 == GF_OMP_TARGET_KIND_REGION)
1647 install_var_local (decl, ctx);
1648 }
1649 }
1650 else
1651 {
1652 tree base = get_base_address (decl);
1653 tree nc = OMP_CLAUSE_CHAIN (c);
1654 if (DECL_P (base)
1655 && nc != NULL_TREE
1656 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1657 && OMP_CLAUSE_DECL (nc) == base
1658 && OMP_CLAUSE_MAP_KIND (nc) == OMP_CLAUSE_MAP_POINTER
1659 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1660 {
1661 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1662 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1663 }
1664 else
1665 {
1666 gcc_assert (!splay_tree_lookup (ctx->field_map,
1667 (splay_tree_key) decl));
1668 tree field
1669 = build_decl (OMP_CLAUSE_LOCATION (c),
1670 FIELD_DECL, NULL_TREE, ptr_type_node);
1671 DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
1672 insert_field_into_struct (ctx->record_type, field);
1673 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1674 (splay_tree_value) field);
1675 }
1676 }
1677 break;
1678
953ff289
DN
1679 case OMP_CLAUSE_NOWAIT:
1680 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1681 case OMP_CLAUSE_COLLAPSE:
1682 case OMP_CLAUSE_UNTIED:
20906c66 1683 case OMP_CLAUSE_MERGEABLE:
acf0174b 1684 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1685 case OMP_CLAUSE_SAFELEN:
953ff289
DN
1686 break;
1687
acf0174b
JJ
1688 case OMP_CLAUSE_ALIGNED:
1689 decl = OMP_CLAUSE_DECL (c);
1690 if (is_global_var (decl)
1691 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1692 install_var_local (decl, ctx);
1693 break;
1694
953ff289
DN
1695 default:
1696 gcc_unreachable ();
1697 }
1698 }
1699
1700 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1701 {
aaf46ef9 1702 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1703 {
1704 case OMP_CLAUSE_LASTPRIVATE:
1705 /* Let the corresponding firstprivate clause create
1706 the variable. */
726a989a 1707 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1708 scan_array_reductions = true;
953ff289
DN
1709 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1710 break;
1711 /* FALLTHRU */
1712
1713 case OMP_CLAUSE_PRIVATE:
1714 case OMP_CLAUSE_FIRSTPRIVATE:
1715 case OMP_CLAUSE_REDUCTION:
74bf76ed 1716 case OMP_CLAUSE_LINEAR:
953ff289
DN
1717 decl = OMP_CLAUSE_DECL (c);
1718 if (is_variable_sized (decl))
1719 install_var_local (decl, ctx);
1720 fixup_remapped_decl (decl, ctx,
aaf46ef9 1721 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1722 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1723 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1724 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1725 scan_array_reductions = true;
1726 break;
1727
1728 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1729 /* Ignore shared directives in teams construct. */
1730 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1731 break;
953ff289 1732 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1733 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1734 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1735 break;
1736
acf0174b
JJ
1737 case OMP_CLAUSE_MAP:
1738 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA)
1739 break;
1740 decl = OMP_CLAUSE_DECL (c);
1741 if (DECL_P (decl)
1742 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1743 && lookup_attribute ("omp declare target",
1744 DECL_ATTRIBUTES (decl)))
1745 break;
1746 if (DECL_P (decl))
1747 {
1748 if (OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1749 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1750 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1751 {
1752 tree new_decl = lookup_decl (decl, ctx);
1753 TREE_TYPE (new_decl)
1754 = remap_type (TREE_TYPE (decl), &ctx->cb);
1755 }
1756 else if (DECL_SIZE (decl)
1757 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1758 {
1759 tree decl2 = DECL_VALUE_EXPR (decl);
1760 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1761 decl2 = TREE_OPERAND (decl2, 0);
1762 gcc_assert (DECL_P (decl2));
1763 fixup_remapped_decl (decl2, ctx, false);
1764 fixup_remapped_decl (decl, ctx, true);
1765 }
1766 else
1767 fixup_remapped_decl (decl, ctx, false);
1768 }
1769 break;
1770
953ff289
DN
1771 case OMP_CLAUSE_COPYPRIVATE:
1772 case OMP_CLAUSE_COPYIN:
1773 case OMP_CLAUSE_DEFAULT:
1774 case OMP_CLAUSE_IF:
1775 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1776 case OMP_CLAUSE_NUM_TEAMS:
1777 case OMP_CLAUSE_THREAD_LIMIT:
1778 case OMP_CLAUSE_DEVICE:
953ff289 1779 case OMP_CLAUSE_SCHEDULE:
acf0174b 1780 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1781 case OMP_CLAUSE_NOWAIT:
1782 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1783 case OMP_CLAUSE_COLLAPSE:
1784 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1785 case OMP_CLAUSE_FINAL:
1786 case OMP_CLAUSE_MERGEABLE:
acf0174b 1787 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1788 case OMP_CLAUSE_SAFELEN:
acf0174b
JJ
1789 case OMP_CLAUSE_ALIGNED:
1790 case OMP_CLAUSE_DEPEND:
1791 case OMP_CLAUSE__LOOPTEMP_:
1792 case OMP_CLAUSE_TO:
1793 case OMP_CLAUSE_FROM:
953ff289
DN
1794 break;
1795
1796 default:
1797 gcc_unreachable ();
1798 }
1799 }
1800
1801 if (scan_array_reductions)
1802 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1804 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1805 {
26127932
JJ
1806 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1807 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
953ff289 1808 }
a68ab351 1809 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
726a989a 1810 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
26127932 1811 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
953ff289
DN
1812}
1813
1814/* Create a new name for omp child function. Returns an identifier. */
1815
1816static GTY(()) unsigned int tmp_ompfn_id_num;
1817
1818static tree
a68ab351 1819create_omp_child_function_name (bool task_copy)
953ff289 1820{
036546e5
JH
1821 return (clone_function_name (current_function_decl,
1822 task_copy ? "_omp_cpyfn" : "_omp_fn"));
953ff289
DN
1823}
1824
1825/* Build a decl for the omp child function. It'll not contain a body
1826 yet, just the bare decl. */
1827
1828static void
a68ab351 1829create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1830{
1831 tree decl, type, name, t;
1832
a68ab351
JJ
1833 name = create_omp_child_function_name (task_copy);
1834 if (task_copy)
1835 type = build_function_type_list (void_type_node, ptr_type_node,
1836 ptr_type_node, NULL_TREE);
1837 else
1838 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1839
c2255bc4
AH
1840 decl = build_decl (gimple_location (ctx->stmt),
1841 FUNCTION_DECL, name, type);
953ff289 1842
a68ab351
JJ
1843 if (!task_copy)
1844 ctx->cb.dst_fn = decl;
1845 else
726a989a 1846 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1847
1848 TREE_STATIC (decl) = 1;
1849 TREE_USED (decl) = 1;
1850 DECL_ARTIFICIAL (decl) = 1;
cd3f04c8 1851 DECL_NAMELESS (decl) = 1;
953ff289
DN
1852 DECL_IGNORED_P (decl) = 0;
1853 TREE_PUBLIC (decl) = 0;
1854 DECL_UNINLINABLE (decl) = 1;
1855 DECL_EXTERNAL (decl) = 0;
1856 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1857 DECL_INITIAL (decl) = make_node (BLOCK);
acf0174b
JJ
1858 bool target_p = false;
1859 if (lookup_attribute ("omp declare target",
1860 DECL_ATTRIBUTES (current_function_decl)))
1861 target_p = true;
1862 else
1863 {
1864 omp_context *octx;
1865 for (octx = ctx; octx; octx = octx->outer)
1866 if (gimple_code (octx->stmt) == GIMPLE_OMP_TARGET
1867 && gimple_omp_target_kind (octx->stmt)
1868 == GF_OMP_TARGET_KIND_REGION)
1869 {
1870 target_p = true;
1871 break;
1872 }
1873 }
1874 if (target_p)
1875 DECL_ATTRIBUTES (decl)
1876 = tree_cons (get_identifier ("omp declare target"),
1877 NULL_TREE, DECL_ATTRIBUTES (decl));
953ff289 1878
c2255bc4
AH
1879 t = build_decl (DECL_SOURCE_LOCATION (decl),
1880 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1881 DECL_ARTIFICIAL (t) = 1;
1882 DECL_IGNORED_P (t) = 1;
07485407 1883 DECL_CONTEXT (t) = decl;
953ff289
DN
1884 DECL_RESULT (decl) = t;
1885
c2255bc4
AH
1886 t = build_decl (DECL_SOURCE_LOCATION (decl),
1887 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
953ff289 1888 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1889 DECL_NAMELESS (t) = 1;
953ff289 1890 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1891 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1892 TREE_USED (t) = 1;
1893 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1894 if (!task_copy)
1895 ctx->receiver_decl = t;
1896 else
1897 {
c2255bc4
AH
1898 t = build_decl (DECL_SOURCE_LOCATION (decl),
1899 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1900 ptr_type_node);
1901 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1902 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1903 DECL_ARG_TYPE (t) = ptr_type_node;
1904 DECL_CONTEXT (t) = current_function_decl;
1905 TREE_USED (t) = 1;
628c189e 1906 TREE_ADDRESSABLE (t) = 1;
910ad8de 1907 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1908 DECL_ARGUMENTS (decl) = t;
1909 }
953ff289 1910
b8698a0f 1911 /* Allocate memory for the function structure. The call to
50674e96 1912 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1913 it afterward. */
db2960f4 1914 push_struct_function (decl);
726a989a 1915 cfun->function_end_locus = gimple_location (ctx->stmt);
db2960f4 1916 pop_cfun ();
953ff289
DN
1917}
1918
acf0174b
JJ
1919/* Callback for walk_gimple_seq. Check if combined parallel
1920 contains gimple_omp_for_combined_into_p OMP_FOR. */
1921
1922static tree
1923find_combined_for (gimple_stmt_iterator *gsi_p,
1924 bool *handled_ops_p,
1925 struct walk_stmt_info *wi)
1926{
1927 gimple stmt = gsi_stmt (*gsi_p);
1928
1929 *handled_ops_p = true;
1930 switch (gimple_code (stmt))
1931 {
1932 WALK_SUBSTMTS;
1933
1934 case GIMPLE_OMP_FOR:
1935 if (gimple_omp_for_combined_into_p (stmt)
1936 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
1937 {
1938 wi->info = stmt;
1939 return integer_zero_node;
1940 }
1941 break;
1942 default:
1943 break;
1944 }
1945 return NULL;
1946}
1947
953ff289
DN
1948/* Scan an OpenMP parallel directive. */
1949
1950static void
726a989a 1951scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1952{
1953 omp_context *ctx;
1954 tree name;
726a989a 1955 gimple stmt = gsi_stmt (*gsi);
953ff289
DN
1956
1957 /* Ignore parallel directives with empty bodies, unless there
1958 are copyin clauses. */
1959 if (optimize > 0
726a989a
RB
1960 && empty_body_p (gimple_omp_body (stmt))
1961 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1962 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1963 {
726a989a 1964 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1965 return;
1966 }
1967
acf0174b
JJ
1968 if (gimple_omp_parallel_combined_p (stmt))
1969 {
1970 gimple for_stmt;
1971 struct walk_stmt_info wi;
1972
1973 memset (&wi, 0, sizeof (wi));
1974 wi.val_only = true;
1975 walk_gimple_seq (gimple_omp_body (stmt),
1976 find_combined_for, NULL, &wi);
1977 for_stmt = (gimple) wi.info;
1978 if (for_stmt)
1979 {
1980 struct omp_for_data fd;
1981 extract_omp_for_data (for_stmt, &fd, NULL);
1982 /* We need two temporaries with fd.loop.v type (istart/iend)
1983 and then (fd.collapse - 1) temporaries with the same
1984 type for count2 ... countN-1 vars if not constant. */
1985 size_t count = 2, i;
1986 tree type = fd.iter_type;
1987 if (fd.collapse > 1
1988 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1989 count += fd.collapse - 1;
1990 for (i = 0; i < count; i++)
1991 {
1992 tree temp = create_tmp_var (type, NULL);
1993 tree c = build_omp_clause (UNKNOWN_LOCATION,
1994 OMP_CLAUSE__LOOPTEMP_);
1995 OMP_CLAUSE_DECL (c) = temp;
1996 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1997 gimple_omp_parallel_set_clauses (stmt, c);
1998 }
1999 }
2000 }
2001
726a989a 2002 ctx = new_omp_context (stmt, outer_ctx);
a68ab351 2003 if (taskreg_nesting_level > 1)
50674e96 2004 ctx->is_nested = true;
953ff289 2005 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
2006 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2007 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 2008 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
2009 name = build_decl (gimple_location (stmt),
2010 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
2011 DECL_ARTIFICIAL (name) = 1;
2012 DECL_NAMELESS (name) = 1;
953ff289 2013 TYPE_NAME (ctx->record_type) = name;
a68ab351 2014 create_omp_child_function (ctx, false);
726a989a 2015 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
953ff289 2016
726a989a 2017 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 2018 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2019
2020 if (TYPE_FIELDS (ctx->record_type) == NULL)
2021 ctx->record_type = ctx->receiver_decl = NULL;
2022 else
2023 {
2024 layout_type (ctx->record_type);
2025 fixup_child_record_type (ctx);
2026 }
2027}
2028
a68ab351
JJ
2029/* Scan an OpenMP task directive. */
2030
2031static void
726a989a 2032scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
2033{
2034 omp_context *ctx;
726a989a
RB
2035 tree name, t;
2036 gimple stmt = gsi_stmt (*gsi);
db3927fb 2037 location_t loc = gimple_location (stmt);
a68ab351
JJ
2038
2039 /* Ignore task directives with empty bodies. */
2040 if (optimize > 0
726a989a 2041 && empty_body_p (gimple_omp_body (stmt)))
a68ab351 2042 {
726a989a 2043 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
2044 return;
2045 }
2046
726a989a 2047 ctx = new_omp_context (stmt, outer_ctx);
a68ab351
JJ
2048 if (taskreg_nesting_level > 1)
2049 ctx->is_nested = true;
2050 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2051 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2052 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2053 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
2054 name = build_decl (gimple_location (stmt),
2055 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
2056 DECL_ARTIFICIAL (name) = 1;
2057 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2058 TYPE_NAME (ctx->record_type) = name;
2059 create_omp_child_function (ctx, false);
726a989a 2060 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 2061
726a989a 2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
2063
2064 if (ctx->srecord_type)
2065 {
2066 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
2067 name = build_decl (gimple_location (stmt),
2068 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
2069 DECL_ARTIFICIAL (name) = 1;
2070 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2071 TYPE_NAME (ctx->srecord_type) = name;
2072 create_omp_child_function (ctx, true);
2073 }
2074
26127932 2075 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
2076
2077 if (TYPE_FIELDS (ctx->record_type) == NULL)
2078 {
2079 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
2080 t = build_int_cst (long_integer_type_node, 0);
2081 gimple_omp_task_set_arg_size (stmt, t);
2082 t = build_int_cst (long_integer_type_node, 1);
2083 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2084 }
2085 else
2086 {
2087 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2088 /* Move VLA fields to the end. */
2089 p = &TYPE_FIELDS (ctx->record_type);
2090 while (*p)
2091 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2092 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2093 {
2094 *q = *p;
2095 *p = TREE_CHAIN (*p);
2096 TREE_CHAIN (*q) = NULL_TREE;
2097 q = &TREE_CHAIN (*q);
2098 }
2099 else
910ad8de 2100 p = &DECL_CHAIN (*p);
a68ab351
JJ
2101 *p = vla_fields;
2102 layout_type (ctx->record_type);
2103 fixup_child_record_type (ctx);
2104 if (ctx->srecord_type)
2105 layout_type (ctx->srecord_type);
db3927fb 2106 t = fold_convert_loc (loc, long_integer_type_node,
a68ab351 2107 TYPE_SIZE_UNIT (ctx->record_type));
726a989a
RB
2108 gimple_omp_task_set_arg_size (stmt, t);
2109 t = build_int_cst (long_integer_type_node,
a68ab351 2110 TYPE_ALIGN_UNIT (ctx->record_type));
726a989a 2111 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2112 }
2113}
2114
953ff289 2115
50674e96 2116/* Scan an OpenMP loop directive. */
953ff289
DN
2117
2118static void
726a989a 2119scan_omp_for (gimple stmt, omp_context *outer_ctx)
953ff289 2120{
50674e96 2121 omp_context *ctx;
726a989a 2122 size_t i;
953ff289 2123
50674e96 2124 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2125
726a989a 2126 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
953ff289 2127
26127932 2128 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2129 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2130 {
726a989a
RB
2131 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2132 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2133 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2134 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2135 }
26127932 2136 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2137}
2138
2139/* Scan an OpenMP sections directive. */
2140
2141static void
726a989a 2142scan_omp_sections (gimple stmt, omp_context *outer_ctx)
953ff289 2143{
953ff289
DN
2144 omp_context *ctx;
2145
2146 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2147 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2148 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2149}
2150
2151/* Scan an OpenMP single directive. */
2152
2153static void
726a989a 2154scan_omp_single (gimple stmt, omp_context *outer_ctx)
953ff289 2155{
953ff289
DN
2156 omp_context *ctx;
2157 tree name;
2158
2159 ctx = new_omp_context (stmt, outer_ctx);
2160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2162 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2163 name = build_decl (gimple_location (stmt),
2164 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2165 TYPE_NAME (ctx->record_type) = name;
2166
726a989a 2167 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2168 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2169
2170 if (TYPE_FIELDS (ctx->record_type) == NULL)
2171 ctx->record_type = NULL;
2172 else
2173 layout_type (ctx->record_type);
2174}
2175
acf0174b
JJ
2176/* Scan an OpenMP target{, data, update} directive. */
2177
2178static void
2179scan_omp_target (gimple stmt, omp_context *outer_ctx)
2180{
2181 omp_context *ctx;
2182 tree name;
2183 int kind = gimple_omp_target_kind (stmt);
2184
2185 ctx = new_omp_context (stmt, outer_ctx);
2186 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2187 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2188 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2189 name = create_tmp_var_name (".omp_data_t");
2190 name = build_decl (gimple_location (stmt),
2191 TYPE_DECL, name, ctx->record_type);
2192 DECL_ARTIFICIAL (name) = 1;
2193 DECL_NAMELESS (name) = 1;
2194 TYPE_NAME (ctx->record_type) = name;
2195 if (kind == GF_OMP_TARGET_KIND_REGION)
2196 {
2197 create_omp_child_function (ctx, false);
2198 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2199 }
2200
2201 scan_sharing_clauses (gimple_omp_target_clauses (stmt), ctx);
2202 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2203
2204 if (TYPE_FIELDS (ctx->record_type) == NULL)
2205 ctx->record_type = ctx->receiver_decl = NULL;
2206 else
2207 {
2208 TYPE_FIELDS (ctx->record_type)
2209 = nreverse (TYPE_FIELDS (ctx->record_type));
2210#ifdef ENABLE_CHECKING
2211 tree field;
2212 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2213 for (field = TYPE_FIELDS (ctx->record_type);
2214 field;
2215 field = DECL_CHAIN (field))
2216 gcc_assert (DECL_ALIGN (field) == align);
2217#endif
2218 layout_type (ctx->record_type);
2219 if (kind == GF_OMP_TARGET_KIND_REGION)
2220 fixup_child_record_type (ctx);
2221 }
2222}
2223
2224/* Scan an OpenMP teams directive. */
2225
2226static void
2227scan_omp_teams (gimple stmt, omp_context *outer_ctx)
2228{
2229 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2230 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2232}
953ff289 2233
a6fc8e21 2234/* Check OpenMP nesting restrictions. */
26127932
JJ
2235static bool
2236check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
a6fc8e21 2237{
74bf76ed
JJ
2238 if (ctx != NULL)
2239 {
2240 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 2241 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
2242 {
2243 error_at (gimple_location (stmt),
2244 "OpenMP constructs may not be nested inside simd region");
2245 return false;
2246 }
acf0174b
JJ
2247 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2248 {
2249 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2250 || (gimple_omp_for_kind (stmt)
2251 != GF_OMP_FOR_KIND_DISTRIBUTE))
2252 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2253 {
2254 error_at (gimple_location (stmt),
2255 "only distribute or parallel constructs are allowed to "
2256 "be closely nested inside teams construct");
2257 return false;
2258 }
2259 }
74bf76ed 2260 }
726a989a 2261 switch (gimple_code (stmt))
a6fc8e21 2262 {
726a989a 2263 case GIMPLE_OMP_FOR:
c02065fc 2264 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed 2265 return true;
acf0174b
JJ
2266 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2267 {
2268 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2269 {
2270 error_at (gimple_location (stmt),
2271 "distribute construct must be closely nested inside "
2272 "teams construct");
2273 return false;
2274 }
2275 return true;
2276 }
2277 /* FALLTHRU */
2278 case GIMPLE_CALL:
2279 if (is_gimple_call (stmt)
2280 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2281 == BUILT_IN_GOMP_CANCEL
2282 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2283 == BUILT_IN_GOMP_CANCELLATION_POINT))
2284 {
2285 const char *bad = NULL;
2286 const char *kind = NULL;
2287 if (ctx == NULL)
2288 {
2289 error_at (gimple_location (stmt), "orphaned %qs construct",
2290 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2291 == BUILT_IN_GOMP_CANCEL
2292 ? "#pragma omp cancel"
2293 : "#pragma omp cancellation point");
2294 return false;
2295 }
9541ffee 2296 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2297 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2298 : 0)
2299 {
2300 case 1:
2301 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2302 bad = "#pragma omp parallel";
2303 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2304 == BUILT_IN_GOMP_CANCEL
2305 && !integer_zerop (gimple_call_arg (stmt, 1)))
2306 ctx->cancellable = true;
2307 kind = "parallel";
2308 break;
2309 case 2:
2310 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2311 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2312 bad = "#pragma omp for";
2313 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2314 == BUILT_IN_GOMP_CANCEL
2315 && !integer_zerop (gimple_call_arg (stmt, 1)))
2316 {
2317 ctx->cancellable = true;
2318 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2319 OMP_CLAUSE_NOWAIT))
2320 warning_at (gimple_location (stmt), 0,
2321 "%<#pragma omp cancel for%> inside "
2322 "%<nowait%> for construct");
2323 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2324 OMP_CLAUSE_ORDERED))
2325 warning_at (gimple_location (stmt), 0,
2326 "%<#pragma omp cancel for%> inside "
2327 "%<ordered%> for construct");
2328 }
2329 kind = "for";
2330 break;
2331 case 4:
2332 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2333 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2334 bad = "#pragma omp sections";
2335 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2336 == BUILT_IN_GOMP_CANCEL
2337 && !integer_zerop (gimple_call_arg (stmt, 1)))
2338 {
2339 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2340 {
2341 ctx->cancellable = true;
2342 if (find_omp_clause (gimple_omp_sections_clauses
2343 (ctx->stmt),
2344 OMP_CLAUSE_NOWAIT))
2345 warning_at (gimple_location (stmt), 0,
2346 "%<#pragma omp cancel sections%> inside "
2347 "%<nowait%> sections construct");
2348 }
2349 else
2350 {
2351 gcc_assert (ctx->outer
2352 && gimple_code (ctx->outer->stmt)
2353 == GIMPLE_OMP_SECTIONS);
2354 ctx->outer->cancellable = true;
2355 if (find_omp_clause (gimple_omp_sections_clauses
2356 (ctx->outer->stmt),
2357 OMP_CLAUSE_NOWAIT))
2358 warning_at (gimple_location (stmt), 0,
2359 "%<#pragma omp cancel sections%> inside "
2360 "%<nowait%> sections construct");
2361 }
2362 }
2363 kind = "sections";
2364 break;
2365 case 8:
2366 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2367 bad = "#pragma omp task";
2368 else
2369 ctx->cancellable = true;
2370 kind = "taskgroup";
2371 break;
2372 default:
2373 error_at (gimple_location (stmt), "invalid arguments");
2374 return false;
2375 }
2376 if (bad)
2377 {
2378 error_at (gimple_location (stmt),
2379 "%<%s %s%> construct not closely nested inside of %qs",
2380 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2381 == BUILT_IN_GOMP_CANCEL
2382 ? "#pragma omp cancel"
2383 : "#pragma omp cancellation point", kind, bad);
2384 return false;
2385 }
2386 }
74bf76ed 2387 /* FALLTHRU */
726a989a
RB
2388 case GIMPLE_OMP_SECTIONS:
2389 case GIMPLE_OMP_SINGLE:
a6fc8e21 2390 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2391 switch (gimple_code (ctx->stmt))
a6fc8e21 2392 {
726a989a
RB
2393 case GIMPLE_OMP_FOR:
2394 case GIMPLE_OMP_SECTIONS:
2395 case GIMPLE_OMP_SINGLE:
2396 case GIMPLE_OMP_ORDERED:
2397 case GIMPLE_OMP_MASTER:
2398 case GIMPLE_OMP_TASK:
acf0174b 2399 case GIMPLE_OMP_CRITICAL:
726a989a 2400 if (is_gimple_call (stmt))
a68ab351 2401 {
acf0174b
JJ
2402 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2403 != BUILT_IN_GOMP_BARRIER)
2404 return true;
26127932
JJ
2405 error_at (gimple_location (stmt),
2406 "barrier region may not be closely nested inside "
2407 "of work-sharing, critical, ordered, master or "
2408 "explicit task region");
2409 return false;
a68ab351 2410 }
26127932
JJ
2411 error_at (gimple_location (stmt),
2412 "work-sharing region may not be closely nested inside "
2413 "of work-sharing, critical, ordered, master or explicit "
2414 "task region");
2415 return false;
726a989a 2416 case GIMPLE_OMP_PARALLEL:
26127932 2417 return true;
a6fc8e21
JJ
2418 default:
2419 break;
2420 }
2421 break;
726a989a 2422 case GIMPLE_OMP_MASTER:
a6fc8e21 2423 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2424 switch (gimple_code (ctx->stmt))
a6fc8e21 2425 {
726a989a
RB
2426 case GIMPLE_OMP_FOR:
2427 case GIMPLE_OMP_SECTIONS:
2428 case GIMPLE_OMP_SINGLE:
2429 case GIMPLE_OMP_TASK:
26127932
JJ
2430 error_at (gimple_location (stmt),
2431 "master region may not be closely nested inside "
2432 "of work-sharing or explicit task region");
2433 return false;
726a989a 2434 case GIMPLE_OMP_PARALLEL:
26127932 2435 return true;
a6fc8e21
JJ
2436 default:
2437 break;
2438 }
2439 break;
726a989a 2440 case GIMPLE_OMP_ORDERED:
a6fc8e21 2441 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2442 switch (gimple_code (ctx->stmt))
a6fc8e21 2443 {
726a989a
RB
2444 case GIMPLE_OMP_CRITICAL:
2445 case GIMPLE_OMP_TASK:
26127932
JJ
2446 error_at (gimple_location (stmt),
2447 "ordered region may not be closely nested inside "
2448 "of critical or explicit task region");
2449 return false;
726a989a
RB
2450 case GIMPLE_OMP_FOR:
2451 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2452 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2453 {
2454 error_at (gimple_location (stmt),
2455 "ordered region must be closely nested inside "
a6fc8e21 2456 "a loop region with an ordered clause");
26127932
JJ
2457 return false;
2458 }
2459 return true;
726a989a 2460 case GIMPLE_OMP_PARALLEL:
acf0174b
JJ
2461 error_at (gimple_location (stmt),
2462 "ordered region must be closely nested inside "
2463 "a loop region with an ordered clause");
2464 return false;
a6fc8e21
JJ
2465 default:
2466 break;
2467 }
2468 break;
726a989a 2469 case GIMPLE_OMP_CRITICAL:
a6fc8e21 2470 for (; ctx != NULL; ctx = ctx->outer)
726a989a
RB
2471 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
2472 && (gimple_omp_critical_name (stmt)
2473 == gimple_omp_critical_name (ctx->stmt)))
a6fc8e21 2474 {
26127932
JJ
2475 error_at (gimple_location (stmt),
2476 "critical region may not be nested inside a critical "
2477 "region with the same name");
2478 return false;
a6fc8e21
JJ
2479 }
2480 break;
acf0174b
JJ
2481 case GIMPLE_OMP_TEAMS:
2482 if (ctx == NULL
2483 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2484 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2485 {
2486 error_at (gimple_location (stmt),
2487 "teams construct not closely nested inside of target "
2488 "region");
2489 return false;
2490 }
2491 break;
a6fc8e21
JJ
2492 default:
2493 break;
2494 }
26127932 2495 return true;
a6fc8e21
JJ
2496}
2497
2498
726a989a
RB
2499/* Helper function scan_omp.
2500
2501 Callback for walk_tree or operators in walk_gimple_stmt used to
2502 scan for OpenMP directives in TP. */
953ff289
DN
2503
2504static tree
726a989a 2505scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 2506{
d3bfe4de
KG
2507 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2508 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
2509 tree t = *tp;
2510
726a989a
RB
2511 switch (TREE_CODE (t))
2512 {
2513 case VAR_DECL:
2514 case PARM_DECL:
2515 case LABEL_DECL:
2516 case RESULT_DECL:
2517 if (ctx)
2518 *tp = remap_decl (t, &ctx->cb);
2519 break;
2520
2521 default:
2522 if (ctx && TYPE_P (t))
2523 *tp = remap_type (t, &ctx->cb);
2524 else if (!DECL_P (t))
a900ae6b
JJ
2525 {
2526 *walk_subtrees = 1;
2527 if (ctx)
70f34814
RG
2528 {
2529 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2530 if (tem != TREE_TYPE (t))
2531 {
2532 if (TREE_CODE (t) == INTEGER_CST)
2533 *tp = build_int_cst_wide (tem,
2534 TREE_INT_CST_LOW (t),
2535 TREE_INT_CST_HIGH (t));
2536 else
2537 TREE_TYPE (t) = tem;
2538 }
2539 }
a900ae6b 2540 }
726a989a
RB
2541 break;
2542 }
2543
2544 return NULL_TREE;
2545}
2546
c02065fc
AH
2547/* Return true if FNDECL is a setjmp or a longjmp. */
2548
2549static bool
2550setjmp_or_longjmp_p (const_tree fndecl)
2551{
2552 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2553 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2554 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2555 return true;
2556
2557 tree declname = DECL_NAME (fndecl);
2558 if (!declname)
2559 return false;
2560 const char *name = IDENTIFIER_POINTER (declname);
2561 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2562}
2563
726a989a
RB
2564
2565/* Helper function for scan_omp.
2566
2567 Callback for walk_gimple_stmt used to scan for OpenMP directives in
2568 the current statement in GSI. */
2569
2570static tree
2571scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2572 struct walk_stmt_info *wi)
2573{
2574 gimple stmt = gsi_stmt (*gsi);
2575 omp_context *ctx = (omp_context *) wi->info;
2576
2577 if (gimple_has_location (stmt))
2578 input_location = gimple_location (stmt);
953ff289 2579
a6fc8e21 2580 /* Check the OpenMP nesting restrictions. */
acf0174b
JJ
2581 bool remove = false;
2582 if (is_gimple_omp (stmt))
2583 remove = !check_omp_nesting_restrictions (stmt, ctx);
2584 else if (is_gimple_call (stmt))
2585 {
2586 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
2587 if (fndecl)
2588 {
2589 if (setjmp_or_longjmp_p (fndecl)
2590 && ctx
2591 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2592 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
2593 {
2594 remove = true;
2595 error_at (gimple_location (stmt),
2596 "setjmp/longjmp inside simd construct");
2597 }
2598 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2599 switch (DECL_FUNCTION_CODE (fndecl))
2600 {
2601 case BUILT_IN_GOMP_BARRIER:
2602 case BUILT_IN_GOMP_CANCEL:
2603 case BUILT_IN_GOMP_CANCELLATION_POINT:
2604 case BUILT_IN_GOMP_TASKYIELD:
2605 case BUILT_IN_GOMP_TASKWAIT:
2606 case BUILT_IN_GOMP_TASKGROUP_START:
2607 case BUILT_IN_GOMP_TASKGROUP_END:
2608 remove = !check_omp_nesting_restrictions (stmt, ctx);
2609 break;
2610 default:
2611 break;
2612 }
2613 }
acf0174b
JJ
2614 }
2615 if (remove)
2616 {
2617 stmt = gimple_build_nop ();
2618 gsi_replace (gsi, stmt, false);
a68ab351 2619 }
a6fc8e21 2620
726a989a
RB
2621 *handled_ops_p = true;
2622
2623 switch (gimple_code (stmt))
953ff289 2624 {
726a989a 2625 case GIMPLE_OMP_PARALLEL:
a68ab351 2626 taskreg_nesting_level++;
726a989a 2627 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
2628 taskreg_nesting_level--;
2629 break;
2630
726a989a 2631 case GIMPLE_OMP_TASK:
a68ab351 2632 taskreg_nesting_level++;
726a989a 2633 scan_omp_task (gsi, ctx);
a68ab351 2634 taskreg_nesting_level--;
953ff289
DN
2635 break;
2636
726a989a
RB
2637 case GIMPLE_OMP_FOR:
2638 scan_omp_for (stmt, ctx);
953ff289
DN
2639 break;
2640
726a989a
RB
2641 case GIMPLE_OMP_SECTIONS:
2642 scan_omp_sections (stmt, ctx);
953ff289
DN
2643 break;
2644
726a989a
RB
2645 case GIMPLE_OMP_SINGLE:
2646 scan_omp_single (stmt, ctx);
953ff289
DN
2647 break;
2648
726a989a
RB
2649 case GIMPLE_OMP_SECTION:
2650 case GIMPLE_OMP_MASTER:
acf0174b 2651 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
2652 case GIMPLE_OMP_ORDERED:
2653 case GIMPLE_OMP_CRITICAL:
2654 ctx = new_omp_context (stmt, ctx);
26127932 2655 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2656 break;
2657
acf0174b
JJ
2658 case GIMPLE_OMP_TARGET:
2659 scan_omp_target (stmt, ctx);
2660 break;
2661
2662 case GIMPLE_OMP_TEAMS:
2663 scan_omp_teams (stmt, ctx);
2664 break;
2665
726a989a 2666 case GIMPLE_BIND:
953ff289
DN
2667 {
2668 tree var;
953ff289 2669
726a989a
RB
2670 *handled_ops_p = false;
2671 if (ctx)
910ad8de 2672 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
726a989a 2673 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
2674 }
2675 break;
953ff289 2676 default:
726a989a 2677 *handled_ops_p = false;
953ff289
DN
2678 break;
2679 }
2680
2681 return NULL_TREE;
2682}
2683
2684
726a989a
RB
2685/* Scan all the statements starting at the current statement. CTX
2686 contains context information about the OpenMP directives and
2687 clauses found during the scan. */
953ff289
DN
2688
2689static void
26127932 2690scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
2691{
2692 location_t saved_location;
2693 struct walk_stmt_info wi;
2694
2695 memset (&wi, 0, sizeof (wi));
953ff289 2696 wi.info = ctx;
953ff289
DN
2697 wi.want_locations = true;
2698
2699 saved_location = input_location;
26127932 2700 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
2701 input_location = saved_location;
2702}
2703\f
2704/* Re-gimplification and code generation routines. */
2705
2706/* Build a call to GOMP_barrier. */
2707
acf0174b
JJ
2708static gimple
2709build_omp_barrier (tree lhs)
2710{
2711 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
2712 : BUILT_IN_GOMP_BARRIER);
2713 gimple g = gimple_build_call (fndecl, 0);
2714 if (lhs)
2715 gimple_call_set_lhs (g, lhs);
2716 return g;
953ff289
DN
2717}
2718
2719/* If a context was created for STMT when it was scanned, return it. */
2720
2721static omp_context *
726a989a 2722maybe_lookup_ctx (gimple stmt)
953ff289
DN
2723{
2724 splay_tree_node n;
2725 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2726 return n ? (omp_context *) n->value : NULL;
2727}
2728
50674e96
DN
2729
2730/* Find the mapping for DECL in CTX or the immediately enclosing
2731 context that has a mapping for DECL.
2732
2733 If CTX is a nested parallel directive, we may have to use the decl
2734 mappings created in CTX's parent context. Suppose that we have the
2735 following parallel nesting (variable UIDs showed for clarity):
2736
2737 iD.1562 = 0;
2738 #omp parallel shared(iD.1562) -> outer parallel
2739 iD.1562 = iD.1562 + 1;
2740
2741 #omp parallel shared (iD.1562) -> inner parallel
2742 iD.1562 = iD.1562 - 1;
2743
2744 Each parallel structure will create a distinct .omp_data_s structure
2745 for copying iD.1562 in/out of the directive:
2746
2747 outer parallel .omp_data_s.1.i -> iD.1562
2748 inner parallel .omp_data_s.2.i -> iD.1562
2749
2750 A shared variable mapping will produce a copy-out operation before
2751 the parallel directive and a copy-in operation after it. So, in
2752 this case we would have:
2753
2754 iD.1562 = 0;
2755 .omp_data_o.1.i = iD.1562;
2756 #omp parallel shared(iD.1562) -> outer parallel
2757 .omp_data_i.1 = &.omp_data_o.1
2758 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2759
2760 .omp_data_o.2.i = iD.1562; -> **
2761 #omp parallel shared(iD.1562) -> inner parallel
2762 .omp_data_i.2 = &.omp_data_o.2
2763 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2764
2765
2766 ** This is a problem. The symbol iD.1562 cannot be referenced
2767 inside the body of the outer parallel region. But since we are
2768 emitting this copy operation while expanding the inner parallel
2769 directive, we need to access the CTX structure of the outer
2770 parallel directive to get the correct mapping:
2771
2772 .omp_data_o.2.i = .omp_data_i.1->i
2773
2774 Since there may be other workshare or parallel directives enclosing
2775 the parallel directive, it may be necessary to walk up the context
2776 parent chain. This is not a problem in general because nested
2777 parallelism happens only rarely. */
2778
2779static tree
2780lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2781{
2782 tree t;
2783 omp_context *up;
2784
50674e96
DN
2785 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2786 t = maybe_lookup_decl (decl, up);
2787
d2dda7fe 2788 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2789
64964499 2790 return t ? t : decl;
50674e96
DN
2791}
2792
2793
8ca5b2a2
JJ
2794/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2795 in outer contexts. */
2796
2797static tree
2798maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2799{
2800 tree t = NULL;
2801 omp_context *up;
2802
d2dda7fe
JJ
2803 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2804 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2805
2806 return t ? t : decl;
2807}
2808
2809
953ff289
DN
2810/* Construct the initialization value for reduction CLAUSE. */
2811
2812tree
2813omp_reduction_init (tree clause, tree type)
2814{
db3927fb 2815 location_t loc = OMP_CLAUSE_LOCATION (clause);
953ff289
DN
2816 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2817 {
2818 case PLUS_EXPR:
2819 case MINUS_EXPR:
2820 case BIT_IOR_EXPR:
2821 case BIT_XOR_EXPR:
2822 case TRUTH_OR_EXPR:
2823 case TRUTH_ORIF_EXPR:
2824 case TRUTH_XOR_EXPR:
2825 case NE_EXPR:
e8160c9a 2826 return build_zero_cst (type);
953ff289
DN
2827
2828 case MULT_EXPR:
2829 case TRUTH_AND_EXPR:
2830 case TRUTH_ANDIF_EXPR:
2831 case EQ_EXPR:
db3927fb 2832 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
2833
2834 case BIT_AND_EXPR:
db3927fb 2835 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
2836
2837 case MAX_EXPR:
2838 if (SCALAR_FLOAT_TYPE_P (type))
2839 {
2840 REAL_VALUE_TYPE max, min;
2841 if (HONOR_INFINITIES (TYPE_MODE (type)))
2842 {
2843 real_inf (&max);
2844 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2845 }
2846 else
2847 real_maxval (&min, 1, TYPE_MODE (type));
2848 return build_real (type, min);
2849 }
2850 else
2851 {
2852 gcc_assert (INTEGRAL_TYPE_P (type));
2853 return TYPE_MIN_VALUE (type);
2854 }
2855
2856 case MIN_EXPR:
2857 if (SCALAR_FLOAT_TYPE_P (type))
2858 {
2859 REAL_VALUE_TYPE max;
2860 if (HONOR_INFINITIES (TYPE_MODE (type)))
2861 real_inf (&max);
2862 else
2863 real_maxval (&max, 0, TYPE_MODE (type));
2864 return build_real (type, max);
2865 }
2866 else
2867 {
2868 gcc_assert (INTEGRAL_TYPE_P (type));
2869 return TYPE_MAX_VALUE (type);
2870 }
2871
2872 default:
2873 gcc_unreachable ();
2874 }
2875}
2876
acf0174b
JJ
2877/* Return alignment to be assumed for var in CLAUSE, which should be
2878 OMP_CLAUSE_ALIGNED. */
2879
2880static tree
2881omp_clause_aligned_alignment (tree clause)
2882{
2883 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2884 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
2885
2886 /* Otherwise return implementation defined alignment. */
2887 unsigned int al = 1;
2888 enum machine_mode mode, vmode;
2889 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2890 if (vs)
2891 vs = 1 << floor_log2 (vs);
2892 static enum mode_class classes[]
2893 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
2894 for (int i = 0; i < 4; i += 2)
2895 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
2896 mode != VOIDmode;
2897 mode = GET_MODE_WIDER_MODE (mode))
2898 {
2899 vmode = targetm.vectorize.preferred_simd_mode (mode);
2900 if (GET_MODE_CLASS (vmode) != classes[i + 1])
2901 continue;
2902 while (vs
2903 && GET_MODE_SIZE (vmode) < vs
2904 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
2905 vmode = GET_MODE_2XWIDER_MODE (vmode);
2906
2907 tree type = lang_hooks.types.type_for_mode (mode, 1);
2908 if (type == NULL_TREE || TYPE_MODE (type) != mode)
2909 continue;
2910 type = build_vector_type (type, GET_MODE_SIZE (vmode)
2911 / GET_MODE_SIZE (mode));
2912 if (TYPE_MODE (type) != vmode)
2913 continue;
2914 if (TYPE_ALIGN_UNIT (type) > al)
2915 al = TYPE_ALIGN_UNIT (type);
2916 }
2917 return build_int_cst (integer_type_node, al);
2918}
2919
74bf76ed
JJ
2920/* Return maximum possible vectorization factor for the target. */
2921
2922static int
2923omp_max_vf (void)
2924{
2925 if (!optimize
2926 || optimize_debug
ea0f3e87
XDL
2927 || (!flag_tree_loop_vectorize
2928 && (global_options_set.x_flag_tree_loop_vectorize
2929 || global_options_set.x_flag_tree_vectorize)))
74bf76ed
JJ
2930 return 1;
2931
2932 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2933 if (vs)
2934 {
2935 vs = 1 << floor_log2 (vs);
2936 return vs;
2937 }
2938 enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
2939 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
2940 return GET_MODE_NUNITS (vqimode);
2941 return 1;
2942}
2943
2944/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
2945 privatization. */
2946
2947static bool
2948lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
2949 tree &idx, tree &lane, tree &ivar, tree &lvar)
2950{
2951 if (max_vf == 0)
2952 {
2953 max_vf = omp_max_vf ();
2954 if (max_vf > 1)
2955 {
2956 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2957 OMP_CLAUSE_SAFELEN);
2958 if (c
2959 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
9439e9a1 2960 max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
74bf76ed
JJ
2961 }
2962 if (max_vf > 1)
2963 {
2964 idx = create_tmp_var (unsigned_type_node, NULL);
2965 lane = create_tmp_var (unsigned_type_node, NULL);
2966 }
2967 }
2968 if (max_vf == 1)
2969 return false;
2970
2971 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
2972 tree avar = create_tmp_var_raw (atype, NULL);
2973 if (TREE_ADDRESSABLE (new_var))
2974 TREE_ADDRESSABLE (avar) = 1;
2975 DECL_ATTRIBUTES (avar)
2976 = tree_cons (get_identifier ("omp simd array"), NULL,
2977 DECL_ATTRIBUTES (avar));
2978 gimple_add_tmp_var (avar);
2979 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
2980 NULL_TREE, NULL_TREE);
2981 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
2982 NULL_TREE, NULL_TREE);
acf0174b
JJ
2983 if (DECL_P (new_var))
2984 {
2985 SET_DECL_VALUE_EXPR (new_var, lvar);
2986 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2987 }
74bf76ed
JJ
2988 return true;
2989}
2990
953ff289
DN
2991/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2992 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2993 private variables. Initialization statements go in ILIST, while calls
2994 to destructors go in DLIST. */
2995
2996static void
726a989a 2997lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 2998 omp_context *ctx, struct omp_for_data *fd)
953ff289 2999{
5039610b 3000 tree c, dtor, copyin_seq, x, ptr;
953ff289 3001 bool copyin_by_ref = false;
8ca5b2a2 3002 bool lastprivate_firstprivate = false;
acf0174b 3003 bool reduction_omp_orig_ref = false;
953ff289 3004 int pass;
74bf76ed 3005 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3006 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD);
74bf76ed
JJ
3007 int max_vf = 0;
3008 tree lane = NULL_TREE, idx = NULL_TREE;
3009 tree ivar = NULL_TREE, lvar = NULL_TREE;
3010 gimple_seq llist[2] = { NULL, NULL };
953ff289 3011
953ff289
DN
3012 copyin_seq = NULL;
3013
74bf76ed
JJ
3014 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3015 with data sharing clauses referencing variable sized vars. That
3016 is unnecessarily hard to support and very unlikely to result in
3017 vectorized code anyway. */
3018 if (is_simd)
3019 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3020 switch (OMP_CLAUSE_CODE (c))
3021 {
3022 case OMP_CLAUSE_REDUCTION:
74bf76ed
JJ
3023 case OMP_CLAUSE_PRIVATE:
3024 case OMP_CLAUSE_FIRSTPRIVATE:
3025 case OMP_CLAUSE_LASTPRIVATE:
3026 case OMP_CLAUSE_LINEAR:
3027 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3028 max_vf = 1;
3029 break;
3030 default:
3031 continue;
3032 }
3033
953ff289
DN
3034 /* Do all the fixed sized types in the first pass, and the variable sized
3035 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3036 the variable sized types are processed before we use them in the
953ff289
DN
3037 variable sized operations. */
3038 for (pass = 0; pass < 2; ++pass)
3039 {
3040 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3041 {
aaf46ef9 3042 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3043 tree var, new_var;
3044 bool by_ref;
db3927fb 3045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3046
3047 switch (c_kind)
3048 {
3049 case OMP_CLAUSE_PRIVATE:
3050 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3051 continue;
3052 break;
3053 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3054 /* Ignore shared directives in teams construct. */
3055 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3056 continue;
8ca5b2a2
JJ
3057 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3058 {
3059 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
3060 continue;
3061 }
953ff289 3062 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3063 case OMP_CLAUSE_COPYIN:
acf0174b
JJ
3064 case OMP_CLAUSE_LINEAR:
3065 break;
953ff289 3066 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3067 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3068 reduction_omp_orig_ref = true;
953ff289 3069 break;
acf0174b
JJ
3070 case OMP_CLAUSE__LOOPTEMP_:
3071 /* Handle _looptemp_ clauses only on parallel. */
3072 if (fd)
3073 continue;
74bf76ed 3074 break;
077b0dfb 3075 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3076 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3077 {
3078 lastprivate_firstprivate = true;
3079 if (pass != 0)
3080 continue;
3081 }
077b0dfb 3082 break;
acf0174b
JJ
3083 case OMP_CLAUSE_ALIGNED:
3084 if (pass == 0)
3085 continue;
3086 var = OMP_CLAUSE_DECL (c);
3087 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3088 && !is_global_var (var))
3089 {
3090 new_var = maybe_lookup_decl (var, ctx);
3091 if (new_var == NULL_TREE)
3092 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3093 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3094 x = build_call_expr_loc (clause_loc, x, 2, new_var,
3095 omp_clause_aligned_alignment (c));
3096 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3097 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3098 gimplify_and_add (x, ilist);
3099 }
3100 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3101 && is_global_var (var))
3102 {
3103 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3104 new_var = lookup_decl (var, ctx);
3105 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3106 t = build_fold_addr_expr_loc (clause_loc, t);
3107 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3108 t = build_call_expr_loc (clause_loc, t2, 2, t,
3109 omp_clause_aligned_alignment (c));
3110 t = fold_convert_loc (clause_loc, ptype, t);
3111 x = create_tmp_var (ptype, NULL);
3112 t = build2 (MODIFY_EXPR, ptype, x, t);
3113 gimplify_and_add (t, ilist);
3114 t = build_simple_mem_ref_loc (clause_loc, x);
3115 SET_DECL_VALUE_EXPR (new_var, t);
3116 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3117 }
3118 continue;
953ff289
DN
3119 default:
3120 continue;
3121 }
3122
3123 new_var = var = OMP_CLAUSE_DECL (c);
3124 if (c_kind != OMP_CLAUSE_COPYIN)
3125 new_var = lookup_decl (var, ctx);
3126
3127 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3128 {
3129 if (pass != 0)
3130 continue;
3131 }
953ff289
DN
3132 else if (is_variable_sized (var))
3133 {
50674e96
DN
3134 /* For variable sized types, we need to allocate the
3135 actual storage here. Call alloca and store the
3136 result in the pointer decl that we created elsewhere. */
953ff289
DN
3137 if (pass == 0)
3138 continue;
3139
a68ab351
JJ
3140 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3141 {
726a989a 3142 gimple stmt;
e79983f4 3143 tree tmp, atmp;
726a989a 3144
a68ab351
JJ
3145 ptr = DECL_VALUE_EXPR (new_var);
3146 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3147 ptr = TREE_OPERAND (ptr, 0);
3148 gcc_assert (DECL_P (ptr));
3149 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
3150
3151 /* void *tmp = __builtin_alloca */
e79983f4
MM
3152 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3153 stmt = gimple_build_call (atmp, 1, x);
726a989a
RB
3154 tmp = create_tmp_var_raw (ptr_type_node, NULL);
3155 gimple_add_tmp_var (tmp);
3156 gimple_call_set_lhs (stmt, tmp);
3157
3158 gimple_seq_add_stmt (ilist, stmt);
3159
db3927fb 3160 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 3161 gimplify_assign (ptr, x, ilist);
a68ab351 3162 }
953ff289 3163 }
953ff289
DN
3164 else if (is_reference (var))
3165 {
50674e96
DN
3166 /* For references that are being privatized for Fortran,
3167 allocate new backing storage for the new pointer
3168 variable. This allows us to avoid changing all the
3169 code that expects a pointer to something that expects
acf0174b 3170 a direct variable. */
953ff289
DN
3171 if (pass == 0)
3172 continue;
3173
3174 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
3175 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
3176 {
3177 x = build_receiver_ref (var, false, ctx);
db3927fb 3178 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3179 }
3180 else if (TREE_CONSTANT (x))
953ff289
DN
3181 {
3182 const char *name = NULL;
3183 if (DECL_NAME (var))
3184 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
3185
077b0dfb
JJ
3186 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
3187 name);
3188 gimple_add_tmp_var (x);
628c189e 3189 TREE_ADDRESSABLE (x) = 1;
db3927fb 3190 x = build_fold_addr_expr_loc (clause_loc, x);
953ff289
DN
3191 }
3192 else
3193 {
e79983f4
MM
3194 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3195 x = build_call_expr_loc (clause_loc, atmp, 1, x);
953ff289
DN
3196 }
3197
db3927fb 3198 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
726a989a 3199 gimplify_assign (new_var, x, ilist);
953ff289 3200
70f34814 3201 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3202 }
3203 else if (c_kind == OMP_CLAUSE_REDUCTION
3204 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3205 {
3206 if (pass == 0)
3207 continue;
3208 }
3209 else if (pass != 0)
3210 continue;
3211
aaf46ef9 3212 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
3213 {
3214 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3215 /* Ignore shared directives in teams construct. */
3216 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3217 continue;
8ca5b2a2
JJ
3218 /* Shared global vars are just accessed directly. */
3219 if (is_global_var (new_var))
3220 break;
953ff289
DN
3221 /* Set up the DECL_VALUE_EXPR for shared variables now. This
3222 needs to be delayed until after fixup_child_record_type so
3223 that we get the correct type during the dereference. */
7c8f7639 3224 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
3225 x = build_receiver_ref (var, by_ref, ctx);
3226 SET_DECL_VALUE_EXPR (new_var, x);
3227 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3228
3229 /* ??? If VAR is not passed by reference, and the variable
3230 hasn't been initialized yet, then we'll get a warning for
3231 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 3232 able to notice this and not store anything at all, but
953ff289
DN
3233 we're generating code too early. Suppress the warning. */
3234 if (!by_ref)
3235 TREE_NO_WARNING (var) = 1;
3236 break;
3237
3238 case OMP_CLAUSE_LASTPRIVATE:
3239 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3240 break;
3241 /* FALLTHRU */
3242
3243 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
3244 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
3245 x = build_outer_var_ref (var, ctx);
3246 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3247 {
3248 if (is_task_ctx (ctx))
3249 x = build_receiver_ref (var, false, ctx);
3250 else
3251 x = build_outer_var_ref (var, ctx);
3252 }
3253 else
3254 x = NULL;
74bf76ed 3255 do_private:
acf0174b
JJ
3256 tree nx;
3257 nx = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
74bf76ed
JJ
3258 if (is_simd)
3259 {
3260 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 3261 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed
JJ
3262 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
3263 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3264 idx, lane, ivar, lvar))
3265 {
acf0174b 3266 if (nx)
74bf76ed
JJ
3267 x = lang_hooks.decls.omp_clause_default_ctor
3268 (c, unshare_expr (ivar), x);
acf0174b 3269 if (nx && x)
74bf76ed
JJ
3270 gimplify_and_add (x, &llist[0]);
3271 if (y)
3272 {
3273 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
3274 if (y)
3275 {
3276 gimple_seq tseq = NULL;
3277
3278 dtor = y;
3279 gimplify_stmt (&dtor, &tseq);
3280 gimple_seq_add_seq (&llist[1], tseq);
3281 }
3282 }
3283 break;
3284 }
3285 }
acf0174b
JJ
3286 if (nx)
3287 gimplify_and_add (nx, ilist);
953ff289
DN
3288 /* FALLTHRU */
3289
3290 do_dtor:
3291 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
3292 if (x)
3293 {
726a989a
RB
3294 gimple_seq tseq = NULL;
3295
953ff289 3296 dtor = x;
726a989a 3297 gimplify_stmt (&dtor, &tseq);
355a7673 3298 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
3299 }
3300 break;
3301
74bf76ed
JJ
3302 case OMP_CLAUSE_LINEAR:
3303 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
3304 goto do_firstprivate;
3305 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3306 x = NULL;
3307 else
3308 x = build_outer_var_ref (var, ctx);
3309 goto do_private;
3310
953ff289 3311 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
3312 if (is_task_ctx (ctx))
3313 {
3314 if (is_reference (var) || is_variable_sized (var))
3315 goto do_dtor;
3316 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
3317 ctx))
3318 || use_pointer_for_field (var, NULL))
3319 {
3320 x = build_receiver_ref (var, false, ctx);
3321 SET_DECL_VALUE_EXPR (new_var, x);
3322 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3323 goto do_dtor;
3324 }
3325 }
74bf76ed 3326 do_firstprivate:
953ff289 3327 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
3328 if (is_simd)
3329 {
acf0174b
JJ
3330 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3331 && gimple_omp_for_combined_into_p (ctx->stmt))
3332 {
3333 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3334 ? sizetype : TREE_TYPE (x);
3335 tree t = fold_convert (stept,
3336 OMP_CLAUSE_LINEAR_STEP (c));
3337 tree c = find_omp_clause (clauses,
3338 OMP_CLAUSE__LOOPTEMP_);
3339 gcc_assert (c);
3340 tree l = OMP_CLAUSE_DECL (c);
3341 if (fd->collapse == 1)
3342 {
3343 tree n1 = fd->loop.n1;
3344 tree step = fd->loop.step;
3345 tree itype = TREE_TYPE (l);
3346 if (POINTER_TYPE_P (itype))
3347 itype = signed_type_for (itype);
3348 l = fold_build2 (MINUS_EXPR, itype, l, n1);
3349 if (TYPE_UNSIGNED (itype)
3350 && fd->loop.cond_code == GT_EXPR)
3351 l = fold_build2 (TRUNC_DIV_EXPR, itype,
3352 fold_build1 (NEGATE_EXPR,
3353 itype, l),
3354 fold_build1 (NEGATE_EXPR,
3355 itype, step));
3356 else
3357 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
3358 }
3359 t = fold_build2 (MULT_EXPR, stept,
3360 fold_convert (stept, l), t);
3361 if (POINTER_TYPE_P (TREE_TYPE (x)))
3362 x = fold_build2 (POINTER_PLUS_EXPR,
3363 TREE_TYPE (x), x, t);
3364 else
3365 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
3366 }
3367
74bf76ed
JJ
3368 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
3369 || TREE_ADDRESSABLE (new_var))
3370 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3371 idx, lane, ivar, lvar))
3372 {
3373 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
3374 {
3375 tree iv = create_tmp_var (TREE_TYPE (new_var), NULL);
3376 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
3377 gimplify_and_add (x, ilist);
3378 gimple_stmt_iterator gsi
3379 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3380 gimple g
3381 = gimple_build_assign (unshare_expr (lvar), iv);
3382 gsi_insert_before_without_update (&gsi, g,
3383 GSI_SAME_STMT);
3384 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3385 ? sizetype : TREE_TYPE (x);
3386 tree t = fold_convert (stept,
3387 OMP_CLAUSE_LINEAR_STEP (c));
3388 enum tree_code code = PLUS_EXPR;
3389 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
3390 code = POINTER_PLUS_EXPR;
3391 g = gimple_build_assign_with_ops (code, iv, iv, t);
3392 gsi_insert_before_without_update (&gsi, g,
3393 GSI_SAME_STMT);
3394 break;
3395 }
3396 x = lang_hooks.decls.omp_clause_copy_ctor
3397 (c, unshare_expr (ivar), x);
3398 gimplify_and_add (x, &llist[0]);
3399 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3400 if (x)
3401 {
3402 gimple_seq tseq = NULL;
3403
3404 dtor = x;
3405 gimplify_stmt (&dtor, &tseq);
3406 gimple_seq_add_seq (&llist[1], tseq);
3407 }
3408 break;
3409 }
3410 }
953ff289
DN
3411 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
3412 gimplify_and_add (x, ilist);
3413 goto do_dtor;
953ff289 3414
acf0174b
JJ
3415 case OMP_CLAUSE__LOOPTEMP_:
3416 gcc_assert (is_parallel_ctx (ctx));
3417 x = build_outer_var_ref (var, ctx);
3418 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3419 gimplify_and_add (x, ilist);
3420 break;
3421
953ff289 3422 case OMP_CLAUSE_COPYIN:
7c8f7639 3423 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3424 x = build_receiver_ref (var, by_ref, ctx);
3425 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
3426 append_to_statement_list (x, &copyin_seq);
3427 copyin_by_ref |= by_ref;
3428 break;
3429
3430 case OMP_CLAUSE_REDUCTION:
3431 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3432 {
a68ab351 3433 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
acf0174b 3434 gimple tseq;
a68ab351
JJ
3435 x = build_outer_var_ref (var, ctx);
3436
acf0174b
JJ
3437 if (is_reference (var)
3438 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3439 TREE_TYPE (x)))
db3927fb 3440 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3441 SET_DECL_VALUE_EXPR (placeholder, x);
3442 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b
JJ
3443 tree new_vard = new_var;
3444 if (is_reference (var))
3445 {
3446 gcc_assert (TREE_CODE (new_var) == MEM_REF);
3447 new_vard = TREE_OPERAND (new_var, 0);
3448 gcc_assert (DECL_P (new_vard));
3449 }
74bf76ed
JJ
3450 if (is_simd
3451 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3452 idx, lane, ivar, lvar))
3453 {
acf0174b
JJ
3454 if (new_vard == new_var)
3455 {
3456 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
3457 SET_DECL_VALUE_EXPR (new_var, ivar);
3458 }
3459 else
3460 {
3461 SET_DECL_VALUE_EXPR (new_vard,
3462 build_fold_addr_expr (ivar));
3463 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3464 }
3465 x = lang_hooks.decls.omp_clause_default_ctor
3466 (c, unshare_expr (ivar),
3467 build_outer_var_ref (var, ctx));
3468 if (x)
3469 gimplify_and_add (x, &llist[0]);
3470 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3471 {
3472 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3473 lower_omp (&tseq, ctx);
3474 gimple_seq_add_seq (&llist[0], tseq);
3475 }
3476 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3477 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3478 lower_omp (&tseq, ctx);
3479 gimple_seq_add_seq (&llist[1], tseq);
3480 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3481 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3482 if (new_vard == new_var)
3483 SET_DECL_VALUE_EXPR (new_var, lvar);
3484 else
3485 SET_DECL_VALUE_EXPR (new_vard,
3486 build_fold_addr_expr (lvar));
3487 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3488 if (x)
3489 {
3490 tseq = NULL;
3491 dtor = x;
3492 gimplify_stmt (&dtor, &tseq);
3493 gimple_seq_add_seq (&llist[1], tseq);
3494 }
3495 break;
3496 }
3497 x = lang_hooks.decls.omp_clause_default_ctor
3498 (c, new_var, unshare_expr (x));
3499 if (x)
3500 gimplify_and_add (x, ilist);
3501 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3502 {
3503 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3504 lower_omp (&tseq, ctx);
3505 gimple_seq_add_seq (ilist, tseq);
3506 }
3507 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3508 if (is_simd)
3509 {
3510 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3511 lower_omp (&tseq, ctx);
3512 gimple_seq_add_seq (dlist, tseq);
3513 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3514 }
3515 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3516 goto do_dtor;
3517 }
3518 else
3519 {
3520 x = omp_reduction_init (c, TREE_TYPE (new_var));
3521 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
3522 if (is_simd
3523 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3524 idx, lane, ivar, lvar))
3525 {
3526 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3527 tree ref = build_outer_var_ref (var, ctx);
3528
3529 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
3530
3531 /* reduction(-:var) sums up the partial results, so it
3532 acts identically to reduction(+:var). */
3533 if (code == MINUS_EXPR)
3534 code = PLUS_EXPR;
3535
3536 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
3537 ref = build_outer_var_ref (var, ctx);
3538 gimplify_assign (ref, x, &llist[1]);
3539 }
3540 else
3541 {
3542 gimplify_assign (new_var, x, ilist);
3543 if (is_simd)
3544 gimplify_assign (build_outer_var_ref (var, ctx),
3545 new_var, dlist);
3546 }
953ff289
DN
3547 }
3548 break;
3549
3550 default:
3551 gcc_unreachable ();
3552 }
3553 }
3554 }
3555
74bf76ed
JJ
3556 if (lane)
3557 {
3558 tree uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
3559 /* Don't want uninit warnings on simduid, it is always uninitialized,
3560 but we use it not for the value, but for the DECL_UID only. */
3561 TREE_NO_WARNING (uid) = 1;
74bf76ed
JJ
3562 gimple g
3563 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
3564 gimple_call_set_lhs (g, lane);
3565 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3566 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
3567 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
3568 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
3569 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3570 gimple_omp_for_set_clauses (ctx->stmt, c);
3571 g = gimple_build_assign_with_ops (INTEGER_CST, lane,
3572 build_int_cst (unsigned_type_node, 0),
3573 NULL_TREE);
3574 gimple_seq_add_stmt (ilist, g);
3575 for (int i = 0; i < 2; i++)
3576 if (llist[i])
3577 {
3578 tree vf = create_tmp_var (unsigned_type_node, NULL);
3579 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
3580 gimple_call_set_lhs (g, vf);
3581 gimple_seq *seq = i == 0 ? ilist : dlist;
3582 gimple_seq_add_stmt (seq, g);
3583 tree t = build_int_cst (unsigned_type_node, 0);
3584 g = gimple_build_assign_with_ops (INTEGER_CST, idx, t, NULL_TREE);
3585 gimple_seq_add_stmt (seq, g);
3586 tree body = create_artificial_label (UNKNOWN_LOCATION);
3587 tree header = create_artificial_label (UNKNOWN_LOCATION);
3588 tree end = create_artificial_label (UNKNOWN_LOCATION);
3589 gimple_seq_add_stmt (seq, gimple_build_goto (header));
3590 gimple_seq_add_stmt (seq, gimple_build_label (body));
3591 gimple_seq_add_seq (seq, llist[i]);
3592 t = build_int_cst (unsigned_type_node, 1);
3593 g = gimple_build_assign_with_ops (PLUS_EXPR, idx, idx, t);
3594 gimple_seq_add_stmt (seq, g);
3595 gimple_seq_add_stmt (seq, gimple_build_label (header));
3596 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
3597 gimple_seq_add_stmt (seq, g);
3598 gimple_seq_add_stmt (seq, gimple_build_label (end));
3599 }
3600 }
3601
953ff289
DN
3602 /* The copyin sequence is not to be executed by the main thread, since
3603 that would result in self-copies. Perhaps not visible to scalars,
3604 but it certainly is to C++ operator=. */
3605 if (copyin_seq)
3606 {
e79983f4
MM
3607 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
3608 0);
953ff289
DN
3609 x = build2 (NE_EXPR, boolean_type_node, x,
3610 build_int_cst (TREE_TYPE (x), 0));
3611 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
3612 gimplify_and_add (x, ilist);
3613 }
3614
3615 /* If any copyin variable is passed by reference, we must ensure the
3616 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
3617 threads. Similarly for variables in both firstprivate and
3618 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
3619 happens after firstprivate copying in all threads. And similarly
3620 for UDRs if initializer expression refers to omp_orig. */
3621 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
3622 {
3623 /* Don't add any barrier for #pragma omp simd or
3624 #pragma omp distribute. */
3625 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
c02065fc 3626 || gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_FOR)
acf0174b 3627 gimple_seq_add_stmt (ilist, build_omp_barrier (NULL_TREE));
74bf76ed
JJ
3628 }
3629
3630 /* If max_vf is non-zero, then we can use only a vectorization factor
3631 up to the max_vf we chose. So stick it into the safelen clause. */
3632 if (max_vf)
3633 {
3634 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
3635 OMP_CLAUSE_SAFELEN);
3636 if (c == NULL_TREE
3637 || compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3638 max_vf) == 1)
3639 {
3640 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
3641 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
3642 max_vf);
3643 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3644 gimple_omp_for_set_clauses (ctx->stmt, c);
3645 }
3646 }
953ff289
DN
3647}
3648
50674e96 3649
953ff289
DN
3650/* Generate code to implement the LASTPRIVATE clauses. This is used for
3651 both parallel and workshare constructs. PREDICATE may be NULL if it's
3652 always true. */
3653
3654static void
726a989a 3655lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 3656 omp_context *ctx)
953ff289 3657{
74bf76ed 3658 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 3659 bool par_clauses = false;
74bf76ed 3660 tree simduid = NULL, lastlane = NULL;
953ff289 3661
74bf76ed
JJ
3662 /* Early exit if there are no lastprivate or linear clauses. */
3663 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
3664 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
3665 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
3666 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
3667 break;
953ff289
DN
3668 if (clauses == NULL)
3669 {
3670 /* If this was a workshare clause, see if it had been combined
3671 with its parallel. In that case, look for the clauses on the
3672 parallel statement itself. */
3673 if (is_parallel_ctx (ctx))
3674 return;
3675
3676 ctx = ctx->outer;
3677 if (ctx == NULL || !is_parallel_ctx (ctx))
3678 return;
3679
726a989a 3680 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
3681 OMP_CLAUSE_LASTPRIVATE);
3682 if (clauses == NULL)
3683 return;
a68ab351 3684 par_clauses = true;
953ff289
DN
3685 }
3686
726a989a
RB
3687 if (predicate)
3688 {
3689 gimple stmt;
3690 tree label_true, arm1, arm2;
3691
c2255bc4
AH
3692 label = create_artificial_label (UNKNOWN_LOCATION);
3693 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3694 arm1 = TREE_OPERAND (predicate, 0);
3695 arm2 = TREE_OPERAND (predicate, 1);
3696 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
3697 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
3698 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
3699 label_true, label);
3700 gimple_seq_add_stmt (stmt_list, stmt);
3701 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
3702 }
953ff289 3703
74bf76ed 3704 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3705 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
3706 {
3707 simduid = find_omp_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
3708 if (simduid)
3709 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
3710 }
3711
a68ab351 3712 for (c = clauses; c ;)
953ff289
DN
3713 {
3714 tree var, new_var;
db3927fb 3715 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3716
74bf76ed
JJ
3717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3718 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3719 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
3720 {
3721 var = OMP_CLAUSE_DECL (c);
3722 new_var = lookup_decl (var, ctx);
953ff289 3723
74bf76ed
JJ
3724 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3725 {
3726 tree val = DECL_VALUE_EXPR (new_var);
3727 if (TREE_CODE (val) == ARRAY_REF
3728 && VAR_P (TREE_OPERAND (val, 0))
3729 && lookup_attribute ("omp simd array",
3730 DECL_ATTRIBUTES (TREE_OPERAND (val,
3731 0))))
3732 {
3733 if (lastlane == NULL)
3734 {
3735 lastlane = create_tmp_var (unsigned_type_node, NULL);
3736 gimple g
3737 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
3738 2, simduid,
3739 TREE_OPERAND (val, 1));
3740 gimple_call_set_lhs (g, lastlane);
3741 gimple_seq_add_stmt (stmt_list, g);
3742 }
3743 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
3744 TREE_OPERAND (val, 0), lastlane,
3745 NULL_TREE, NULL_TREE);
3746 }
3747 }
3748
3749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3750 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 3751 {
355a7673 3752 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
3753 gimple_seq_add_seq (stmt_list,
3754 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 3755 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 3756 }
953ff289 3757
a68ab351
JJ
3758 x = build_outer_var_ref (var, ctx);
3759 if (is_reference (var))
70f34814 3760 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 3761 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 3762 gimplify_and_add (x, stmt_list);
a68ab351
JJ
3763 }
3764 c = OMP_CLAUSE_CHAIN (c);
3765 if (c == NULL && !par_clauses)
3766 {
3767 /* If this was a workshare clause, see if it had been combined
3768 with its parallel. In that case, continue looking for the
3769 clauses also on the parallel statement itself. */
3770 if (is_parallel_ctx (ctx))
3771 break;
3772
3773 ctx = ctx->outer;
3774 if (ctx == NULL || !is_parallel_ctx (ctx))
3775 break;
3776
726a989a 3777 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
3778 OMP_CLAUSE_LASTPRIVATE);
3779 par_clauses = true;
3780 }
953ff289
DN
3781 }
3782
726a989a
RB
3783 if (label)
3784 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
3785}
3786
50674e96 3787
953ff289
DN
3788/* Generate code to implement the REDUCTION clauses. */
3789
3790static void
726a989a 3791lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 3792{
726a989a
RB
3793 gimple_seq sub_seq = NULL;
3794 gimple stmt;
3795 tree x, c;
953ff289
DN
3796 int count = 0;
3797
74bf76ed
JJ
3798 /* SIMD reductions are handled in lower_rec_input_clauses. */
3799 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3800 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
3801 return;
3802
953ff289
DN
3803 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
3804 update in that case, otherwise use a lock. */
3805 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 3806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
3807 {
3808 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3809 {
acf0174b 3810 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
3811 count = -1;
3812 break;
3813 }
3814 count++;
3815 }
3816
3817 if (count == 0)
3818 return;
3819
3820 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3821 {
3822 tree var, ref, new_var;
3823 enum tree_code code;
db3927fb 3824 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3825
aaf46ef9 3826 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
3827 continue;
3828
3829 var = OMP_CLAUSE_DECL (c);
3830 new_var = lookup_decl (var, ctx);
3831 if (is_reference (var))
70f34814 3832 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3833 ref = build_outer_var_ref (var, ctx);
3834 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
3835
3836 /* reduction(-:var) sums up the partial results, so it acts
3837 identically to reduction(+:var). */
953ff289
DN
3838 if (code == MINUS_EXPR)
3839 code = PLUS_EXPR;
3840
3841 if (count == 1)
3842 {
db3927fb 3843 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3844
3845 addr = save_expr (addr);
3846 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 3847 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 3848 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 3849 gimplify_and_add (x, stmt_seqp);
953ff289
DN
3850 return;
3851 }
3852
3853 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3854 {
3855 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3856
acf0174b
JJ
3857 if (is_reference (var)
3858 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3859 TREE_TYPE (ref)))
db3927fb 3860 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3861 SET_DECL_VALUE_EXPR (placeholder, ref);
3862 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 3863 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
3864 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
3865 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
3866 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
3867 }
3868 else
3869 {
3870 x = build2 (code, TREE_TYPE (ref), ref, new_var);
3871 ref = build_outer_var_ref (var, ctx);
726a989a 3872 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
3873 }
3874 }
3875
e79983f4
MM
3876 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
3877 0);
726a989a 3878 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 3879
726a989a 3880 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 3881
e79983f4
MM
3882 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
3883 0);
726a989a 3884 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
3885}
3886
50674e96 3887
953ff289
DN
3888/* Generate code to implement the COPYPRIVATE clauses. */
3889
3890static void
726a989a 3891lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
3892 omp_context *ctx)
3893{
3894 tree c;
3895
3896 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3897 {
78db7d92 3898 tree var, new_var, ref, x;
953ff289 3899 bool by_ref;
db3927fb 3900 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3901
aaf46ef9 3902 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
3903 continue;
3904
3905 var = OMP_CLAUSE_DECL (c);
7c8f7639 3906 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3907
3908 ref = build_sender_ref (var, ctx);
78db7d92
JJ
3909 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
3910 if (by_ref)
3911 {
3912 x = build_fold_addr_expr_loc (clause_loc, new_var);
3913 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
3914 }
726a989a 3915 gimplify_assign (ref, x, slist);
953ff289 3916
78db7d92
JJ
3917 ref = build_receiver_ref (var, false, ctx);
3918 if (by_ref)
3919 {
3920 ref = fold_convert_loc (clause_loc,
3921 build_pointer_type (TREE_TYPE (new_var)),
3922 ref);
3923 ref = build_fold_indirect_ref_loc (clause_loc, ref);
3924 }
953ff289
DN
3925 if (is_reference (var))
3926 {
78db7d92 3927 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
3928 ref = build_simple_mem_ref_loc (clause_loc, ref);
3929 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 3930 }
78db7d92 3931 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
3932 gimplify_and_add (x, rlist);
3933 }
3934}
3935
50674e96 3936
953ff289
DN
3937/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
3938 and REDUCTION from the sender (aka parent) side. */
3939
3940static void
726a989a
RB
3941lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
3942 omp_context *ctx)
953ff289
DN
3943{
3944 tree c;
3945
3946 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3947 {
50674e96 3948 tree val, ref, x, var;
953ff289 3949 bool by_ref, do_in = false, do_out = false;
db3927fb 3950 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3951
aaf46ef9 3952 switch (OMP_CLAUSE_CODE (c))
953ff289 3953 {
a68ab351
JJ
3954 case OMP_CLAUSE_PRIVATE:
3955 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3956 break;
3957 continue;
953ff289
DN
3958 case OMP_CLAUSE_FIRSTPRIVATE:
3959 case OMP_CLAUSE_COPYIN:
3960 case OMP_CLAUSE_LASTPRIVATE:
3961 case OMP_CLAUSE_REDUCTION:
acf0174b 3962 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3963 break;
3964 default:
3965 continue;
3966 }
3967
d2dda7fe
JJ
3968 val = OMP_CLAUSE_DECL (c);
3969 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 3970
8ca5b2a2
JJ
3971 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
3972 && is_global_var (var))
3973 continue;
953ff289
DN
3974 if (is_variable_sized (val))
3975 continue;
7c8f7639 3976 by_ref = use_pointer_for_field (val, NULL);
953ff289 3977
aaf46ef9 3978 switch (OMP_CLAUSE_CODE (c))
953ff289 3979 {
a68ab351 3980 case OMP_CLAUSE_PRIVATE:
953ff289
DN
3981 case OMP_CLAUSE_FIRSTPRIVATE:
3982 case OMP_CLAUSE_COPYIN:
acf0174b 3983 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3984 do_in = true;
3985 break;
3986
3987 case OMP_CLAUSE_LASTPRIVATE:
3988 if (by_ref || is_reference (val))
3989 {
3990 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3991 continue;
3992 do_in = true;
3993 }
3994 else
a68ab351
JJ
3995 {
3996 do_out = true;
3997 if (lang_hooks.decls.omp_private_outer_ref (val))
3998 do_in = true;
3999 }
953ff289
DN
4000 break;
4001
4002 case OMP_CLAUSE_REDUCTION:
4003 do_in = true;
4004 do_out = !(by_ref || is_reference (val));
4005 break;
4006
4007 default:
4008 gcc_unreachable ();
4009 }
4010
4011 if (do_in)
4012 {
4013 ref = build_sender_ref (val, ctx);
db3927fb 4014 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 4015 gimplify_assign (ref, x, ilist);
a68ab351
JJ
4016 if (is_task_ctx (ctx))
4017 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 4018 }
50674e96 4019
953ff289
DN
4020 if (do_out)
4021 {
4022 ref = build_sender_ref (val, ctx);
726a989a 4023 gimplify_assign (var, ref, olist);
953ff289
DN
4024 }
4025 }
4026}
4027
726a989a
RB
4028/* Generate code to implement SHARED from the sender (aka parent)
4029 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
4030 list things that got automatically shared. */
953ff289
DN
4031
4032static void
726a989a 4033lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 4034{
a68ab351 4035 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
4036
4037 if (ctx->record_type == NULL)
4038 return;
50674e96 4039
a68ab351 4040 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 4041 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
4042 {
4043 ovar = DECL_ABSTRACT_ORIGIN (f);
4044 nvar = maybe_lookup_decl (ovar, ctx);
4045 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
4046 continue;
4047
50674e96
DN
4048 /* If CTX is a nested parallel directive. Find the immediately
4049 enclosing parallel or workshare construct that contains a
4050 mapping for OVAR. */
d2dda7fe 4051 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 4052
7c8f7639 4053 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
4054 {
4055 x = build_sender_ref (ovar, ctx);
50674e96 4056 var = build_fold_addr_expr (var);
726a989a 4057 gimplify_assign (x, var, ilist);
953ff289
DN
4058 }
4059 else
4060 {
4061 x = build_sender_ref (ovar, ctx);
726a989a 4062 gimplify_assign (x, var, ilist);
953ff289 4063
14e5b285
RG
4064 if (!TREE_READONLY (var)
4065 /* We don't need to receive a new reference to a result
4066 or parm decl. In fact we may not store to it as we will
4067 invalidate any pending RSO and generate wrong gimple
4068 during inlining. */
4069 && !((TREE_CODE (var) == RESULT_DECL
4070 || TREE_CODE (var) == PARM_DECL)
4071 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
4072 {
4073 x = build_sender_ref (ovar, ctx);
726a989a 4074 gimplify_assign (var, x, olist);
a68ab351 4075 }
953ff289
DN
4076 }
4077 }
4078}
4079
726a989a
RB
4080
4081/* A convenience function to build an empty GIMPLE_COND with just the
4082 condition. */
4083
4084static gimple
4085gimple_build_cond_empty (tree cond)
4086{
4087 enum tree_code pred_code;
4088 tree lhs, rhs;
4089
4090 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
4091 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
4092}
4093
4094
b8698a0f 4095/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
4096 generate the parallel operation. REGION is the parallel region
4097 being expanded. BB is the block where to insert the code. WS_ARGS
4098 will be set if this is a call to a combined parallel+workshare
4099 construct, it contains the list of additional arguments needed by
4100 the workshare construct. */
953ff289
DN
4101
4102static void
777f7f9a 4103expand_parallel_call (struct omp_region *region, basic_block bb,
9771b263 4104 gimple entry_stmt, vec<tree, va_gc> *ws_args)
953ff289 4105{
acf0174b 4106 tree t, t1, t2, val, cond, c, clauses, flags;
726a989a
RB
4107 gimple_stmt_iterator gsi;
4108 gimple stmt;
e79983f4
MM
4109 enum built_in_function start_ix;
4110 int start_ix2;
db3927fb 4111 location_t clause_loc;
9771b263 4112 vec<tree, va_gc> *args;
50674e96 4113
726a989a 4114 clauses = gimple_omp_parallel_clauses (entry_stmt);
50674e96 4115
acf0174b 4116 /* Determine what flavor of GOMP_parallel we will be
50674e96 4117 emitting. */
acf0174b 4118 start_ix = BUILT_IN_GOMP_PARALLEL;
50674e96
DN
4119 if (is_combined_parallel (region))
4120 {
777f7f9a 4121 switch (region->inner->type)
50674e96 4122 {
726a989a 4123 case GIMPLE_OMP_FOR:
a68ab351 4124 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
acf0174b 4125 start_ix2 = ((int)BUILT_IN_GOMP_PARALLEL_LOOP_STATIC
e79983f4
MM
4126 + (region->inner->sched_kind
4127 == OMP_CLAUSE_SCHEDULE_RUNTIME
4128 ? 3 : region->inner->sched_kind));
4129 start_ix = (enum built_in_function)start_ix2;
777f7f9a 4130 break;
726a989a 4131 case GIMPLE_OMP_SECTIONS:
acf0174b 4132 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
777f7f9a
RH
4133 break;
4134 default:
4135 gcc_unreachable ();
50674e96 4136 }
50674e96 4137 }
953ff289
DN
4138
4139 /* By default, the value of NUM_THREADS is zero (selected at run time)
4140 and there is no conditional. */
4141 cond = NULL_TREE;
4142 val = build_int_cst (unsigned_type_node, 0);
acf0174b 4143 flags = build_int_cst (unsigned_type_node, 0);
953ff289
DN
4144
4145 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4146 if (c)
4147 cond = OMP_CLAUSE_IF_EXPR (c);
4148
4149 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
4150 if (c)
db3927fb
AH
4151 {
4152 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
4153 clause_loc = OMP_CLAUSE_LOCATION (c);
4154 }
4155 else
4156 clause_loc = gimple_location (entry_stmt);
953ff289 4157
acf0174b
JJ
4158 c = find_omp_clause (clauses, OMP_CLAUSE_PROC_BIND);
4159 if (c)
4160 flags = build_int_cst (unsigned_type_node, OMP_CLAUSE_PROC_BIND_KIND (c));
4161
953ff289 4162 /* Ensure 'val' is of the correct type. */
db3927fb 4163 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
953ff289
DN
4164
4165 /* If we found the clause 'if (cond)', build either
4166 (cond != 0) or (cond ? val : 1u). */
4167 if (cond)
4168 {
726a989a 4169 gimple_stmt_iterator gsi;
50674e96
DN
4170
4171 cond = gimple_boolify (cond);
4172
953ff289 4173 if (integer_zerop (val))
db3927fb
AH
4174 val = fold_build2_loc (clause_loc,
4175 EQ_EXPR, unsigned_type_node, cond,
917948d3 4176 build_int_cst (TREE_TYPE (cond), 0));
953ff289 4177 else
50674e96
DN
4178 {
4179 basic_block cond_bb, then_bb, else_bb;
917948d3 4180 edge e, e_then, e_else;
726a989a 4181 tree tmp_then, tmp_else, tmp_join, tmp_var;
917948d3
ZD
4182
4183 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
4184 if (gimple_in_ssa_p (cfun))
4185 {
726a989a
RB
4186 tmp_then = make_ssa_name (tmp_var, NULL);
4187 tmp_else = make_ssa_name (tmp_var, NULL);
4188 tmp_join = make_ssa_name (tmp_var, NULL);
917948d3
ZD
4189 }
4190 else
4191 {
4192 tmp_then = tmp_var;
4193 tmp_else = tmp_var;
4194 tmp_join = tmp_var;
4195 }
50674e96 4196
50674e96
DN
4197 e = split_block (bb, NULL);
4198 cond_bb = e->src;
4199 bb = e->dest;
4200 remove_edge (e);
4201
4202 then_bb = create_empty_bb (cond_bb);
4203 else_bb = create_empty_bb (then_bb);
917948d3
ZD
4204 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
4205 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96 4206
726a989a
RB
4207 stmt = gimple_build_cond_empty (cond);
4208 gsi = gsi_start_bb (cond_bb);
4209 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4210
726a989a
RB
4211 gsi = gsi_start_bb (then_bb);
4212 stmt = gimple_build_assign (tmp_then, val);
4213 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4214
726a989a
RB
4215 gsi = gsi_start_bb (else_bb);
4216 stmt = gimple_build_assign
4217 (tmp_else, build_int_cst (unsigned_type_node, 1));
4218 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96
DN
4219
4220 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
4221 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
4222 if (current_loops)
4223 {
4224 add_bb_to_loop (then_bb, cond_bb->loop_father);
4225 add_bb_to_loop (else_bb, cond_bb->loop_father);
4226 }
917948d3
ZD
4227 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
4228 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 4229
917948d3
ZD
4230 if (gimple_in_ssa_p (cfun))
4231 {
726a989a 4232 gimple phi = create_phi_node (tmp_join, bb);
9e227d60
DC
4233 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
4234 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
917948d3
ZD
4235 }
4236
4237 val = tmp_join;
50674e96
DN
4238 }
4239
726a989a
RB
4240 gsi = gsi_start_bb (bb);
4241 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
4242 false, GSI_CONTINUE_LINKING);
953ff289
DN
4243 }
4244
726a989a
RB
4245 gsi = gsi_last_bb (bb);
4246 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289 4247 if (t == NULL)
5039610b 4248 t1 = null_pointer_node;
953ff289 4249 else
5039610b 4250 t1 = build_fold_addr_expr (t);
726a989a 4251 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
50674e96 4252
acf0174b 4253 vec_alloc (args, 4 + vec_safe_length (ws_args));
9771b263
DN
4254 args->quick_push (t2);
4255 args->quick_push (t1);
4256 args->quick_push (val);
4257 if (ws_args)
4258 args->splice (*ws_args);
acf0174b 4259 args->quick_push (flags);
3bb06db4
NF
4260
4261 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
e79983f4 4262 builtin_decl_explicit (start_ix), args);
50674e96 4263
726a989a
RB
4264 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4265 false, GSI_CONTINUE_LINKING);
953ff289
DN
4266}
4267
50674e96 4268
a68ab351
JJ
4269/* Build the function call to GOMP_task to actually
4270 generate the task operation. BB is the block where to insert the code. */
4271
4272static void
726a989a 4273expand_task_call (basic_block bb, gimple entry_stmt)
a68ab351 4274{
acf0174b 4275 tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
726a989a 4276 gimple_stmt_iterator gsi;
db3927fb 4277 location_t loc = gimple_location (entry_stmt);
a68ab351 4278
726a989a 4279 clauses = gimple_omp_task_clauses (entry_stmt);
a68ab351 4280
a68ab351
JJ
4281 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4282 if (c)
4283 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
4284 else
4285 cond = boolean_true_node;
4286
4287 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
20906c66 4288 c2 = find_omp_clause (clauses, OMP_CLAUSE_MERGEABLE);
acf0174b 4289 depend = find_omp_clause (clauses, OMP_CLAUSE_DEPEND);
20906c66 4290 flags = build_int_cst (unsigned_type_node,
acf0174b 4291 (c ? 1 : 0) + (c2 ? 4 : 0) + (depend ? 8 : 0));
20906c66
JJ
4292
4293 c = find_omp_clause (clauses, OMP_CLAUSE_FINAL);
4294 if (c)
4295 {
4296 c = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (c));
4297 c = fold_build3_loc (loc, COND_EXPR, unsigned_type_node, c,
4298 build_int_cst (unsigned_type_node, 2),
4299 build_int_cst (unsigned_type_node, 0));
4300 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_node, flags, c);
4301 }
acf0174b
JJ
4302 if (depend)
4303 depend = OMP_CLAUSE_DECL (depend);
4304 else
4305 depend = build_int_cst (ptr_type_node, 0);
a68ab351 4306
726a989a
RB
4307 gsi = gsi_last_bb (bb);
4308 t = gimple_omp_task_data_arg (entry_stmt);
a68ab351
JJ
4309 if (t == NULL)
4310 t2 = null_pointer_node;
4311 else
db3927fb
AH
4312 t2 = build_fold_addr_expr_loc (loc, t);
4313 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
726a989a 4314 t = gimple_omp_task_copy_fn (entry_stmt);
a68ab351
JJ
4315 if (t == NULL)
4316 t3 = null_pointer_node;
4317 else
db3927fb 4318 t3 = build_fold_addr_expr_loc (loc, t);
a68ab351 4319
e79983f4 4320 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
acf0174b 4321 8, t1, t2, t3,
726a989a 4322 gimple_omp_task_arg_size (entry_stmt),
acf0174b
JJ
4323 gimple_omp_task_arg_align (entry_stmt), cond, flags,
4324 depend);
a68ab351 4325
726a989a
RB
4326 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4327 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
4328}
4329
4330
726a989a
RB
4331/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
4332 catch handler and return it. This prevents programs from violating the
4333 structured block semantics with throws. */
953ff289 4334
726a989a
RB
4335static gimple_seq
4336maybe_catch_exception (gimple_seq body)
953ff289 4337{
1d65f45c
RH
4338 gimple g;
4339 tree decl;
953ff289
DN
4340
4341 if (!flag_exceptions)
726a989a 4342 return body;
953ff289 4343
3b06d379
SB
4344 if (lang_hooks.eh_protect_cleanup_actions != NULL)
4345 decl = lang_hooks.eh_protect_cleanup_actions ();
953ff289 4346 else
e79983f4 4347 decl = builtin_decl_explicit (BUILT_IN_TRAP);
726a989a 4348
1d65f45c
RH
4349 g = gimple_build_eh_must_not_throw (decl);
4350 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
726a989a 4351 GIMPLE_TRY_CATCH);
953ff289 4352
1d65f45c 4353 return gimple_seq_alloc_with_stmt (g);
953ff289
DN
4354}
4355
50674e96 4356/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 4357
50674e96 4358static tree
9771b263 4359vec2chain (vec<tree, va_gc> *v)
953ff289 4360{
c021f10b
NF
4361 tree chain = NULL_TREE, t;
4362 unsigned ix;
953ff289 4363
9771b263 4364 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
50674e96 4365 {
910ad8de 4366 DECL_CHAIN (t) = chain;
c021f10b 4367 chain = t;
50674e96 4368 }
953ff289 4369
c021f10b 4370 return chain;
50674e96 4371}
953ff289 4372
953ff289 4373
50674e96 4374/* Remove barriers in REGION->EXIT's block. Note that this is only
726a989a
RB
4375 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
4376 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
4377 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
50674e96 4378 removed. */
953ff289 4379
50674e96
DN
4380static void
4381remove_exit_barrier (struct omp_region *region)
4382{
726a989a 4383 gimple_stmt_iterator gsi;
50674e96 4384 basic_block exit_bb;
777f7f9a
RH
4385 edge_iterator ei;
4386 edge e;
726a989a 4387 gimple stmt;
03742a9b 4388 int any_addressable_vars = -1;
953ff289 4389
777f7f9a 4390 exit_bb = region->exit;
953ff289 4391
2aee3e57
JJ
4392 /* If the parallel region doesn't return, we don't have REGION->EXIT
4393 block at all. */
4394 if (! exit_bb)
4395 return;
4396
726a989a
RB
4397 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
4398 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
4399 statements that can appear in between are extremely limited -- no
4400 memory operations at all. Here, we allow nothing at all, so the
726a989a
RB
4401 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
4402 gsi = gsi_last_bb (exit_bb);
4403 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4404 gsi_prev (&gsi);
4405 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
50674e96 4406 return;
953ff289 4407
777f7f9a
RH
4408 FOR_EACH_EDGE (e, ei, exit_bb->preds)
4409 {
726a989a
RB
4410 gsi = gsi_last_bb (e->src);
4411 if (gsi_end_p (gsi))
777f7f9a 4412 continue;
726a989a 4413 stmt = gsi_stmt (gsi);
03742a9b
JJ
4414 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
4415 && !gimple_omp_return_nowait_p (stmt))
4416 {
4417 /* OpenMP 3.0 tasks unfortunately prevent this optimization
4418 in many cases. If there could be tasks queued, the barrier
4419 might be needed to let the tasks run before some local
4420 variable of the parallel that the task uses as shared
4421 runs out of scope. The task can be spawned either
4422 from within current function (this would be easy to check)
4423 or from some function it calls and gets passed an address
4424 of such a variable. */
4425 if (any_addressable_vars < 0)
4426 {
4427 gimple parallel_stmt = last_stmt (region->entry);
4428 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
c021f10b
NF
4429 tree local_decls, block, decl;
4430 unsigned ix;
03742a9b
JJ
4431
4432 any_addressable_vars = 0;
c021f10b
NF
4433 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
4434 if (TREE_ADDRESSABLE (decl))
03742a9b
JJ
4435 {
4436 any_addressable_vars = 1;
4437 break;
4438 }
4439 for (block = gimple_block (stmt);
4440 !any_addressable_vars
4441 && block
4442 && TREE_CODE (block) == BLOCK;
4443 block = BLOCK_SUPERCONTEXT (block))
4444 {
4445 for (local_decls = BLOCK_VARS (block);
4446 local_decls;
910ad8de 4447 local_decls = DECL_CHAIN (local_decls))
03742a9b
JJ
4448 if (TREE_ADDRESSABLE (local_decls))
4449 {
4450 any_addressable_vars = 1;
4451 break;
4452 }
4453 if (block == gimple_block (parallel_stmt))
4454 break;
4455 }
4456 }
4457 if (!any_addressable_vars)
4458 gimple_omp_return_set_nowait (stmt);
4459 }
777f7f9a 4460 }
953ff289
DN
4461}
4462
777f7f9a
RH
4463static void
4464remove_exit_barriers (struct omp_region *region)
4465{
726a989a 4466 if (region->type == GIMPLE_OMP_PARALLEL)
777f7f9a
RH
4467 remove_exit_barrier (region);
4468
4469 if (region->inner)
4470 {
4471 region = region->inner;
4472 remove_exit_barriers (region);
4473 while (region->next)
4474 {
4475 region = region->next;
4476 remove_exit_barriers (region);
4477 }
4478 }
4479}
50674e96 4480
2b4cf991
JJ
4481/* Optimize omp_get_thread_num () and omp_get_num_threads ()
4482 calls. These can't be declared as const functions, but
4483 within one parallel body they are constant, so they can be
4484 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
4485 which are declared const. Similarly for task body, except
4486 that in untied task omp_get_thread_num () can change at any task
4487 scheduling point. */
2b4cf991
JJ
4488
4489static void
726a989a 4490optimize_omp_library_calls (gimple entry_stmt)
2b4cf991
JJ
4491{
4492 basic_block bb;
726a989a 4493 gimple_stmt_iterator gsi;
e79983f4
MM
4494 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4495 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree);
4496 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
4497 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree);
726a989a
RB
4498 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
4499 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
a68ab351 4500 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
4501
4502 FOR_EACH_BB (bb)
726a989a 4503 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2b4cf991 4504 {
726a989a 4505 gimple call = gsi_stmt (gsi);
2b4cf991
JJ
4506 tree decl;
4507
726a989a
RB
4508 if (is_gimple_call (call)
4509 && (decl = gimple_call_fndecl (call))
2b4cf991
JJ
4510 && DECL_EXTERNAL (decl)
4511 && TREE_PUBLIC (decl)
4512 && DECL_INITIAL (decl) == NULL)
4513 {
4514 tree built_in;
4515
4516 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
4517 {
4518 /* In #pragma omp task untied omp_get_thread_num () can change
4519 during the execution of the task region. */
4520 if (untied_task)
4521 continue;
e79983f4 4522 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
a68ab351 4523 }
2b4cf991 4524 else if (DECL_NAME (decl) == num_thr_id)
e79983f4 4525 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
2b4cf991
JJ
4526 else
4527 continue;
4528
4529 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
726a989a 4530 || gimple_call_num_args (call) != 0)
2b4cf991
JJ
4531 continue;
4532
4533 if (flag_exceptions && !TREE_NOTHROW (decl))
4534 continue;
4535
4536 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
9600efe1
MM
4537 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
4538 TREE_TYPE (TREE_TYPE (built_in))))
2b4cf991
JJ
4539 continue;
4540
7c9577be 4541 gimple_call_set_fndecl (call, built_in);
2b4cf991
JJ
4542 }
4543 }
4544}
4545
5a0f4dd3
JJ
4546/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
4547 regimplified. */
4548
4549static tree
4550expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
4551{
4552 tree t = *tp;
4553
4554 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
4555 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
4556 return t;
4557
4558 if (TREE_CODE (t) == ADDR_EXPR)
4559 recompute_tree_invariant_for_addr_expr (t);
4560
4561 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
4562 return NULL_TREE;
4563}
4564
74bf76ed
JJ
4565/* Prepend TO = FROM assignment before *GSI_P. */
4566
4567static void
4568expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
4569{
4570 bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
4571 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
4572 true, GSI_SAME_STMT);
4573 gimple stmt = gimple_build_assign (to, from);
4574 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
4575 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
4576 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
4577 {
4578 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4579 gimple_regimplify_operands (stmt, &gsi);
4580 }
4581}
4582
a68ab351 4583/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
4584
4585static void
a68ab351 4586expand_omp_taskreg (struct omp_region *region)
953ff289 4587{
50674e96 4588 basic_block entry_bb, exit_bb, new_bb;
db2960f4 4589 struct function *child_cfun;
3bb06db4 4590 tree child_fn, block, t;
726a989a
RB
4591 gimple_stmt_iterator gsi;
4592 gimple entry_stmt, stmt;
50674e96 4593 edge e;
9771b263 4594 vec<tree, va_gc> *ws_args;
50674e96 4595
777f7f9a 4596 entry_stmt = last_stmt (region->entry);
726a989a 4597 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
50674e96 4598 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
50674e96 4599
777f7f9a
RH
4600 entry_bb = region->entry;
4601 exit_bb = region->exit;
50674e96 4602
50674e96 4603 if (is_combined_parallel (region))
777f7f9a 4604 ws_args = region->ws_args;
50674e96 4605 else
3bb06db4 4606 ws_args = NULL;
953ff289 4607
777f7f9a 4608 if (child_cfun->cfg)
953ff289 4609 {
50674e96
DN
4610 /* Due to inlining, it may happen that we have already outlined
4611 the region, in which case all we need to do is make the
4612 sub-graph unreachable and emit the parallel call. */
4613 edge entry_succ_e, exit_succ_e;
726a989a 4614 gimple_stmt_iterator gsi;
50674e96
DN
4615
4616 entry_succ_e = single_succ_edge (entry_bb);
50674e96 4617
726a989a
RB
4618 gsi = gsi_last_bb (entry_bb);
4619 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
4620 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
4621 gsi_remove (&gsi, true);
50674e96
DN
4622
4623 new_bb = entry_bb;
d3c673c7
JJ
4624 if (exit_bb)
4625 {
4626 exit_succ_e = single_succ_edge (exit_bb);
4627 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
4628 }
917948d3 4629 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 4630 }
50674e96
DN
4631 else
4632 {
2fed2012 4633 unsigned srcidx, dstidx, num;
c021f10b 4634
50674e96 4635 /* If the parallel region needs data sent from the parent
b570947c
JJ
4636 function, then the very first statement (except possible
4637 tree profile counter updates) of the parallel body
50674e96
DN
4638 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
4639 &.OMP_DATA_O is passed as an argument to the child function,
4640 we need to replace it with the argument as seen by the child
4641 function.
4642
4643 In most cases, this will end up being the identity assignment
4644 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
4645 a function call that has been inlined, the original PARM_DECL
4646 .OMP_DATA_I may have been converted into a different local
4647 variable. In which case, we need to keep the assignment. */
726a989a 4648 if (gimple_omp_taskreg_data_arg (entry_stmt))
50674e96
DN
4649 {
4650 basic_block entry_succ_bb = single_succ (entry_bb);
726a989a
RB
4651 gimple_stmt_iterator gsi;
4652 tree arg, narg;
4653 gimple parcopy_stmt = NULL;
953ff289 4654
726a989a 4655 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
b570947c 4656 {
726a989a 4657 gimple stmt;
b570947c 4658
726a989a
RB
4659 gcc_assert (!gsi_end_p (gsi));
4660 stmt = gsi_stmt (gsi);
4661 if (gimple_code (stmt) != GIMPLE_ASSIGN)
018b899b
JJ
4662 continue;
4663
726a989a 4664 if (gimple_num_ops (stmt) == 2)
b570947c 4665 {
726a989a
RB
4666 tree arg = gimple_assign_rhs1 (stmt);
4667
4668 /* We're ignore the subcode because we're
4669 effectively doing a STRIP_NOPS. */
4670
4671 if (TREE_CODE (arg) == ADDR_EXPR
4672 && TREE_OPERAND (arg, 0)
4673 == gimple_omp_taskreg_data_arg (entry_stmt))
4674 {
4675 parcopy_stmt = stmt;
4676 break;
4677 }
b570947c
JJ
4678 }
4679 }
917948d3 4680
726a989a 4681 gcc_assert (parcopy_stmt != NULL);
917948d3
ZD
4682 arg = DECL_ARGUMENTS (child_fn);
4683
4684 if (!gimple_in_ssa_p (cfun))
4685 {
726a989a
RB
4686 if (gimple_assign_lhs (parcopy_stmt) == arg)
4687 gsi_remove (&gsi, true);
917948d3 4688 else
726a989a
RB
4689 {
4690 /* ?? Is setting the subcode really necessary ?? */
4691 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
4692 gimple_assign_set_rhs1 (parcopy_stmt, arg);
4693 }
917948d3
ZD
4694 }
4695 else
4696 {
4697 /* If we are in ssa form, we must load the value from the default
4698 definition of the argument. That should not be defined now,
4699 since the argument is not used uninitialized. */
32244553 4700 gcc_assert (ssa_default_def (cfun, arg) == NULL);
726a989a 4701 narg = make_ssa_name (arg, gimple_build_nop ());
32244553 4702 set_ssa_default_def (cfun, arg, narg);
726a989a
RB
4703 /* ?? Is setting the subcode really necessary ?? */
4704 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
4705 gimple_assign_set_rhs1 (parcopy_stmt, narg);
917948d3
ZD
4706 update_stmt (parcopy_stmt);
4707 }
50674e96
DN
4708 }
4709
4710 /* Declare local variables needed in CHILD_CFUN. */
4711 block = DECL_INITIAL (child_fn);
c021f10b 4712 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
4f0ae266
JJ
4713 /* The gimplifier could record temporaries in parallel/task block
4714 rather than in containing function's local_decls chain,
4715 which would mean cgraph missed finalizing them. Do it now. */
910ad8de 4716 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
4f0ae266
JJ
4717 if (TREE_CODE (t) == VAR_DECL
4718 && TREE_STATIC (t)
4719 && !DECL_EXTERNAL (t))
4720 varpool_finalize_decl (t);
726a989a 4721 DECL_SAVED_TREE (child_fn) = NULL;
355a7673
MM
4722 /* We'll create a CFG for child_fn, so no gimple body is needed. */
4723 gimple_set_body (child_fn, NULL);
b357f682 4724 TREE_USED (block) = 1;
50674e96 4725
917948d3 4726 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 4727 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
50674e96
DN
4728 DECL_CONTEXT (t) = child_fn;
4729
726a989a
RB
4730 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
4731 so that it can be moved to the child function. */
4732 gsi = gsi_last_bb (entry_bb);
4733 stmt = gsi_stmt (gsi);
4734 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
4735 || gimple_code (stmt) == GIMPLE_OMP_TASK));
4736 gsi_remove (&gsi, true);
4737 e = split_block (entry_bb, stmt);
50674e96
DN
4738 entry_bb = e->dest;
4739 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4740
726a989a 4741 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
4742 if (exit_bb)
4743 {
726a989a
RB
4744 gsi = gsi_last_bb (exit_bb);
4745 gcc_assert (!gsi_end_p (gsi)
4746 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4747 stmt = gimple_build_return (NULL);
4748 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4749 gsi_remove (&gsi, true);
2aee3e57 4750 }
917948d3
ZD
4751
4752 /* Move the parallel region into CHILD_CFUN. */
b8698a0f 4753
917948d3
ZD
4754 if (gimple_in_ssa_p (cfun))
4755 {
5db9ba0c 4756 init_tree_ssa (child_cfun);
3828719a
RG
4757 init_ssa_operands (child_cfun);
4758 child_cfun->gimple_df->in_ssa_p = true;
b357f682 4759 block = NULL_TREE;
917948d3 4760 }
b357f682 4761 else
726a989a 4762 block = gimple_block (entry_stmt);
b357f682
JJ
4763
4764 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
917948d3
ZD
4765 if (exit_bb)
4766 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
6093bc06
RB
4767 /* When the OMP expansion process cannot guarantee an up-to-date
4768 loop tree arrange for the child function to fixup loops. */
4769 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
4770 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
917948d3 4771
b357f682 4772 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
9771b263 4773 num = vec_safe_length (child_cfun->local_decls);
2fed2012
JJ
4774 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
4775 {
9771b263 4776 t = (*child_cfun->local_decls)[srcidx];
2fed2012
JJ
4777 if (DECL_CONTEXT (t) == cfun->decl)
4778 continue;
4779 if (srcidx != dstidx)
9771b263 4780 (*child_cfun->local_decls)[dstidx] = t;
2fed2012
JJ
4781 dstidx++;
4782 }
4783 if (dstidx != num)
9771b263 4784 vec_safe_truncate (child_cfun->local_decls, dstidx);
b357f682 4785
917948d3 4786 /* Inform the callgraph about the new function. */
d7ed20db 4787 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
917948d3
ZD
4788 cgraph_add_new_function (child_fn, true);
4789
4790 /* Fix the callgraph edges for child_cfun. Those for cfun will be
4791 fixed in a following pass. */
4792 push_cfun (child_cfun);
2b4cf991 4793 if (optimize)
a68ab351 4794 optimize_omp_library_calls (entry_stmt);
917948d3 4795 rebuild_cgraph_edges ();
99819c63
JJ
4796
4797 /* Some EH regions might become dead, see PR34608. If
4798 pass_cleanup_cfg isn't the first pass to happen with the
4799 new child, these dead EH edges might cause problems.
4800 Clean them up now. */
4801 if (flag_exceptions)
4802 {
4803 basic_block bb;
99819c63
JJ
4804 bool changed = false;
4805
99819c63 4806 FOR_EACH_BB (bb)
726a989a 4807 changed |= gimple_purge_dead_eh_edges (bb);
99819c63
JJ
4808 if (changed)
4809 cleanup_tree_cfg ();
99819c63 4810 }
5006671f
RG
4811 if (gimple_in_ssa_p (cfun))
4812 update_ssa (TODO_update_ssa);
917948d3 4813 pop_cfun ();
50674e96 4814 }
b8698a0f 4815
50674e96 4816 /* Emit a library call to launch the children threads. */
726a989a 4817 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
a68ab351
JJ
4818 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
4819 else
4820 expand_task_call (new_bb, entry_stmt);
a5efada7
RG
4821 if (gimple_in_ssa_p (cfun))
4822 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4823}
4824
50674e96 4825
74bf76ed
JJ
4826/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
4827 of the combined collapse > 1 loop constructs, generate code like:
4828 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
4829 if (cond3 is <)
4830 adj = STEP3 - 1;
4831 else
4832 adj = STEP3 + 1;
4833 count3 = (adj + N32 - N31) / STEP3;
4834 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
4835 if (cond2 is <)
4836 adj = STEP2 - 1;
4837 else
4838 adj = STEP2 + 1;
4839 count2 = (adj + N22 - N21) / STEP2;
4840 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
4841 if (cond1 is <)
4842 adj = STEP1 - 1;
4843 else
4844 adj = STEP1 + 1;
4845 count1 = (adj + N12 - N11) / STEP1;
4846 count = count1 * count2 * count3;
4847 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
4848 count = 0;
acf0174b
JJ
4849 and set ZERO_ITER_BB to that bb. If this isn't the outermost
4850 of the combined loop constructs, just initialize COUNTS array
4851 from the _looptemp_ clauses. */
74bf76ed
JJ
4852
4853/* NOTE: It *could* be better to moosh all of the BBs together,
4854 creating one larger BB with all the computation and the unexpected
4855 jump at the end. I.e.
4856
4857 bool zero3, zero2, zero1, zero;
4858
4859 zero3 = N32 c3 N31;
4860 count3 = (N32 - N31) /[cl] STEP3;
4861 zero2 = N22 c2 N21;
4862 count2 = (N22 - N21) /[cl] STEP2;
4863 zero1 = N12 c1 N11;
4864 count1 = (N12 - N11) /[cl] STEP1;
4865 zero = zero3 || zero2 || zero1;
4866 count = count1 * count2 * count3;
4867 if (__builtin_expect(zero, false)) goto zero_iter_bb;
4868
4869 After all, we expect the zero=false, and thus we expect to have to
4870 evaluate all of the comparison expressions, so short-circuiting
4871 oughtn't be a win. Since the condition isn't protecting a
4872 denominator, we're not concerned about divide-by-zero, so we can
4873 fully evaluate count even if a numerator turned out to be wrong.
4874
4875 It seems like putting this all together would create much better
4876 scheduling opportunities, and less pressure on the chip's branch
4877 predictor. */
4878
4879static void
4880expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
4881 basic_block &entry_bb, tree *counts,
4882 basic_block &zero_iter_bb, int &first_zero_iter,
4883 basic_block &l2_dom_bb)
4884{
4885 tree t, type = TREE_TYPE (fd->loop.v);
4886 gimple stmt;
4887 edge e, ne;
4888 int i;
4889
4890 /* Collapsed loops need work for expansion into SSA form. */
4891 gcc_assert (!gimple_in_ssa_p (cfun));
4892
acf0174b
JJ
4893 if (gimple_omp_for_combined_into_p (fd->for_stmt)
4894 && TREE_CODE (fd->loop.n2) != INTEGER_CST)
4895 {
4896 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4897 isn't supposed to be handled, as the inner loop doesn't
4898 use it. */
4899 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
4900 OMP_CLAUSE__LOOPTEMP_);
4901 gcc_assert (innerc);
4902 for (i = 0; i < fd->collapse; i++)
4903 {
4904 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
4905 OMP_CLAUSE__LOOPTEMP_);
4906 gcc_assert (innerc);
4907 if (i)
4908 counts[i] = OMP_CLAUSE_DECL (innerc);
4909 else
4910 counts[0] = NULL_TREE;
4911 }
4912 return;
4913 }
4914
74bf76ed
JJ
4915 for (i = 0; i < fd->collapse; i++)
4916 {
4917 tree itype = TREE_TYPE (fd->loops[i].v);
4918
4919 if (SSA_VAR_P (fd->loop.n2)
4920 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,
4921 fold_convert (itype, fd->loops[i].n1),
4922 fold_convert (itype, fd->loops[i].n2)))
4923 == NULL_TREE || !integer_onep (t)))
4924 {
4925 tree n1, n2;
4926 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
4927 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
4928 true, GSI_SAME_STMT);
4929 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
4930 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
4931 true, GSI_SAME_STMT);
4932 stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
4933 NULL_TREE, NULL_TREE);
4934 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4935 if (walk_tree (gimple_cond_lhs_ptr (stmt),
4936 expand_omp_regimplify_p, NULL, NULL)
4937 || walk_tree (gimple_cond_rhs_ptr (stmt),
4938 expand_omp_regimplify_p, NULL, NULL))
4939 {
4940 *gsi = gsi_for_stmt (stmt);
4941 gimple_regimplify_operands (stmt, gsi);
4942 }
4943 e = split_block (entry_bb, stmt);
4944 if (zero_iter_bb == NULL)
4945 {
4946 first_zero_iter = i;
4947 zero_iter_bb = create_empty_bb (entry_bb);
4948 if (current_loops)
4949 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
4950 *gsi = gsi_after_labels (zero_iter_bb);
4951 stmt = gimple_build_assign (fd->loop.n2,
4952 build_zero_cst (type));
4953 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4954 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
4955 entry_bb);
4956 }
4957 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
4958 ne->probability = REG_BR_PROB_BASE / 2000 - 1;
4959 e->flags = EDGE_TRUE_VALUE;
4960 e->probability = REG_BR_PROB_BASE - ne->probability;
4961 if (l2_dom_bb == NULL)
4962 l2_dom_bb = entry_bb;
4963 entry_bb = e->dest;
4964 *gsi = gsi_last_bb (entry_bb);
4965 }
4966
4967 if (POINTER_TYPE_P (itype))
4968 itype = signed_type_for (itype);
4969 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
4970 ? -1 : 1));
4971 t = fold_build2 (PLUS_EXPR, itype,
4972 fold_convert (itype, fd->loops[i].step), t);
4973 t = fold_build2 (PLUS_EXPR, itype, t,
4974 fold_convert (itype, fd->loops[i].n2));
4975 t = fold_build2 (MINUS_EXPR, itype, t,
4976 fold_convert (itype, fd->loops[i].n1));
4977 /* ?? We could probably use CEIL_DIV_EXPR instead of
4978 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
4979 generate the same code in the end because generically we
4980 don't know that the values involved must be negative for
4981 GT?? */
4982 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
4983 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4984 fold_build1 (NEGATE_EXPR, itype, t),
4985 fold_build1 (NEGATE_EXPR, itype,
4986 fold_convert (itype,
4987 fd->loops[i].step)));
4988 else
4989 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
4990 fold_convert (itype, fd->loops[i].step));
4991 t = fold_convert (type, t);
4992 if (TREE_CODE (t) == INTEGER_CST)
4993 counts[i] = t;
4994 else
4995 {
4996 counts[i] = create_tmp_reg (type, ".count");
4997 expand_omp_build_assign (gsi, counts[i], t);
4998 }
4999 if (SSA_VAR_P (fd->loop.n2))
5000 {
5001 if (i == 0)
5002 t = counts[0];
5003 else
5004 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
5005 expand_omp_build_assign (gsi, fd->loop.n2, t);
5006 }
5007 }
5008}
5009
5010
5011/* Helper function for expand_omp_{for_*,simd}. Generate code like:
5012 T = V;
5013 V3 = N31 + (T % count3) * STEP3;
5014 T = T / count3;
5015 V2 = N21 + (T % count2) * STEP2;
5016 T = T / count2;
5017 V1 = N11 + T * STEP1;
acf0174b
JJ
5018 if this loop doesn't have an inner loop construct combined with it.
5019 If it does have an inner loop construct combined with it and the
5020 iteration count isn't known constant, store values from counts array
5021 into its _looptemp_ temporaries instead. */
74bf76ed
JJ
5022
5023static void
5024expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
acf0174b 5025 tree *counts, gimple inner_stmt, tree startvar)
74bf76ed
JJ
5026{
5027 int i;
acf0174b
JJ
5028 if (gimple_omp_for_combined_p (fd->for_stmt))
5029 {
5030 /* If fd->loop.n2 is constant, then no propagation of the counts
5031 is needed, they are constant. */
5032 if (TREE_CODE (fd->loop.n2) == INTEGER_CST)
5033 return;
5034
5035 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5036 ? gimple_omp_parallel_clauses (inner_stmt)
5037 : gimple_omp_for_clauses (inner_stmt);
5038 /* First two _looptemp_ clauses are for istart/iend, counts[0]
5039 isn't supposed to be handled, as the inner loop doesn't
5040 use it. */
5041 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5042 gcc_assert (innerc);
5043 for (i = 0; i < fd->collapse; i++)
5044 {
5045 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5046 OMP_CLAUSE__LOOPTEMP_);
5047 gcc_assert (innerc);
5048 if (i)
5049 {
5050 tree tem = OMP_CLAUSE_DECL (innerc);
5051 tree t = fold_convert (TREE_TYPE (tem), counts[i]);
5052 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5053 false, GSI_CONTINUE_LINKING);
5054 gimple stmt = gimple_build_assign (tem, t);
5055 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5056 }
5057 }
5058 return;
5059 }
5060
74bf76ed
JJ
5061 tree type = TREE_TYPE (fd->loop.v);
5062 tree tem = create_tmp_reg (type, ".tem");
5063 gimple stmt = gimple_build_assign (tem, startvar);
5064 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5065
5066 for (i = fd->collapse - 1; i >= 0; i--)
5067 {
5068 tree vtype = TREE_TYPE (fd->loops[i].v), itype, t;
5069 itype = vtype;
5070 if (POINTER_TYPE_P (vtype))
5071 itype = signed_type_for (vtype);
5072 if (i != 0)
5073 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
5074 else
5075 t = tem;
5076 t = fold_convert (itype, t);
5077 t = fold_build2 (MULT_EXPR, itype, t,
5078 fold_convert (itype, fd->loops[i].step));
5079 if (POINTER_TYPE_P (vtype))
5080 t = fold_build_pointer_plus (fd->loops[i].n1, t);
5081 else
5082 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
5083 t = force_gimple_operand_gsi (gsi, t,
5084 DECL_P (fd->loops[i].v)
5085 && TREE_ADDRESSABLE (fd->loops[i].v),
5086 NULL_TREE, false,
5087 GSI_CONTINUE_LINKING);
5088 stmt = gimple_build_assign (fd->loops[i].v, t);
5089 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5090 if (i != 0)
5091 {
5092 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
5093 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5094 false, GSI_CONTINUE_LINKING);
5095 stmt = gimple_build_assign (tem, t);
5096 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5097 }
5098 }
5099}
5100
5101
5102/* Helper function for expand_omp_for_*. Generate code like:
5103 L10:
5104 V3 += STEP3;
5105 if (V3 cond3 N32) goto BODY_BB; else goto L11;
5106 L11:
5107 V3 = N31;
5108 V2 += STEP2;
5109 if (V2 cond2 N22) goto BODY_BB; else goto L12;
5110 L12:
5111 V2 = N21;
5112 V1 += STEP1;
5113 goto BODY_BB; */
5114
5115static basic_block
5116extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
5117 basic_block body_bb)
5118{
5119 basic_block last_bb, bb, collapse_bb = NULL;
5120 int i;
5121 gimple_stmt_iterator gsi;
5122 edge e;
5123 tree t;
5124 gimple stmt;
5125
5126 last_bb = cont_bb;
5127 for (i = fd->collapse - 1; i >= 0; i--)
5128 {
5129 tree vtype = TREE_TYPE (fd->loops[i].v);
5130
5131 bb = create_empty_bb (last_bb);
5132 if (current_loops)
5133 add_bb_to_loop (bb, last_bb->loop_father);
5134 gsi = gsi_start_bb (bb);
5135
5136 if (i < fd->collapse - 1)
5137 {
5138 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
5139 e->probability = REG_BR_PROB_BASE / 8;
5140
5141 t = fd->loops[i + 1].n1;
5142 t = force_gimple_operand_gsi (&gsi, t,
5143 DECL_P (fd->loops[i + 1].v)
5144 && TREE_ADDRESSABLE (fd->loops[i
5145 + 1].v),
5146 NULL_TREE, false,
5147 GSI_CONTINUE_LINKING);
5148 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
5149 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5150 }
5151 else
5152 collapse_bb = bb;
5153
5154 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
5155
5156 if (POINTER_TYPE_P (vtype))
5157 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
5158 else
5159 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step);
5160 t = force_gimple_operand_gsi (&gsi, t,
5161 DECL_P (fd->loops[i].v)
5162 && TREE_ADDRESSABLE (fd->loops[i].v),
5163 NULL_TREE, false, GSI_CONTINUE_LINKING);
5164 stmt = gimple_build_assign (fd->loops[i].v, t);
5165 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5166
5167 if (i > 0)
5168 {
5169 t = fd->loops[i].n2;
5170 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5171 false, GSI_CONTINUE_LINKING);
5172 tree v = fd->loops[i].v;
5173 if (DECL_P (v) && TREE_ADDRESSABLE (v))
5174 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE,
5175 false, GSI_CONTINUE_LINKING);
5176 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t);
5177 stmt = gimple_build_cond_empty (t);
5178 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5179 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
5180 e->probability = REG_BR_PROB_BASE * 7 / 8;
5181 }
5182 else
5183 make_edge (bb, body_bb, EDGE_FALLTHRU);
5184 last_bb = bb;
5185 }
5186
5187 return collapse_bb;
5188}
5189
5190
50674e96 5191/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
5192 loop with any schedule. Given parameters:
5193
5194 for (V = N1; V cond N2; V += STEP) BODY;
5195
5196 where COND is "<" or ">", we generate pseudocode
5197
5198 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 5199 if (more) goto L0; else goto L3;
953ff289
DN
5200 L0:
5201 V = istart0;
5202 iend = iend0;
5203 L1:
5204 BODY;
5205 V += STEP;
50674e96 5206 if (V cond iend) goto L1; else goto L2;
953ff289 5207 L2:
50674e96
DN
5208 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5209 L3:
953ff289 5210
50674e96 5211 If this is a combined omp parallel loop, instead of the call to
a68ab351 5212 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
acf0174b
JJ
5213 If this is gimple_omp_for_combined_p loop, then instead of assigning
5214 V and iend in L0 we assign the first two _looptemp_ clause decls of the
5215 inner GIMPLE_OMP_FOR and V += STEP; and
5216 if (V cond iend) goto L1; else goto L2; are removed.
a68ab351
JJ
5217
5218 For collapsed loops, given parameters:
5219 collapse(3)
5220 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
5221 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
5222 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
5223 BODY;
5224
5225 we generate pseudocode
5226
5a0f4dd3 5227 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
a68ab351
JJ
5228 if (cond3 is <)
5229 adj = STEP3 - 1;
5230 else
5231 adj = STEP3 + 1;
5232 count3 = (adj + N32 - N31) / STEP3;
5a0f4dd3 5233 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
a68ab351
JJ
5234 if (cond2 is <)
5235 adj = STEP2 - 1;
5236 else
5237 adj = STEP2 + 1;
5238 count2 = (adj + N22 - N21) / STEP2;
5a0f4dd3 5239 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
a68ab351
JJ
5240 if (cond1 is <)
5241 adj = STEP1 - 1;
5242 else
5243 adj = STEP1 + 1;
5244 count1 = (adj + N12 - N11) / STEP1;
5245 count = count1 * count2 * count3;
5a0f4dd3
JJ
5246 goto Z1;
5247 Z0:
5248 count = 0;
5249 Z1:
a68ab351
JJ
5250 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
5251 if (more) goto L0; else goto L3;
5252 L0:
5253 V = istart0;
5254 T = V;
5255 V3 = N31 + (T % count3) * STEP3;
5256 T = T / count3;
5257 V2 = N21 + (T % count2) * STEP2;
5258 T = T / count2;
5259 V1 = N11 + T * STEP1;
5260 iend = iend0;
5261 L1:
5262 BODY;
5263 V += 1;
5264 if (V < iend) goto L10; else goto L2;
5265 L10:
5266 V3 += STEP3;
5267 if (V3 cond3 N32) goto L1; else goto L11;
5268 L11:
5269 V3 = N31;
5270 V2 += STEP2;
5271 if (V2 cond2 N22) goto L1; else goto L12;
5272 L12:
5273 V2 = N21;
5274 V1 += STEP1;
5275 goto L1;
5276 L2:
5277 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5278 L3:
5279
5280 */
953ff289 5281
777f7f9a 5282static void
50674e96
DN
5283expand_omp_for_generic (struct omp_region *region,
5284 struct omp_for_data *fd,
953ff289 5285 enum built_in_function start_fn,
acf0174b
JJ
5286 enum built_in_function next_fn,
5287 gimple inner_stmt)
953ff289 5288{
726a989a 5289 tree type, istart0, iend0, iend;
a68ab351
JJ
5290 tree t, vmain, vback, bias = NULL_TREE;
5291 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 5292 basic_block l2_bb = NULL, l3_bb = NULL;
726a989a
RB
5293 gimple_stmt_iterator gsi;
5294 gimple stmt;
50674e96 5295 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 5296 bool broken_loop = region->cont == NULL;
917948d3 5297 edge e, ne;
a68ab351
JJ
5298 tree *counts = NULL;
5299 int i;
e5c95afe
ZD
5300
5301 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
5302 gcc_assert (fd->iter_type == long_integer_type_node
5303 || !in_combined_parallel);
953ff289 5304
a68ab351
JJ
5305 type = TREE_TYPE (fd->loop.v);
5306 istart0 = create_tmp_var (fd->iter_type, ".istart0");
5307 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
5308 TREE_ADDRESSABLE (istart0) = 1;
5309 TREE_ADDRESSABLE (iend0) = 1;
953ff289 5310
a68ab351
JJ
5311 /* See if we need to bias by LLONG_MIN. */
5312 if (fd->iter_type == long_long_unsigned_type_node
5313 && TREE_CODE (type) == INTEGER_TYPE
5314 && !TYPE_UNSIGNED (type))
5315 {
5316 tree n1, n2;
5317
5318 if (fd->loop.cond_code == LT_EXPR)
5319 {
5320 n1 = fd->loop.n1;
5321 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
5322 }
5323 else
5324 {
5325 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
5326 n2 = fd->loop.n1;
5327 }
5328 if (TREE_CODE (n1) != INTEGER_CST
5329 || TREE_CODE (n2) != INTEGER_CST
5330 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
5331 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
5332 }
5333
777f7f9a 5334 entry_bb = region->entry;
d3c673c7 5335 cont_bb = region->cont;
a68ab351 5336 collapse_bb = NULL;
e5c95afe
ZD
5337 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
5338 gcc_assert (broken_loop
5339 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
5340 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5341 l1_bb = single_succ (l0_bb);
5342 if (!broken_loop)
d3c673c7
JJ
5343 {
5344 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
5345 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
5346 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 5347 }
e5c95afe
ZD
5348 else
5349 l2_bb = NULL;
5350 l3_bb = BRANCH_EDGE (entry_bb)->dest;
5351 exit_bb = region->exit;
50674e96 5352
726a989a 5353 gsi = gsi_last_bb (entry_bb);
a68ab351 5354
726a989a 5355 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
a68ab351
JJ
5356 if (fd->collapse > 1)
5357 {
5a0f4dd3 5358 int first_zero_iter = -1;
74bf76ed 5359 basic_block zero_iter_bb = NULL, l2_dom_bb = NULL;
5a0f4dd3 5360
74bf76ed
JJ
5361 counts = XALLOCAVEC (tree, fd->collapse);
5362 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5363 zero_iter_bb, first_zero_iter,
5364 l2_dom_bb);
a68ab351 5365
5a0f4dd3
JJ
5366 if (zero_iter_bb)
5367 {
5368 /* Some counts[i] vars might be uninitialized if
5369 some loop has zero iterations. But the body shouldn't
5370 be executed in that case, so just avoid uninit warnings. */
5371 for (i = first_zero_iter; i < fd->collapse; i++)
5372 if (SSA_VAR_P (counts[i]))
5373 TREE_NO_WARNING (counts[i]) = 1;
5374 gsi_prev (&gsi);
5375 e = split_block (entry_bb, gsi_stmt (gsi));
5376 entry_bb = e->dest;
5377 make_edge (zero_iter_bb, entry_bb, EDGE_FALLTHRU);
5378 gsi = gsi_last_bb (entry_bb);
5379 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
5380 get_immediate_dominator (CDI_DOMINATORS,
5381 zero_iter_bb));
5382 }
a68ab351 5383 }
917948d3
ZD
5384 if (in_combined_parallel)
5385 {
5386 /* In a combined parallel loop, emit a call to
5387 GOMP_loop_foo_next. */
e79983f4 5388 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
917948d3
ZD
5389 build_fold_addr_expr (istart0),
5390 build_fold_addr_expr (iend0));
5391 }
5392 else
953ff289 5393 {
5039610b 5394 tree t0, t1, t2, t3, t4;
50674e96
DN
5395 /* If this is not a combined parallel loop, emit a call to
5396 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
5397 t4 = build_fold_addr_expr (iend0);
5398 t3 = build_fold_addr_expr (istart0);
a68ab351 5399 t2 = fold_convert (fd->iter_type, fd->loop.step);
74bf76ed
JJ
5400 t1 = fd->loop.n2;
5401 t0 = fd->loop.n1;
acf0174b
JJ
5402 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5403 {
5404 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5405 OMP_CLAUSE__LOOPTEMP_);
5406 gcc_assert (innerc);
5407 t0 = OMP_CLAUSE_DECL (innerc);
5408 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5409 OMP_CLAUSE__LOOPTEMP_);
5410 gcc_assert (innerc);
5411 t1 = OMP_CLAUSE_DECL (innerc);
5412 }
74bf76ed
JJ
5413 if (POINTER_TYPE_P (TREE_TYPE (t0))
5414 && TYPE_PRECISION (TREE_TYPE (t0))
5415 != TYPE_PRECISION (fd->iter_type))
c6ff4493
SE
5416 {
5417 /* Avoid casting pointers to integer of a different size. */
96f9265a 5418 tree itype = signed_type_for (type);
74bf76ed
JJ
5419 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1));
5420 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0));
c6ff4493
SE
5421 }
5422 else
5423 {
74bf76ed
JJ
5424 t1 = fold_convert (fd->iter_type, t1);
5425 t0 = fold_convert (fd->iter_type, t0);
c6ff4493 5426 }
a68ab351 5427 if (bias)
953ff289 5428 {
a68ab351
JJ
5429 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
5430 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
5431 }
5432 if (fd->iter_type == long_integer_type_node)
5433 {
5434 if (fd->chunk_size)
5435 {
5436 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4
MM
5437 t = build_call_expr (builtin_decl_explicit (start_fn),
5438 6, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5439 }
5440 else
e79983f4
MM
5441 t = build_call_expr (builtin_decl_explicit (start_fn),
5442 5, t0, t1, t2, t3, t4);
953ff289 5443 }
5039610b 5444 else
a68ab351
JJ
5445 {
5446 tree t5;
5447 tree c_bool_type;
e79983f4 5448 tree bfn_decl;
a68ab351
JJ
5449
5450 /* The GOMP_loop_ull_*start functions have additional boolean
5451 argument, true for < loops and false for > loops.
5452 In Fortran, the C bool type can be different from
5453 boolean_type_node. */
e79983f4
MM
5454 bfn_decl = builtin_decl_explicit (start_fn);
5455 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl));
a68ab351
JJ
5456 t5 = build_int_cst (c_bool_type,
5457 fd->loop.cond_code == LT_EXPR ? 1 : 0);
5458 if (fd->chunk_size)
5459 {
e79983f4 5460 tree bfn_decl = builtin_decl_explicit (start_fn);
a68ab351 5461 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4 5462 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5463 }
5464 else
e79983f4
MM
5465 t = build_call_expr (builtin_decl_explicit (start_fn),
5466 6, t5, t0, t1, t2, t3, t4);
a68ab351 5467 }
953ff289 5468 }
a68ab351
JJ
5469 if (TREE_TYPE (t) != boolean_type_node)
5470 t = fold_build2 (NE_EXPR, boolean_type_node,
5471 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5472 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5473 true, GSI_SAME_STMT);
5474 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
917948d3 5475
726a989a
RB
5476 /* Remove the GIMPLE_OMP_FOR statement. */
5477 gsi_remove (&gsi, true);
953ff289 5478
50674e96 5479 /* Iteration setup for sequential loop goes in L0_BB. */
74bf76ed
JJ
5480 tree startvar = fd->loop.v;
5481 tree endvar = NULL_TREE;
5482
acf0174b
JJ
5483 if (gimple_omp_for_combined_p (fd->for_stmt))
5484 {
5485 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR
5486 && gimple_omp_for_kind (inner_stmt)
5487 == GF_OMP_FOR_KIND_SIMD);
5488 tree innerc = find_omp_clause (gimple_omp_for_clauses (inner_stmt),
5489 OMP_CLAUSE__LOOPTEMP_);
5490 gcc_assert (innerc);
5491 startvar = OMP_CLAUSE_DECL (innerc);
5492 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5493 OMP_CLAUSE__LOOPTEMP_);
5494 gcc_assert (innerc);
5495 endvar = OMP_CLAUSE_DECL (innerc);
5496 }
5497
726a989a 5498 gsi = gsi_start_bb (l0_bb);
550918ca 5499 t = istart0;
a68ab351 5500 if (bias)
550918ca 5501 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5502 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5503 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5504 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5505 t = force_gimple_operand_gsi (&gsi, t,
74bf76ed
JJ
5506 DECL_P (startvar)
5507 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5508 NULL_TREE, false, GSI_CONTINUE_LINKING);
74bf76ed 5509 stmt = gimple_build_assign (startvar, t);
726a989a 5510 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
953ff289 5511
550918ca 5512 t = iend0;
a68ab351 5513 if (bias)
550918ca 5514 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5515 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5516 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5517 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5518 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5519 false, GSI_CONTINUE_LINKING);
74bf76ed 5520 if (endvar)
a68ab351 5521 {
74bf76ed 5522 stmt = gimple_build_assign (endvar, iend);
726a989a 5523 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351 5524 }
74bf76ed 5525 if (fd->collapse > 1)
acf0174b 5526 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
50674e96 5527
e5c95afe 5528 if (!broken_loop)
d3c673c7 5529 {
e5c95afe
ZD
5530 /* Code to control the increment and predicate for the sequential
5531 loop goes in the CONT_BB. */
726a989a
RB
5532 gsi = gsi_last_bb (cont_bb);
5533 stmt = gsi_stmt (gsi);
5534 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5535 vmain = gimple_omp_continue_control_use (stmt);
5536 vback = gimple_omp_continue_control_def (stmt);
917948d3 5537
acf0174b 5538 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5539 {
5540 if (POINTER_TYPE_P (type))
5541 t = fold_build_pointer_plus (vmain, fd->loop.step);
5542 else
5543 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
5544 t = force_gimple_operand_gsi (&gsi, t,
5545 DECL_P (vback)
5546 && TREE_ADDRESSABLE (vback),
5547 NULL_TREE, true, GSI_SAME_STMT);
5548 stmt = gimple_build_assign (vback, t);
5549 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5550
5551 t = build2 (fd->loop.cond_code, boolean_type_node,
5552 DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
5553 iend);
5554 stmt = gimple_build_cond_empty (t);
5555 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5556 }
50674e96 5557
726a989a
RB
5558 /* Remove GIMPLE_OMP_CONTINUE. */
5559 gsi_remove (&gsi, true);
50674e96 5560
acf0174b 5561 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed 5562 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, l1_bb);
a68ab351 5563
e5c95afe 5564 /* Emit code to get the next parallel iteration in L2_BB. */
726a989a 5565 gsi = gsi_start_bb (l2_bb);
50674e96 5566
e79983f4 5567 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
e5c95afe
ZD
5568 build_fold_addr_expr (istart0),
5569 build_fold_addr_expr (iend0));
726a989a
RB
5570 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5571 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
5572 if (TREE_TYPE (t) != boolean_type_node)
5573 t = fold_build2 (NE_EXPR, boolean_type_node,
5574 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5575 stmt = gimple_build_cond_empty (t);
5576 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
e5c95afe 5577 }
953ff289 5578
777f7f9a 5579 /* Add the loop cleanup function. */
726a989a
RB
5580 gsi = gsi_last_bb (exit_bb);
5581 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
e79983f4 5582 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
acf0174b
JJ
5583 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5584 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
777f7f9a 5585 else
e79983f4 5586 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
726a989a 5587 stmt = gimple_build_call (t, 0);
acf0174b
JJ
5588 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5589 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
726a989a
RB
5590 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
5591 gsi_remove (&gsi, true);
50674e96
DN
5592
5593 /* Connect the new blocks. */
917948d3
ZD
5594 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
5595 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 5596
e5c95afe
ZD
5597 if (!broken_loop)
5598 {
726a989a
RB
5599 gimple_seq phis;
5600
917948d3
ZD
5601 e = find_edge (cont_bb, l3_bb);
5602 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
5603
726a989a
RB
5604 phis = phi_nodes (l3_bb);
5605 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5606 {
5607 gimple phi = gsi_stmt (gsi);
5608 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
5609 PHI_ARG_DEF_FROM_EDGE (phi, e));
5610 }
917948d3
ZD
5611 remove_edge (e);
5612
e5c95afe 5613 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
5614 if (current_loops)
5615 add_bb_to_loop (l2_bb, cont_bb->loop_father);
74bf76ed 5616 e = find_edge (cont_bb, l1_bb);
acf0174b
JJ
5617 if (gimple_omp_for_combined_p (fd->for_stmt))
5618 {
5619 remove_edge (e);
5620 e = NULL;
5621 }
74bf76ed 5622 else if (fd->collapse > 1)
a68ab351 5623 {
a68ab351
JJ
5624 remove_edge (e);
5625 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5626 }
5627 else
74bf76ed
JJ
5628 e->flags = EDGE_TRUE_VALUE;
5629 if (e)
a68ab351 5630 {
74bf76ed
JJ
5631 e->probability = REG_BR_PROB_BASE * 7 / 8;
5632 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
5633 }
5634 else
5635 {
5636 e = find_edge (cont_bb, l2_bb);
5637 e->flags = EDGE_FALLTHRU;
a68ab351 5638 }
e5c95afe 5639 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
5640
5641 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
5642 recompute_dominator (CDI_DOMINATORS, l2_bb));
5643 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
5644 recompute_dominator (CDI_DOMINATORS, l3_bb));
5645 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
5646 recompute_dominator (CDI_DOMINATORS, l0_bb));
5647 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
5648 recompute_dominator (CDI_DOMINATORS, l1_bb));
6093bc06
RB
5649
5650 struct loop *outer_loop = alloc_loop ();
5651 outer_loop->header = l0_bb;
5652 outer_loop->latch = l2_bb;
5653 add_loop (outer_loop, l0_bb->loop_father);
5654
acf0174b 5655 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5656 {
5657 struct loop *loop = alloc_loop ();
5658 loop->header = l1_bb;
5659 /* The loop may have multiple latches. */
5660 add_loop (loop, outer_loop);
5661 }
e5c95afe 5662 }
953ff289
DN
5663}
5664
5665
50674e96
DN
5666/* A subroutine of expand_omp_for. Generate code for a parallel
5667 loop with static schedule and no specified chunk size. Given
5668 parameters:
953ff289
DN
5669
5670 for (V = N1; V cond N2; V += STEP) BODY;
5671
5672 where COND is "<" or ">", we generate pseudocode
5673
5a0f4dd3 5674 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
5675 if (cond is <)
5676 adj = STEP - 1;
5677 else
5678 adj = STEP + 1;
a68ab351
JJ
5679 if ((__typeof (V)) -1 > 0 && cond is >)
5680 n = -(adj + N2 - N1) / -STEP;
5681 else
5682 n = (adj + N2 - N1) / STEP;
953ff289 5683 q = n / nthreads;
fb79f500
JJ
5684 tt = n % nthreads;
5685 if (threadid < tt) goto L3; else goto L4;
5686 L3:
5687 tt = 0;
5688 q = q + 1;
5689 L4:
5690 s0 = q * threadid + tt;
5691 e0 = s0 + q;
917948d3 5692 V = s0 * STEP + N1;
953ff289
DN
5693 if (s0 >= e0) goto L2; else goto L0;
5694 L0:
953ff289
DN
5695 e = e0 * STEP + N1;
5696 L1:
5697 BODY;
5698 V += STEP;
5699 if (V cond e) goto L1;
953ff289
DN
5700 L2:
5701*/
5702
777f7f9a 5703static void
50674e96 5704expand_omp_for_static_nochunk (struct omp_region *region,
acf0174b
JJ
5705 struct omp_for_data *fd,
5706 gimple inner_stmt)
953ff289 5707{
fb79f500 5708 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
a68ab351 5709 tree type, itype, vmain, vback;
fb79f500 5710 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
acf0174b 5711 basic_block body_bb, cont_bb, collapse_bb = NULL;
777f7f9a 5712 basic_block fin_bb;
726a989a
RB
5713 gimple_stmt_iterator gsi;
5714 gimple stmt;
fb79f500 5715 edge ep;
acf0174b
JJ
5716 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
5717 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
5718 bool broken_loop = region->cont == NULL;
5719 tree *counts = NULL;
5720 tree n1, n2, step;
953ff289 5721
a68ab351
JJ
5722 itype = type = TREE_TYPE (fd->loop.v);
5723 if (POINTER_TYPE_P (type))
96f9265a 5724 itype = signed_type_for (type);
953ff289 5725
777f7f9a 5726 entry_bb = region->entry;
777f7f9a 5727 cont_bb = region->cont;
e5c95afe 5728 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
acf0174b
JJ
5729 fin_bb = BRANCH_EDGE (entry_bb)->dest;
5730 gcc_assert (broken_loop
5731 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest));
e5c95afe
ZD
5732 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5733 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
5734 if (!broken_loop)
5735 {
5736 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
5737 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
5738 }
777f7f9a
RH
5739 exit_bb = region->exit;
5740
50674e96 5741 /* Iteration space partitioning goes in ENTRY_BB. */
726a989a
RB
5742 gsi = gsi_last_bb (entry_bb);
5743 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
777f7f9a 5744
acf0174b
JJ
5745 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
5746 {
5747 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
5748 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
5749 }
5750
5751 if (fd->collapse > 1)
5752 {
5753 int first_zero_iter = -1;
5754 basic_block l2_dom_bb = NULL;
5755
5756 counts = XALLOCAVEC (tree, fd->collapse);
5757 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5758 fin_bb, first_zero_iter,
5759 l2_dom_bb);
5760 t = NULL_TREE;
5761 }
5762 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
5763 t = integer_one_node;
5764 else
5765 t = fold_binary (fd->loop.cond_code, boolean_type_node,
5766 fold_convert (type, fd->loop.n1),
5767 fold_convert (type, fd->loop.n2));
5768 if (fd->collapse == 1
5769 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
5770 && (t == NULL_TREE || !integer_onep (t)))
5771 {
5a0f4dd3
JJ
5772 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
5773 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE,
5774 true, GSI_SAME_STMT);
5775 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
5776 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
5777 true, GSI_SAME_STMT);
5778 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
5779 NULL_TREE, NULL_TREE);
5780 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5781 if (walk_tree (gimple_cond_lhs_ptr (stmt),
5782 expand_omp_regimplify_p, NULL, NULL)
5783 || walk_tree (gimple_cond_rhs_ptr (stmt),
5784 expand_omp_regimplify_p, NULL, NULL))
5785 {
5786 gsi = gsi_for_stmt (stmt);
5787 gimple_regimplify_operands (stmt, &gsi);
5788 }
5789 ep = split_block (entry_bb, stmt);
5790 ep->flags = EDGE_TRUE_VALUE;
5791 entry_bb = ep->dest;
5792 ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
5793 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
5794 ep->probability = REG_BR_PROB_BASE / 2000 - 1;
5795 if (gimple_in_ssa_p (cfun))
5796 {
5797 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
5798 for (gsi = gsi_start_phis (fin_bb);
5799 !gsi_end_p (gsi); gsi_next (&gsi))
5800 {
5801 gimple phi = gsi_stmt (gsi);
5802 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
5803 ep, UNKNOWN_LOCATION);
5804 }
5805 }
5806 gsi = gsi_last_bb (entry_bb);
5807 }
5808
acf0174b 5809 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 5810 t = fold_convert (itype, t);
726a989a
RB
5811 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5812 true, GSI_SAME_STMT);
b8698a0f 5813
acf0174b 5814 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 5815 t = fold_convert (itype, t);
726a989a
RB
5816 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5817 true, GSI_SAME_STMT);
953ff289 5818
acf0174b
JJ
5819 n1 = fd->loop.n1;
5820 n2 = fd->loop.n2;
5821 step = fd->loop.step;
5822 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5823 {
5824 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5825 OMP_CLAUSE__LOOPTEMP_);
5826 gcc_assert (innerc);
5827 n1 = OMP_CLAUSE_DECL (innerc);
5828 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5829 OMP_CLAUSE__LOOPTEMP_);
5830 gcc_assert (innerc);
5831 n2 = OMP_CLAUSE_DECL (innerc);
5832 }
5833 n1 = force_gimple_operand_gsi (&gsi, fold_convert (type, n1),
5834 true, NULL_TREE, true, GSI_SAME_STMT);
5835 n2 = force_gimple_operand_gsi (&gsi, fold_convert (itype, n2),
5836 true, NULL_TREE, true, GSI_SAME_STMT);
5837 step = force_gimple_operand_gsi (&gsi, fold_convert (itype, step),
5838 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
5839
5840 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
5841 t = fold_build2 (PLUS_EXPR, itype, step, t);
5842 t = fold_build2 (PLUS_EXPR, itype, t, n2);
5843 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
5844 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
5845 t = fold_build2 (TRUNC_DIV_EXPR, itype,
5846 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 5847 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 5848 else
acf0174b 5849 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 5850 t = fold_convert (itype, t);
726a989a 5851 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5852
7cc434a3 5853 q = create_tmp_reg (itype, "q");
a68ab351 5854 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
fb79f500
JJ
5855 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5856 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
5857
7cc434a3 5858 tt = create_tmp_reg (itype, "tt");
fb79f500
JJ
5859 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
5860 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5861 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
953ff289 5862
fb79f500
JJ
5863 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
5864 stmt = gimple_build_cond_empty (t);
5865 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5866
5867 second_bb = split_block (entry_bb, stmt)->dest;
5868 gsi = gsi_last_bb (second_bb);
5869 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
5870
5871 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
5872 GSI_SAME_STMT);
5873 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
5874 build_int_cst (itype, 1));
5875 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5876
5877 third_bb = split_block (second_bb, stmt)->dest;
5878 gsi = gsi_last_bb (third_bb);
5879 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
953ff289 5880
a68ab351 5881 t = build2 (MULT_EXPR, itype, q, threadid);
fb79f500 5882 t = build2 (PLUS_EXPR, itype, t, tt);
726a989a 5883 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5884
a68ab351 5885 t = fold_build2 (PLUS_EXPR, itype, s0, q);
726a989a 5886 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 5887
953ff289 5888 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
726a989a 5889 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
50674e96 5890
726a989a
RB
5891 /* Remove the GIMPLE_OMP_FOR statement. */
5892 gsi_remove (&gsi, true);
50674e96
DN
5893
5894 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 5895 gsi = gsi_start_bb (seq_start_bb);
953ff289 5896
acf0174b
JJ
5897 tree startvar = fd->loop.v;
5898 tree endvar = NULL_TREE;
5899
5900 if (gimple_omp_for_combined_p (fd->for_stmt))
5901 {
5902 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5903 ? gimple_omp_parallel_clauses (inner_stmt)
5904 : gimple_omp_for_clauses (inner_stmt);
5905 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5906 gcc_assert (innerc);
5907 startvar = OMP_CLAUSE_DECL (innerc);
5908 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5909 OMP_CLAUSE__LOOPTEMP_);
5910 gcc_assert (innerc);
5911 endvar = OMP_CLAUSE_DECL (innerc);
5912 }
a68ab351 5913 t = fold_convert (itype, s0);
acf0174b 5914 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5915 if (POINTER_TYPE_P (type))
acf0174b 5916 t = fold_build_pointer_plus (n1, t);
a68ab351 5917 else
acf0174b
JJ
5918 t = fold_build2 (PLUS_EXPR, type, t, n1);
5919 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5920 t = force_gimple_operand_gsi (&gsi, t,
acf0174b
JJ
5921 DECL_P (startvar)
5922 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5923 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 5924 stmt = gimple_build_assign (startvar, t);
726a989a 5925 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
b8698a0f 5926
a68ab351 5927 t = fold_convert (itype, e0);
acf0174b 5928 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5929 if (POINTER_TYPE_P (type))
acf0174b 5930 t = fold_build_pointer_plus (n1, t);
a68ab351 5931 else
acf0174b
JJ
5932 t = fold_build2 (PLUS_EXPR, type, t, n1);
5933 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5934 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5935 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
5936 if (endvar)
5937 {
5938 stmt = gimple_build_assign (endvar, e);
5939 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5940 }
5941 if (fd->collapse > 1)
5942 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
953ff289 5943
acf0174b
JJ
5944 if (!broken_loop)
5945 {
5946 /* The code controlling the sequential loop replaces the
5947 GIMPLE_OMP_CONTINUE. */
5948 gsi = gsi_last_bb (cont_bb);
5949 stmt = gsi_stmt (gsi);
5950 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5951 vmain = gimple_omp_continue_control_use (stmt);
5952 vback = gimple_omp_continue_control_def (stmt);
917948d3 5953
acf0174b
JJ
5954 if (!gimple_omp_for_combined_p (fd->for_stmt))
5955 {
5956 if (POINTER_TYPE_P (type))
5957 t = fold_build_pointer_plus (vmain, step);
5958 else
5959 t = fold_build2 (PLUS_EXPR, type, vmain, step);
5960 t = force_gimple_operand_gsi (&gsi, t,
5961 DECL_P (vback)
5962 && TREE_ADDRESSABLE (vback),
5963 NULL_TREE, true, GSI_SAME_STMT);
5964 stmt = gimple_build_assign (vback, t);
5965 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
917948d3 5966
acf0174b
JJ
5967 t = build2 (fd->loop.cond_code, boolean_type_node,
5968 DECL_P (vback) && TREE_ADDRESSABLE (vback)
5969 ? t : vback, e);
5970 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
5971 }
953ff289 5972
acf0174b
JJ
5973 /* Remove the GIMPLE_OMP_CONTINUE statement. */
5974 gsi_remove (&gsi, true);
5975
5976 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
5977 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
5978 }
50674e96 5979
726a989a
RB
5980 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
5981 gsi = gsi_last_bb (exit_bb);
5982 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
acf0174b
JJ
5983 {
5984 t = gimple_omp_return_lhs (gsi_stmt (gsi));
5985 gsi_insert_after (&gsi, build_omp_barrier (t), GSI_SAME_STMT);
5986 }
726a989a 5987 gsi_remove (&gsi, true);
50674e96
DN
5988
5989 /* Connect all the blocks. */
fb79f500
JJ
5990 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
5991 ep->probability = REG_BR_PROB_BASE / 4 * 3;
5992 ep = find_edge (entry_bb, second_bb);
5993 ep->flags = EDGE_TRUE_VALUE;
5994 ep->probability = REG_BR_PROB_BASE / 4;
5995 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
5996 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 5997
acf0174b
JJ
5998 if (!broken_loop)
5999 {
6000 ep = find_edge (cont_bb, body_bb);
6001 if (gimple_omp_for_combined_p (fd->for_stmt))
6002 {
6003 remove_edge (ep);
6004 ep = NULL;
6005 }
6006 else if (fd->collapse > 1)
6007 {
6008 remove_edge (ep);
6009 ep = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6010 }
6011 else
6012 ep->flags = EDGE_TRUE_VALUE;
6013 find_edge (cont_bb, fin_bb)->flags
6014 = ep ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
6015 }
b8698a0f 6016
fb79f500
JJ
6017 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
6018 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
6019 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
acf0174b 6020
917948d3
ZD
6021 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6022 recompute_dominator (CDI_DOMINATORS, body_bb));
6023 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6024 recompute_dominator (CDI_DOMINATORS, fin_bb));
6093bc06 6025
acf0174b
JJ
6026 if (!broken_loop && !gimple_omp_for_combined_p (fd->for_stmt))
6027 {
6028 struct loop *loop = alloc_loop ();
6029 loop->header = body_bb;
6030 if (collapse_bb == NULL)
6031 loop->latch = cont_bb;
6032 add_loop (loop, body_bb->loop_father);
6033 }
953ff289
DN
6034}
6035
50674e96
DN
6036
6037/* A subroutine of expand_omp_for. Generate code for a parallel
6038 loop with static schedule and a specified chunk size. Given
6039 parameters:
953ff289
DN
6040
6041 for (V = N1; V cond N2; V += STEP) BODY;
6042
6043 where COND is "<" or ">", we generate pseudocode
6044
5a0f4dd3 6045 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
6046 if (cond is <)
6047 adj = STEP - 1;
6048 else
6049 adj = STEP + 1;
a68ab351
JJ
6050 if ((__typeof (V)) -1 > 0 && cond is >)
6051 n = -(adj + N2 - N1) / -STEP;
6052 else
6053 n = (adj + N2 - N1) / STEP;
953ff289 6054 trip = 0;
917948d3
ZD
6055 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
6056 here so that V is defined
6057 if the loop is not entered
953ff289
DN
6058 L0:
6059 s0 = (trip * nthreads + threadid) * CHUNK;
6060 e0 = min(s0 + CHUNK, n);
6061 if (s0 < n) goto L1; else goto L4;
6062 L1:
6063 V = s0 * STEP + N1;
6064 e = e0 * STEP + N1;
6065 L2:
6066 BODY;
6067 V += STEP;
6068 if (V cond e) goto L2; else goto L3;
6069 L3:
6070 trip += 1;
6071 goto L0;
6072 L4:
953ff289
DN
6073*/
6074
777f7f9a 6075static void
acf0174b
JJ
6076expand_omp_for_static_chunk (struct omp_region *region,
6077 struct omp_for_data *fd, gimple inner_stmt)
953ff289 6078{
726a989a 6079 tree n, s0, e0, e, t;
917948d3 6080 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
726a989a 6081 tree type, itype, v_main, v_back, v_extra;
50674e96 6082 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
acf0174b 6083 basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
726a989a
RB
6084 gimple_stmt_iterator si;
6085 gimple stmt;
6086 edge se;
acf0174b
JJ
6087 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
6088 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
6089 bool broken_loop = region->cont == NULL;
6090 tree *counts = NULL;
6091 tree n1, n2, step;
953ff289 6092
a68ab351
JJ
6093 itype = type = TREE_TYPE (fd->loop.v);
6094 if (POINTER_TYPE_P (type))
96f9265a 6095 itype = signed_type_for (type);
953ff289 6096
777f7f9a 6097 entry_bb = region->entry;
e5c95afe
ZD
6098 se = split_block (entry_bb, last_stmt (entry_bb));
6099 entry_bb = se->src;
6100 iter_part_bb = se->dest;
777f7f9a 6101 cont_bb = region->cont;
e5c95afe 6102 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
acf0174b
JJ
6103 fin_bb = BRANCH_EDGE (iter_part_bb)->dest;
6104 gcc_assert (broken_loop
6105 || fin_bb == FALLTHRU_EDGE (cont_bb)->dest);
e5c95afe
ZD
6106 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
6107 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
6108 if (!broken_loop)
6109 {
6110 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
6111 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6112 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
6113 }
777f7f9a 6114 exit_bb = region->exit;
50674e96 6115
50674e96 6116 /* Trip and adjustment setup goes in ENTRY_BB. */
726a989a
RB
6117 si = gsi_last_bb (entry_bb);
6118 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
50674e96 6119
acf0174b
JJ
6120 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
6121 {
6122 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
6123 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
6124 }
6125
6126 if (fd->collapse > 1)
6127 {
6128 int first_zero_iter = -1;
6129 basic_block l2_dom_bb = NULL;
6130
6131 counts = XALLOCAVEC (tree, fd->collapse);
6132 expand_omp_for_init_counts (fd, &si, entry_bb, counts,
6133 fin_bb, first_zero_iter,
6134 l2_dom_bb);
6135 t = NULL_TREE;
6136 }
6137 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
6138 t = integer_one_node;
6139 else
6140 t = fold_binary (fd->loop.cond_code, boolean_type_node,
6141 fold_convert (type, fd->loop.n1),
6142 fold_convert (type, fd->loop.n2));
6143 if (fd->collapse == 1
6144 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
6145 && (t == NULL_TREE || !integer_onep (t)))
6146 {
5a0f4dd3
JJ
6147 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
6148 n1 = force_gimple_operand_gsi (&si, n1, true, NULL_TREE,
6149 true, GSI_SAME_STMT);
6150 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
6151 n2 = force_gimple_operand_gsi (&si, n2, true, NULL_TREE,
6152 true, GSI_SAME_STMT);
6153 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
6154 NULL_TREE, NULL_TREE);
6155 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
6156 if (walk_tree (gimple_cond_lhs_ptr (stmt),
6157 expand_omp_regimplify_p, NULL, NULL)
6158 || walk_tree (gimple_cond_rhs_ptr (stmt),
6159 expand_omp_regimplify_p, NULL, NULL))
6160 {
6161 si = gsi_for_stmt (stmt);
6162 gimple_regimplify_operands (stmt, &si);
6163 }
6164 se = split_block (entry_bb, stmt);
6165 se->flags = EDGE_TRUE_VALUE;
6166 entry_bb = se->dest;
6167 se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
6168 se = make_edge (se->src, fin_bb, EDGE_FALSE_VALUE);
6169 se->probability = REG_BR_PROB_BASE / 2000 - 1;
6170 if (gimple_in_ssa_p (cfun))
6171 {
6172 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
6173 for (si = gsi_start_phis (fin_bb);
6174 !gsi_end_p (si); gsi_next (&si))
6175 {
6176 gimple phi = gsi_stmt (si);
6177 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
6178 se, UNKNOWN_LOCATION);
6179 }
6180 }
6181 si = gsi_last_bb (entry_bb);
6182 }
6183
acf0174b 6184 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 6185 t = fold_convert (itype, t);
726a989a
RB
6186 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6187 true, GSI_SAME_STMT);
b8698a0f 6188
acf0174b 6189 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 6190 t = fold_convert (itype, t);
726a989a
RB
6191 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6192 true, GSI_SAME_STMT);
917948d3 6193
acf0174b
JJ
6194 n1 = fd->loop.n1;
6195 n2 = fd->loop.n2;
6196 step = fd->loop.step;
6197 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6198 {
6199 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6200 OMP_CLAUSE__LOOPTEMP_);
6201 gcc_assert (innerc);
6202 n1 = OMP_CLAUSE_DECL (innerc);
6203 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6204 OMP_CLAUSE__LOOPTEMP_);
6205 gcc_assert (innerc);
6206 n2 = OMP_CLAUSE_DECL (innerc);
6207 }
6208 n1 = force_gimple_operand_gsi (&si, fold_convert (type, n1),
6209 true, NULL_TREE, true, GSI_SAME_STMT);
6210 n2 = force_gimple_operand_gsi (&si, fold_convert (itype, n2),
6211 true, NULL_TREE, true, GSI_SAME_STMT);
6212 step = force_gimple_operand_gsi (&si, fold_convert (itype, step),
6213 true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 6214 fd->chunk_size
726a989a
RB
6215 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
6216 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
6217
6218 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
6219 t = fold_build2 (PLUS_EXPR, itype, step, t);
6220 t = fold_build2 (PLUS_EXPR, itype, t, n2);
6221 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
6222 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
6223 t = fold_build2 (TRUNC_DIV_EXPR, itype,
6224 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 6225 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 6226 else
acf0174b 6227 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 6228 t = fold_convert (itype, t);
726a989a
RB
6229 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6230 true, GSI_SAME_STMT);
917948d3 6231
a5efada7 6232 trip_var = create_tmp_reg (itype, ".trip");
917948d3
ZD
6233 if (gimple_in_ssa_p (cfun))
6234 {
726a989a
RB
6235 trip_init = make_ssa_name (trip_var, NULL);
6236 trip_main = make_ssa_name (trip_var, NULL);
6237 trip_back = make_ssa_name (trip_var, NULL);
917948d3 6238 }
953ff289 6239 else
917948d3
ZD
6240 {
6241 trip_init = trip_var;
6242 trip_main = trip_var;
6243 trip_back = trip_var;
6244 }
953ff289 6245
726a989a
RB
6246 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
6247 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
50674e96 6248
a68ab351 6249 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
acf0174b 6250 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6251 if (POINTER_TYPE_P (type))
acf0174b 6252 t = fold_build_pointer_plus (n1, t);
a68ab351 6253 else
acf0174b 6254 t = fold_build2 (PLUS_EXPR, type, t, n1);
726a989a
RB
6255 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6256 true, GSI_SAME_STMT);
917948d3 6257
726a989a
RB
6258 /* Remove the GIMPLE_OMP_FOR. */
6259 gsi_remove (&si, true);
50674e96
DN
6260
6261 /* Iteration space partitioning goes in ITER_PART_BB. */
726a989a 6262 si = gsi_last_bb (iter_part_bb);
953ff289 6263
a68ab351
JJ
6264 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
6265 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
6266 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
726a989a
RB
6267 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6268 false, GSI_CONTINUE_LINKING);
953ff289 6269
a68ab351
JJ
6270 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
6271 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a
RB
6272 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6273 false, GSI_CONTINUE_LINKING);
953ff289
DN
6274
6275 t = build2 (LT_EXPR, boolean_type_node, s0, n);
726a989a 6276 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
50674e96
DN
6277
6278 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 6279 si = gsi_start_bb (seq_start_bb);
953ff289 6280
acf0174b
JJ
6281 tree startvar = fd->loop.v;
6282 tree endvar = NULL_TREE;
6283
6284 if (gimple_omp_for_combined_p (fd->for_stmt))
6285 {
6286 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
6287 ? gimple_omp_parallel_clauses (inner_stmt)
6288 : gimple_omp_for_clauses (inner_stmt);
6289 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
6290 gcc_assert (innerc);
6291 startvar = OMP_CLAUSE_DECL (innerc);
6292 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6293 OMP_CLAUSE__LOOPTEMP_);
6294 gcc_assert (innerc);
6295 endvar = OMP_CLAUSE_DECL (innerc);
6296 }
6297
a68ab351 6298 t = fold_convert (itype, s0);
acf0174b 6299 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6300 if (POINTER_TYPE_P (type))
acf0174b 6301 t = fold_build_pointer_plus (n1, t);
a68ab351 6302 else
acf0174b
JJ
6303 t = fold_build2 (PLUS_EXPR, type, t, n1);
6304 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 6305 t = force_gimple_operand_gsi (&si, t,
acf0174b
JJ
6306 DECL_P (startvar)
6307 && TREE_ADDRESSABLE (startvar),
ea3a0fde 6308 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 6309 stmt = gimple_build_assign (startvar, t);
726a989a 6310 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 6311
a68ab351 6312 t = fold_convert (itype, e0);
acf0174b 6313 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6314 if (POINTER_TYPE_P (type))
acf0174b 6315 t = fold_build_pointer_plus (n1, t);
a68ab351 6316 else
acf0174b
JJ
6317 t = fold_build2 (PLUS_EXPR, type, t, n1);
6318 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
6319 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6320 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
6321 if (endvar)
6322 {
6323 stmt = gimple_build_assign (endvar, e);
6324 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6325 }
6326 if (fd->collapse > 1)
6327 expand_omp_for_init_vars (fd, &si, counts, inner_stmt, startvar);
6328
6329 if (!broken_loop)
6330 {
6331 /* The code controlling the sequential loop goes in CONT_BB,
6332 replacing the GIMPLE_OMP_CONTINUE. */
6333 si = gsi_last_bb (cont_bb);
6334 stmt = gsi_stmt (si);
6335 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6336 v_main = gimple_omp_continue_control_use (stmt);
6337 v_back = gimple_omp_continue_control_def (stmt);
953ff289 6338
acf0174b
JJ
6339 if (!gimple_omp_for_combined_p (fd->for_stmt))
6340 {
6341 if (POINTER_TYPE_P (type))
6342 t = fold_build_pointer_plus (v_main, step);
6343 else
6344 t = fold_build2 (PLUS_EXPR, type, v_main, step);
6345 if (DECL_P (v_back) && TREE_ADDRESSABLE (v_back))
6346 t = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6347 true, GSI_SAME_STMT);
6348 stmt = gimple_build_assign (v_back, t);
6349 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
917948d3 6350
acf0174b
JJ
6351 t = build2 (fd->loop.cond_code, boolean_type_node,
6352 DECL_P (v_back) && TREE_ADDRESSABLE (v_back)
6353 ? t : v_back, e);
6354 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
6355 }
917948d3 6356
acf0174b
JJ
6357 /* Remove GIMPLE_OMP_CONTINUE. */
6358 gsi_remove (&si, true);
b8698a0f 6359
acf0174b
JJ
6360 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
6361 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
50674e96 6362
acf0174b
JJ
6363 /* Trip update code goes into TRIP_UPDATE_BB. */
6364 si = gsi_start_bb (trip_update_bb);
953ff289 6365
acf0174b
JJ
6366 t = build_int_cst (itype, 1);
6367 t = build2 (PLUS_EXPR, itype, trip_main, t);
6368 stmt = gimple_build_assign (trip_back, t);
6369 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6370 }
953ff289 6371
726a989a
RB
6372 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
6373 si = gsi_last_bb (exit_bb);
6374 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
acf0174b
JJ
6375 {
6376 t = gimple_omp_return_lhs (gsi_stmt (si));
6377 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
6378 }
726a989a 6379 gsi_remove (&si, true);
953ff289 6380
50674e96 6381 /* Connect the new blocks. */
e5c95afe
ZD
6382 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
6383 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 6384
acf0174b
JJ
6385 if (!broken_loop)
6386 {
6387 se = find_edge (cont_bb, body_bb);
6388 if (gimple_omp_for_combined_p (fd->for_stmt))
6389 {
6390 remove_edge (se);
6391 se = NULL;
6392 }
6393 else if (fd->collapse > 1)
6394 {
6395 remove_edge (se);
6396 se = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6397 }
6398 else
6399 se->flags = EDGE_TRUE_VALUE;
6400 find_edge (cont_bb, trip_update_bb)->flags
6401 = se ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
917948d3 6402
acf0174b
JJ
6403 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
6404 }
917948d3
ZD
6405
6406 if (gimple_in_ssa_p (cfun))
6407 {
726a989a
RB
6408 gimple_stmt_iterator psi;
6409 gimple phi;
6410 edge re, ene;
9771b263 6411 edge_var_map_vector *head;
726a989a
RB
6412 edge_var_map *vm;
6413 size_t i;
6414
acf0174b
JJ
6415 gcc_assert (fd->collapse == 1 && !broken_loop);
6416
917948d3
ZD
6417 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
6418 remove arguments of the phi nodes in fin_bb. We need to create
6419 appropriate phi nodes in iter_part_bb instead. */
6420 se = single_pred_edge (fin_bb);
6421 re = single_succ_edge (trip_update_bb);
726a989a 6422 head = redirect_edge_var_map_vector (re);
917948d3
ZD
6423 ene = single_succ_edge (entry_bb);
6424
726a989a 6425 psi = gsi_start_phis (fin_bb);
9771b263 6426 for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
726a989a 6427 gsi_next (&psi), ++i)
917948d3 6428 {
726a989a 6429 gimple nphi;
f5045c96 6430 source_location locus;
726a989a
RB
6431
6432 phi = gsi_stmt (psi);
6433 t = gimple_phi_result (phi);
6434 gcc_assert (t == redirect_edge_var_map_result (vm));
917948d3 6435 nphi = create_phi_node (t, iter_part_bb);
917948d3
ZD
6436
6437 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
f5045c96
AM
6438 locus = gimple_phi_arg_location_from_edge (phi, se);
6439
a68ab351
JJ
6440 /* A special case -- fd->loop.v is not yet computed in
6441 iter_part_bb, we need to use v_extra instead. */
6442 if (t == fd->loop.v)
917948d3 6443 t = v_extra;
9e227d60 6444 add_phi_arg (nphi, t, ene, locus);
f5045c96 6445 locus = redirect_edge_var_map_location (vm);
9e227d60 6446 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
726a989a 6447 }
9771b263 6448 gcc_assert (!gsi_end_p (psi) && i == head->length ());
726a989a
RB
6449 redirect_edge_var_map_clear (re);
6450 while (1)
6451 {
6452 psi = gsi_start_phis (fin_bb);
6453 if (gsi_end_p (psi))
6454 break;
6455 remove_phi_node (&psi, false);
917948d3 6456 }
917948d3
ZD
6457
6458 /* Make phi node for trip. */
6459 phi = create_phi_node (trip_main, iter_part_bb);
f5045c96 6460 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
9e227d60 6461 UNKNOWN_LOCATION);
f5045c96 6462 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
9e227d60 6463 UNKNOWN_LOCATION);
917948d3
ZD
6464 }
6465
acf0174b
JJ
6466 if (!broken_loop)
6467 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
917948d3
ZD
6468 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
6469 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
6470 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6471 recompute_dominator (CDI_DOMINATORS, fin_bb));
6472 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
6473 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
6474 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6475 recompute_dominator (CDI_DOMINATORS, body_bb));
6093bc06 6476
acf0174b
JJ
6477 if (!broken_loop)
6478 {
6479 struct loop *trip_loop = alloc_loop ();
6480 trip_loop->header = iter_part_bb;
6481 trip_loop->latch = trip_update_bb;
6482 add_loop (trip_loop, iter_part_bb->loop_father);
6093bc06 6483
acf0174b
JJ
6484 if (!gimple_omp_for_combined_p (fd->for_stmt))
6485 {
6486 struct loop *loop = alloc_loop ();
6487 loop->header = body_bb;
6488 loop->latch = cont_bb;
6489 add_loop (loop, trip_loop);
6490 }
6491 }
953ff289
DN
6492}
6493
acf0174b 6494
74bf76ed
JJ
6495/* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
6496 loop. Given parameters:
6497
6498 for (V = N1; V cond N2; V += STEP) BODY;
6499
6500 where COND is "<" or ">", we generate pseudocode
6501
6502 V = N1;
6503 goto L1;
6504 L0:
6505 BODY;
6506 V += STEP;
6507 L1:
6508 if (V cond N2) goto L0; else goto L2;
6509 L2:
6510
6511 For collapsed loops, given parameters:
6512 collapse(3)
6513 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
6514 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
6515 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
6516 BODY;
6517
6518 we generate pseudocode
6519
6520 if (cond3 is <)
6521 adj = STEP3 - 1;
6522 else
6523 adj = STEP3 + 1;
6524 count3 = (adj + N32 - N31) / STEP3;
6525 if (cond2 is <)
6526 adj = STEP2 - 1;
6527 else
6528 adj = STEP2 + 1;
6529 count2 = (adj + N22 - N21) / STEP2;
6530 if (cond1 is <)
6531 adj = STEP1 - 1;
6532 else
6533 adj = STEP1 + 1;
6534 count1 = (adj + N12 - N11) / STEP1;
6535 count = count1 * count2 * count3;
6536 V = 0;
6537 V1 = N11;
6538 V2 = N21;
6539 V3 = N31;
6540 goto L1;
6541 L0:
6542 BODY;
6543 V += 1;
6544 V3 += STEP3;
6545 V2 += (V3 cond3 N32) ? 0 : STEP2;
6546 V3 = (V3 cond3 N32) ? V3 : N31;
6547 V1 += (V2 cond2 N22) ? 0 : STEP1;
6548 V2 = (V2 cond2 N22) ? V2 : N21;
6549 L1:
6550 if (V < count) goto L0; else goto L2;
6551 L2:
6552
6553 */
6554
6555static void
6556expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
6557{
6558 tree type, t;
6559 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
6560 gimple_stmt_iterator gsi;
6561 gimple stmt;
6562 bool broken_loop = region->cont == NULL;
6563 edge e, ne;
6564 tree *counts = NULL;
6565 int i;
6566 tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6567 OMP_CLAUSE_SAFELEN);
6568 tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6569 OMP_CLAUSE__SIMDUID_);
acf0174b 6570 tree n1, n2;
74bf76ed
JJ
6571
6572 type = TREE_TYPE (fd->loop.v);
6573 entry_bb = region->entry;
6574 cont_bb = region->cont;
6575 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
6576 gcc_assert (broken_loop
6577 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
6578 l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
6579 if (!broken_loop)
6580 {
6581 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
6582 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6583 l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
6584 l2_bb = BRANCH_EDGE (entry_bb)->dest;
6585 }
6586 else
6587 {
6588 BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
6589 l1_bb = split_edge (BRANCH_EDGE (entry_bb));
6590 l2_bb = single_succ (l1_bb);
6591 }
6592 exit_bb = region->exit;
6593 l2_dom_bb = NULL;
6594
6595 gsi = gsi_last_bb (entry_bb);
6596
6597 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
6598 /* Not needed in SSA form right now. */
6599 gcc_assert (!gimple_in_ssa_p (cfun));
6600 if (fd->collapse > 1)
6601 {
6602 int first_zero_iter = -1;
6603 basic_block zero_iter_bb = l2_bb;
6604
6605 counts = XALLOCAVEC (tree, fd->collapse);
6606 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
6607 zero_iter_bb, first_zero_iter,
6608 l2_dom_bb);
6609 }
6610 if (l2_dom_bb == NULL)
6611 l2_dom_bb = l1_bb;
6612
acf0174b 6613 n1 = fd->loop.n1;
74bf76ed 6614 n2 = fd->loop.n2;
acf0174b
JJ
6615 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6616 {
6617 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6618 OMP_CLAUSE__LOOPTEMP_);
6619 gcc_assert (innerc);
6620 n1 = OMP_CLAUSE_DECL (innerc);
6621 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6622 OMP_CLAUSE__LOOPTEMP_);
6623 gcc_assert (innerc);
6624 n2 = OMP_CLAUSE_DECL (innerc);
6625 expand_omp_build_assign (&gsi, fd->loop.v,
6626 fold_convert (type, n1));
6627 if (fd->collapse > 1)
6628 {
6629 gsi_prev (&gsi);
6630 expand_omp_for_init_vars (fd, &gsi, counts, NULL, n1);
6631 gsi_next (&gsi);
6632 }
6633 }
74bf76ed
JJ
6634 else
6635 {
6636 expand_omp_build_assign (&gsi, fd->loop.v,
6637 fold_convert (type, fd->loop.n1));
6638 if (fd->collapse > 1)
6639 for (i = 0; i < fd->collapse; i++)
6640 {
6641 tree itype = TREE_TYPE (fd->loops[i].v);
6642 if (POINTER_TYPE_P (itype))
6643 itype = signed_type_for (itype);
6644 t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
6645 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6646 }
6647 }
6648
6649 /* Remove the GIMPLE_OMP_FOR statement. */
6650 gsi_remove (&gsi, true);
6651
6652 if (!broken_loop)
6653 {
6654 /* Code to control the increment goes in the CONT_BB. */
6655 gsi = gsi_last_bb (cont_bb);
6656 stmt = gsi_stmt (gsi);
6657 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6658
6659 if (POINTER_TYPE_P (type))
6660 t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
6661 else
6662 t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
6663 expand_omp_build_assign (&gsi, fd->loop.v, t);
6664
6665 if (fd->collapse > 1)
6666 {
6667 i = fd->collapse - 1;
6668 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
6669 {
6670 t = fold_convert (sizetype, fd->loops[i].step);
6671 t = fold_build_pointer_plus (fd->loops[i].v, t);
6672 }
6673 else
6674 {
6675 t = fold_convert (TREE_TYPE (fd->loops[i].v),
6676 fd->loops[i].step);
6677 t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
6678 fd->loops[i].v, t);
6679 }
6680 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6681
6682 for (i = fd->collapse - 1; i > 0; i--)
6683 {
6684 tree itype = TREE_TYPE (fd->loops[i].v);
6685 tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
6686 if (POINTER_TYPE_P (itype2))
6687 itype2 = signed_type_for (itype2);
6688 t = build3 (COND_EXPR, itype2,
6689 build2 (fd->loops[i].cond_code, boolean_type_node,
6690 fd->loops[i].v,
6691 fold_convert (itype, fd->loops[i].n2)),
6692 build_int_cst (itype2, 0),
6693 fold_convert (itype2, fd->loops[i - 1].step));
6694 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
6695 t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
6696 else
6697 t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
6698 expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
6699
6700 t = build3 (COND_EXPR, itype,
6701 build2 (fd->loops[i].cond_code, boolean_type_node,
6702 fd->loops[i].v,
6703 fold_convert (itype, fd->loops[i].n2)),
6704 fd->loops[i].v,
6705 fold_convert (itype, fd->loops[i].n1));
6706 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6707 }
6708 }
6709
6710 /* Remove GIMPLE_OMP_CONTINUE. */
6711 gsi_remove (&gsi, true);
6712 }
6713
6714 /* Emit the condition in L1_BB. */
6715 gsi = gsi_start_bb (l1_bb);
6716
6717 t = fold_convert (type, n2);
6718 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
6719 false, GSI_CONTINUE_LINKING);
6720 t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
6721 stmt = gimple_build_cond_empty (t);
6722 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
6723 if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
6724 NULL, NULL)
6725 || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
6726 NULL, NULL))
6727 {
6728 gsi = gsi_for_stmt (stmt);
6729 gimple_regimplify_operands (stmt, &gsi);
6730 }
6731
6732 /* Remove GIMPLE_OMP_RETURN. */
6733 gsi = gsi_last_bb (exit_bb);
6734 gsi_remove (&gsi, true);
6735
6736 /* Connect the new blocks. */
6737 remove_edge (FALLTHRU_EDGE (entry_bb));
6738
6739 if (!broken_loop)
6740 {
6741 remove_edge (BRANCH_EDGE (entry_bb));
6742 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
6743
6744 e = BRANCH_EDGE (l1_bb);
6745 ne = FALLTHRU_EDGE (l1_bb);
6746 e->flags = EDGE_TRUE_VALUE;
6747 }
6748 else
6749 {
6750 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
6751
6752 ne = single_succ_edge (l1_bb);
6753 e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
6754
6755 }
6756 ne->flags = EDGE_FALSE_VALUE;
6757 e->probability = REG_BR_PROB_BASE * 7 / 8;
6758 ne->probability = REG_BR_PROB_BASE / 8;
6759
6760 set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
6761 set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
6762 set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
6763
6764 if (!broken_loop)
6765 {
6766 struct loop *loop = alloc_loop ();
6767 loop->header = l1_bb;
6768 loop->latch = e->dest;
6769 add_loop (loop, l1_bb->loop_father);
6770 if (safelen == NULL_TREE)
6771 loop->safelen = INT_MAX;
6772 else
6773 {
6774 safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
cc269bb6 6775 if (!tree_fits_uhwi_p (safelen)
7d362f6c 6776 || tree_to_uhwi (safelen) > INT_MAX)
74bf76ed
JJ
6777 loop->safelen = INT_MAX;
6778 else
ae7e9ddd 6779 loop->safelen = tree_to_uhwi (safelen);
74bf76ed
JJ
6780 if (loop->safelen == 1)
6781 loop->safelen = 0;
6782 }
6783 if (simduid)
6784 {
6785 loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6786 cfun->has_simduid_loops = true;
6787 }
ea0f3e87 6788 /* If not -fno-tree-loop-vectorize, hint that we want to vectorize
74bf76ed 6789 the loop. */
ea0f3e87
XDL
6790 if ((flag_tree_loop_vectorize
6791 || (!global_options_set.x_flag_tree_loop_vectorize
6792 && !global_options_set.x_flag_tree_vectorize))
74bf76ed
JJ
6793 && loop->safelen > 1)
6794 {
6795 loop->force_vect = true;
6796 cfun->has_force_vect_loops = true;
6797 }
6798 }
6799}
6800
953ff289 6801
50674e96 6802/* Expand the OpenMP loop defined by REGION. */
953ff289 6803
50674e96 6804static void
acf0174b 6805expand_omp_for (struct omp_region *region, gimple inner_stmt)
50674e96
DN
6806{
6807 struct omp_for_data fd;
a68ab351 6808 struct omp_for_data_loop *loops;
953ff289 6809
a68ab351
JJ
6810 loops
6811 = (struct omp_for_data_loop *)
726a989a 6812 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
a68ab351 6813 * sizeof (struct omp_for_data_loop));
a68ab351 6814 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 6815 region->sched_kind = fd.sched_kind;
953ff289 6816
135a171d
JJ
6817 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
6818 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6819 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6820 if (region->cont)
6821 {
6822 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
6823 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6824 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6825 }
6093bc06 6826 else
1aa95df7 6827 /* If there isn't a continue then this is a degerate case where
6093bc06
RB
6828 the introduction of abnormal edges during lowering will prevent
6829 original loops from being detected. Fix that up. */
6830 loops_state_set (LOOPS_NEED_FIXUP);
135a171d 6831
c02065fc 6832 if (gimple_omp_for_kind (fd.for_stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
6833 expand_omp_simd (region, &fd);
6834 else if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 6835 && !fd.have_ordered)
953ff289
DN
6836 {
6837 if (fd.chunk_size == NULL)
acf0174b 6838 expand_omp_for_static_nochunk (region, &fd, inner_stmt);
953ff289 6839 else
acf0174b 6840 expand_omp_for_static_chunk (region, &fd, inner_stmt);
953ff289
DN
6841 }
6842 else
6843 {
a68ab351
JJ
6844 int fn_index, start_ix, next_ix;
6845
74bf76ed
JJ
6846 gcc_assert (gimple_omp_for_kind (fd.for_stmt)
6847 == GF_OMP_FOR_KIND_FOR);
9abd5ed9
JJ
6848 if (fd.chunk_size == NULL
6849 && fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
6850 fd.chunk_size = integer_zero_node;
a68ab351
JJ
6851 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
6852 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
726a989a 6853 ? 3 : fd.sched_kind;
a68ab351 6854 fn_index += fd.have_ordered * 4;
e79983f4
MM
6855 start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
6856 next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
a68ab351
JJ
6857 if (fd.iter_type == long_long_unsigned_type_node)
6858 {
e79983f4
MM
6859 start_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_START
6860 - (int)BUILT_IN_GOMP_LOOP_STATIC_START);
6861 next_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
6862 - (int)BUILT_IN_GOMP_LOOP_STATIC_NEXT);
a68ab351 6863 }
bbbbb16a 6864 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
acf0174b 6865 (enum built_in_function) next_ix, inner_stmt);
953ff289 6866 }
5f40b3cb 6867
a5efada7
RG
6868 if (gimple_in_ssa_p (cfun))
6869 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
6870}
6871
953ff289
DN
6872
6873/* Expand code for an OpenMP sections directive. In pseudo code, we generate
6874
953ff289
DN
6875 v = GOMP_sections_start (n);
6876 L0:
6877 switch (v)
6878 {
6879 case 0:
6880 goto L2;
6881 case 1:
6882 section 1;
6883 goto L1;
6884 case 2:
6885 ...
6886 case n:
6887 ...
953ff289
DN
6888 default:
6889 abort ();
6890 }
6891 L1:
6892 v = GOMP_sections_next ();
6893 goto L0;
6894 L2:
6895 reduction;
6896
50674e96 6897 If this is a combined parallel sections, replace the call to
917948d3 6898 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
6899
6900static void
50674e96 6901expand_omp_sections (struct omp_region *region)
953ff289 6902{
0f900dfa 6903 tree t, u, vin = NULL, vmain, vnext, l2;
726a989a 6904 unsigned len;
e5c95afe 6905 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
726a989a
RB
6906 gimple_stmt_iterator si, switch_si;
6907 gimple sections_stmt, stmt, cont;
c34938a8
JJ
6908 edge_iterator ei;
6909 edge e;
777f7f9a 6910 struct omp_region *inner;
726a989a 6911 unsigned i, casei;
e5c95afe 6912 bool exit_reachable = region->cont != NULL;
953ff289 6913
65e7bfe3 6914 gcc_assert (region->exit != NULL);
777f7f9a 6915 entry_bb = region->entry;
e5c95afe 6916 l0_bb = single_succ (entry_bb);
777f7f9a 6917 l1_bb = region->cont;
e5c95afe 6918 l2_bb = region->exit;
65e7bfe3
JJ
6919 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
6920 l2 = gimple_block_label (l2_bb);
6921 else
d3c673c7 6922 {
65e7bfe3
JJ
6923 /* This can happen if there are reductions. */
6924 len = EDGE_COUNT (l0_bb->succs);
6925 gcc_assert (len > 0);
6926 e = EDGE_SUCC (l0_bb, len - 1);
6927 si = gsi_last_bb (e->dest);
6928 l2 = NULL_TREE;
6929 if (gsi_end_p (si)
6930 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
6931 l2 = gimple_block_label (e->dest);
c34938a8 6932 else
65e7bfe3
JJ
6933 FOR_EACH_EDGE (e, ei, l0_bb->succs)
6934 {
6935 si = gsi_last_bb (e->dest);
6936 if (gsi_end_p (si)
6937 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
c34938a8 6938 {
65e7bfe3
JJ
6939 l2 = gimple_block_label (e->dest);
6940 break;
c34938a8 6941 }
65e7bfe3 6942 }
d3c673c7 6943 }
65e7bfe3
JJ
6944 if (exit_reachable)
6945 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7 6946 else
65e7bfe3 6947 default_bb = create_empty_bb (l0_bb);
50674e96
DN
6948
6949 /* We will build a switch() with enough cases for all the
726a989a 6950 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
50674e96 6951 and a default case to abort if something goes wrong. */
e5c95afe 6952 len = EDGE_COUNT (l0_bb->succs);
726a989a 6953
9771b263 6954 /* Use vec::quick_push on label_vec throughout, since we know the size
726a989a 6955 in advance. */
ef062b13 6956 auto_vec<tree> label_vec (len);
953ff289 6957
777f7f9a 6958 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
726a989a
RB
6959 GIMPLE_OMP_SECTIONS statement. */
6960 si = gsi_last_bb (entry_bb);
6961 sections_stmt = gsi_stmt (si);
6962 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
6963 vin = gimple_omp_sections_control (sections_stmt);
50674e96 6964 if (!is_combined_parallel (region))
953ff289 6965 {
50674e96
DN
6966 /* If we are not inside a combined parallel+sections region,
6967 call GOMP_sections_start. */
4befd127 6968 t = build_int_cst (unsigned_type_node, len - 1);
e79983f4 6969 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_START);
726a989a 6970 stmt = gimple_build_call (u, 1, t);
953ff289 6971 }
917948d3
ZD
6972 else
6973 {
6974 /* Otherwise, call GOMP_sections_next. */
e79983f4 6975 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
726a989a 6976 stmt = gimple_build_call (u, 0);
917948d3 6977 }
726a989a
RB
6978 gimple_call_set_lhs (stmt, vin);
6979 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
6980 gsi_remove (&si, true);
6981
6982 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
6983 L0_BB. */
6984 switch_si = gsi_last_bb (l0_bb);
6985 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
917948d3
ZD
6986 if (exit_reachable)
6987 {
6988 cont = last_stmt (l1_bb);
726a989a
RB
6989 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
6990 vmain = gimple_omp_continue_control_use (cont);
6991 vnext = gimple_omp_continue_control_def (cont);
917948d3
ZD
6992 }
6993 else
6994 {
6995 vmain = vin;
6996 vnext = NULL_TREE;
6997 }
953ff289 6998
65e7bfe3 6999 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
9771b263 7000 label_vec.quick_push (t);
65e7bfe3 7001 i = 1;
d3c673c7 7002
726a989a 7003 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
7004 for (inner = region->inner, casei = 1;
7005 inner;
7006 inner = inner->next, i++, casei++)
953ff289 7007 {
50674e96
DN
7008 basic_block s_entry_bb, s_exit_bb;
7009
c34938a8 7010 /* Skip optional reduction region. */
726a989a 7011 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
c34938a8
JJ
7012 {
7013 --i;
7014 --casei;
7015 continue;
7016 }
7017
777f7f9a
RH
7018 s_entry_bb = inner->entry;
7019 s_exit_bb = inner->exit;
953ff289 7020
726a989a 7021 t = gimple_block_label (s_entry_bb);
e5c95afe 7022 u = build_int_cst (unsigned_type_node, casei);
3d528853 7023 u = build_case_label (u, NULL, t);
9771b263 7024 label_vec.quick_push (u);
777f7f9a 7025
726a989a
RB
7026 si = gsi_last_bb (s_entry_bb);
7027 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
7028 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
7029 gsi_remove (&si, true);
777f7f9a 7030 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
7031
7032 if (s_exit_bb == NULL)
7033 continue;
7034
726a989a
RB
7035 si = gsi_last_bb (s_exit_bb);
7036 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7037 gsi_remove (&si, true);
d3c673c7 7038
50674e96 7039 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
7040 }
7041
50674e96 7042 /* Error handling code goes in DEFAULT_BB. */
726a989a 7043 t = gimple_block_label (default_bb);
3d528853 7044 u = build_case_label (NULL, NULL, t);
777f7f9a 7045 make_edge (l0_bb, default_bb, 0);
a9e0d843 7046 if (current_loops)
6093bc06 7047 add_bb_to_loop (default_bb, current_loops->tree_root);
953ff289 7048
fd8d363e 7049 stmt = gimple_build_switch (vmain, u, label_vec);
726a989a
RB
7050 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
7051 gsi_remove (&switch_si, true);
726a989a
RB
7052
7053 si = gsi_start_bb (default_bb);
e79983f4 7054 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
726a989a 7055 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
50674e96 7056
e5c95afe 7057 if (exit_reachable)
d3c673c7 7058 {
e79983f4
MM
7059 tree bfn_decl;
7060
e5c95afe 7061 /* Code to get the next section goes in L1_BB. */
726a989a
RB
7062 si = gsi_last_bb (l1_bb);
7063 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
953ff289 7064
e79983f4
MM
7065 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
7066 stmt = gimple_build_call (bfn_decl, 0);
726a989a
RB
7067 gimple_call_set_lhs (stmt, vnext);
7068 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7069 gsi_remove (&si, true);
50674e96 7070
e5c95afe 7071 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
d3c673c7 7072 }
50674e96 7073
65e7bfe3
JJ
7074 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
7075 si = gsi_last_bb (l2_bb);
7076 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
7077 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_NOWAIT);
acf0174b
JJ
7078 else if (gimple_omp_return_lhs (gsi_stmt (si)))
7079 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_CANCEL);
65e7bfe3
JJ
7080 else
7081 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END);
7082 stmt = gimple_build_call (t, 0);
acf0174b
JJ
7083 if (gimple_omp_return_lhs (gsi_stmt (si)))
7084 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (si)));
65e7bfe3
JJ
7085 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7086 gsi_remove (&si, true);
7087
917948d3 7088 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 7089}
953ff289 7090
953ff289 7091
777f7f9a
RH
7092/* Expand code for an OpenMP single directive. We've already expanded
7093 much of the code, here we simply place the GOMP_barrier call. */
7094
7095static void
7096expand_omp_single (struct omp_region *region)
7097{
7098 basic_block entry_bb, exit_bb;
726a989a 7099 gimple_stmt_iterator si;
777f7f9a
RH
7100
7101 entry_bb = region->entry;
7102 exit_bb = region->exit;
7103
726a989a 7104 si = gsi_last_bb (entry_bb);
726a989a
RB
7105 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
7106 gsi_remove (&si, true);
777f7f9a
RH
7107 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7108
726a989a 7109 si = gsi_last_bb (exit_bb);
acf0174b
JJ
7110 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
7111 {
7112 tree t = gimple_omp_return_lhs (gsi_stmt (si));
7113 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
7114 }
726a989a 7115 gsi_remove (&si, true);
777f7f9a
RH
7116 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7117}
7118
7119
7120/* Generic expansion for OpenMP synchronization directives: master,
7121 ordered and critical. All we need to do here is remove the entry
7122 and exit markers for REGION. */
50674e96
DN
7123
7124static void
7125expand_omp_synch (struct omp_region *region)
7126{
7127 basic_block entry_bb, exit_bb;
726a989a 7128 gimple_stmt_iterator si;
50674e96 7129
777f7f9a
RH
7130 entry_bb = region->entry;
7131 exit_bb = region->exit;
50674e96 7132
726a989a
RB
7133 si = gsi_last_bb (entry_bb);
7134 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
7135 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
acf0174b 7136 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TASKGROUP
726a989a 7137 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
acf0174b
JJ
7138 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL
7139 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TEAMS);
726a989a 7140 gsi_remove (&si, true);
50674e96
DN
7141 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7142
d3c673c7
JJ
7143 if (exit_bb)
7144 {
726a989a
RB
7145 si = gsi_last_bb (exit_bb);
7146 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7147 gsi_remove (&si, true);
d3c673c7
JJ
7148 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7149 }
50674e96 7150}
953ff289 7151
20906c66
JJ
7152/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7153 operation as a normal volatile load. */
7154
7155static bool
05409788
RH
7156expand_omp_atomic_load (basic_block load_bb, tree addr,
7157 tree loaded_val, int index)
20906c66 7158{
05409788
RH
7159 enum built_in_function tmpbase;
7160 gimple_stmt_iterator gsi;
7161 basic_block store_bb;
7162 location_t loc;
7163 gimple stmt;
7164 tree decl, call, type, itype;
7165
7166 gsi = gsi_last_bb (load_bb);
7167 stmt = gsi_stmt (gsi);
7168 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7169 loc = gimple_location (stmt);
7170
7171 /* ??? If the target does not implement atomic_load_optab[mode], and mode
7172 is smaller than word size, then expand_atomic_load assumes that the load
7173 is atomic. We could avoid the builtin entirely in this case. */
7174
7175 tmpbase = (enum built_in_function) (BUILT_IN_ATOMIC_LOAD_N + index + 1);
7176 decl = builtin_decl_explicit (tmpbase);
7177 if (decl == NULL_TREE)
7178 return false;
7179
7180 type = TREE_TYPE (loaded_val);
7181 itype = TREE_TYPE (TREE_TYPE (decl));
7182
7183 call = build_call_expr_loc (loc, decl, 2, addr,
acf0174b
JJ
7184 build_int_cst (NULL,
7185 gimple_omp_atomic_seq_cst_p (stmt)
7186 ? MEMMODEL_SEQ_CST
7187 : MEMMODEL_RELAXED));
05409788
RH
7188 if (!useless_type_conversion_p (type, itype))
7189 call = fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7190 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7191
7192 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7193 gsi_remove (&gsi, true);
7194
7195 store_bb = single_succ (load_bb);
7196 gsi = gsi_last_bb (store_bb);
7197 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7198 gsi_remove (&gsi, true);
7199
7200 if (gimple_in_ssa_p (cfun))
7201 update_ssa (TODO_update_ssa_no_phi);
7202
7203 return true;
20906c66
JJ
7204}
7205
7206/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7207 operation as a normal volatile store. */
7208
7209static bool
05409788
RH
7210expand_omp_atomic_store (basic_block load_bb, tree addr,
7211 tree loaded_val, tree stored_val, int index)
20906c66 7212{
05409788
RH
7213 enum built_in_function tmpbase;
7214 gimple_stmt_iterator gsi;
7215 basic_block store_bb = single_succ (load_bb);
7216 location_t loc;
7217 gimple stmt;
7218 tree decl, call, type, itype;
7219 enum machine_mode imode;
7220 bool exchange;
7221
7222 gsi = gsi_last_bb (load_bb);
7223 stmt = gsi_stmt (gsi);
7224 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7225
7226 /* If the load value is needed, then this isn't a store but an exchange. */
7227 exchange = gimple_omp_atomic_need_value_p (stmt);
7228
7229 gsi = gsi_last_bb (store_bb);
7230 stmt = gsi_stmt (gsi);
7231 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE);
7232 loc = gimple_location (stmt);
7233
7234 /* ??? If the target does not implement atomic_store_optab[mode], and mode
7235 is smaller than word size, then expand_atomic_store assumes that the store
7236 is atomic. We could avoid the builtin entirely in this case. */
7237
7238 tmpbase = (exchange ? BUILT_IN_ATOMIC_EXCHANGE_N : BUILT_IN_ATOMIC_STORE_N);
7239 tmpbase = (enum built_in_function) ((int) tmpbase + index + 1);
7240 decl = builtin_decl_explicit (tmpbase);
7241 if (decl == NULL_TREE)
7242 return false;
7243
7244 type = TREE_TYPE (stored_val);
7245
7246 /* Dig out the type of the function's second argument. */
7247 itype = TREE_TYPE (decl);
7248 itype = TYPE_ARG_TYPES (itype);
7249 itype = TREE_CHAIN (itype);
7250 itype = TREE_VALUE (itype);
7251 imode = TYPE_MODE (itype);
7252
7253 if (exchange && !can_atomic_exchange_p (imode, true))
7254 return false;
7255
7256 if (!useless_type_conversion_p (itype, type))
7257 stored_val = fold_build1_loc (loc, VIEW_CONVERT_EXPR, itype, stored_val);
7258 call = build_call_expr_loc (loc, decl, 3, addr, stored_val,
acf0174b
JJ
7259 build_int_cst (NULL,
7260 gimple_omp_atomic_seq_cst_p (stmt)
7261 ? MEMMODEL_SEQ_CST
7262 : MEMMODEL_RELAXED));
05409788
RH
7263 if (exchange)
7264 {
7265 if (!useless_type_conversion_p (type, itype))
7266 call = build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7267 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7268 }
7269
7270 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7271 gsi_remove (&gsi, true);
7272
7273 /* Remove the GIMPLE_OMP_ATOMIC_LOAD that we verified above. */
7274 gsi = gsi_last_bb (load_bb);
7275 gsi_remove (&gsi, true);
7276
7277 if (gimple_in_ssa_p (cfun))
7278 update_ssa (TODO_update_ssa_no_phi);
7279
7280 return true;
20906c66
JJ
7281}
7282
a509ebb5 7283/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
86951993 7284 operation as a __atomic_fetch_op builtin. INDEX is log2 of the
a509ebb5
RL
7285 size of the data type, and thus usable to find the index of the builtin
7286 decl. Returns false if the expression is not of the proper form. */
7287
7288static bool
7289expand_omp_atomic_fetch_op (basic_block load_bb,
7290 tree addr, tree loaded_val,
7291 tree stored_val, int index)
7292{
e79983f4 7293 enum built_in_function oldbase, newbase, tmpbase;
a509ebb5 7294 tree decl, itype, call;
20906c66 7295 tree lhs, rhs;
a509ebb5 7296 basic_block store_bb = single_succ (load_bb);
726a989a
RB
7297 gimple_stmt_iterator gsi;
7298 gimple stmt;
db3927fb 7299 location_t loc;
86951993 7300 enum tree_code code;
20906c66 7301 bool need_old, need_new;
86951993 7302 enum machine_mode imode;
acf0174b 7303 bool seq_cst;
a509ebb5
RL
7304
7305 /* We expect to find the following sequences:
b8698a0f 7306
a509ebb5 7307 load_bb:
726a989a 7308 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
a509ebb5
RL
7309
7310 store_bb:
7311 val = tmp OP something; (or: something OP tmp)
b8698a0f 7312 GIMPLE_OMP_STORE (val)
a509ebb5 7313
b8698a0f 7314 ???FIXME: Allow a more flexible sequence.
a509ebb5 7315 Perhaps use data flow to pick the statements.
b8698a0f 7316
a509ebb5
RL
7317 */
7318
726a989a
RB
7319 gsi = gsi_after_labels (store_bb);
7320 stmt = gsi_stmt (gsi);
db3927fb 7321 loc = gimple_location (stmt);
726a989a 7322 if (!is_gimple_assign (stmt))
a509ebb5 7323 return false;
726a989a
RB
7324 gsi_next (&gsi);
7325 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 7326 return false;
20906c66
JJ
7327 need_new = gimple_omp_atomic_need_value_p (gsi_stmt (gsi));
7328 need_old = gimple_omp_atomic_need_value_p (last_stmt (load_bb));
acf0174b 7329 seq_cst = gimple_omp_atomic_seq_cst_p (last_stmt (load_bb));
20906c66 7330 gcc_checking_assert (!need_old || !need_new);
a509ebb5 7331
726a989a 7332 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
a509ebb5
RL
7333 return false;
7334
a509ebb5 7335 /* Check for one of the supported fetch-op operations. */
86951993
AM
7336 code = gimple_assign_rhs_code (stmt);
7337 switch (code)
a509ebb5
RL
7338 {
7339 case PLUS_EXPR:
7340 case POINTER_PLUS_EXPR:
86951993
AM
7341 oldbase = BUILT_IN_ATOMIC_FETCH_ADD_N;
7342 newbase = BUILT_IN_ATOMIC_ADD_FETCH_N;
a509ebb5
RL
7343 break;
7344 case MINUS_EXPR:
86951993
AM
7345 oldbase = BUILT_IN_ATOMIC_FETCH_SUB_N;
7346 newbase = BUILT_IN_ATOMIC_SUB_FETCH_N;
a509ebb5
RL
7347 break;
7348 case BIT_AND_EXPR:
86951993
AM
7349 oldbase = BUILT_IN_ATOMIC_FETCH_AND_N;
7350 newbase = BUILT_IN_ATOMIC_AND_FETCH_N;
a509ebb5
RL
7351 break;
7352 case BIT_IOR_EXPR:
86951993
AM
7353 oldbase = BUILT_IN_ATOMIC_FETCH_OR_N;
7354 newbase = BUILT_IN_ATOMIC_OR_FETCH_N;
a509ebb5
RL
7355 break;
7356 case BIT_XOR_EXPR:
86951993
AM
7357 oldbase = BUILT_IN_ATOMIC_FETCH_XOR_N;
7358 newbase = BUILT_IN_ATOMIC_XOR_FETCH_N;
a509ebb5
RL
7359 break;
7360 default:
7361 return false;
7362 }
86951993 7363
a509ebb5 7364 /* Make sure the expression is of the proper form. */
726a989a
RB
7365 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
7366 rhs = gimple_assign_rhs2 (stmt);
7367 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
7368 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
7369 rhs = gimple_assign_rhs1 (stmt);
a509ebb5
RL
7370 else
7371 return false;
7372
e79983f4
MM
7373 tmpbase = ((enum built_in_function)
7374 ((need_new ? newbase : oldbase) + index + 1));
7375 decl = builtin_decl_explicit (tmpbase);
20790697
JJ
7376 if (decl == NULL_TREE)
7377 return false;
a509ebb5 7378 itype = TREE_TYPE (TREE_TYPE (decl));
86951993 7379 imode = TYPE_MODE (itype);
a509ebb5 7380
86951993
AM
7381 /* We could test all of the various optabs involved, but the fact of the
7382 matter is that (with the exception of i486 vs i586 and xadd) all targets
7383 that support any atomic operaton optab also implements compare-and-swap.
7384 Let optabs.c take care of expanding any compare-and-swap loop. */
cedb4a1a 7385 if (!can_compare_and_swap_p (imode, true))
a509ebb5
RL
7386 return false;
7387
726a989a
RB
7388 gsi = gsi_last_bb (load_bb);
7389 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
86951993
AM
7390
7391 /* OpenMP does not imply any barrier-like semantics on its atomic ops.
7392 It only requires that the operation happen atomically. Thus we can
7393 use the RELAXED memory model. */
7394 call = build_call_expr_loc (loc, decl, 3, addr,
7395 fold_convert_loc (loc, itype, rhs),
acf0174b
JJ
7396 build_int_cst (NULL,
7397 seq_cst ? MEMMODEL_SEQ_CST
7398 : MEMMODEL_RELAXED));
86951993 7399
20906c66
JJ
7400 if (need_old || need_new)
7401 {
7402 lhs = need_old ? loaded_val : stored_val;
7403 call = fold_convert_loc (loc, TREE_TYPE (lhs), call);
7404 call = build2_loc (loc, MODIFY_EXPR, void_type_node, lhs, call);
7405 }
7406 else
7407 call = fold_convert_loc (loc, void_type_node, call);
726a989a
RB
7408 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7409 gsi_remove (&gsi, true);
a509ebb5 7410
726a989a
RB
7411 gsi = gsi_last_bb (store_bb);
7412 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7413 gsi_remove (&gsi, true);
7414 gsi = gsi_last_bb (store_bb);
7415 gsi_remove (&gsi, true);
a509ebb5
RL
7416
7417 if (gimple_in_ssa_p (cfun))
7418 update_ssa (TODO_update_ssa_no_phi);
7419
7420 return true;
7421}
7422
7423/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7424
7425 oldval = *addr;
7426 repeat:
7427 newval = rhs; // with oldval replacing *addr in rhs
7428 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
7429 if (oldval != newval)
7430 goto repeat;
7431
7432 INDEX is log2 of the size of the data type, and thus usable to find the
7433 index of the builtin decl. */
7434
7435static bool
7436expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
7437 tree addr, tree loaded_val, tree stored_val,
7438 int index)
7439{
c18c98c0 7440 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5 7441 tree type, itype, cmpxchg, iaddr;
726a989a 7442 gimple_stmt_iterator si;
a509ebb5 7443 basic_block loop_header = single_succ (load_bb);
726a989a 7444 gimple phi, stmt;
a509ebb5 7445 edge e;
e79983f4 7446 enum built_in_function fncode;
a509ebb5 7447
86951993
AM
7448 /* ??? We need a non-pointer interface to __atomic_compare_exchange in
7449 order to use the RELAXED memory model effectively. */
e79983f4
MM
7450 fncode = (enum built_in_function)((int)BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
7451 + index + 1);
7452 cmpxchg = builtin_decl_explicit (fncode);
20790697
JJ
7453 if (cmpxchg == NULL_TREE)
7454 return false;
a509ebb5
RL
7455 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7456 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
7457
cedb4a1a 7458 if (!can_compare_and_swap_p (TYPE_MODE (itype), true))
a509ebb5
RL
7459 return false;
7460
726a989a
RB
7461 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
7462 si = gsi_last_bb (load_bb);
7463 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
7464
c18c98c0
JJ
7465 /* For floating-point values, we'll need to view-convert them to integers
7466 so that we can perform the atomic compare and swap. Simplify the
7467 following code by always setting up the "i"ntegral variables. */
7468 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7469 {
726a989a
RB
7470 tree iaddr_val;
7471
7cc434a3
RG
7472 iaddr = create_tmp_reg (build_pointer_type_for_mode (itype, ptr_mode,
7473 true), NULL);
726a989a
RB
7474 iaddr_val
7475 = force_gimple_operand_gsi (&si,
7476 fold_convert (TREE_TYPE (iaddr), addr),
7477 false, NULL_TREE, true, GSI_SAME_STMT);
7478 stmt = gimple_build_assign (iaddr, iaddr_val);
7479 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7480 loadedi = create_tmp_var (itype, NULL);
7481 if (gimple_in_ssa_p (cfun))
46eb666a 7482 loadedi = make_ssa_name (loadedi, NULL);
c18c98c0
JJ
7483 }
7484 else
7485 {
7486 iaddr = addr;
7487 loadedi = loaded_val;
7488 }
726a989a 7489
70f34814
RG
7490 initial
7491 = force_gimple_operand_gsi (&si,
7492 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
7493 iaddr,
7494 build_int_cst (TREE_TYPE (iaddr), 0)),
7495 true, NULL_TREE, true, GSI_SAME_STMT);
c18c98c0
JJ
7496
7497 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
7498 if (gimple_in_ssa_p (cfun))
7499 {
726a989a 7500 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
c18c98c0 7501 phi = create_phi_node (loadedi, loop_header);
a509ebb5
RL
7502 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
7503 initial);
7504 }
7505 else
726a989a
RB
7506 gsi_insert_before (&si,
7507 gimple_build_assign (loadedi, initial),
7508 GSI_SAME_STMT);
c18c98c0
JJ
7509 if (loadedi != loaded_val)
7510 {
726a989a
RB
7511 gimple_stmt_iterator gsi2;
7512 tree x;
c18c98c0
JJ
7513
7514 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
726a989a 7515 gsi2 = gsi_start_bb (loop_header);
c18c98c0
JJ
7516 if (gimple_in_ssa_p (cfun))
7517 {
726a989a
RB
7518 gimple stmt;
7519 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7520 true, GSI_SAME_STMT);
7521 stmt = gimple_build_assign (loaded_val, x);
7522 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7523 }
7524 else
7525 {
726a989a
RB
7526 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
7527 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7528 true, GSI_SAME_STMT);
c18c98c0
JJ
7529 }
7530 }
726a989a 7531 gsi_remove (&si, true);
a509ebb5 7532
726a989a
RB
7533 si = gsi_last_bb (store_bb);
7534 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7535
c18c98c0
JJ
7536 if (iaddr == addr)
7537 storedi = stored_val;
a509ebb5 7538 else
c18c98c0 7539 storedi =
726a989a 7540 force_gimple_operand_gsi (&si,
c18c98c0
JJ
7541 build1 (VIEW_CONVERT_EXPR, itype,
7542 stored_val), true, NULL_TREE, true,
726a989a 7543 GSI_SAME_STMT);
a509ebb5
RL
7544
7545 /* Build the compare&swap statement. */
7546 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
726a989a 7547 new_storedi = force_gimple_operand_gsi (&si,
587aa063
RG
7548 fold_convert (TREE_TYPE (loadedi),
7549 new_storedi),
a509ebb5 7550 true, NULL_TREE,
726a989a 7551 true, GSI_SAME_STMT);
a509ebb5
RL
7552
7553 if (gimple_in_ssa_p (cfun))
7554 old_vali = loadedi;
7555 else
7556 {
587aa063 7557 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
726a989a
RB
7558 stmt = gimple_build_assign (old_vali, loadedi);
7559 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7560
726a989a
RB
7561 stmt = gimple_build_assign (loadedi, new_storedi);
7562 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7563 }
7564
7565 /* Note that we always perform the comparison as an integer, even for
b8698a0f 7566 floating point. This allows the atomic operation to properly
a509ebb5 7567 succeed even with NaNs and -0.0. */
726a989a
RB
7568 stmt = gimple_build_cond_empty
7569 (build2 (NE_EXPR, boolean_type_node,
7570 new_storedi, old_vali));
7571 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7572
7573 /* Update cfg. */
7574 e = single_succ_edge (store_bb);
7575 e->flags &= ~EDGE_FALLTHRU;
7576 e->flags |= EDGE_FALSE_VALUE;
7577
7578 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
7579
c18c98c0 7580 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
7581 if we are not in SSA). */
7582 if (gimple_in_ssa_p (cfun))
7583 {
726a989a 7584 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
c18c98c0 7585 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
7586 }
7587
726a989a
RB
7588 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
7589 gsi_remove (&si, true);
a509ebb5 7590
6093bc06
RB
7591 struct loop *loop = alloc_loop ();
7592 loop->header = loop_header;
a1756c0a 7593 loop->latch = store_bb;
6093bc06
RB
7594 add_loop (loop, loop_header->loop_father);
7595
a509ebb5
RL
7596 if (gimple_in_ssa_p (cfun))
7597 update_ssa (TODO_update_ssa_no_phi);
7598
7599 return true;
7600}
7601
7602/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7603
7604 GOMP_atomic_start ();
7605 *addr = rhs;
7606 GOMP_atomic_end ();
7607
7608 The result is not globally atomic, but works so long as all parallel
7609 references are within #pragma omp atomic directives. According to
7610 responses received from omp@openmp.org, appears to be within spec.
7611 Which makes sense, since that's how several other compilers handle
b8698a0f 7612 this situation as well.
726a989a
RB
7613 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
7614 expanding. STORED_VAL is the operand of the matching
7615 GIMPLE_OMP_ATOMIC_STORE.
a509ebb5 7616
b8698a0f
L
7617 We replace
7618 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
a509ebb5
RL
7619 loaded_val = *addr;
7620
7621 and replace
05409788 7622 GIMPLE_OMP_ATOMIC_STORE (stored_val) with
b8698a0f 7623 *addr = stored_val;
a509ebb5
RL
7624*/
7625
7626static bool
7627expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
7628 tree addr, tree loaded_val, tree stored_val)
7629{
726a989a
RB
7630 gimple_stmt_iterator si;
7631 gimple stmt;
a509ebb5
RL
7632 tree t;
7633
726a989a
RB
7634 si = gsi_last_bb (load_bb);
7635 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5 7636
e79983f4 7637 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
3bb06db4 7638 t = build_call_expr (t, 0);
726a989a 7639 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
a509ebb5 7640
70f34814 7641 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
726a989a
RB
7642 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
7643 gsi_remove (&si, true);
a509ebb5 7644
726a989a
RB
7645 si = gsi_last_bb (store_bb);
7646 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7647
70f34814
RG
7648 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
7649 stored_val);
726a989a 7650 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7651
e79983f4 7652 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
3bb06db4 7653 t = build_call_expr (t, 0);
726a989a
RB
7654 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
7655 gsi_remove (&si, true);
a509ebb5
RL
7656
7657 if (gimple_in_ssa_p (cfun))
7658 update_ssa (TODO_update_ssa_no_phi);
7659 return true;
7660}
7661
b8698a0f
L
7662/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
7663 using expand_omp_atomic_fetch_op. If it failed, we try to
a509ebb5
RL
7664 call expand_omp_atomic_pipeline, and if it fails too, the
7665 ultimate fallback is wrapping the operation in a mutex
b8698a0f
L
7666 (expand_omp_atomic_mutex). REGION is the atomic region built
7667 by build_omp_regions_1(). */
a509ebb5
RL
7668
7669static void
7670expand_omp_atomic (struct omp_region *region)
7671{
7672 basic_block load_bb = region->entry, store_bb = region->exit;
726a989a
RB
7673 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
7674 tree loaded_val = gimple_omp_atomic_load_lhs (load);
7675 tree addr = gimple_omp_atomic_load_rhs (load);
7676 tree stored_val = gimple_omp_atomic_store_val (store);
a509ebb5
RL
7677 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7678 HOST_WIDE_INT index;
7679
7680 /* Make sure the type is one of the supported sizes. */
ae7e9ddd 7681 index = tree_to_uhwi (TYPE_SIZE_UNIT (type));
a509ebb5
RL
7682 index = exact_log2 (index);
7683 if (index >= 0 && index <= 4)
7684 {
7685 unsigned int align = TYPE_ALIGN_UNIT (type);
7686
7687 /* __sync builtins require strict data alignment. */
4999c62c 7688 if (exact_log2 (align) >= index)
a509ebb5 7689 {
05409788 7690 /* Atomic load. */
20906c66
JJ
7691 if (loaded_val == stored_val
7692 && (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7693 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7694 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
05409788 7695 && expand_omp_atomic_load (load_bb, addr, loaded_val, index))
20906c66
JJ
7696 return;
7697
05409788 7698 /* Atomic store. */
20906c66
JJ
7699 if ((GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7700 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7701 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
7702 && store_bb == single_succ (load_bb)
7703 && first_stmt (store_bb) == store
05409788
RH
7704 && expand_omp_atomic_store (load_bb, addr, loaded_val,
7705 stored_val, index))
20906c66
JJ
7706 return;
7707
a509ebb5
RL
7708 /* When possible, use specialized atomic update functions. */
7709 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
05409788
RH
7710 && store_bb == single_succ (load_bb)
7711 && expand_omp_atomic_fetch_op (load_bb, addr,
7712 loaded_val, stored_val, index))
7713 return;
a509ebb5
RL
7714
7715 /* If we don't have specialized __sync builtins, try and implement
7716 as a compare and swap loop. */
7717 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
7718 loaded_val, stored_val, index))
7719 return;
7720 }
7721 }
7722
7723 /* The ultimate fallback is wrapping the operation in a mutex. */
7724 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
7725}
7726
953ff289 7727
acf0174b
JJ
7728/* Expand the OpenMP target{, data, update} directive starting at REGION. */
7729
7730static void
7731expand_omp_target (struct omp_region *region)
7732{
7733 basic_block entry_bb, exit_bb, new_bb;
7734 struct function *child_cfun = NULL;
7735 tree child_fn = NULL_TREE, block, t;
7736 gimple_stmt_iterator gsi;
7737 gimple entry_stmt, stmt;
7738 edge e;
7739
7740 entry_stmt = last_stmt (region->entry);
7741 new_bb = region->entry;
7742 int kind = gimple_omp_target_kind (entry_stmt);
7743 if (kind == GF_OMP_TARGET_KIND_REGION)
7744 {
7745 child_fn = gimple_omp_target_child_fn (entry_stmt);
7746 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7747 }
7748
7749 entry_bb = region->entry;
7750 exit_bb = region->exit;
7751
7752 if (kind == GF_OMP_TARGET_KIND_REGION)
7753 {
7754 unsigned srcidx, dstidx, num;
7755
7756 /* If the target region needs data sent from the parent
7757 function, then the very first statement (except possible
7758 tree profile counter updates) of the parallel body
7759 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
7760 &.OMP_DATA_O is passed as an argument to the child function,
7761 we need to replace it with the argument as seen by the child
7762 function.
7763
7764 In most cases, this will end up being the identity assignment
7765 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
7766 a function call that has been inlined, the original PARM_DECL
7767 .OMP_DATA_I may have been converted into a different local
7768 variable. In which case, we need to keep the assignment. */
7769 if (gimple_omp_target_data_arg (entry_stmt))
7770 {
7771 basic_block entry_succ_bb = single_succ (entry_bb);
7772 gimple_stmt_iterator gsi;
7773 tree arg;
7774 gimple tgtcopy_stmt = NULL;
7775 tree sender
7776 = TREE_VEC_ELT (gimple_omp_target_data_arg (entry_stmt), 0);
7777
7778 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
7779 {
7780 gcc_assert (!gsi_end_p (gsi));
7781 stmt = gsi_stmt (gsi);
7782 if (gimple_code (stmt) != GIMPLE_ASSIGN)
7783 continue;
7784
7785 if (gimple_num_ops (stmt) == 2)
7786 {
7787 tree arg = gimple_assign_rhs1 (stmt);
7788
7789 /* We're ignoring the subcode because we're
7790 effectively doing a STRIP_NOPS. */
7791
7792 if (TREE_CODE (arg) == ADDR_EXPR
7793 && TREE_OPERAND (arg, 0) == sender)
7794 {
7795 tgtcopy_stmt = stmt;
7796 break;
7797 }
7798 }
7799 }
7800
7801 gcc_assert (tgtcopy_stmt != NULL);
7802 arg = DECL_ARGUMENTS (child_fn);
7803
7804 gcc_assert (gimple_assign_lhs (tgtcopy_stmt) == arg);
7805 gsi_remove (&gsi, true);
7806 }
7807
7808 /* Declare local variables needed in CHILD_CFUN. */
7809 block = DECL_INITIAL (child_fn);
7810 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
7811 /* The gimplifier could record temporaries in target block
7812 rather than in containing function's local_decls chain,
7813 which would mean cgraph missed finalizing them. Do it now. */
7814 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
7815 if (TREE_CODE (t) == VAR_DECL
7816 && TREE_STATIC (t)
7817 && !DECL_EXTERNAL (t))
7818 varpool_finalize_decl (t);
7819 DECL_SAVED_TREE (child_fn) = NULL;
7820 /* We'll create a CFG for child_fn, so no gimple body is needed. */
7821 gimple_set_body (child_fn, NULL);
7822 TREE_USED (block) = 1;
7823
7824 /* Reset DECL_CONTEXT on function arguments. */
7825 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7826 DECL_CONTEXT (t) = child_fn;
7827
7828 /* Split ENTRY_BB at GIMPLE_OMP_TARGET,
7829 so that it can be moved to the child function. */
7830 gsi = gsi_last_bb (entry_bb);
7831 stmt = gsi_stmt (gsi);
7832 gcc_assert (stmt && gimple_code (stmt) == GIMPLE_OMP_TARGET
7833 && gimple_omp_target_kind (stmt)
7834 == GF_OMP_TARGET_KIND_REGION);
7835 gsi_remove (&gsi, true);
7836 e = split_block (entry_bb, stmt);
7837 entry_bb = e->dest;
7838 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7839
7840 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
7841 if (exit_bb)
7842 {
7843 gsi = gsi_last_bb (exit_bb);
7844 gcc_assert (!gsi_end_p (gsi)
7845 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
7846 stmt = gimple_build_return (NULL);
7847 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
7848 gsi_remove (&gsi, true);
7849 }
7850
7851 /* Move the target region into CHILD_CFUN. */
7852
7853 block = gimple_block (entry_stmt);
7854
7855 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
7856 if (exit_bb)
7857 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
7858 /* When the OMP expansion process cannot guarantee an up-to-date
7859 loop tree arrange for the child function to fixup loops. */
7860 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
7861 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
7862
7863 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
7864 num = vec_safe_length (child_cfun->local_decls);
7865 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
7866 {
7867 t = (*child_cfun->local_decls)[srcidx];
7868 if (DECL_CONTEXT (t) == cfun->decl)
7869 continue;
7870 if (srcidx != dstidx)
7871 (*child_cfun->local_decls)[dstidx] = t;
7872 dstidx++;
7873 }
7874 if (dstidx != num)
7875 vec_safe_truncate (child_cfun->local_decls, dstidx);
7876
7877 /* Inform the callgraph about the new function. */
7878 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
7879 cgraph_add_new_function (child_fn, true);
7880
7881 /* Fix the callgraph edges for child_cfun. Those for cfun will be
7882 fixed in a following pass. */
7883 push_cfun (child_cfun);
7884 rebuild_cgraph_edges ();
7885
7886 /* Some EH regions might become dead, see PR34608. If
7887 pass_cleanup_cfg isn't the first pass to happen with the
7888 new child, these dead EH edges might cause problems.
7889 Clean them up now. */
7890 if (flag_exceptions)
7891 {
7892 basic_block bb;
7893 bool changed = false;
7894
7895 FOR_EACH_BB (bb)
7896 changed |= gimple_purge_dead_eh_edges (bb);
7897 if (changed)
7898 cleanup_tree_cfg ();
7899 }
7900 pop_cfun ();
7901 }
7902
7903 /* Emit a library call to launch the target region, or do data
7904 transfers. */
7905 tree t1, t2, t3, t4, device, cond, c, clauses;
7906 enum built_in_function start_ix;
7907 location_t clause_loc;
7908
7909 clauses = gimple_omp_target_clauses (entry_stmt);
7910
7911 if (kind == GF_OMP_TARGET_KIND_REGION)
7912 start_ix = BUILT_IN_GOMP_TARGET;
7913 else if (kind == GF_OMP_TARGET_KIND_DATA)
7914 start_ix = BUILT_IN_GOMP_TARGET_DATA;
7915 else
7916 start_ix = BUILT_IN_GOMP_TARGET_UPDATE;
7917
7918 /* By default, the value of DEVICE is -1 (let runtime library choose)
7919 and there is no conditional. */
7920 cond = NULL_TREE;
7921 device = build_int_cst (integer_type_node, -1);
7922
7923 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
7924 if (c)
7925 cond = OMP_CLAUSE_IF_EXPR (c);
7926
7927 c = find_omp_clause (clauses, OMP_CLAUSE_DEVICE);
7928 if (c)
7929 {
7930 device = OMP_CLAUSE_DEVICE_ID (c);
7931 clause_loc = OMP_CLAUSE_LOCATION (c);
7932 }
7933 else
7934 clause_loc = gimple_location (entry_stmt);
7935
7936 /* Ensure 'device' is of the correct type. */
7937 device = fold_convert_loc (clause_loc, integer_type_node, device);
7938
7939 /* If we found the clause 'if (cond)', build
7940 (cond ? device : -2). */
7941 if (cond)
7942 {
7943 cond = gimple_boolify (cond);
7944
7945 basic_block cond_bb, then_bb, else_bb;
7946 edge e;
7947 tree tmp_var;
7948
7949 tmp_var = create_tmp_var (TREE_TYPE (device), NULL);
7950 if (kind != GF_OMP_TARGET_KIND_REGION)
7951 {
7952 gsi = gsi_last_bb (new_bb);
7953 gsi_prev (&gsi);
7954 e = split_block (new_bb, gsi_stmt (gsi));
7955 }
7956 else
7957 e = split_block (new_bb, NULL);
7958 cond_bb = e->src;
7959 new_bb = e->dest;
7960 remove_edge (e);
7961
7962 then_bb = create_empty_bb (cond_bb);
7963 else_bb = create_empty_bb (then_bb);
7964 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
7965 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
7966
7967 stmt = gimple_build_cond_empty (cond);
7968 gsi = gsi_last_bb (cond_bb);
7969 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7970
7971 gsi = gsi_start_bb (then_bb);
7972 stmt = gimple_build_assign (tmp_var, device);
7973 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7974
7975 gsi = gsi_start_bb (else_bb);
7976 stmt = gimple_build_assign (tmp_var,
7977 build_int_cst (integer_type_node, -2));
7978 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7979
7980 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
7981 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
7982 if (current_loops)
7983 {
7984 add_bb_to_loop (then_bb, cond_bb->loop_father);
7985 add_bb_to_loop (else_bb, cond_bb->loop_father);
7986 }
7987 make_edge (then_bb, new_bb, EDGE_FALLTHRU);
7988 make_edge (else_bb, new_bb, EDGE_FALLTHRU);
7989
7990 device = tmp_var;
7991 }
7992
7993 gsi = gsi_last_bb (new_bb);
7994 t = gimple_omp_target_data_arg (entry_stmt);
7995 if (t == NULL)
7996 {
7997 t1 = size_zero_node;
7998 t2 = build_zero_cst (ptr_type_node);
7999 t3 = t2;
8000 t4 = t2;
8001 }
8002 else
8003 {
8004 t1 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (TREE_VEC_ELT (t, 1))));
8005 t1 = size_binop (PLUS_EXPR, t1, size_int (1));
8006 t2 = build_fold_addr_expr (TREE_VEC_ELT (t, 0));
8007 t3 = build_fold_addr_expr (TREE_VEC_ELT (t, 1));
8008 t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
8009 }
8010
8011 gimple g;
8012 /* FIXME: This will be address of
8013 extern char __OPENMP_TARGET__[] __attribute__((visibility ("hidden")))
8014 symbol, as soon as the linker plugin is able to create it for us. */
8015 tree openmp_target = build_zero_cst (ptr_type_node);
8016 if (kind == GF_OMP_TARGET_KIND_REGION)
8017 {
8018 tree fnaddr = build_fold_addr_expr (child_fn);
8019 g = gimple_build_call (builtin_decl_explicit (start_ix), 7,
8020 device, fnaddr, openmp_target, t1, t2, t3, t4);
8021 }
8022 else
8023 g = gimple_build_call (builtin_decl_explicit (start_ix), 6,
8024 device, openmp_target, t1, t2, t3, t4);
8025 gimple_set_location (g, gimple_location (entry_stmt));
8026 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8027 if (kind != GF_OMP_TARGET_KIND_REGION)
8028 {
8029 g = gsi_stmt (gsi);
8030 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_TARGET);
8031 gsi_remove (&gsi, true);
8032 }
8033 if (kind == GF_OMP_TARGET_KIND_DATA && region->exit)
8034 {
8035 gsi = gsi_last_bb (region->exit);
8036 g = gsi_stmt (gsi);
8037 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_RETURN);
8038 gsi_remove (&gsi, true);
8039 }
8040}
8041
8042
8043/* Expand the parallel region tree rooted at REGION. Expansion
8044 proceeds in depth-first order. Innermost regions are expanded
8045 first. This way, parallel regions that require a new function to
726a989a 8046 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
50674e96
DN
8047 internal dependencies in their body. */
8048
8049static void
8050expand_omp (struct omp_region *region)
8051{
8052 while (region)
8053 {
b357f682 8054 location_t saved_location;
acf0174b 8055 gimple inner_stmt = NULL;
b357f682 8056
068e1875
ZD
8057 /* First, determine whether this is a combined parallel+workshare
8058 region. */
726a989a 8059 if (region->type == GIMPLE_OMP_PARALLEL)
068e1875
ZD
8060 determine_parallel_type (region);
8061
acf0174b
JJ
8062 if (region->type == GIMPLE_OMP_FOR
8063 && gimple_omp_for_combined_p (last_stmt (region->entry)))
8064 inner_stmt = last_stmt (region->inner->entry);
8065
50674e96
DN
8066 if (region->inner)
8067 expand_omp (region->inner);
8068
b357f682 8069 saved_location = input_location;
726a989a
RB
8070 if (gimple_has_location (last_stmt (region->entry)))
8071 input_location = gimple_location (last_stmt (region->entry));
b357f682 8072
777f7f9a 8073 switch (region->type)
50674e96 8074 {
726a989a
RB
8075 case GIMPLE_OMP_PARALLEL:
8076 case GIMPLE_OMP_TASK:
a68ab351
JJ
8077 expand_omp_taskreg (region);
8078 break;
8079
726a989a 8080 case GIMPLE_OMP_FOR:
acf0174b 8081 expand_omp_for (region, inner_stmt);
777f7f9a 8082 break;
50674e96 8083
726a989a 8084 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
8085 expand_omp_sections (region);
8086 break;
50674e96 8087
726a989a 8088 case GIMPLE_OMP_SECTION:
777f7f9a 8089 /* Individual omp sections are handled together with their
726a989a 8090 parent GIMPLE_OMP_SECTIONS region. */
777f7f9a 8091 break;
50674e96 8092
726a989a 8093 case GIMPLE_OMP_SINGLE:
777f7f9a
RH
8094 expand_omp_single (region);
8095 break;
50674e96 8096
726a989a 8097 case GIMPLE_OMP_MASTER:
acf0174b 8098 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
8099 case GIMPLE_OMP_ORDERED:
8100 case GIMPLE_OMP_CRITICAL:
acf0174b 8101 case GIMPLE_OMP_TEAMS:
777f7f9a
RH
8102 expand_omp_synch (region);
8103 break;
50674e96 8104
726a989a 8105 case GIMPLE_OMP_ATOMIC_LOAD:
a509ebb5
RL
8106 expand_omp_atomic (region);
8107 break;
8108
acf0174b
JJ
8109 case GIMPLE_OMP_TARGET:
8110 expand_omp_target (region);
8111 break;
8112
777f7f9a
RH
8113 default:
8114 gcc_unreachable ();
8115 }
8d9c1aec 8116
b357f682 8117 input_location = saved_location;
50674e96
DN
8118 region = region->next;
8119 }
8120}
8121
8122
8123/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
8124 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
8125 true, the function ends once a single tree is built (otherwise, whole
8126 forest of OMP constructs may be built). */
50674e96
DN
8127
8128static void
5f40b3cb
ZD
8129build_omp_regions_1 (basic_block bb, struct omp_region *parent,
8130 bool single_tree)
50674e96 8131{
726a989a
RB
8132 gimple_stmt_iterator gsi;
8133 gimple stmt;
50674e96
DN
8134 basic_block son;
8135
726a989a
RB
8136 gsi = gsi_last_bb (bb);
8137 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
50674e96
DN
8138 {
8139 struct omp_region *region;
726a989a 8140 enum gimple_code code;
50674e96 8141
726a989a
RB
8142 stmt = gsi_stmt (gsi);
8143 code = gimple_code (stmt);
8144 if (code == GIMPLE_OMP_RETURN)
50674e96
DN
8145 {
8146 /* STMT is the return point out of region PARENT. Mark it
8147 as the exit point and make PARENT the immediately
8148 enclosing region. */
8149 gcc_assert (parent);
8150 region = parent;
777f7f9a 8151 region->exit = bb;
50674e96 8152 parent = parent->outer;
50674e96 8153 }
726a989a 8154 else if (code == GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 8155 {
726a989a
RB
8156 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
8157 GIMPLE_OMP_RETURN, but matches with
8158 GIMPLE_OMP_ATOMIC_LOAD. */
a509ebb5 8159 gcc_assert (parent);
726a989a 8160 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
8161 region = parent;
8162 region->exit = bb;
8163 parent = parent->outer;
8164 }
8165
726a989a 8166 else if (code == GIMPLE_OMP_CONTINUE)
777f7f9a
RH
8167 {
8168 gcc_assert (parent);
8169 parent->cont = bb;
8170 }
726a989a 8171 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
e5c95afe 8172 {
726a989a
RB
8173 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
8174 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
8175 ;
e5c95afe 8176 }
acf0174b
JJ
8177 else if (code == GIMPLE_OMP_TARGET
8178 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_UPDATE)
8179 new_omp_region (bb, code, parent);
50674e96
DN
8180 else
8181 {
8182 /* Otherwise, this directive becomes the parent for a new
8183 region. */
777f7f9a 8184 region = new_omp_region (bb, code, parent);
50674e96
DN
8185 parent = region;
8186 }
50674e96
DN
8187 }
8188
5f40b3cb
ZD
8189 if (single_tree && !parent)
8190 return;
8191
50674e96
DN
8192 for (son = first_dom_son (CDI_DOMINATORS, bb);
8193 son;
8194 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
8195 build_omp_regions_1 (son, parent, single_tree);
8196}
8197
8198/* Builds the tree of OMP regions rooted at ROOT, storing it to
8199 root_omp_region. */
8200
8201static void
8202build_omp_regions_root (basic_block root)
8203{
8204 gcc_assert (root_omp_region == NULL);
8205 build_omp_regions_1 (root, NULL, true);
8206 gcc_assert (root_omp_region != NULL);
50674e96
DN
8207}
8208
5f40b3cb
ZD
8209/* Expands omp construct (and its subconstructs) starting in HEAD. */
8210
8211void
8212omp_expand_local (basic_block head)
8213{
8214 build_omp_regions_root (head);
8215 if (dump_file && (dump_flags & TDF_DETAILS))
8216 {
8217 fprintf (dump_file, "\nOMP region tree\n\n");
8218 dump_omp_region (dump_file, root_omp_region, 0);
8219 fprintf (dump_file, "\n");
8220 }
8221
8222 remove_exit_barriers (root_omp_region);
8223 expand_omp (root_omp_region);
8224
8225 free_omp_regions ();
8226}
50674e96
DN
8227
8228/* Scan the CFG and build a tree of OMP regions. Return the root of
8229 the OMP region tree. */
8230
8231static void
8232build_omp_regions (void)
8233{
777f7f9a 8234 gcc_assert (root_omp_region == NULL);
50674e96 8235 calculate_dominance_info (CDI_DOMINATORS);
fefa31b5 8236 build_omp_regions_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, false);
50674e96
DN
8237}
8238
50674e96
DN
8239/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
8240
c2924966 8241static unsigned int
50674e96
DN
8242execute_expand_omp (void)
8243{
8244 build_omp_regions ();
8245
777f7f9a
RH
8246 if (!root_omp_region)
8247 return 0;
50674e96 8248
777f7f9a
RH
8249 if (dump_file)
8250 {
8251 fprintf (dump_file, "\nOMP region tree\n\n");
8252 dump_omp_region (dump_file, root_omp_region, 0);
8253 fprintf (dump_file, "\n");
50674e96 8254 }
777f7f9a
RH
8255
8256 remove_exit_barriers (root_omp_region);
8257
8258 expand_omp (root_omp_region);
8259
777f7f9a
RH
8260 cleanup_tree_cfg ();
8261
8262 free_omp_regions ();
8263
c2924966 8264 return 0;
50674e96
DN
8265}
8266
917948d3
ZD
8267/* OMP expansion -- the default pass, run before creation of SSA form. */
8268
50674e96
DN
8269static bool
8270gate_expand_omp (void)
8271{
c02065fc
AH
8272 return ((flag_openmp != 0 || flag_openmp_simd != 0
8273 || flag_enable_cilkplus != 0) && !seen_error ());
50674e96
DN
8274}
8275
27a4cd48
DM
8276namespace {
8277
8278const pass_data pass_data_expand_omp =
8279{
8280 GIMPLE_PASS, /* type */
8281 "ompexp", /* name */
8282 OPTGROUP_NONE, /* optinfo_flags */
8283 true, /* has_gate */
8284 true, /* has_execute */
8285 TV_NONE, /* tv_id */
8286 PROP_gimple_any, /* properties_required */
8287 0, /* properties_provided */
8288 0, /* properties_destroyed */
8289 0, /* todo_flags_start */
8290 0, /* todo_flags_finish */
50674e96 8291};
27a4cd48
DM
8292
8293class pass_expand_omp : public gimple_opt_pass
8294{
8295public:
c3284718
RS
8296 pass_expand_omp (gcc::context *ctxt)
8297 : gimple_opt_pass (pass_data_expand_omp, ctxt)
27a4cd48
DM
8298 {}
8299
8300 /* opt_pass methods: */
8301 bool gate () { return gate_expand_omp (); }
8302 unsigned int execute () { return execute_expand_omp (); }
8303
8304}; // class pass_expand_omp
8305
8306} // anon namespace
8307
8308gimple_opt_pass *
8309make_pass_expand_omp (gcc::context *ctxt)
8310{
8311 return new pass_expand_omp (ctxt);
8312}
50674e96
DN
8313\f
8314/* Routines to lower OpenMP directives into OMP-GIMPLE. */
8315
acf0174b
JJ
8316/* If ctx is a worksharing context inside of a cancellable parallel
8317 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8318 and conditional branch to parallel's cancel_label to handle
8319 cancellation in the implicit barrier. */
8320
8321static void
8322maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
8323{
8324 gimple omp_return = gimple_seq_last_stmt (*body);
8325 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8326 if (gimple_omp_return_nowait_p (omp_return))
8327 return;
8328 if (ctx->outer
8329 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
8330 && ctx->outer->cancellable)
8331 {
8332 tree lhs = create_tmp_var (boolean_type_node, NULL);
8333 gimple_omp_return_set_lhs (omp_return, lhs);
8334 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8335 gimple g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
8336 ctx->outer->cancel_label, fallthru_label);
8337 gimple_seq_add_stmt (body, g);
8338 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8339 }
8340}
8341
726a989a
RB
8342/* Lower the OpenMP sections directive in the current statement in GSI_P.
8343 CTX is the enclosing OMP context for the current statement. */
50674e96
DN
8344
8345static void
726a989a 8346lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8347{
726a989a
RB
8348 tree block, control;
8349 gimple_stmt_iterator tgsi;
726a989a 8350 gimple stmt, new_stmt, bind, t;
355a7673 8351 gimple_seq ilist, dlist, olist, new_body;
50674e96 8352
726a989a 8353 stmt = gsi_stmt (*gsi_p);
50674e96 8354
45852dcc 8355 push_gimplify_context ();
50674e96
DN
8356
8357 dlist = NULL;
8358 ilist = NULL;
726a989a 8359 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
acf0174b 8360 &ilist, &dlist, ctx, NULL);
50674e96 8361
355a7673
MM
8362 new_body = gimple_omp_body (stmt);
8363 gimple_omp_set_body (stmt, NULL);
8364 tgsi = gsi_start (new_body);
8365 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
50674e96
DN
8366 {
8367 omp_context *sctx;
726a989a 8368 gimple sec_start;
50674e96 8369
726a989a 8370 sec_start = gsi_stmt (tgsi);
50674e96
DN
8371 sctx = maybe_lookup_ctx (sec_start);
8372 gcc_assert (sctx);
8373
355a7673
MM
8374 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8375 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8376 GSI_CONTINUE_LINKING);
726a989a 8377 gimple_omp_set_body (sec_start, NULL);
50674e96 8378
355a7673 8379 if (gsi_one_before_end_p (tgsi))
50674e96 8380 {
726a989a
RB
8381 gimple_seq l = NULL;
8382 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
50674e96 8383 &l, ctx);
355a7673 8384 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
726a989a 8385 gimple_omp_section_set_last (sec_start);
50674e96 8386 }
b8698a0f 8387
355a7673
MM
8388 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8389 GSI_CONTINUE_LINKING);
50674e96 8390 }
953ff289
DN
8391
8392 block = make_node (BLOCK);
355a7673 8393 bind = gimple_build_bind (NULL, new_body, block);
953ff289 8394
726a989a
RB
8395 olist = NULL;
8396 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 8397
b357f682 8398 block = make_node (BLOCK);
726a989a 8399 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673 8400 gsi_replace (gsi_p, new_stmt, true);
50674e96 8401
b357f682 8402 pop_gimplify_context (new_stmt);
726a989a
RB
8403 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8404 BLOCK_VARS (block) = gimple_bind_vars (bind);
b357f682
JJ
8405 if (BLOCK_VARS (block))
8406 TREE_USED (block) = 1;
8407
726a989a
RB
8408 new_body = NULL;
8409 gimple_seq_add_seq (&new_body, ilist);
8410 gimple_seq_add_stmt (&new_body, stmt);
8411 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8412 gimple_seq_add_stmt (&new_body, bind);
777f7f9a 8413
e5c95afe 8414 control = create_tmp_var (unsigned_type_node, ".section");
726a989a
RB
8415 t = gimple_build_omp_continue (control, control);
8416 gimple_omp_sections_set_control (stmt, control);
8417 gimple_seq_add_stmt (&new_body, t);
777f7f9a 8418
726a989a 8419 gimple_seq_add_seq (&new_body, olist);
acf0174b
JJ
8420 if (ctx->cancellable)
8421 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a 8422 gimple_seq_add_seq (&new_body, dlist);
50674e96 8423
726a989a 8424 new_body = maybe_catch_exception (new_body);
4a31b7ee 8425
726a989a
RB
8426 t = gimple_build_omp_return
8427 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
8428 OMP_CLAUSE_NOWAIT));
8429 gimple_seq_add_stmt (&new_body, t);
acf0174b 8430 maybe_add_implicit_barrier_cancel (ctx, &new_body);
777f7f9a 8431
726a989a 8432 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
8433}
8434
8435
50674e96 8436/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8437 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
953ff289
DN
8438
8439 if (GOMP_single_start ())
8440 BODY;
8441 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
8442
8443 FIXME. It may be better to delay expanding the logic of this until
8444 pass_expand_omp. The expanded logic may make the job more difficult
8445 to a synchronization analysis pass. */
953ff289
DN
8446
8447static void
726a989a 8448lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
953ff289 8449{
c2255bc4
AH
8450 location_t loc = gimple_location (single_stmt);
8451 tree tlabel = create_artificial_label (loc);
8452 tree flabel = create_artificial_label (loc);
726a989a
RB
8453 gimple call, cond;
8454 tree lhs, decl;
8455
e79983f4 8456 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
726a989a
RB
8457 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
8458 call = gimple_build_call (decl, 0);
8459 gimple_call_set_lhs (call, lhs);
8460 gimple_seq_add_stmt (pre_p, call);
8461
8462 cond = gimple_build_cond (EQ_EXPR, lhs,
db3927fb
AH
8463 fold_convert_loc (loc, TREE_TYPE (lhs),
8464 boolean_true_node),
726a989a
RB
8465 tlabel, flabel);
8466 gimple_seq_add_stmt (pre_p, cond);
8467 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8468 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8469 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
953ff289
DN
8470}
8471
50674e96
DN
8472
8473/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8474 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289
DN
8475
8476 #pragma omp single copyprivate (a, b, c)
8477
8478 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8479
8480 {
8481 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8482 {
8483 BODY;
8484 copyout.a = a;
8485 copyout.b = b;
8486 copyout.c = c;
8487 GOMP_single_copy_end (&copyout);
8488 }
8489 else
8490 {
8491 a = copyout_p->a;
8492 b = copyout_p->b;
8493 c = copyout_p->c;
8494 }
8495 GOMP_barrier ();
8496 }
50674e96
DN
8497
8498 FIXME. It may be better to delay expanding the logic of this until
8499 pass_expand_omp. The expanded logic may make the job more difficult
8500 to a synchronization analysis pass. */
953ff289
DN
8501
8502static void
726a989a 8503lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
953ff289 8504{
e79983f4 8505 tree ptr_type, t, l0, l1, l2, bfn_decl;
726a989a 8506 gimple_seq copyin_seq;
c2255bc4 8507 location_t loc = gimple_location (single_stmt);
953ff289
DN
8508
8509 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8510
8511 ptr_type = build_pointer_type (ctx->record_type);
8512 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8513
c2255bc4
AH
8514 l0 = create_artificial_label (loc);
8515 l1 = create_artificial_label (loc);
8516 l2 = create_artificial_label (loc);
953ff289 8517
e79983f4
MM
8518 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8519 t = build_call_expr_loc (loc, bfn_decl, 0);
db3927fb 8520 t = fold_convert_loc (loc, ptr_type, t);
726a989a 8521 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289
DN
8522
8523 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8524 build_int_cst (ptr_type, 0));
8525 t = build3 (COND_EXPR, void_type_node, t,
8526 build_and_jump (&l0), build_and_jump (&l1));
8527 gimplify_and_add (t, pre_p);
8528
726a989a 8529 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 8530
726a989a 8531 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289
DN
8532
8533 copyin_seq = NULL;
726a989a 8534 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
953ff289
DN
8535 &copyin_seq, ctx);
8536
db3927fb 8537 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
e79983f4
MM
8538 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8539 t = build_call_expr_loc (loc, bfn_decl, 1, t);
953ff289
DN
8540 gimplify_and_add (t, pre_p);
8541
8542 t = build_and_jump (&l2);
8543 gimplify_and_add (t, pre_p);
8544
726a989a 8545 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 8546
726a989a 8547 gimple_seq_add_seq (pre_p, copyin_seq);
953ff289 8548
726a989a 8549 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
953ff289
DN
8550}
8551
50674e96 8552
953ff289
DN
8553/* Expand code for an OpenMP single directive. */
8554
8555static void
726a989a 8556lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8557{
726a989a
RB
8558 tree block;
8559 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
acf0174b 8560 gimple_seq bind_body, bind_body_tail = NULL, dlist;
953ff289 8561
45852dcc 8562 push_gimplify_context ();
953ff289 8563
355a7673
MM
8564 block = make_node (BLOCK);
8565 bind = gimple_build_bind (NULL, NULL, block);
8566 gsi_replace (gsi_p, bind, true);
726a989a 8567 bind_body = NULL;
355a7673 8568 dlist = NULL;
726a989a 8569 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
acf0174b 8570 &bind_body, &dlist, ctx, NULL);
355a7673 8571 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
953ff289 8572
726a989a 8573 gimple_seq_add_stmt (&bind_body, single_stmt);
953ff289
DN
8574
8575 if (ctx->record_type)
726a989a 8576 lower_omp_single_copy (single_stmt, &bind_body, ctx);
953ff289 8577 else
726a989a
RB
8578 lower_omp_single_simple (single_stmt, &bind_body);
8579
8580 gimple_omp_set_body (single_stmt, NULL);
953ff289 8581
726a989a 8582 gimple_seq_add_seq (&bind_body, dlist);
777f7f9a 8583
726a989a 8584 bind_body = maybe_catch_exception (bind_body);
777f7f9a 8585
b8698a0f 8586 t = gimple_build_omp_return
726a989a
RB
8587 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
8588 OMP_CLAUSE_NOWAIT));
acf0174b
JJ
8589 gimple_seq_add_stmt (&bind_body_tail, t);
8590 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
8591 if (ctx->record_type)
8592 {
8593 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8594 tree clobber = build_constructor (ctx->record_type, NULL);
8595 TREE_THIS_VOLATILE (clobber) = 1;
8596 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8597 clobber), GSI_SAME_STMT);
8598 }
8599 gimple_seq_add_seq (&bind_body, bind_body_tail);
355a7673 8600 gimple_bind_set_body (bind, bind_body);
777f7f9a 8601
953ff289 8602 pop_gimplify_context (bind);
50674e96 8603
726a989a
RB
8604 gimple_bind_append_vars (bind, ctx->block_vars);
8605 BLOCK_VARS (block) = ctx->block_vars;
b357f682
JJ
8606 if (BLOCK_VARS (block))
8607 TREE_USED (block) = 1;
953ff289
DN
8608}
8609
50674e96 8610
953ff289
DN
8611/* Expand code for an OpenMP master directive. */
8612
8613static void
726a989a 8614lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8615{
e79983f4 8616 tree block, lab = NULL, x, bfn_decl;
726a989a 8617 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8618 location_t loc = gimple_location (stmt);
726a989a 8619 gimple_seq tseq;
953ff289 8620
45852dcc 8621 push_gimplify_context ();
953ff289
DN
8622
8623 block = make_node (BLOCK);
355a7673
MM
8624 bind = gimple_build_bind (NULL, NULL, block);
8625 gsi_replace (gsi_p, bind, true);
8626 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8627
e79983f4
MM
8628 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8629 x = build_call_expr_loc (loc, bfn_decl, 0);
953ff289
DN
8630 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8631 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
726a989a
RB
8632 tseq = NULL;
8633 gimplify_and_add (x, &tseq);
8634 gimple_bind_add_seq (bind, tseq);
953ff289 8635
355a7673 8636 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8637 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8638 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8639 gimple_omp_set_body (stmt, NULL);
953ff289 8640
726a989a 8641 gimple_bind_add_stmt (bind, gimple_build_label (lab));
777f7f9a 8642
726a989a 8643 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8644
953ff289 8645 pop_gimplify_context (bind);
50674e96 8646
726a989a
RB
8647 gimple_bind_append_vars (bind, ctx->block_vars);
8648 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
8649}
8650
50674e96 8651
acf0174b
JJ
8652/* Expand code for an OpenMP taskgroup directive. */
8653
8654static void
8655lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8656{
8657 gimple stmt = gsi_stmt (*gsi_p), bind, x;
8658 tree block = make_node (BLOCK);
8659
8660 bind = gimple_build_bind (NULL, NULL, block);
8661 gsi_replace (gsi_p, bind, true);
8662 gimple_bind_add_stmt (bind, stmt);
8663
8664 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8665 0);
8666 gimple_bind_add_stmt (bind, x);
8667
8668 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8669 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8670 gimple_omp_set_body (stmt, NULL);
8671
8672 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8673
8674 gimple_bind_append_vars (bind, ctx->block_vars);
8675 BLOCK_VARS (block) = ctx->block_vars;
8676}
8677
8678
953ff289
DN
8679/* Expand code for an OpenMP ordered directive. */
8680
8681static void
726a989a 8682lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8683{
726a989a
RB
8684 tree block;
8685 gimple stmt = gsi_stmt (*gsi_p), bind, x;
953ff289 8686
45852dcc 8687 push_gimplify_context ();
953ff289
DN
8688
8689 block = make_node (BLOCK);
355a7673
MM
8690 bind = gimple_build_bind (NULL, NULL, block);
8691 gsi_replace (gsi_p, bind, true);
8692 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8693
e79983f4
MM
8694 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8695 0);
726a989a 8696 gimple_bind_add_stmt (bind, x);
953ff289 8697
355a7673 8698 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8699 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8700 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8701 gimple_omp_set_body (stmt, NULL);
953ff289 8702
e79983f4 8703 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 0);
726a989a 8704 gimple_bind_add_stmt (bind, x);
777f7f9a 8705
726a989a 8706 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8707
953ff289 8708 pop_gimplify_context (bind);
50674e96 8709
726a989a
RB
8710 gimple_bind_append_vars (bind, ctx->block_vars);
8711 BLOCK_VARS (block) = gimple_bind_vars (bind);
953ff289
DN
8712}
8713
953ff289 8714
726a989a 8715/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
953ff289
DN
8716 substitution of a couple of function calls. But in the NAMED case,
8717 requires that languages coordinate a symbol name. It is therefore
8718 best put here in common code. */
8719
8720static GTY((param1_is (tree), param2_is (tree)))
8721 splay_tree critical_name_mutexes;
8722
8723static void
726a989a 8724lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8725{
726a989a
RB
8726 tree block;
8727 tree name, lock, unlock;
8728 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8729 location_t loc = gimple_location (stmt);
726a989a 8730 gimple_seq tbody;
953ff289 8731
726a989a 8732 name = gimple_omp_critical_name (stmt);
953ff289
DN
8733 if (name)
8734 {
5039610b 8735 tree decl;
953ff289
DN
8736 splay_tree_node n;
8737
8738 if (!critical_name_mutexes)
8739 critical_name_mutexes
a9429e29
LB
8740 = splay_tree_new_ggc (splay_tree_compare_pointers,
8741 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
8742 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
953ff289
DN
8743
8744 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
8745 if (n == NULL)
8746 {
8747 char *new_str;
8748
8749 decl = create_tmp_var_raw (ptr_type_node, NULL);
8750
8751 new_str = ACONCAT ((".gomp_critical_user_",
8752 IDENTIFIER_POINTER (name), NULL));
8753 DECL_NAME (decl) = get_identifier (new_str);
8754 TREE_PUBLIC (decl) = 1;
8755 TREE_STATIC (decl) = 1;
8756 DECL_COMMON (decl) = 1;
8757 DECL_ARTIFICIAL (decl) = 1;
8758 DECL_IGNORED_P (decl) = 1;
8a4a83ed 8759 varpool_finalize_decl (decl);
953ff289
DN
8760
8761 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
8762 (splay_tree_value) decl);
8763 }
8764 else
8765 decl = (tree) n->value;
8766
e79983f4 8767 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
db3927fb 8768 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
953ff289 8769
e79983f4 8770 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
db3927fb
AH
8771 unlock = build_call_expr_loc (loc, unlock, 1,
8772 build_fold_addr_expr_loc (loc, decl));
953ff289
DN
8773 }
8774 else
8775 {
e79983f4 8776 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
db3927fb 8777 lock = build_call_expr_loc (loc, lock, 0);
953ff289 8778
e79983f4 8779 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
db3927fb 8780 unlock = build_call_expr_loc (loc, unlock, 0);
953ff289
DN
8781 }
8782
45852dcc 8783 push_gimplify_context ();
953ff289
DN
8784
8785 block = make_node (BLOCK);
355a7673
MM
8786 bind = gimple_build_bind (NULL, NULL, block);
8787 gsi_replace (gsi_p, bind, true);
8788 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8789
726a989a
RB
8790 tbody = gimple_bind_body (bind);
8791 gimplify_and_add (lock, &tbody);
8792 gimple_bind_set_body (bind, tbody);
953ff289 8793
355a7673 8794 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8795 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8796 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8797 gimple_omp_set_body (stmt, NULL);
953ff289 8798
726a989a
RB
8799 tbody = gimple_bind_body (bind);
8800 gimplify_and_add (unlock, &tbody);
8801 gimple_bind_set_body (bind, tbody);
777f7f9a 8802
726a989a 8803 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
953ff289
DN
8804
8805 pop_gimplify_context (bind);
726a989a
RB
8806 gimple_bind_append_vars (bind, ctx->block_vars);
8807 BLOCK_VARS (block) = gimple_bind_vars (bind);
50674e96
DN
8808}
8809
8810
8811/* A subroutine of lower_omp_for. Generate code to emit the predicate
8812 for a lastprivate clause. Given a loop control predicate of (V
8813 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
8814 is appended to *DLIST, iterator initialization is appended to
8815 *BODY_P. */
50674e96
DN
8816
8817static void
726a989a
RB
8818lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8819 gimple_seq *dlist, struct omp_context *ctx)
50674e96 8820{
726a989a 8821 tree clauses, cond, vinit;
50674e96 8822 enum tree_code cond_code;
726a989a 8823 gimple_seq stmts;
b8698a0f 8824
a68ab351 8825 cond_code = fd->loop.cond_code;
50674e96
DN
8826 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8827
8828 /* When possible, use a strict equality expression. This can let VRP
8829 type optimizations deduce the value and remove a copy. */
9541ffee 8830 if (tree_fits_shwi_p (fd->loop.step))
50674e96 8831 {
eb1ce453 8832 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
50674e96
DN
8833 if (step == 1 || step == -1)
8834 cond_code = EQ_EXPR;
8835 }
8836
a68ab351 8837 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96 8838
726a989a 8839 clauses = gimple_omp_for_clauses (fd->for_stmt);
3d55c64b
JJ
8840 stmts = NULL;
8841 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
726a989a 8842 if (!gimple_seq_empty_p (stmts))
3d55c64b 8843 {
726a989a 8844 gimple_seq_add_seq (&stmts, *dlist);
a68ab351 8845 *dlist = stmts;
3d55c64b
JJ
8846
8847 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 8848 vinit = fd->loop.n1;
3d55c64b 8849 if (cond_code == EQ_EXPR
9541ffee 8850 && tree_fits_shwi_p (fd->loop.n2)
a68ab351
JJ
8851 && ! integer_zerop (fd->loop.n2))
8852 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
74bf76ed
JJ
8853 else
8854 vinit = unshare_expr (vinit);
3d55c64b
JJ
8855
8856 /* Initialize the iterator variable, so that threads that don't execute
8857 any iterations don't execute the lastprivate clauses by accident. */
726a989a 8858 gimplify_assign (fd->loop.v, vinit, body_p);
3d55c64b 8859 }
50674e96
DN
8860}
8861
8862
8863/* Lower code for an OpenMP loop directive. */
8864
8865static void
726a989a 8866lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8867{
726a989a 8868 tree *rhs_p, block;
acf0174b 8869 struct omp_for_data fd, *fdp = NULL;
726a989a 8870 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
0f900dfa 8871 gimple_seq omp_for_body, body, dlist;
726a989a 8872 size_t i;
50674e96 8873
45852dcc 8874 push_gimplify_context ();
50674e96 8875
355a7673 8876 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
50674e96 8877
b357f682 8878 block = make_node (BLOCK);
726a989a 8879 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673
MM
8880 /* Replace at gsi right away, so that 'stmt' is no member
8881 of a sequence anymore as we're going to add to to a different
8882 one below. */
8883 gsi_replace (gsi_p, new_stmt, true);
b357f682 8884
50674e96
DN
8885 /* Move declaration of temporaries in the loop body before we make
8886 it go away. */
726a989a
RB
8887 omp_for_body = gimple_omp_body (stmt);
8888 if (!gimple_seq_empty_p (omp_for_body)
8889 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8890 {
8891 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
8892 gimple_bind_append_vars (new_stmt, vars);
8893 }
50674e96 8894
acf0174b
JJ
8895 if (gimple_omp_for_combined_into_p (stmt))
8896 {
8897 extract_omp_for_data (stmt, &fd, NULL);
8898 fdp = &fd;
8899
8900 /* We need two temporaries with fd.loop.v type (istart/iend)
8901 and then (fd.collapse - 1) temporaries with the same
8902 type for count2 ... countN-1 vars if not constant. */
8903 size_t count = 2;
8904 tree type = fd.iter_type;
8905 if (fd.collapse > 1
8906 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8907 count += fd.collapse - 1;
8908 bool parallel_for = gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR;
8909 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8910 tree clauses = *pc;
8911 if (parallel_for)
8912 outerc
8913 = find_omp_clause (gimple_omp_parallel_clauses (ctx->outer->stmt),
8914 OMP_CLAUSE__LOOPTEMP_);
8915 for (i = 0; i < count; i++)
8916 {
8917 tree temp;
8918 if (parallel_for)
8919 {
8920 gcc_assert (outerc);
8921 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8922 outerc = find_omp_clause (OMP_CLAUSE_CHAIN (outerc),
8923 OMP_CLAUSE__LOOPTEMP_);
8924 }
8925 else
8926 temp = create_tmp_var (type, NULL);
8927 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8928 OMP_CLAUSE_DECL (*pc) = temp;
8929 pc = &OMP_CLAUSE_CHAIN (*pc);
8930 }
8931 *pc = clauses;
8932 }
8933
726a989a 8934 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
50674e96 8935 dlist = NULL;
726a989a 8936 body = NULL;
acf0174b
JJ
8937 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8938 fdp);
726a989a 8939 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
50674e96 8940
74bf76ed
JJ
8941 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8942
50674e96
DN
8943 /* Lower the header expressions. At this point, we can assume that
8944 the header is of the form:
8945
8946 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8947
8948 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8949 using the .omp_data_s mapping, if needed. */
726a989a 8950 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 8951 {
726a989a 8952 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
a68ab351 8953 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8954 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8955
726a989a 8956 rhs_p = gimple_omp_for_final_ptr (stmt, i);
a68ab351 8957 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8958 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8959
726a989a 8960 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
a68ab351 8961 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8962 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8963 }
50674e96
DN
8964
8965 /* Once lowered, extract the bounds and clauses. */
a68ab351 8966 extract_omp_for_data (stmt, &fd, NULL);
50674e96 8967
726a989a 8968 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
50674e96 8969
726a989a
RB
8970 gimple_seq_add_stmt (&body, stmt);
8971 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
777f7f9a 8972
726a989a
RB
8973 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8974 fd.loop.v));
777f7f9a 8975
50674e96 8976 /* After the loop, add exit clauses. */
726a989a 8977 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
acf0174b
JJ
8978
8979 if (ctx->cancellable)
8980 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8981
726a989a 8982 gimple_seq_add_seq (&body, dlist);
50674e96 8983
726a989a 8984 body = maybe_catch_exception (body);
4a31b7ee 8985
777f7f9a 8986 /* Region exit marker goes at the end of the loop body. */
726a989a 8987 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
acf0174b 8988 maybe_add_implicit_barrier_cancel (ctx, &body);
b357f682 8989 pop_gimplify_context (new_stmt);
726a989a
RB
8990
8991 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8992 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
b357f682
JJ
8993 if (BLOCK_VARS (block))
8994 TREE_USED (block) = 1;
50674e96 8995
726a989a
RB
8996 gimple_bind_set_body (new_stmt, body);
8997 gimple_omp_set_body (stmt, NULL);
8998 gimple_omp_for_set_pre_body (stmt, NULL);
953ff289
DN
8999}
9000
b8698a0f 9001/* Callback for walk_stmts. Check if the current statement only contains
726a989a 9002 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
69f1837b
JJ
9003
9004static tree
726a989a
RB
9005check_combined_parallel (gimple_stmt_iterator *gsi_p,
9006 bool *handled_ops_p,
9007 struct walk_stmt_info *wi)
69f1837b 9008{
d3bfe4de 9009 int *info = (int *) wi->info;
726a989a 9010 gimple stmt = gsi_stmt (*gsi_p);
69f1837b 9011
726a989a
RB
9012 *handled_ops_p = true;
9013 switch (gimple_code (stmt))
69f1837b 9014 {
726a989a
RB
9015 WALK_SUBSTMTS;
9016
9017 case GIMPLE_OMP_FOR:
9018 case GIMPLE_OMP_SECTIONS:
69f1837b
JJ
9019 *info = *info == 0 ? 1 : -1;
9020 break;
9021 default:
9022 *info = -1;
9023 break;
9024 }
9025 return NULL;
9026}
50674e96 9027
a68ab351
JJ
9028struct omp_taskcopy_context
9029{
9030 /* This field must be at the beginning, as we do "inheritance": Some
9031 callback functions for tree-inline.c (e.g., omp_copy_decl)
9032 receive a copy_body_data pointer that is up-casted to an
9033 omp_context pointer. */
9034 copy_body_data cb;
9035 omp_context *ctx;
9036};
9037
9038static tree
9039task_copyfn_copy_decl (tree var, copy_body_data *cb)
9040{
9041 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9042
9043 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9044 return create_tmp_var (TREE_TYPE (var), NULL);
9045
9046 return var;
9047}
9048
9049static tree
9050task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9051{
9052 tree name, new_fields = NULL, type, f;
9053
9054 type = lang_hooks.types.make_type (RECORD_TYPE);
9055 name = DECL_NAME (TYPE_NAME (orig_type));
c2255bc4
AH
9056 name = build_decl (gimple_location (tcctx->ctx->stmt),
9057 TYPE_DECL, name, type);
a68ab351
JJ
9058 TYPE_NAME (type) = name;
9059
9060 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9061 {
9062 tree new_f = copy_node (f);
9063 DECL_CONTEXT (new_f) = type;
9064 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9065 TREE_CHAIN (new_f) = new_fields;
726a989a
RB
9066 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9067 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9068 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9069 &tcctx->cb, NULL);
a68ab351
JJ
9070 new_fields = new_f;
9071 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
9072 }
9073 TYPE_FIELDS (type) = nreverse (new_fields);
9074 layout_type (type);
9075 return type;
9076}
9077
9078/* Create task copyfn. */
9079
9080static void
726a989a 9081create_task_copyfn (gimple task_stmt, omp_context *ctx)
a68ab351
JJ
9082{
9083 struct function *child_cfun;
9084 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9085 tree record_type, srecord_type, bind, list;
9086 bool record_needs_remap = false, srecord_needs_remap = false;
9087 splay_tree_node n;
9088 struct omp_taskcopy_context tcctx;
db3927fb 9089 location_t loc = gimple_location (task_stmt);
a68ab351 9090
726a989a 9091 child_fn = gimple_omp_task_copy_fn (task_stmt);
a68ab351
JJ
9092 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9093 gcc_assert (child_cfun->cfg == NULL);
a68ab351
JJ
9094 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9095
9096 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 9097 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
a68ab351
JJ
9098 DECL_CONTEXT (t) = child_fn;
9099
9100 /* Populate the function. */
45852dcc 9101 push_gimplify_context ();
af16bc76 9102 push_cfun (child_cfun);
a68ab351
JJ
9103
9104 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9105 TREE_SIDE_EFFECTS (bind) = 1;
9106 list = NULL;
9107 DECL_SAVED_TREE (child_fn) = bind;
726a989a 9108 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
a68ab351
JJ
9109
9110 /* Remap src and dst argument types if needed. */
9111 record_type = ctx->record_type;
9112 srecord_type = ctx->srecord_type;
910ad8de 9113 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9114 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9115 {
9116 record_needs_remap = true;
9117 break;
9118 }
910ad8de 9119 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9120 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9121 {
9122 srecord_needs_remap = true;
9123 break;
9124 }
9125
9126 if (record_needs_remap || srecord_needs_remap)
9127 {
9128 memset (&tcctx, '\0', sizeof (tcctx));
9129 tcctx.cb.src_fn = ctx->cb.src_fn;
9130 tcctx.cb.dst_fn = child_fn;
fe660d7b
MJ
9131 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
9132 gcc_checking_assert (tcctx.cb.src_node);
a68ab351
JJ
9133 tcctx.cb.dst_node = tcctx.cb.src_node;
9134 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9135 tcctx.cb.copy_decl = task_copyfn_copy_decl;
1d65f45c 9136 tcctx.cb.eh_lp_nr = 0;
a68ab351
JJ
9137 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9138 tcctx.cb.decl_map = pointer_map_create ();
9139 tcctx.ctx = ctx;
9140
9141 if (record_needs_remap)
9142 record_type = task_copyfn_remap_type (&tcctx, record_type);
9143 if (srecord_needs_remap)
9144 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9145 }
9146 else
9147 tcctx.cb.decl_map = NULL;
9148
a68ab351
JJ
9149 arg = DECL_ARGUMENTS (child_fn);
9150 TREE_TYPE (arg) = build_pointer_type (record_type);
910ad8de 9151 sarg = DECL_CHAIN (arg);
a68ab351
JJ
9152 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9153
9154 /* First pass: initialize temporaries used in record_type and srecord_type
9155 sizes and field offsets. */
9156 if (tcctx.cb.decl_map)
726a989a 9157 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9159 {
9160 tree *p;
9161
9162 decl = OMP_CLAUSE_DECL (c);
9163 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
9164 if (p == NULL)
9165 continue;
9166 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9167 sf = (tree) n->value;
9168 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9169 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9170 src = omp_build_component_ref (src, sf);
726a989a 9171 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
a68ab351
JJ
9172 append_to_statement_list (t, &list);
9173 }
9174
9175 /* Second pass: copy shared var pointers and copy construct non-VLA
9176 firstprivate vars. */
726a989a 9177 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9178 switch (OMP_CLAUSE_CODE (c))
9179 {
9180 case OMP_CLAUSE_SHARED:
9181 decl = OMP_CLAUSE_DECL (c);
9182 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9183 if (n == NULL)
9184 break;
9185 f = (tree) n->value;
9186 if (tcctx.cb.decl_map)
9187 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9188 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9189 sf = (tree) n->value;
9190 if (tcctx.cb.decl_map)
9191 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9192 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9193 src = omp_build_component_ref (src, sf);
70f34814 9194 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9195 dst = omp_build_component_ref (dst, f);
726a989a 9196 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9197 append_to_statement_list (t, &list);
9198 break;
9199 case OMP_CLAUSE_FIRSTPRIVATE:
9200 decl = OMP_CLAUSE_DECL (c);
9201 if (is_variable_sized (decl))
9202 break;
9203 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9204 if (n == NULL)
9205 break;
9206 f = (tree) n->value;
9207 if (tcctx.cb.decl_map)
9208 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9209 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9210 if (n != NULL)
9211 {
9212 sf = (tree) n->value;
9213 if (tcctx.cb.decl_map)
9214 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9215 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9216 src = omp_build_component_ref (src, sf);
a68ab351 9217 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
70f34814 9218 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9219 }
9220 else
9221 src = decl;
70f34814 9222 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9223 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9224 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9225 append_to_statement_list (t, &list);
9226 break;
9227 case OMP_CLAUSE_PRIVATE:
9228 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9229 break;
9230 decl = OMP_CLAUSE_DECL (c);
9231 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9232 f = (tree) n->value;
9233 if (tcctx.cb.decl_map)
9234 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9235 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9236 if (n != NULL)
9237 {
9238 sf = (tree) n->value;
9239 if (tcctx.cb.decl_map)
9240 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9241 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9242 src = omp_build_component_ref (src, sf);
a68ab351 9243 if (use_pointer_for_field (decl, NULL))
70f34814 9244 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9245 }
9246 else
9247 src = decl;
70f34814 9248 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9249 dst = omp_build_component_ref (dst, f);
726a989a 9250 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9251 append_to_statement_list (t, &list);
9252 break;
9253 default:
9254 break;
9255 }
9256
9257 /* Last pass: handle VLA firstprivates. */
9258 if (tcctx.cb.decl_map)
726a989a 9259 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9260 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9261 {
9262 tree ind, ptr, df;
9263
9264 decl = OMP_CLAUSE_DECL (c);
9265 if (!is_variable_sized (decl))
9266 continue;
9267 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9268 if (n == NULL)
9269 continue;
9270 f = (tree) n->value;
9271 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9272 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9273 ind = DECL_VALUE_EXPR (decl);
9274 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9275 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9276 n = splay_tree_lookup (ctx->sfield_map,
9277 (splay_tree_key) TREE_OPERAND (ind, 0));
9278 sf = (tree) n->value;
9279 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9280 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9281 src = omp_build_component_ref (src, sf);
70f34814
RG
9282 src = build_simple_mem_ref_loc (loc, src);
9283 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9284 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9285 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9286 append_to_statement_list (t, &list);
9287 n = splay_tree_lookup (ctx->field_map,
9288 (splay_tree_key) TREE_OPERAND (ind, 0));
9289 df = (tree) n->value;
9290 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
70f34814 9291 ptr = build_simple_mem_ref_loc (loc, arg);
a9a58711 9292 ptr = omp_build_component_ref (ptr, df);
726a989a 9293 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
db3927fb 9294 build_fold_addr_expr_loc (loc, dst));
a68ab351
JJ
9295 append_to_statement_list (t, &list);
9296 }
9297
9298 t = build1 (RETURN_EXPR, void_type_node, NULL);
9299 append_to_statement_list (t, &list);
9300
9301 if (tcctx.cb.decl_map)
9302 pointer_map_destroy (tcctx.cb.decl_map);
9303 pop_gimplify_context (NULL);
9304 BIND_EXPR_BODY (bind) = list;
9305 pop_cfun ();
a68ab351
JJ
9306}
9307
acf0174b
JJ
9308static void
9309lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
9310{
9311 tree c, clauses;
9312 gimple g;
9313 size_t n_in = 0, n_out = 0, idx = 2, i;
9314
9315 clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
9316 OMP_CLAUSE_DEPEND);
9317 gcc_assert (clauses);
9318 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9319 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9320 switch (OMP_CLAUSE_DEPEND_KIND (c))
9321 {
9322 case OMP_CLAUSE_DEPEND_IN:
9323 n_in++;
9324 break;
9325 case OMP_CLAUSE_DEPEND_OUT:
9326 case OMP_CLAUSE_DEPEND_INOUT:
9327 n_out++;
9328 break;
9329 default:
9330 gcc_unreachable ();
9331 }
9332 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
9333 tree array = create_tmp_var (type, NULL);
9334 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9335 NULL_TREE);
9336 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
9337 gimple_seq_add_stmt (iseq, g);
9338 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9339 NULL_TREE);
9340 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
9341 gimple_seq_add_stmt (iseq, g);
9342 for (i = 0; i < 2; i++)
9343 {
9344 if ((i ? n_in : n_out) == 0)
9345 continue;
9346 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9348 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
9349 {
9350 tree t = OMP_CLAUSE_DECL (c);
9351 t = fold_convert (ptr_type_node, t);
9352 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9353 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9354 NULL_TREE, NULL_TREE);
9355 g = gimple_build_assign (r, t);
9356 gimple_seq_add_stmt (iseq, g);
9357 }
9358 }
9359 tree *p = gimple_omp_task_clauses_ptr (stmt);
9360 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9361 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9362 OMP_CLAUSE_CHAIN (c) = *p;
9363 *p = c;
9364 tree clobber = build_constructor (type, NULL);
9365 TREE_THIS_VOLATILE (clobber) = 1;
9366 g = gimple_build_assign (array, clobber);
9367 gimple_seq_add_stmt (oseq, g);
9368}
9369
726a989a
RB
9370/* Lower the OpenMP parallel or task directive in the current statement
9371 in GSI_P. CTX holds context information for the directive. */
50674e96
DN
9372
9373static void
726a989a 9374lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 9375{
726a989a
RB
9376 tree clauses;
9377 tree child_fn, t;
9378 gimple stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
9379 gimple par_bind, bind, dep_bind = NULL;
9380 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
db3927fb 9381 location_t loc = gimple_location (stmt);
50674e96 9382
726a989a
RB
9383 clauses = gimple_omp_taskreg_clauses (stmt);
9384 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9385 par_body = gimple_bind_body (par_bind);
50674e96 9386 child_fn = ctx->cb.dst_fn;
726a989a
RB
9387 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9388 && !gimple_omp_parallel_combined_p (stmt))
69f1837b
JJ
9389 {
9390 struct walk_stmt_info wi;
9391 int ws_num = 0;
9392
9393 memset (&wi, 0, sizeof (wi));
69f1837b
JJ
9394 wi.info = &ws_num;
9395 wi.val_only = true;
726a989a 9396 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
69f1837b 9397 if (ws_num == 1)
726a989a 9398 gimple_omp_parallel_set_combined_p (stmt, true);
69f1837b 9399 }
acf0174b
JJ
9400 gimple_seq dep_ilist = NULL;
9401 gimple_seq dep_olist = NULL;
9402 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9403 && find_omp_clause (clauses, OMP_CLAUSE_DEPEND))
9404 {
45852dcc 9405 push_gimplify_context ();
acf0174b
JJ
9406 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9407 lower_depend_clauses (stmt, &dep_ilist, &dep_olist);
9408 }
9409
a68ab351
JJ
9410 if (ctx->srecord_type)
9411 create_task_copyfn (stmt, ctx);
50674e96 9412
45852dcc 9413 push_gimplify_context ();
50674e96 9414
726a989a
RB
9415 par_olist = NULL;
9416 par_ilist = NULL;
acf0174b
JJ
9417 par_rlist = NULL;
9418 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
355a7673 9419 lower_omp (&par_body, ctx);
726a989a 9420 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
acf0174b 9421 lower_reduction_clauses (clauses, &par_rlist, ctx);
50674e96
DN
9422
9423 /* Declare all the variables created by mapping and the variables
9424 declared in the scope of the parallel body. */
9425 record_vars_into (ctx->block_vars, child_fn);
726a989a 9426 record_vars_into (gimple_bind_vars (par_bind), child_fn);
50674e96
DN
9427
9428 if (ctx->record_type)
9429 {
a68ab351
JJ
9430 ctx->sender_decl
9431 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9432 : ctx->record_type, ".omp_data_o");
cd3f04c8 9433 DECL_NAMELESS (ctx->sender_decl) = 1;
628c189e 9434 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
726a989a 9435 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
50674e96
DN
9436 }
9437
726a989a
RB
9438 olist = NULL;
9439 ilist = NULL;
50674e96
DN
9440 lower_send_clauses (clauses, &ilist, &olist, ctx);
9441 lower_send_shared_vars (&ilist, &olist, ctx);
9442
acf0174b
JJ
9443 if (ctx->record_type)
9444 {
9445 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9446 TREE_THIS_VOLATILE (clobber) = 1;
9447 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9448 clobber));
9449 }
9450
50674e96 9451 /* Once all the expansions are done, sequence all the different
726a989a 9452 fragments inside gimple_omp_body. */
50674e96 9453
726a989a 9454 new_body = NULL;
50674e96
DN
9455
9456 if (ctx->record_type)
9457 {
db3927fb 9458 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
018b899b 9459 /* fixup_child_record_type might have changed receiver_decl's type. */
db3927fb 9460 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
726a989a
RB
9461 gimple_seq_add_stmt (&new_body,
9462 gimple_build_assign (ctx->receiver_decl, t));
50674e96
DN
9463 }
9464
726a989a
RB
9465 gimple_seq_add_seq (&new_body, par_ilist);
9466 gimple_seq_add_seq (&new_body, par_body);
acf0174b
JJ
9467 gimple_seq_add_seq (&new_body, par_rlist);
9468 if (ctx->cancellable)
9469 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a
RB
9470 gimple_seq_add_seq (&new_body, par_olist);
9471 new_body = maybe_catch_exception (new_body);
9472 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9473 gimple_omp_set_body (stmt, new_body);
50674e96 9474
726a989a 9475 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
acf0174b
JJ
9476 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9477 gimple_bind_add_seq (bind, ilist);
9478 gimple_bind_add_stmt (bind, stmt);
9479 gimple_bind_add_seq (bind, olist);
9480
9481 pop_gimplify_context (NULL);
9482
9483 if (dep_bind)
9484 {
9485 gimple_bind_add_seq (dep_bind, dep_ilist);
9486 gimple_bind_add_stmt (dep_bind, bind);
9487 gimple_bind_add_seq (dep_bind, dep_olist);
9488 pop_gimplify_context (dep_bind);
9489 }
9490}
9491
9492/* Lower the OpenMP target directive in the current statement
9493 in GSI_P. CTX holds context information for the directive. */
9494
9495static void
9496lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9497{
9498 tree clauses;
9499 tree child_fn, t, c;
9500 gimple stmt = gsi_stmt (*gsi_p);
9501 gimple tgt_bind = NULL, bind;
9502 gimple_seq tgt_body = NULL, olist, ilist, new_body;
acf0174b
JJ
9503 location_t loc = gimple_location (stmt);
9504 int kind = gimple_omp_target_kind (stmt);
9505 unsigned int map_cnt = 0;
9506
9507 clauses = gimple_omp_target_clauses (stmt);
9508 if (kind == GF_OMP_TARGET_KIND_REGION)
9509 {
9510 tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9511 tgt_body = gimple_bind_body (tgt_bind);
9512 }
9513 else if (kind == GF_OMP_TARGET_KIND_DATA)
9514 tgt_body = gimple_omp_body (stmt);
9515 child_fn = ctx->cb.dst_fn;
9516
45852dcc 9517 push_gimplify_context ();
acf0174b
JJ
9518
9519 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9520 switch (OMP_CLAUSE_CODE (c))
9521 {
9522 tree var, x;
9523
9524 default:
9525 break;
9526 case OMP_CLAUSE_MAP:
9527 case OMP_CLAUSE_TO:
9528 case OMP_CLAUSE_FROM:
9529 var = OMP_CLAUSE_DECL (c);
9530 if (!DECL_P (var))
9531 {
9532 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9533 || !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9534 map_cnt++;
9535 continue;
9536 }
9537
9538 if (DECL_SIZE (var)
9539 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9540 {
9541 tree var2 = DECL_VALUE_EXPR (var);
9542 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9543 var2 = TREE_OPERAND (var2, 0);
9544 gcc_assert (DECL_P (var2));
9545 var = var2;
9546 }
9547
9548 if (!maybe_lookup_field (var, ctx))
9549 continue;
9550
9551 if (kind == GF_OMP_TARGET_KIND_REGION)
9552 {
9553 x = build_receiver_ref (var, true, ctx);
9554 tree new_var = lookup_decl (var, ctx);
9555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9556 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9557 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9558 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9559 x = build_simple_mem_ref (x);
9560 SET_DECL_VALUE_EXPR (new_var, x);
9561 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9562 }
9563 map_cnt++;
9564 }
9565
9566 if (kind == GF_OMP_TARGET_KIND_REGION)
9567 {
9568 target_nesting_level++;
9569 lower_omp (&tgt_body, ctx);
9570 target_nesting_level--;
9571 }
9572 else if (kind == GF_OMP_TARGET_KIND_DATA)
9573 lower_omp (&tgt_body, ctx);
9574
9575 if (kind == GF_OMP_TARGET_KIND_REGION)
9576 {
9577 /* Declare all the variables created by mapping and the variables
9578 declared in the scope of the target body. */
9579 record_vars_into (ctx->block_vars, child_fn);
9580 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9581 }
9582
9583 olist = NULL;
9584 ilist = NULL;
9585 if (ctx->record_type)
9586 {
9587 ctx->sender_decl
9588 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9589 DECL_NAMELESS (ctx->sender_decl) = 1;
9590 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9591 t = make_tree_vec (3);
9592 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9593 TREE_VEC_ELT (t, 1)
9594 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9595 ".omp_data_sizes");
9596 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9597 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9598 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9599 TREE_VEC_ELT (t, 2)
9600 = create_tmp_var (build_array_type_nelts (unsigned_char_type_node,
9601 map_cnt),
9602 ".omp_data_kinds");
9603 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9604 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9605 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9606 gimple_omp_target_set_data_arg (stmt, t);
9607
9608 vec<constructor_elt, va_gc> *vsize;
9609 vec<constructor_elt, va_gc> *vkind;
9610 vec_alloc (vsize, map_cnt);
9611 vec_alloc (vkind, map_cnt);
9612 unsigned int map_idx = 0;
9613
9614 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9615 switch (OMP_CLAUSE_CODE (c))
9616 {
9617 tree ovar, nc;
9618
9619 default:
9620 break;
9621 case OMP_CLAUSE_MAP:
9622 case OMP_CLAUSE_TO:
9623 case OMP_CLAUSE_FROM:
9624 nc = c;
9625 ovar = OMP_CLAUSE_DECL (c);
9626 if (!DECL_P (ovar))
9627 {
9628 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9629 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9630 {
9631 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9632 == get_base_address (ovar));
9633 nc = OMP_CLAUSE_CHAIN (c);
9634 ovar = OMP_CLAUSE_DECL (nc);
9635 }
9636 else
9637 {
9638 tree x = build_sender_ref (ovar, ctx);
9639 tree v
9640 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9641 gimplify_assign (x, v, &ilist);
9642 nc = NULL_TREE;
9643 }
9644 }
9645 else
9646 {
9647 if (DECL_SIZE (ovar)
9648 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9649 {
9650 tree ovar2 = DECL_VALUE_EXPR (ovar);
9651 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9652 ovar2 = TREE_OPERAND (ovar2, 0);
9653 gcc_assert (DECL_P (ovar2));
9654 ovar = ovar2;
9655 }
9656 if (!maybe_lookup_field (ovar, ctx))
9657 continue;
9658 }
9659
9660 if (nc)
9661 {
9662 tree var = lookup_decl_in_outer_ctx (ovar, ctx);
9663 tree x = build_sender_ref (ovar, ctx);
9664 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9665 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9666 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9667 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9668 {
9669 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9670 tree avar
9671 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)), NULL);
9672 mark_addressable (avar);
9673 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9674 avar = build_fold_addr_expr (avar);
9675 gimplify_assign (x, avar, &ilist);
9676 }
9677 else if (is_gimple_reg (var))
9678 {
9679 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9680 tree avar = create_tmp_var (TREE_TYPE (var), NULL);
9681 mark_addressable (avar);
9682 if (OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_ALLOC
9683 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_FROM)
9684 gimplify_assign (avar, var, &ilist);
9685 avar = build_fold_addr_expr (avar);
9686 gimplify_assign (x, avar, &ilist);
9687 if ((OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_FROM
9688 || OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_TOFROM)
9689 && !TYPE_READONLY (TREE_TYPE (var)))
9690 {
9691 x = build_sender_ref (ovar, ctx);
9692 x = build_simple_mem_ref (x);
9693 gimplify_assign (var, x, &olist);
9694 }
9695 }
9696 else
9697 {
9698 var = build_fold_addr_expr (var);
9699 gimplify_assign (x, var, &ilist);
9700 }
9701 }
9702 tree s = OMP_CLAUSE_SIZE (c);
9703 if (s == NULL_TREE)
9704 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9705 s = fold_convert (size_type_node, s);
9706 tree purpose = size_int (map_idx++);
9707 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9708 if (TREE_CODE (s) != INTEGER_CST)
9709 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9710
9711 unsigned char tkind = 0;
9712 switch (OMP_CLAUSE_CODE (c))
9713 {
9714 case OMP_CLAUSE_MAP:
9715 tkind = OMP_CLAUSE_MAP_KIND (c);
9716 break;
9717 case OMP_CLAUSE_TO:
9718 tkind = OMP_CLAUSE_MAP_TO;
9719 break;
9720 case OMP_CLAUSE_FROM:
9721 tkind = OMP_CLAUSE_MAP_FROM;
9722 break;
9723 default:
9724 gcc_unreachable ();
9725 }
9726 unsigned int talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9727 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9728 talign = DECL_ALIGN_UNIT (ovar);
9729 talign = ceil_log2 (talign);
9730 tkind |= talign << 3;
9731 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9732 build_int_cst (unsigned_char_type_node,
9733 tkind));
9734 if (nc && nc != c)
9735 c = nc;
9736 }
9737
9738 gcc_assert (map_idx == map_cnt);
9739
9740 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9741 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9742 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9743 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9744 if (!TREE_STATIC (TREE_VEC_ELT (t, 1)))
9745 {
9746 gimple_seq initlist = NULL;
9747 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9748 TREE_VEC_ELT (t, 1)),
9749 &initlist, true, NULL_TREE);
9750 gimple_seq_add_seq (&ilist, initlist);
9751 }
9752
9753 tree clobber = build_constructor (ctx->record_type, NULL);
9754 TREE_THIS_VOLATILE (clobber) = 1;
9755 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9756 clobber));
9757 }
9758
9759 /* Once all the expansions are done, sequence all the different
9760 fragments inside gimple_omp_body. */
9761
9762 new_body = NULL;
9763
9764 if (ctx->record_type && kind == GF_OMP_TARGET_KIND_REGION)
9765 {
9766 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9767 /* fixup_child_record_type might have changed receiver_decl's type. */
9768 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9769 gimple_seq_add_stmt (&new_body,
9770 gimple_build_assign (ctx->receiver_decl, t));
9771 }
9772
9773 if (kind == GF_OMP_TARGET_KIND_REGION)
9774 {
9775 gimple_seq_add_seq (&new_body, tgt_body);
9776 new_body = maybe_catch_exception (new_body);
9777 }
9778 else if (kind == GF_OMP_TARGET_KIND_DATA)
9779 new_body = tgt_body;
9780 if (kind != GF_OMP_TARGET_KIND_UPDATE)
9781 {
9782 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9783 gimple_omp_set_body (stmt, new_body);
9784 }
9785
9786 bind = gimple_build_bind (NULL, NULL,
9787 tgt_bind ? gimple_bind_block (tgt_bind)
9788 : NULL_TREE);
726a989a 9789 gsi_replace (gsi_p, bind, true);
355a7673
MM
9790 gimple_bind_add_seq (bind, ilist);
9791 gimple_bind_add_stmt (bind, stmt);
9792 gimple_bind_add_seq (bind, olist);
50674e96 9793
726a989a 9794 pop_gimplify_context (NULL);
50674e96
DN
9795}
9796
acf0174b
JJ
9797/* Expand code for an OpenMP teams directive. */
9798
9799static void
9800lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9801{
9802 gimple teams_stmt = gsi_stmt (*gsi_p);
45852dcc 9803 push_gimplify_context ();
acf0174b
JJ
9804
9805 tree block = make_node (BLOCK);
9806 gimple bind = gimple_build_bind (NULL, NULL, block);
9807 gsi_replace (gsi_p, bind, true);
9808 gimple_seq bind_body = NULL;
9809 gimple_seq dlist = NULL;
9810 gimple_seq olist = NULL;
9811
9812 tree num_teams = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9813 OMP_CLAUSE_NUM_TEAMS);
9814 if (num_teams == NULL_TREE)
9815 num_teams = build_int_cst (unsigned_type_node, 0);
9816 else
9817 {
9818 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
9819 num_teams = fold_convert (unsigned_type_node, num_teams);
9820 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
9821 }
9822 tree thread_limit = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9823 OMP_CLAUSE_THREAD_LIMIT);
9824 if (thread_limit == NULL_TREE)
9825 thread_limit = build_int_cst (unsigned_type_node, 0);
9826 else
9827 {
9828 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
9829 thread_limit = fold_convert (unsigned_type_node, thread_limit);
9830 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
9831 fb_rvalue);
9832 }
9833
9834 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
9835 &bind_body, &dlist, ctx, NULL);
9836 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9837 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
9838 gimple_seq_add_stmt (&bind_body, teams_stmt);
9839
9840 location_t loc = gimple_location (teams_stmt);
9841 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
9842 gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
9843 gimple_set_location (call, loc);
9844 gimple_seq_add_stmt (&bind_body, call);
9845
9846 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
9847 gimple_omp_set_body (teams_stmt, NULL);
9848 gimple_seq_add_seq (&bind_body, olist);
9849 gimple_seq_add_seq (&bind_body, dlist);
9850 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
9851 gimple_bind_set_body (bind, bind_body);
9852
9853 pop_gimplify_context (bind);
9854
9855 gimple_bind_append_vars (bind, ctx->block_vars);
9856 BLOCK_VARS (block) = ctx->block_vars;
9857 if (BLOCK_VARS (block))
9858 TREE_USED (block) = 1;
9859}
9860
9861
d0fb20be 9862/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
726a989a
RB
9863 regimplified. If DATA is non-NULL, lower_omp_1 is outside
9864 of OpenMP context, but with task_shared_vars set. */
75a4c3c1
AP
9865
9866static tree
726a989a
RB
9867lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
9868 void *data)
75a4c3c1 9869{
d0fb20be 9870 tree t = *tp;
75a4c3c1 9871
d0fb20be 9872 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
726a989a 9873 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
b826efd9
JJ
9874 return t;
9875
9876 if (task_shared_vars
9877 && DECL_P (t)
9878 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
d0fb20be 9879 return t;
75a4c3c1 9880
d0fb20be
JJ
9881 /* If a global variable has been privatized, TREE_CONSTANT on
9882 ADDR_EXPR might be wrong. */
726a989a 9883 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 9884 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 9885
d0fb20be
JJ
9886 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
9887 return NULL_TREE;
75a4c3c1 9888}
50674e96 9889
d0fb20be 9890static void
726a989a 9891lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 9892{
726a989a
RB
9893 gimple stmt = gsi_stmt (*gsi_p);
9894 struct walk_stmt_info wi;
953ff289 9895
726a989a
RB
9896 if (gimple_has_location (stmt))
9897 input_location = gimple_location (stmt);
d0fb20be 9898
726a989a
RB
9899 if (task_shared_vars)
9900 memset (&wi, '\0', sizeof (wi));
d0fb20be 9901
50674e96
DN
9902 /* If we have issued syntax errors, avoid doing any heavy lifting.
9903 Just replace the OpenMP directives with a NOP to avoid
9904 confusing RTL expansion. */
1da2ed5f 9905 if (seen_error () && is_gimple_omp (stmt))
50674e96 9906 {
726a989a 9907 gsi_replace (gsi_p, gimple_build_nop (), true);
d0fb20be 9908 return;
50674e96
DN
9909 }
9910
726a989a 9911 switch (gimple_code (stmt))
953ff289 9912 {
726a989a 9913 case GIMPLE_COND:
a68ab351 9914 if ((ctx || task_shared_vars)
726a989a
RB
9915 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
9916 ctx ? NULL : &wi, NULL)
9917 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
9918 ctx ? NULL : &wi, NULL)))
9919 gimple_regimplify_operands (stmt, gsi_p);
d0fb20be 9920 break;
726a989a 9921 case GIMPLE_CATCH:
355a7673 9922 lower_omp (gimple_catch_handler_ptr (stmt), ctx);
d0fb20be 9923 break;
726a989a 9924 case GIMPLE_EH_FILTER:
355a7673 9925 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
d0fb20be 9926 break;
726a989a 9927 case GIMPLE_TRY:
355a7673
MM
9928 lower_omp (gimple_try_eval_ptr (stmt), ctx);
9929 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
d0fb20be 9930 break;
d88513ea
PM
9931 case GIMPLE_TRANSACTION:
9932 lower_omp (gimple_transaction_body_ptr (stmt), ctx);
9933 break;
726a989a 9934 case GIMPLE_BIND:
355a7673 9935 lower_omp (gimple_bind_body_ptr (stmt), ctx);
d0fb20be 9936 break;
726a989a
RB
9937 case GIMPLE_OMP_PARALLEL:
9938 case GIMPLE_OMP_TASK:
9939 ctx = maybe_lookup_ctx (stmt);
acf0174b
JJ
9940 gcc_assert (ctx);
9941 if (ctx->cancellable)
9942 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9943 lower_omp_taskreg (gsi_p, ctx);
d0fb20be 9944 break;
726a989a
RB
9945 case GIMPLE_OMP_FOR:
9946 ctx = maybe_lookup_ctx (stmt);
953ff289 9947 gcc_assert (ctx);
acf0174b
JJ
9948 if (ctx->cancellable)
9949 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9950 lower_omp_for (gsi_p, ctx);
953ff289 9951 break;
726a989a
RB
9952 case GIMPLE_OMP_SECTIONS:
9953 ctx = maybe_lookup_ctx (stmt);
953ff289 9954 gcc_assert (ctx);
acf0174b
JJ
9955 if (ctx->cancellable)
9956 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9957 lower_omp_sections (gsi_p, ctx);
953ff289 9958 break;
726a989a
RB
9959 case GIMPLE_OMP_SINGLE:
9960 ctx = maybe_lookup_ctx (stmt);
953ff289 9961 gcc_assert (ctx);
726a989a 9962 lower_omp_single (gsi_p, ctx);
953ff289 9963 break;
726a989a
RB
9964 case GIMPLE_OMP_MASTER:
9965 ctx = maybe_lookup_ctx (stmt);
953ff289 9966 gcc_assert (ctx);
726a989a 9967 lower_omp_master (gsi_p, ctx);
953ff289 9968 break;
acf0174b
JJ
9969 case GIMPLE_OMP_TASKGROUP:
9970 ctx = maybe_lookup_ctx (stmt);
9971 gcc_assert (ctx);
9972 lower_omp_taskgroup (gsi_p, ctx);
9973 break;
726a989a
RB
9974 case GIMPLE_OMP_ORDERED:
9975 ctx = maybe_lookup_ctx (stmt);
953ff289 9976 gcc_assert (ctx);
726a989a 9977 lower_omp_ordered (gsi_p, ctx);
953ff289 9978 break;
726a989a
RB
9979 case GIMPLE_OMP_CRITICAL:
9980 ctx = maybe_lookup_ctx (stmt);
953ff289 9981 gcc_assert (ctx);
726a989a
RB
9982 lower_omp_critical (gsi_p, ctx);
9983 break;
9984 case GIMPLE_OMP_ATOMIC_LOAD:
9985 if ((ctx || task_shared_vars)
9986 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
9987 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
9988 gimple_regimplify_operands (stmt, gsi_p);
953ff289 9989 break;
acf0174b
JJ
9990 case GIMPLE_OMP_TARGET:
9991 ctx = maybe_lookup_ctx (stmt);
9992 gcc_assert (ctx);
9993 lower_omp_target (gsi_p, ctx);
9994 break;
9995 case GIMPLE_OMP_TEAMS:
9996 ctx = maybe_lookup_ctx (stmt);
9997 gcc_assert (ctx);
9998 lower_omp_teams (gsi_p, ctx);
9999 break;
10000 case GIMPLE_CALL:
10001 tree fndecl;
10002 fndecl = gimple_call_fndecl (stmt);
10003 if (fndecl
10004 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10005 switch (DECL_FUNCTION_CODE (fndecl))
10006 {
10007 case BUILT_IN_GOMP_BARRIER:
10008 if (ctx == NULL)
10009 break;
10010 /* FALLTHRU */
10011 case BUILT_IN_GOMP_CANCEL:
10012 case BUILT_IN_GOMP_CANCELLATION_POINT:
10013 omp_context *cctx;
10014 cctx = ctx;
10015 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10016 cctx = cctx->outer;
10017 gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
10018 if (!cctx->cancellable)
10019 {
10020 if (DECL_FUNCTION_CODE (fndecl)
10021 == BUILT_IN_GOMP_CANCELLATION_POINT)
10022 {
10023 stmt = gimple_build_nop ();
10024 gsi_replace (gsi_p, stmt, false);
10025 }
10026 break;
10027 }
10028 tree lhs;
10029 lhs = create_tmp_var (boolean_type_node, NULL);
10030 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10031 {
10032 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10033 gimple_call_set_fndecl (stmt, fndecl);
10034 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
10035 }
10036 gimple_call_set_lhs (stmt, lhs);
10037 tree fallthru_label;
10038 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10039 gimple g;
10040 g = gimple_build_label (fallthru_label);
10041 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10042 g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
10043 cctx->cancel_label, fallthru_label);
10044 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10045 break;
10046 default:
10047 break;
10048 }
10049 /* FALLTHRU */
d0fb20be 10050 default:
a68ab351 10051 if ((ctx || task_shared_vars)
726a989a
RB
10052 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10053 ctx ? NULL : &wi))
10054 gimple_regimplify_operands (stmt, gsi_p);
953ff289 10055 break;
953ff289 10056 }
953ff289
DN
10057}
10058
10059static void
355a7673 10060lower_omp (gimple_seq *body, omp_context *ctx)
953ff289 10061{
b357f682 10062 location_t saved_location = input_location;
355a7673
MM
10063 gimple_stmt_iterator gsi;
10064 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
726a989a 10065 lower_omp_1 (&gsi, ctx);
acf0174b
JJ
10066 /* Inside target region we haven't called fold_stmt during gimplification,
10067 because it can break code by adding decl references that weren't in the
10068 source. Call fold_stmt now. */
10069 if (target_nesting_level)
10070 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10071 fold_stmt (&gsi);
b357f682 10072 input_location = saved_location;
953ff289
DN
10073}
10074\f
10075/* Main entry point. */
10076
c2924966 10077static unsigned int
953ff289
DN
10078execute_lower_omp (void)
10079{
726a989a
RB
10080 gimple_seq body;
10081
535b544a
SB
10082 /* This pass always runs, to provide PROP_gimple_lomp.
10083 But there is nothing to do unless -fopenmp is given. */
c02065fc 10084 if (flag_openmp == 0 && flag_openmp_simd == 0 && flag_enable_cilkplus == 0)
535b544a
SB
10085 return 0;
10086
953ff289
DN
10087 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10088 delete_omp_context);
10089
726a989a 10090 body = gimple_body (current_function_decl);
26127932 10091 scan_omp (&body, NULL);
a68ab351 10092 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
10093
10094 if (all_contexts->root)
a68ab351
JJ
10095 {
10096 if (task_shared_vars)
45852dcc 10097 push_gimplify_context ();
355a7673 10098 lower_omp (&body, NULL);
a68ab351
JJ
10099 if (task_shared_vars)
10100 pop_gimplify_context (NULL);
10101 }
953ff289 10102
50674e96
DN
10103 if (all_contexts)
10104 {
10105 splay_tree_delete (all_contexts);
10106 all_contexts = NULL;
10107 }
a68ab351 10108 BITMAP_FREE (task_shared_vars);
c2924966 10109 return 0;
953ff289
DN
10110}
10111
27a4cd48
DM
10112namespace {
10113
10114const pass_data pass_data_lower_omp =
10115{
10116 GIMPLE_PASS, /* type */
10117 "omplower", /* name */
10118 OPTGROUP_NONE, /* optinfo_flags */
10119 false, /* has_gate */
10120 true, /* has_execute */
10121 TV_NONE, /* tv_id */
10122 PROP_gimple_any, /* properties_required */
10123 PROP_gimple_lomp, /* properties_provided */
10124 0, /* properties_destroyed */
10125 0, /* todo_flags_start */
10126 0, /* todo_flags_finish */
953ff289 10127};
27a4cd48
DM
10128
10129class pass_lower_omp : public gimple_opt_pass
10130{
10131public:
c3284718
RS
10132 pass_lower_omp (gcc::context *ctxt)
10133 : gimple_opt_pass (pass_data_lower_omp, ctxt)
27a4cd48
DM
10134 {}
10135
10136 /* opt_pass methods: */
10137 unsigned int execute () { return execute_lower_omp (); }
10138
10139}; // class pass_lower_omp
10140
10141} // anon namespace
10142
10143gimple_opt_pass *
10144make_pass_lower_omp (gcc::context *ctxt)
10145{
10146 return new pass_lower_omp (ctxt);
10147}
953ff289
DN
10148\f
10149/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
10150 It is not part of the "omplower" pass, as that's invoked too late. It
10151 should be invoked by the respective front ends after gimplification. */
953ff289
DN
10152
10153static splay_tree all_labels;
10154
10155/* Check for mismatched contexts and generate an error if needed. Return
10156 true if an error is detected. */
10157
10158static bool
726a989a
RB
10159diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10160 gimple branch_ctx, gimple label_ctx)
953ff289 10161{
726a989a 10162 if (label_ctx == branch_ctx)
953ff289
DN
10163 return false;
10164
b8698a0f 10165
726a989a
RB
10166 /*
10167 Previously we kept track of the label's entire context in diagnose_sb_[12]
10168 so we could traverse it and issue a correct "exit" or "enter" error
10169 message upon a structured block violation.
10170
10171 We built the context by building a list with tree_cons'ing, but there is
10172 no easy counterpart in gimple tuples. It seems like far too much work
10173 for issuing exit/enter error messages. If someone really misses the
10174 distinct error message... patches welcome.
10175 */
b8698a0f 10176
726a989a 10177#if 0
953ff289 10178 /* Try to avoid confusing the user by producing and error message
fa10beec 10179 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
10180 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10181 if (branch_ctx == NULL)
10182 exit_p = false;
10183 else
10184 {
10185 while (label_ctx)
10186 {
10187 if (TREE_VALUE (label_ctx) == branch_ctx)
10188 {
10189 exit_p = false;
10190 break;
10191 }
10192 label_ctx = TREE_CHAIN (label_ctx);
10193 }
10194 }
10195
10196 if (exit_p)
10197 error ("invalid exit from OpenMP structured block");
10198 else
10199 error ("invalid entry to OpenMP structured block");
726a989a 10200#endif
953ff289 10201
c02065fc
AH
10202 bool cilkplus_block = false;
10203 if (flag_enable_cilkplus)
10204 {
10205 if ((branch_ctx
10206 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
10207 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
10208 || (gimple_code (label_ctx) == GIMPLE_OMP_FOR
10209 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
10210 cilkplus_block = true;
10211 }
10212
726a989a
RB
10213 /* If it's obvious we have an invalid entry, be specific about the error. */
10214 if (branch_ctx == NULL)
c02065fc
AH
10215 {
10216 if (cilkplus_block)
10217 error ("invalid entry to Cilk Plus structured block");
10218 else
10219 error ("invalid entry to OpenMP structured block");
10220 }
726a989a 10221 else
c02065fc
AH
10222 {
10223 /* Otherwise, be vague and lazy, but efficient. */
10224 if (cilkplus_block)
10225 error ("invalid branch to/from a Cilk Plus structured block");
10226 else
10227 error ("invalid branch to/from an OpenMP structured block");
10228 }
726a989a
RB
10229
10230 gsi_replace (gsi_p, gimple_build_nop (), false);
953ff289
DN
10231 return true;
10232}
10233
10234/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
726a989a 10235 where each label is found. */
953ff289
DN
10236
10237static tree
726a989a
RB
10238diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10239 struct walk_stmt_info *wi)
953ff289 10240{
726a989a
RB
10241 gimple context = (gimple) wi->info;
10242 gimple inner_context;
10243 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10244
726a989a
RB
10245 *handled_ops_p = true;
10246
10247 switch (gimple_code (stmt))
953ff289 10248 {
726a989a 10249 WALK_SUBSTMTS;
b8698a0f 10250
726a989a
RB
10251 case GIMPLE_OMP_PARALLEL:
10252 case GIMPLE_OMP_TASK:
10253 case GIMPLE_OMP_SECTIONS:
10254 case GIMPLE_OMP_SINGLE:
10255 case GIMPLE_OMP_SECTION:
10256 case GIMPLE_OMP_MASTER:
10257 case GIMPLE_OMP_ORDERED:
10258 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10259 case GIMPLE_OMP_TARGET:
10260 case GIMPLE_OMP_TEAMS:
10261 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
10262 /* The minimal context here is just the current OMP construct. */
10263 inner_context = stmt;
953ff289 10264 wi->info = inner_context;
726a989a 10265 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10266 wi->info = context;
10267 break;
10268
726a989a
RB
10269 case GIMPLE_OMP_FOR:
10270 inner_context = stmt;
953ff289 10271 wi->info = inner_context;
726a989a
RB
10272 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10273 walk them. */
10274 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10275 diagnose_sb_1, NULL, wi);
10276 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10277 wi->info = context;
10278 break;
10279
726a989a
RB
10280 case GIMPLE_LABEL:
10281 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
953ff289
DN
10282 (splay_tree_value) context);
10283 break;
10284
10285 default:
10286 break;
10287 }
10288
10289 return NULL_TREE;
10290}
10291
10292/* Pass 2: Check each branch and see if its context differs from that of
10293 the destination label's context. */
10294
10295static tree
726a989a
RB
10296diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10297 struct walk_stmt_info *wi)
953ff289 10298{
726a989a 10299 gimple context = (gimple) wi->info;
953ff289 10300 splay_tree_node n;
726a989a 10301 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10302
726a989a
RB
10303 *handled_ops_p = true;
10304
10305 switch (gimple_code (stmt))
953ff289 10306 {
726a989a
RB
10307 WALK_SUBSTMTS;
10308
10309 case GIMPLE_OMP_PARALLEL:
10310 case GIMPLE_OMP_TASK:
10311 case GIMPLE_OMP_SECTIONS:
10312 case GIMPLE_OMP_SINGLE:
10313 case GIMPLE_OMP_SECTION:
10314 case GIMPLE_OMP_MASTER:
10315 case GIMPLE_OMP_ORDERED:
10316 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10317 case GIMPLE_OMP_TARGET:
10318 case GIMPLE_OMP_TEAMS:
10319 case GIMPLE_OMP_TASKGROUP:
726a989a 10320 wi->info = stmt;
355a7673 10321 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10322 wi->info = context;
10323 break;
10324
726a989a
RB
10325 case GIMPLE_OMP_FOR:
10326 wi->info = stmt;
10327 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10328 walk them. */
355a7673
MM
10329 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10330 diagnose_sb_2, NULL, wi);
10331 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10332 wi->info = context;
10333 break;
10334
ca50f84a
L
10335 case GIMPLE_COND:
10336 {
10337 tree lab = gimple_cond_true_label (stmt);
10338 if (lab)
10339 {
10340 n = splay_tree_lookup (all_labels,
10341 (splay_tree_key) lab);
10342 diagnose_sb_0 (gsi_p, context,
10343 n ? (gimple) n->value : NULL);
10344 }
10345 lab = gimple_cond_false_label (stmt);
10346 if (lab)
10347 {
10348 n = splay_tree_lookup (all_labels,
10349 (splay_tree_key) lab);
10350 diagnose_sb_0 (gsi_p, context,
10351 n ? (gimple) n->value : NULL);
10352 }
10353 }
10354 break;
10355
726a989a 10356 case GIMPLE_GOTO:
953ff289 10357 {
726a989a 10358 tree lab = gimple_goto_dest (stmt);
953ff289
DN
10359 if (TREE_CODE (lab) != LABEL_DECL)
10360 break;
10361
10362 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10363 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
953ff289
DN
10364 }
10365 break;
10366
726a989a 10367 case GIMPLE_SWITCH:
953ff289 10368 {
726a989a
RB
10369 unsigned int i;
10370 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
953ff289 10371 {
726a989a 10372 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
953ff289 10373 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10374 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
953ff289
DN
10375 break;
10376 }
10377 }
10378 break;
10379
726a989a
RB
10380 case GIMPLE_RETURN:
10381 diagnose_sb_0 (gsi_p, context, NULL);
953ff289
DN
10382 break;
10383
10384 default:
10385 break;
10386 }
10387
10388 return NULL_TREE;
10389}
10390
0645c1a2
AM
10391/* Called from tree-cfg.c::make_edges to create cfg edges for all GIMPLE_OMP
10392 codes. */
10393bool
10394make_gimple_omp_edges (basic_block bb, struct omp_region **region)
10395{
10396 gimple last = last_stmt (bb);
10397 enum gimple_code code = gimple_code (last);
10398 struct omp_region *cur_region = *region;
10399 bool fallthru = false;
10400
10401 switch (code)
10402 {
10403 case GIMPLE_OMP_PARALLEL:
10404 case GIMPLE_OMP_TASK:
10405 case GIMPLE_OMP_FOR:
10406 case GIMPLE_OMP_SINGLE:
10407 case GIMPLE_OMP_TEAMS:
10408 case GIMPLE_OMP_MASTER:
10409 case GIMPLE_OMP_TASKGROUP:
10410 case GIMPLE_OMP_ORDERED:
10411 case GIMPLE_OMP_CRITICAL:
10412 case GIMPLE_OMP_SECTION:
10413 cur_region = new_omp_region (bb, code, cur_region);
10414 fallthru = true;
10415 break;
10416
10417 case GIMPLE_OMP_TARGET:
10418 cur_region = new_omp_region (bb, code, cur_region);
10419 fallthru = true;
10420 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
10421 cur_region = cur_region->outer;
10422 break;
10423
10424 case GIMPLE_OMP_SECTIONS:
10425 cur_region = new_omp_region (bb, code, cur_region);
10426 fallthru = true;
10427 break;
10428
10429 case GIMPLE_OMP_SECTIONS_SWITCH:
10430 fallthru = false;
10431 break;
10432
10433 case GIMPLE_OMP_ATOMIC_LOAD:
10434 case GIMPLE_OMP_ATOMIC_STORE:
10435 fallthru = true;
10436 break;
10437
10438 case GIMPLE_OMP_RETURN:
10439 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
10440 somewhere other than the next block. This will be
10441 created later. */
10442 cur_region->exit = bb;
10443 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
10444 cur_region = cur_region->outer;
10445 break;
10446
10447 case GIMPLE_OMP_CONTINUE:
10448 cur_region->cont = bb;
10449 switch (cur_region->type)
10450 {
10451 case GIMPLE_OMP_FOR:
10452 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
10453 succs edges as abnormal to prevent splitting
10454 them. */
10455 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
10456 /* Make the loopback edge. */
10457 make_edge (bb, single_succ (cur_region->entry),
10458 EDGE_ABNORMAL);
10459
10460 /* Create an edge from GIMPLE_OMP_FOR to exit, which
10461 corresponds to the case that the body of the loop
10462 is not executed at all. */
10463 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
10464 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
10465 fallthru = false;
10466 break;
10467
10468 case GIMPLE_OMP_SECTIONS:
10469 /* Wire up the edges into and out of the nested sections. */
10470 {
10471 basic_block switch_bb = single_succ (cur_region->entry);
10472
10473 struct omp_region *i;
10474 for (i = cur_region->inner; i ; i = i->next)
10475 {
10476 gcc_assert (i->type == GIMPLE_OMP_SECTION);
10477 make_edge (switch_bb, i->entry, 0);
10478 make_edge (i->exit, bb, EDGE_FALLTHRU);
10479 }
10480
10481 /* Make the loopback edge to the block with
10482 GIMPLE_OMP_SECTIONS_SWITCH. */
10483 make_edge (bb, switch_bb, 0);
10484
10485 /* Make the edge from the switch to exit. */
10486 make_edge (switch_bb, bb->next_bb, 0);
10487 fallthru = false;
10488 }
10489 break;
10490
10491 default:
10492 gcc_unreachable ();
10493 }
10494 break;
10495
10496 default:
10497 gcc_unreachable ();
10498 }
10499
10500 if (*region != cur_region)
10501 *region = cur_region;
10502
10503 return fallthru;
10504}
10505
a406865a
RG
10506static unsigned int
10507diagnose_omp_structured_block_errors (void)
953ff289 10508{
953ff289 10509 struct walk_stmt_info wi;
a406865a 10510 gimple_seq body = gimple_body (current_function_decl);
953ff289
DN
10511
10512 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10513
10514 memset (&wi, 0, sizeof (wi));
726a989a 10515 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
953ff289
DN
10516
10517 memset (&wi, 0, sizeof (wi));
953ff289 10518 wi.want_locations = true;
355a7673
MM
10519 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10520
10521 gimple_set_body (current_function_decl, body);
953ff289
DN
10522
10523 splay_tree_delete (all_labels);
10524 all_labels = NULL;
10525
a406865a 10526 return 0;
953ff289
DN
10527}
10528
a406865a
RG
10529static bool
10530gate_diagnose_omp_blocks (void)
10531{
c02065fc 10532 return flag_openmp || flag_enable_cilkplus;
a406865a
RG
10533}
10534
27a4cd48
DM
10535namespace {
10536
10537const pass_data pass_data_diagnose_omp_blocks =
10538{
10539 GIMPLE_PASS, /* type */
10540 "*diagnose_omp_blocks", /* name */
10541 OPTGROUP_NONE, /* optinfo_flags */
10542 true, /* has_gate */
10543 true, /* has_execute */
10544 TV_NONE, /* tv_id */
10545 PROP_gimple_any, /* properties_required */
10546 0, /* properties_provided */
10547 0, /* properties_destroyed */
10548 0, /* todo_flags_start */
10549 0, /* todo_flags_finish */
a406865a
RG
10550};
10551
27a4cd48
DM
10552class pass_diagnose_omp_blocks : public gimple_opt_pass
10553{
10554public:
c3284718
RS
10555 pass_diagnose_omp_blocks (gcc::context *ctxt)
10556 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
27a4cd48
DM
10557 {}
10558
10559 /* opt_pass methods: */
10560 bool gate () { return gate_diagnose_omp_blocks (); }
10561 unsigned int execute () {
10562 return diagnose_omp_structured_block_errors ();
10563 }
10564
10565}; // class pass_diagnose_omp_blocks
10566
10567} // anon namespace
10568
10569gimple_opt_pass *
10570make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10571{
10572 return new pass_diagnose_omp_blocks (ctxt);
10573}
10574
953ff289 10575#include "gt-omp-low.h"