]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
gcc/ada/
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
1e8e9920 1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
711789cc 6 Copyright (C) 2005-2013 Free Software Foundation, Inc.
1e8e9920 7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
8c4c00c1 12Software Foundation; either version 3, or (at your option) any later
1e8e9920 13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
8c4c00c1 21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
1e8e9920 23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
9ed99284 29#include "stringpool.h"
30#include "stor-layout.h"
1e8e9920 31#include "rtl.h"
e795d6e1 32#include "gimple.h"
a8783bee 33#include "gimplify.h"
dcf1a1ec 34#include "gimple-iterator.h"
e795d6e1 35#include "gimplify-me.h"
dcf1a1ec 36#include "gimple-walk.h"
75a70cf9 37#include "tree-iterator.h"
1e8e9920 38#include "tree-inline.h"
39#include "langhooks.h"
852f689e 40#include "diagnostic-core.h"
073c1fd5 41#include "gimple-ssa.h"
42#include "cgraph.h"
43#include "tree-cfg.h"
44#include "tree-phinodes.h"
45#include "ssa-iterators.h"
46#include "tree-ssanames.h"
47#include "tree-into-ssa.h"
9ed99284 48#include "expr.h"
073c1fd5 49#include "tree-dfa.h"
69ee5dbb 50#include "tree-ssa.h"
1e8e9920 51#include "flags.h"
52#include "function.h"
53#include "expr.h"
1e8e9920 54#include "tree-pass.h"
55#include "ggc.h"
56#include "except.h"
e3022db7 57#include "splay-tree.h"
cb7f680b 58#include "optabs.h"
59#include "cfgloop.h"
3d483a94 60#include "target.h"
7740abd8 61#include "omp-low.h"
424a4a92 62#include "gimple-low.h"
63#include "tree-cfgcleanup.h"
e797f49f 64#include "tree-nested.h"
1e8e9920 65
75a70cf9 66
48e1416a 67/* Lowering of OpenMP parallel and workshare constructs proceeds in two
1e8e9920 68 phases. The first phase scans the function looking for OMP statements
69 and then for variables that must be replaced to satisfy data sharing
70 clauses. The second phase expands code for the constructs, as well as
334ec2d8 71 re-gimplifying things when variables have been replaced with complex
1e8e9920 72 expressions.
73
d134bccc 74 Final code generation is done by pass_expand_omp. The flowgraph is
75 scanned for parallel regions which are then moved to a new
76 function, to be invoked by the thread library. */
1e8e9920 77
7740abd8 78/* Parallel region information. Every parallel and workshare
79 directive is enclosed between two markers, the OMP_* directive
80 and a corresponding OMP_RETURN statement. */
81
82struct omp_region
83{
84 /* The enclosing region. */
85 struct omp_region *outer;
86
87 /* First child region. */
88 struct omp_region *inner;
89
90 /* Next peer region. */
91 struct omp_region *next;
92
93 /* Block containing the omp directive as its last stmt. */
94 basic_block entry;
95
96 /* Block containing the OMP_RETURN as its last stmt. */
97 basic_block exit;
98
99 /* Block containing the OMP_CONTINUE as its last stmt. */
100 basic_block cont;
101
102 /* If this is a combined parallel+workshare region, this is a list
103 of additional arguments needed by the combined parallel+workshare
104 library call. */
105 vec<tree, va_gc> *ws_args;
106
107 /* The code for the omp directive of this region. */
108 enum gimple_code type;
109
110 /* Schedule kind, only used for OMP_FOR type regions. */
111 enum omp_clause_schedule_kind sched_kind;
112
113 /* True if this is a combined parallel+workshare region. */
114 bool is_combined_parallel;
115};
116
1e8e9920 117/* Context structure. Used to store information about each parallel
118 directive in the code. */
119
120typedef struct omp_context
121{
122 /* This field must be at the beginning, as we do "inheritance": Some
123 callback functions for tree-inline.c (e.g., omp_copy_decl)
124 receive a copy_body_data pointer that is up-casted to an
125 omp_context pointer. */
126 copy_body_data cb;
127
128 /* The tree of contexts corresponding to the encountered constructs. */
129 struct omp_context *outer;
75a70cf9 130 gimple stmt;
1e8e9920 131
48e1416a 132 /* Map variables to fields in a structure that allows communication
1e8e9920 133 between sending and receiving threads. */
134 splay_tree field_map;
135 tree record_type;
136 tree sender_decl;
137 tree receiver_decl;
138
fd6481cf 139 /* These are used just by task contexts, if task firstprivate fn is
140 needed. srecord_type is used to communicate from the thread
141 that encountered the task construct to task firstprivate fn,
142 record_type is allocated by GOMP_task, initialized by task firstprivate
143 fn and passed to the task body fn. */
144 splay_tree sfield_map;
145 tree srecord_type;
146
1e8e9920 147 /* A chain of variables to add to the top-level block surrounding the
148 construct. In the case of a parallel, this is in the child function. */
149 tree block_vars;
150
bc7bff74 151 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
152 barriers should jump to during omplower pass. */
153 tree cancel_label;
154
1e8e9920 155 /* What to do with variables with implicitly determined sharing
156 attributes. */
157 enum omp_clause_default_kind default_kind;
158
159 /* Nesting depth of this context. Used to beautify error messages re
160 invalid gotos. The outermost ctx is depth 1, with depth 0 being
161 reserved for the main body of the function. */
162 int depth;
163
1e8e9920 164 /* True if this parallel directive is nested within another. */
165 bool is_nested;
bc7bff74 166
167 /* True if this construct can be cancelled. */
168 bool cancellable;
1e8e9920 169} omp_context;
170
171
fd6481cf 172struct omp_for_data_loop
173{
174 tree v, n1, n2, step;
175 enum tree_code cond_code;
176};
177
773c5ba7 178/* A structure describing the main elements of a parallel loop. */
1e8e9920 179
773c5ba7 180struct omp_for_data
1e8e9920 181{
fd6481cf 182 struct omp_for_data_loop loop;
75a70cf9 183 tree chunk_size;
184 gimple for_stmt;
fd6481cf 185 tree pre, iter_type;
186 int collapse;
1e8e9920 187 bool have_nowait, have_ordered;
188 enum omp_clause_schedule_kind sched_kind;
fd6481cf 189 struct omp_for_data_loop *loops;
1e8e9920 190};
191
773c5ba7 192
1e8e9920 193static splay_tree all_contexts;
fd6481cf 194static int taskreg_nesting_level;
bc7bff74 195static int target_nesting_level;
7740abd8 196static struct omp_region *root_omp_region;
fd6481cf 197static bitmap task_shared_vars;
1e8e9920 198
ab129075 199static void scan_omp (gimple_seq *, omp_context *);
75a70cf9 200static tree scan_omp_1_op (tree *, int *, void *);
201
202#define WALK_SUBSTMTS \
203 case GIMPLE_BIND: \
204 case GIMPLE_TRY: \
205 case GIMPLE_CATCH: \
206 case GIMPLE_EH_FILTER: \
4c0315d0 207 case GIMPLE_TRANSACTION: \
75a70cf9 208 /* The sub-statements for these should be walked. */ \
209 *handled_ops_p = false; \
210 break;
211
212/* Convenience function for calling scan_omp_1_op on tree operands. */
213
214static inline tree
215scan_omp_op (tree *tp, omp_context *ctx)
216{
217 struct walk_stmt_info wi;
218
219 memset (&wi, 0, sizeof (wi));
220 wi.info = ctx;
221 wi.want_locations = true;
222
223 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
224}
225
e3a19533 226static void lower_omp (gimple_seq *, omp_context *);
f49d7bb5 227static tree lookup_decl_in_outer_ctx (tree, omp_context *);
228static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
1e8e9920 229
230/* Find an OpenMP clause of type KIND within CLAUSES. */
231
79acaae1 232tree
590c3166 233find_omp_clause (tree clauses, enum omp_clause_code kind)
1e8e9920 234{
235 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
55d6e7cd 236 if (OMP_CLAUSE_CODE (clauses) == kind)
1e8e9920 237 return clauses;
238
239 return NULL_TREE;
240}
241
242/* Return true if CTX is for an omp parallel. */
243
244static inline bool
245is_parallel_ctx (omp_context *ctx)
246{
75a70cf9 247 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
1e8e9920 248}
249
773c5ba7 250
fd6481cf 251/* Return true if CTX is for an omp task. */
252
253static inline bool
254is_task_ctx (omp_context *ctx)
255{
75a70cf9 256 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 257}
258
259
260/* Return true if CTX is for an omp parallel or omp task. */
261
262static inline bool
263is_taskreg_ctx (omp_context *ctx)
264{
75a70cf9 265 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
266 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 267}
268
269
773c5ba7 270/* Return true if REGION is a combined parallel+workshare region. */
1e8e9920 271
272static inline bool
773c5ba7 273is_combined_parallel (struct omp_region *region)
274{
275 return region->is_combined_parallel;
276}
277
278
279/* Extract the header elements of parallel loop FOR_STMT and store
280 them into *FD. */
281
282static void
75a70cf9 283extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
fd6481cf 284 struct omp_for_data_loop *loops)
773c5ba7 285{
fd6481cf 286 tree t, var, *collapse_iter, *collapse_count;
287 tree count = NULL_TREE, iter_type = long_integer_type_node;
288 struct omp_for_data_loop *loop;
289 int i;
290 struct omp_for_data_loop dummy_loop;
389dd41b 291 location_t loc = gimple_location (for_stmt);
f2697631 292 bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_KIND_SIMD;
bc7bff74 293 bool distribute = gimple_omp_for_kind (for_stmt)
294 == GF_OMP_FOR_KIND_DISTRIBUTE;
773c5ba7 295
296 fd->for_stmt = for_stmt;
297 fd->pre = NULL;
75a70cf9 298 fd->collapse = gimple_omp_for_collapse (for_stmt);
fd6481cf 299 if (fd->collapse > 1)
300 fd->loops = loops;
301 else
302 fd->loops = &fd->loop;
773c5ba7 303
bc7bff74 304 fd->have_nowait = distribute || simd;
305 fd->have_ordered = false;
773c5ba7 306 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
307 fd->chunk_size = NULL_TREE;
fd6481cf 308 collapse_iter = NULL;
309 collapse_count = NULL;
773c5ba7 310
75a70cf9 311 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
55d6e7cd 312 switch (OMP_CLAUSE_CODE (t))
773c5ba7 313 {
314 case OMP_CLAUSE_NOWAIT:
315 fd->have_nowait = true;
316 break;
317 case OMP_CLAUSE_ORDERED:
318 fd->have_ordered = true;
319 break;
320 case OMP_CLAUSE_SCHEDULE:
bc7bff74 321 gcc_assert (!distribute);
773c5ba7 322 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
323 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
324 break;
bc7bff74 325 case OMP_CLAUSE_DIST_SCHEDULE:
326 gcc_assert (distribute);
327 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
328 break;
fd6481cf 329 case OMP_CLAUSE_COLLAPSE:
330 if (fd->collapse > 1)
331 {
332 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
333 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
334 }
773c5ba7 335 default:
336 break;
337 }
338
fd6481cf 339 /* FIXME: for now map schedule(auto) to schedule(static).
340 There should be analysis to determine whether all iterations
341 are approximately the same amount of work (then schedule(static)
bde357c8 342 is best) or if it varies (then schedule(dynamic,N) is better). */
fd6481cf 343 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
344 {
345 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
346 gcc_assert (fd->chunk_size == NULL);
347 }
348 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
773c5ba7 349 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
350 gcc_assert (fd->chunk_size == NULL);
351 else if (fd->chunk_size == NULL)
352 {
353 /* We only need to compute a default chunk size for ordered
354 static loops and dynamic loops. */
fd6481cf 355 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
bc7bff74 356 || fd->have_ordered)
773c5ba7 357 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
358 ? integer_zero_node : integer_one_node;
359 }
fd6481cf 360
361 for (i = 0; i < fd->collapse; i++)
362 {
363 if (fd->collapse == 1)
364 loop = &fd->loop;
365 else if (loops != NULL)
366 loop = loops + i;
367 else
368 loop = &dummy_loop;
369
75a70cf9 370 loop->v = gimple_omp_for_index (for_stmt, i);
fd6481cf 371 gcc_assert (SSA_VAR_P (loop->v));
372 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
373 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
374 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
75a70cf9 375 loop->n1 = gimple_omp_for_initial (for_stmt, i);
fd6481cf 376
75a70cf9 377 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
378 loop->n2 = gimple_omp_for_final (for_stmt, i);
fd6481cf 379 switch (loop->cond_code)
380 {
381 case LT_EXPR:
382 case GT_EXPR:
383 break;
f2697631 384 case NE_EXPR:
385 gcc_assert (gimple_omp_for_kind (for_stmt)
386 == GF_OMP_FOR_KIND_CILKSIMD);
387 break;
fd6481cf 388 case LE_EXPR:
389 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
2cc66f2a 390 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
fd6481cf 391 else
389dd41b 392 loop->n2 = fold_build2_loc (loc,
393 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 394 build_int_cst (TREE_TYPE (loop->n2), 1));
395 loop->cond_code = LT_EXPR;
396 break;
397 case GE_EXPR:
398 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
2cc66f2a 399 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
fd6481cf 400 else
389dd41b 401 loop->n2 = fold_build2_loc (loc,
402 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 403 build_int_cst (TREE_TYPE (loop->n2), 1));
404 loop->cond_code = GT_EXPR;
405 break;
406 default:
407 gcc_unreachable ();
408 }
409
75a70cf9 410 t = gimple_omp_for_incr (for_stmt, i);
fd6481cf 411 gcc_assert (TREE_OPERAND (t, 0) == var);
412 switch (TREE_CODE (t))
413 {
414 case PLUS_EXPR:
fd6481cf 415 loop->step = TREE_OPERAND (t, 1);
416 break;
85d86b55 417 case POINTER_PLUS_EXPR:
418 loop->step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
419 break;
fd6481cf 420 case MINUS_EXPR:
421 loop->step = TREE_OPERAND (t, 1);
389dd41b 422 loop->step = fold_build1_loc (loc,
423 NEGATE_EXPR, TREE_TYPE (loop->step),
fd6481cf 424 loop->step);
425 break;
426 default:
427 gcc_unreachable ();
428 }
429
bc7bff74 430 if (simd
431 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
432 && !fd->have_ordered))
3d483a94 433 {
434 if (fd->collapse == 1)
435 iter_type = TREE_TYPE (loop->v);
436 else if (i == 0
437 || TYPE_PRECISION (iter_type)
438 < TYPE_PRECISION (TREE_TYPE (loop->v)))
439 iter_type
440 = build_nonstandard_integer_type
bc7bff74 441 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
3d483a94 442 }
443 else if (iter_type != long_long_unsigned_type_node)
fd6481cf 444 {
445 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
446 iter_type = long_long_unsigned_type_node;
447 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
448 && TYPE_PRECISION (TREE_TYPE (loop->v))
449 >= TYPE_PRECISION (iter_type))
450 {
451 tree n;
452
453 if (loop->cond_code == LT_EXPR)
389dd41b 454 n = fold_build2_loc (loc,
455 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 456 loop->n2, loop->step);
457 else
458 n = loop->n1;
459 if (TREE_CODE (n) != INTEGER_CST
460 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
461 iter_type = long_long_unsigned_type_node;
462 }
463 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
464 > TYPE_PRECISION (iter_type))
465 {
466 tree n1, n2;
467
468 if (loop->cond_code == LT_EXPR)
469 {
470 n1 = loop->n1;
389dd41b 471 n2 = fold_build2_loc (loc,
472 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 473 loop->n2, loop->step);
474 }
475 else
476 {
389dd41b 477 n1 = fold_build2_loc (loc,
478 MINUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 479 loop->n2, loop->step);
480 n2 = loop->n1;
481 }
482 if (TREE_CODE (n1) != INTEGER_CST
483 || TREE_CODE (n2) != INTEGER_CST
484 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
485 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
486 iter_type = long_long_unsigned_type_node;
487 }
488 }
489
490 if (collapse_count && *collapse_count == NULL)
491 {
8e6b4515 492 t = fold_binary (loop->cond_code, boolean_type_node,
493 fold_convert (TREE_TYPE (loop->v), loop->n1),
494 fold_convert (TREE_TYPE (loop->v), loop->n2));
495 if (t && integer_zerop (t))
496 count = build_zero_cst (long_long_unsigned_type_node);
497 else if ((i == 0 || count != NULL_TREE)
498 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
499 && TREE_CONSTANT (loop->n1)
500 && TREE_CONSTANT (loop->n2)
501 && TREE_CODE (loop->step) == INTEGER_CST)
fd6481cf 502 {
503 tree itype = TREE_TYPE (loop->v);
504
505 if (POINTER_TYPE_P (itype))
3cea8318 506 itype = signed_type_for (itype);
fd6481cf 507 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
389dd41b 508 t = fold_build2_loc (loc,
509 PLUS_EXPR, itype,
510 fold_convert_loc (loc, itype, loop->step), t);
511 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
512 fold_convert_loc (loc, itype, loop->n2));
513 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
514 fold_convert_loc (loc, itype, loop->n1));
fd6481cf 515 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
389dd41b 516 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
517 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
518 fold_build1_loc (loc, NEGATE_EXPR, itype,
519 fold_convert_loc (loc, itype,
520 loop->step)));
fd6481cf 521 else
389dd41b 522 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
523 fold_convert_loc (loc, itype, loop->step));
524 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
fd6481cf 525 if (count != NULL_TREE)
389dd41b 526 count = fold_build2_loc (loc,
527 MULT_EXPR, long_long_unsigned_type_node,
fd6481cf 528 count, t);
529 else
530 count = t;
531 if (TREE_CODE (count) != INTEGER_CST)
532 count = NULL_TREE;
533 }
8e6b4515 534 else if (count && !integer_zerop (count))
fd6481cf 535 count = NULL_TREE;
536 }
537 }
538
3d483a94 539 if (count
bc7bff74 540 && !simd
541 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
542 || fd->have_ordered))
fd6481cf 543 {
544 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
545 iter_type = long_long_unsigned_type_node;
546 else
547 iter_type = long_integer_type_node;
548 }
549 else if (collapse_iter && *collapse_iter != NULL)
550 iter_type = TREE_TYPE (*collapse_iter);
551 fd->iter_type = iter_type;
552 if (collapse_iter && *collapse_iter == NULL)
553 *collapse_iter = create_tmp_var (iter_type, ".iter");
554 if (collapse_count && *collapse_count == NULL)
555 {
556 if (count)
389dd41b 557 *collapse_count = fold_convert_loc (loc, iter_type, count);
fd6481cf 558 else
559 *collapse_count = create_tmp_var (iter_type, ".count");
560 }
561
562 if (fd->collapse > 1)
563 {
564 fd->loop.v = *collapse_iter;
565 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
566 fd->loop.n2 = *collapse_count;
567 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
568 fd->loop.cond_code = LT_EXPR;
569 }
773c5ba7 570}
571
572
573/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
574 is the immediate dominator of PAR_ENTRY_BB, return true if there
575 are no data dependencies that would prevent expanding the parallel
576 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
577
578 When expanding a combined parallel+workshare region, the call to
579 the child function may need additional arguments in the case of
75a70cf9 580 GIMPLE_OMP_FOR regions. In some cases, these arguments are
581 computed out of variables passed in from the parent to the child
582 via 'struct .omp_data_s'. For instance:
773c5ba7 583
584 #pragma omp parallel for schedule (guided, i * 4)
585 for (j ...)
586
587 Is lowered into:
588
589 # BLOCK 2 (PAR_ENTRY_BB)
590 .omp_data_o.i = i;
591 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
48e1416a 592
773c5ba7 593 # BLOCK 3 (WS_ENTRY_BB)
594 .omp_data_i = &.omp_data_o;
595 D.1667 = .omp_data_i->i;
596 D.1598 = D.1667 * 4;
597 #pragma omp for schedule (guided, D.1598)
598
599 When we outline the parallel region, the call to the child function
600 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
601 that value is computed *after* the call site. So, in principle we
602 cannot do the transformation.
603
604 To see whether the code in WS_ENTRY_BB blocks the combined
605 parallel+workshare call, we collect all the variables used in the
75a70cf9 606 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
773c5ba7 607 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
608 call.
609
610 FIXME. If we had the SSA form built at this point, we could merely
611 hoist the code in block 3 into block 2 and be done with it. But at
612 this point we don't have dataflow information and though we could
613 hack something up here, it is really not worth the aggravation. */
614
615static bool
f018d957 616workshare_safe_to_combine_p (basic_block ws_entry_bb)
773c5ba7 617{
618 struct omp_for_data fd;
f018d957 619 gimple ws_stmt = last_stmt (ws_entry_bb);
773c5ba7 620
75a70cf9 621 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 622 return true;
623
75a70cf9 624 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
773c5ba7 625
fd6481cf 626 extract_omp_for_data (ws_stmt, &fd, NULL);
627
628 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
629 return false;
630 if (fd.iter_type != long_integer_type_node)
631 return false;
773c5ba7 632
633 /* FIXME. We give up too easily here. If any of these arguments
634 are not constants, they will likely involve variables that have
635 been mapped into fields of .omp_data_s for sharing with the child
636 function. With appropriate data flow, it would be possible to
637 see through this. */
fd6481cf 638 if (!is_gimple_min_invariant (fd.loop.n1)
639 || !is_gimple_min_invariant (fd.loop.n2)
640 || !is_gimple_min_invariant (fd.loop.step)
773c5ba7 641 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
642 return false;
643
644 return true;
645}
646
647
648/* Collect additional arguments needed to emit a combined
649 parallel+workshare call. WS_STMT is the workshare directive being
650 expanded. */
651
f1f41a6c 652static vec<tree, va_gc> *
bc7bff74 653get_ws_args_for (gimple par_stmt, gimple ws_stmt)
773c5ba7 654{
655 tree t;
389dd41b 656 location_t loc = gimple_location (ws_stmt);
f1f41a6c 657 vec<tree, va_gc> *ws_args;
773c5ba7 658
75a70cf9 659 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
773c5ba7 660 {
661 struct omp_for_data fd;
bc7bff74 662 tree n1, n2;
773c5ba7 663
fd6481cf 664 extract_omp_for_data (ws_stmt, &fd, NULL);
bc7bff74 665 n1 = fd.loop.n1;
666 n2 = fd.loop.n2;
667
668 if (gimple_omp_for_combined_into_p (ws_stmt))
669 {
670 tree innerc
671 = find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
672 OMP_CLAUSE__LOOPTEMP_);
673 gcc_assert (innerc);
674 n1 = OMP_CLAUSE_DECL (innerc);
675 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
676 OMP_CLAUSE__LOOPTEMP_);
677 gcc_assert (innerc);
678 n2 = OMP_CLAUSE_DECL (innerc);
679 }
773c5ba7 680
f1f41a6c 681 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
773c5ba7 682
bc7bff74 683 t = fold_convert_loc (loc, long_integer_type_node, n1);
f1f41a6c 684 ws_args->quick_push (t);
773c5ba7 685
bc7bff74 686 t = fold_convert_loc (loc, long_integer_type_node, n2);
f1f41a6c 687 ws_args->quick_push (t);
773c5ba7 688
414c3a2c 689 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
f1f41a6c 690 ws_args->quick_push (t);
414c3a2c 691
692 if (fd.chunk_size)
693 {
694 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
f1f41a6c 695 ws_args->quick_push (t);
414c3a2c 696 }
773c5ba7 697
698 return ws_args;
699 }
75a70cf9 700 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 701 {
ac6e3339 702 /* Number of sections is equal to the number of edges from the
75a70cf9 703 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
704 the exit of the sections region. */
705 basic_block bb = single_succ (gimple_bb (ws_stmt));
ac6e3339 706 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
f1f41a6c 707 vec_alloc (ws_args, 1);
708 ws_args->quick_push (t);
414c3a2c 709 return ws_args;
773c5ba7 710 }
711
712 gcc_unreachable ();
713}
714
715
716/* Discover whether REGION is a combined parallel+workshare region. */
717
718static void
719determine_parallel_type (struct omp_region *region)
1e8e9920 720{
773c5ba7 721 basic_block par_entry_bb, par_exit_bb;
722 basic_block ws_entry_bb, ws_exit_bb;
723
03ed154b 724 if (region == NULL || region->inner == NULL
ac6e3339 725 || region->exit == NULL || region->inner->exit == NULL
726 || region->inner->cont == NULL)
773c5ba7 727 return;
728
729 /* We only support parallel+for and parallel+sections. */
75a70cf9 730 if (region->type != GIMPLE_OMP_PARALLEL
731 || (region->inner->type != GIMPLE_OMP_FOR
732 && region->inner->type != GIMPLE_OMP_SECTIONS))
773c5ba7 733 return;
734
735 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
736 WS_EXIT_BB -> PAR_EXIT_BB. */
61e47ac8 737 par_entry_bb = region->entry;
738 par_exit_bb = region->exit;
739 ws_entry_bb = region->inner->entry;
740 ws_exit_bb = region->inner->exit;
773c5ba7 741
742 if (single_succ (par_entry_bb) == ws_entry_bb
743 && single_succ (ws_exit_bb) == par_exit_bb
f018d957 744 && workshare_safe_to_combine_p (ws_entry_bb)
75a70cf9 745 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
de7ef844 746 || (last_and_only_stmt (ws_entry_bb)
747 && last_and_only_stmt (par_exit_bb))))
773c5ba7 748 {
bc7bff74 749 gimple par_stmt = last_stmt (par_entry_bb);
75a70cf9 750 gimple ws_stmt = last_stmt (ws_entry_bb);
61e47ac8 751
75a70cf9 752 if (region->inner->type == GIMPLE_OMP_FOR)
773c5ba7 753 {
754 /* If this is a combined parallel loop, we need to determine
755 whether or not to use the combined library calls. There
756 are two cases where we do not apply the transformation:
757 static loops and any kind of ordered loop. In the first
758 case, we already open code the loop so there is no need
759 to do anything else. In the latter case, the combined
760 parallel loop call would still need extra synchronization
761 to implement ordered semantics, so there would not be any
762 gain in using the combined call. */
75a70cf9 763 tree clauses = gimple_omp_for_clauses (ws_stmt);
773c5ba7 764 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
765 if (c == NULL
766 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
767 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
768 {
769 region->is_combined_parallel = false;
770 region->inner->is_combined_parallel = false;
771 return;
772 }
773 }
774
775 region->is_combined_parallel = true;
776 region->inner->is_combined_parallel = true;
bc7bff74 777 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
773c5ba7 778 }
1e8e9920 779}
780
773c5ba7 781
1e8e9920 782/* Return true if EXPR is variable sized. */
783
784static inline bool
1f1872fd 785is_variable_sized (const_tree expr)
1e8e9920 786{
787 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
788}
789
790/* Return true if DECL is a reference type. */
791
792static inline bool
793is_reference (tree decl)
794{
795 return lang_hooks.decls.omp_privatize_by_reference (decl);
796}
797
798/* Lookup variables in the decl or field splay trees. The "maybe" form
799 allows for the variable form to not have been entered, otherwise we
800 assert that the variable must have been entered. */
801
802static inline tree
803lookup_decl (tree var, omp_context *ctx)
804{
e3022db7 805 tree *n;
806 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
807 return *n;
1e8e9920 808}
809
810static inline tree
e8a588af 811maybe_lookup_decl (const_tree var, omp_context *ctx)
1e8e9920 812{
e3022db7 813 tree *n;
814 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
815 return n ? *n : NULL_TREE;
1e8e9920 816}
817
818static inline tree
819lookup_field (tree var, omp_context *ctx)
820{
821 splay_tree_node n;
822 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
823 return (tree) n->value;
824}
825
fd6481cf 826static inline tree
827lookup_sfield (tree var, omp_context *ctx)
828{
829 splay_tree_node n;
830 n = splay_tree_lookup (ctx->sfield_map
831 ? ctx->sfield_map : ctx->field_map,
832 (splay_tree_key) var);
833 return (tree) n->value;
834}
835
1e8e9920 836static inline tree
837maybe_lookup_field (tree var, omp_context *ctx)
838{
839 splay_tree_node n;
840 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
841 return n ? (tree) n->value : NULL_TREE;
842}
843
e8a588af 844/* Return true if DECL should be copied by pointer. SHARED_CTX is
845 the parallel context if DECL is to be shared. */
1e8e9920 846
847static bool
fd6481cf 848use_pointer_for_field (tree decl, omp_context *shared_ctx)
1e8e9920 849{
850 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
851 return true;
852
554f2707 853 /* We can only use copy-in/copy-out semantics for shared variables
1e8e9920 854 when we know the value is not accessible from an outer scope. */
e8a588af 855 if (shared_ctx)
1e8e9920 856 {
857 /* ??? Trivially accessible from anywhere. But why would we even
858 be passing an address in this case? Should we simply assert
859 this to be false, or should we have a cleanup pass that removes
860 these from the list of mappings? */
861 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
862 return true;
863
864 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
865 without analyzing the expression whether or not its location
866 is accessible to anyone else. In the case of nested parallel
867 regions it certainly may be. */
df2c34fc 868 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
1e8e9920 869 return true;
870
871 /* Do not use copy-in/copy-out for variables that have their
872 address taken. */
873 if (TREE_ADDRESSABLE (decl))
874 return true;
e8a588af 875
b8214689 876 /* lower_send_shared_vars only uses copy-in, but not copy-out
877 for these. */
878 if (TREE_READONLY (decl)
879 || ((TREE_CODE (decl) == RESULT_DECL
880 || TREE_CODE (decl) == PARM_DECL)
881 && DECL_BY_REFERENCE (decl)))
882 return false;
883
e8a588af 884 /* Disallow copy-in/out in nested parallel if
885 decl is shared in outer parallel, otherwise
886 each thread could store the shared variable
887 in its own copy-in location, making the
888 variable no longer really shared. */
b8214689 889 if (shared_ctx->is_nested)
e8a588af 890 {
891 omp_context *up;
892
893 for (up = shared_ctx->outer; up; up = up->outer)
0cb159ec 894 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
e8a588af 895 break;
896
0cb159ec 897 if (up)
e8a588af 898 {
899 tree c;
900
75a70cf9 901 for (c = gimple_omp_taskreg_clauses (up->stmt);
e8a588af 902 c; c = OMP_CLAUSE_CHAIN (c))
903 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
904 && OMP_CLAUSE_DECL (c) == decl)
905 break;
906
907 if (c)
784ad964 908 goto maybe_mark_addressable_and_ret;
e8a588af 909 }
910 }
fd6481cf 911
b8214689 912 /* For tasks avoid using copy-in/out. As tasks can be
fd6481cf 913 deferred or executed in different thread, when GOMP_task
914 returns, the task hasn't necessarily terminated. */
b8214689 915 if (is_task_ctx (shared_ctx))
fd6481cf 916 {
784ad964 917 tree outer;
918 maybe_mark_addressable_and_ret:
919 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
fd6481cf 920 if (is_gimple_reg (outer))
921 {
922 /* Taking address of OUTER in lower_send_shared_vars
923 might need regimplification of everything that uses the
924 variable. */
925 if (!task_shared_vars)
926 task_shared_vars = BITMAP_ALLOC (NULL);
927 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
928 TREE_ADDRESSABLE (outer) = 1;
929 }
930 return true;
931 }
1e8e9920 932 }
933
934 return false;
935}
936
79acaae1 937/* Construct a new automatic decl similar to VAR. */
938
939static tree
940omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
941{
942 tree copy = copy_var_decl (var, name, type);
943
944 DECL_CONTEXT (copy) = current_function_decl;
1767a056 945 DECL_CHAIN (copy) = ctx->block_vars;
1e8e9920 946 ctx->block_vars = copy;
947
948 return copy;
949}
950
951static tree
952omp_copy_decl_1 (tree var, omp_context *ctx)
953{
954 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
955}
956
445d06b6 957/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
958 as appropriate. */
959static tree
960omp_build_component_ref (tree obj, tree field)
961{
962 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
963 if (TREE_THIS_VOLATILE (field))
964 TREE_THIS_VOLATILE (ret) |= 1;
965 if (TREE_READONLY (field))
966 TREE_READONLY (ret) |= 1;
967 return ret;
968}
969
1e8e9920 970/* Build tree nodes to access the field for VAR on the receiver side. */
971
972static tree
973build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
974{
975 tree x, field = lookup_field (var, ctx);
976
977 /* If the receiver record type was remapped in the child function,
978 remap the field into the new record type. */
979 x = maybe_lookup_field (field, ctx);
980 if (x != NULL)
981 field = x;
982
182cf5a9 983 x = build_simple_mem_ref (ctx->receiver_decl);
445d06b6 984 x = omp_build_component_ref (x, field);
1e8e9920 985 if (by_ref)
182cf5a9 986 x = build_simple_mem_ref (x);
1e8e9920 987
988 return x;
989}
990
991/* Build tree nodes to access VAR in the scope outer to CTX. In the case
992 of a parallel, this is a component reference; for workshare constructs
993 this is some variable. */
994
995static tree
996build_outer_var_ref (tree var, omp_context *ctx)
997{
998 tree x;
999
f49d7bb5 1000 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
1e8e9920 1001 x = var;
1002 else if (is_variable_sized (var))
1003 {
1004 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1005 x = build_outer_var_ref (x, ctx);
182cf5a9 1006 x = build_simple_mem_ref (x);
1e8e9920 1007 }
fd6481cf 1008 else if (is_taskreg_ctx (ctx))
1e8e9920 1009 {
e8a588af 1010 bool by_ref = use_pointer_for_field (var, NULL);
1e8e9920 1011 x = build_receiver_ref (var, by_ref, ctx);
1012 }
3d483a94 1013 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 1014 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 1015 {
1016 /* #pragma omp simd isn't a worksharing construct, and can reference even
1017 private vars in its linear etc. clauses. */
1018 x = NULL_TREE;
1019 if (ctx->outer && is_taskreg_ctx (ctx))
1020 x = lookup_decl (var, ctx->outer);
1021 else if (ctx->outer)
84cb1020 1022 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
3d483a94 1023 if (x == NULL_TREE)
1024 x = var;
1025 }
1e8e9920 1026 else if (ctx->outer)
1027 x = lookup_decl (var, ctx->outer);
9438af57 1028 else if (is_reference (var))
1029 /* This can happen with orphaned constructs. If var is reference, it is
1030 possible it is shared and as such valid. */
1031 x = var;
1e8e9920 1032 else
1033 gcc_unreachable ();
1034
1035 if (is_reference (var))
182cf5a9 1036 x = build_simple_mem_ref (x);
1e8e9920 1037
1038 return x;
1039}
1040
1041/* Build tree nodes to access the field for VAR on the sender side. */
1042
1043static tree
1044build_sender_ref (tree var, omp_context *ctx)
1045{
fd6481cf 1046 tree field = lookup_sfield (var, ctx);
445d06b6 1047 return omp_build_component_ref (ctx->sender_decl, field);
1e8e9920 1048}
1049
1050/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
1051
1052static void
fd6481cf 1053install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
1e8e9920 1054{
fd6481cf 1055 tree field, type, sfield = NULL_TREE;
1e8e9920 1056
fd6481cf 1057 gcc_assert ((mask & 1) == 0
1058 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
1059 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
1060 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
1e8e9920 1061
1062 type = TREE_TYPE (var);
bc7bff74 1063 if (mask & 4)
1064 {
1065 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
1066 type = build_pointer_type (build_pointer_type (type));
1067 }
1068 else if (by_ref)
1e8e9920 1069 type = build_pointer_type (type);
fd6481cf 1070 else if ((mask & 3) == 1 && is_reference (var))
1071 type = TREE_TYPE (type);
1e8e9920 1072
e60a6f7b 1073 field = build_decl (DECL_SOURCE_LOCATION (var),
1074 FIELD_DECL, DECL_NAME (var), type);
1e8e9920 1075
1076 /* Remember what variable this field was created for. This does have a
1077 side effect of making dwarf2out ignore this member, so for helpful
1078 debugging we clear it later in delete_omp_context. */
1079 DECL_ABSTRACT_ORIGIN (field) = var;
fd6481cf 1080 if (type == TREE_TYPE (var))
1081 {
1082 DECL_ALIGN (field) = DECL_ALIGN (var);
1083 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
1084 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
1085 }
1086 else
1087 DECL_ALIGN (field) = TYPE_ALIGN (type);
1e8e9920 1088
fd6481cf 1089 if ((mask & 3) == 3)
1090 {
1091 insert_field_into_struct (ctx->record_type, field);
1092 if (ctx->srecord_type)
1093 {
e60a6f7b 1094 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1095 FIELD_DECL, DECL_NAME (var), type);
fd6481cf 1096 DECL_ABSTRACT_ORIGIN (sfield) = var;
1097 DECL_ALIGN (sfield) = DECL_ALIGN (field);
1098 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
1099 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
1100 insert_field_into_struct (ctx->srecord_type, sfield);
1101 }
1102 }
1103 else
1104 {
1105 if (ctx->srecord_type == NULL_TREE)
1106 {
1107 tree t;
1108
1109 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
1110 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1111 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1112 {
e60a6f7b 1113 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1114 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
fd6481cf 1115 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
1116 insert_field_into_struct (ctx->srecord_type, sfield);
1117 splay_tree_insert (ctx->sfield_map,
1118 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
1119 (splay_tree_value) sfield);
1120 }
1121 }
1122 sfield = field;
1123 insert_field_into_struct ((mask & 1) ? ctx->record_type
1124 : ctx->srecord_type, field);
1125 }
1e8e9920 1126
fd6481cf 1127 if (mask & 1)
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
1129 (splay_tree_value) field);
1130 if ((mask & 2) && ctx->sfield_map)
1131 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1132 (splay_tree_value) sfield);
1e8e9920 1133}
1134
1135static tree
1136install_var_local (tree var, omp_context *ctx)
1137{
1138 tree new_var = omp_copy_decl_1 (var, ctx);
1139 insert_decl_map (&ctx->cb, var, new_var);
1140 return new_var;
1141}
1142
1143/* Adjust the replacement for DECL in CTX for the new context. This means
1144 copying the DECL_VALUE_EXPR, and fixing up the type. */
1145
1146static void
1147fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1148{
1149 tree new_decl, size;
1150
1151 new_decl = lookup_decl (decl, ctx);
1152
1153 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1154
1155 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1156 && DECL_HAS_VALUE_EXPR_P (decl))
1157 {
1158 tree ve = DECL_VALUE_EXPR (decl);
75a70cf9 1159 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
1e8e9920 1160 SET_DECL_VALUE_EXPR (new_decl, ve);
1161 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1162 }
1163
1164 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1165 {
1166 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1167 if (size == error_mark_node)
1168 size = TYPE_SIZE (TREE_TYPE (new_decl));
1169 DECL_SIZE (new_decl) = size;
1170
1171 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1172 if (size == error_mark_node)
1173 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1174 DECL_SIZE_UNIT (new_decl) = size;
1175 }
1176}
1177
1178/* The callback for remap_decl. Search all containing contexts for a
1179 mapping of the variable; this avoids having to duplicate the splay
1180 tree ahead of time. We know a mapping doesn't already exist in the
1181 given context. Create new mappings to implement default semantics. */
1182
1183static tree
1184omp_copy_decl (tree var, copy_body_data *cb)
1185{
1186 omp_context *ctx = (omp_context *) cb;
1187 tree new_var;
1188
1e8e9920 1189 if (TREE_CODE (var) == LABEL_DECL)
1190 {
e60a6f7b 1191 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
773c5ba7 1192 DECL_CONTEXT (new_var) = current_function_decl;
1e8e9920 1193 insert_decl_map (&ctx->cb, var, new_var);
1194 return new_var;
1195 }
1196
fd6481cf 1197 while (!is_taskreg_ctx (ctx))
1e8e9920 1198 {
1199 ctx = ctx->outer;
1200 if (ctx == NULL)
1201 return var;
1202 new_var = maybe_lookup_decl (var, ctx);
1203 if (new_var)
1204 return new_var;
1205 }
1206
f49d7bb5 1207 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1208 return var;
1209
1e8e9920 1210 return error_mark_node;
1211}
1212
773c5ba7 1213
1214/* Return the parallel region associated with STMT. */
1215
773c5ba7 1216/* Debugging dumps for parallel regions. */
1217void dump_omp_region (FILE *, struct omp_region *, int);
1218void debug_omp_region (struct omp_region *);
1219void debug_all_omp_regions (void);
1220
1221/* Dump the parallel region tree rooted at REGION. */
1222
1223void
1224dump_omp_region (FILE *file, struct omp_region *region, int indent)
1225{
61e47ac8 1226 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
75a70cf9 1227 gimple_code_name[region->type]);
773c5ba7 1228
1229 if (region->inner)
1230 dump_omp_region (file, region->inner, indent + 4);
1231
61e47ac8 1232 if (region->cont)
1233 {
75a70cf9 1234 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
61e47ac8 1235 region->cont->index);
1236 }
48e1416a 1237
773c5ba7 1238 if (region->exit)
75a70cf9 1239 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
61e47ac8 1240 region->exit->index);
773c5ba7 1241 else
61e47ac8 1242 fprintf (file, "%*s[no exit marker]\n", indent, "");
773c5ba7 1243
1244 if (region->next)
61e47ac8 1245 dump_omp_region (file, region->next, indent);
773c5ba7 1246}
1247
4b987fac 1248DEBUG_FUNCTION void
773c5ba7 1249debug_omp_region (struct omp_region *region)
1250{
1251 dump_omp_region (stderr, region, 0);
1252}
1253
4b987fac 1254DEBUG_FUNCTION void
773c5ba7 1255debug_all_omp_regions (void)
1256{
1257 dump_omp_region (stderr, root_omp_region, 0);
1258}
1259
1260
1261/* Create a new parallel region starting at STMT inside region PARENT. */
1262
7740abd8 1263static struct omp_region *
75a70cf9 1264new_omp_region (basic_block bb, enum gimple_code type,
1265 struct omp_region *parent)
773c5ba7 1266{
4077bf7a 1267 struct omp_region *region = XCNEW (struct omp_region);
773c5ba7 1268
1269 region->outer = parent;
61e47ac8 1270 region->entry = bb;
1271 region->type = type;
773c5ba7 1272
1273 if (parent)
1274 {
1275 /* This is a nested region. Add it to the list of inner
1276 regions in PARENT. */
1277 region->next = parent->inner;
1278 parent->inner = region;
1279 }
61e47ac8 1280 else
773c5ba7 1281 {
1282 /* This is a toplevel region. Add it to the list of toplevel
1283 regions in ROOT_OMP_REGION. */
1284 region->next = root_omp_region;
1285 root_omp_region = region;
1286 }
61e47ac8 1287
1288 return region;
1289}
1290
1291/* Release the memory associated with the region tree rooted at REGION. */
1292
1293static void
1294free_omp_region_1 (struct omp_region *region)
1295{
1296 struct omp_region *i, *n;
1297
1298 for (i = region->inner; i ; i = n)
773c5ba7 1299 {
61e47ac8 1300 n = i->next;
1301 free_omp_region_1 (i);
773c5ba7 1302 }
1303
61e47ac8 1304 free (region);
1305}
773c5ba7 1306
61e47ac8 1307/* Release the memory for the entire omp region tree. */
1308
1309void
1310free_omp_regions (void)
1311{
1312 struct omp_region *r, *n;
1313 for (r = root_omp_region; r ; r = n)
1314 {
1315 n = r->next;
1316 free_omp_region_1 (r);
1317 }
1318 root_omp_region = NULL;
773c5ba7 1319}
1320
1321
1e8e9920 1322/* Create a new context, with OUTER_CTX being the surrounding context. */
1323
1324static omp_context *
75a70cf9 1325new_omp_context (gimple stmt, omp_context *outer_ctx)
1e8e9920 1326{
1327 omp_context *ctx = XCNEW (omp_context);
1328
1329 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1330 (splay_tree_value) ctx);
1331 ctx->stmt = stmt;
1332
1333 if (outer_ctx)
1334 {
1335 ctx->outer = outer_ctx;
1336 ctx->cb = outer_ctx->cb;
1337 ctx->cb.block = NULL;
1338 ctx->depth = outer_ctx->depth + 1;
1339 }
1340 else
1341 {
1342 ctx->cb.src_fn = current_function_decl;
1343 ctx->cb.dst_fn = current_function_decl;
53f79206 1344 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1345 gcc_checking_assert (ctx->cb.src_node);
1e8e9920 1346 ctx->cb.dst_node = ctx->cb.src_node;
1347 ctx->cb.src_cfun = cfun;
1348 ctx->cb.copy_decl = omp_copy_decl;
e38def9c 1349 ctx->cb.eh_lp_nr = 0;
1e8e9920 1350 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1351 ctx->depth = 1;
1352 }
1353
e3022db7 1354 ctx->cb.decl_map = pointer_map_create ();
1e8e9920 1355
1356 return ctx;
1357}
1358
75a70cf9 1359static gimple_seq maybe_catch_exception (gimple_seq);
f6430caa 1360
1361/* Finalize task copyfn. */
1362
1363static void
75a70cf9 1364finalize_task_copyfn (gimple task_stmt)
f6430caa 1365{
1366 struct function *child_cfun;
9078126c 1367 tree child_fn;
e3a19533 1368 gimple_seq seq = NULL, new_seq;
75a70cf9 1369 gimple bind;
f6430caa 1370
75a70cf9 1371 child_fn = gimple_omp_task_copy_fn (task_stmt);
f6430caa 1372 if (child_fn == NULL_TREE)
1373 return;
1374
1375 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
82b40354 1376 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
f6430caa 1377
f6430caa 1378 push_cfun (child_cfun);
7e3aae05 1379 bind = gimplify_body (child_fn, false);
75a70cf9 1380 gimple_seq_add_stmt (&seq, bind);
1381 new_seq = maybe_catch_exception (seq);
1382 if (new_seq != seq)
1383 {
1384 bind = gimple_build_bind (NULL, new_seq, NULL);
e3a19533 1385 seq = NULL;
75a70cf9 1386 gimple_seq_add_stmt (&seq, bind);
1387 }
1388 gimple_set_body (child_fn, seq);
f6430caa 1389 pop_cfun ();
f6430caa 1390
82b40354 1391 /* Inform the callgraph about the new function. */
f6430caa 1392 cgraph_add_new_function (child_fn, false);
1393}
1394
1e8e9920 1395/* Destroy a omp_context data structures. Called through the splay tree
1396 value delete callback. */
1397
1398static void
1399delete_omp_context (splay_tree_value value)
1400{
1401 omp_context *ctx = (omp_context *) value;
1402
e3022db7 1403 pointer_map_destroy (ctx->cb.decl_map);
1e8e9920 1404
1405 if (ctx->field_map)
1406 splay_tree_delete (ctx->field_map);
fd6481cf 1407 if (ctx->sfield_map)
1408 splay_tree_delete (ctx->sfield_map);
1e8e9920 1409
1410 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1411 it produces corrupt debug information. */
1412 if (ctx->record_type)
1413 {
1414 tree t;
1767a056 1415 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1e8e9920 1416 DECL_ABSTRACT_ORIGIN (t) = NULL;
1417 }
fd6481cf 1418 if (ctx->srecord_type)
1419 {
1420 tree t;
1767a056 1421 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
fd6481cf 1422 DECL_ABSTRACT_ORIGIN (t) = NULL;
1423 }
1e8e9920 1424
f6430caa 1425 if (is_task_ctx (ctx))
1426 finalize_task_copyfn (ctx->stmt);
1427
1e8e9920 1428 XDELETE (ctx);
1429}
1430
1431/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1432 context. */
1433
1434static void
1435fixup_child_record_type (omp_context *ctx)
1436{
1437 tree f, type = ctx->record_type;
1438
1439 /* ??? It isn't sufficient to just call remap_type here, because
1440 variably_modified_type_p doesn't work the way we expect for
1441 record types. Testing each field for whether it needs remapping
1442 and creating a new record by hand works, however. */
1767a056 1443 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1e8e9920 1444 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1445 break;
1446 if (f)
1447 {
1448 tree name, new_fields = NULL;
1449
1450 type = lang_hooks.types.make_type (RECORD_TYPE);
1451 name = DECL_NAME (TYPE_NAME (ctx->record_type));
e60a6f7b 1452 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1453 TYPE_DECL, name, type);
1e8e9920 1454 TYPE_NAME (type) = name;
1455
1767a056 1456 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1e8e9920 1457 {
1458 tree new_f = copy_node (f);
1459 DECL_CONTEXT (new_f) = type;
1460 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1767a056 1461 DECL_CHAIN (new_f) = new_fields;
75a70cf9 1462 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1463 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1464 &ctx->cb, NULL);
1465 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1466 &ctx->cb, NULL);
1e8e9920 1467 new_fields = new_f;
1468
1469 /* Arrange to be able to look up the receiver field
1470 given the sender field. */
1471 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1472 (splay_tree_value) new_f);
1473 }
1474 TYPE_FIELDS (type) = nreverse (new_fields);
1475 layout_type (type);
1476 }
1477
1478 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1479}
1480
1481/* Instantiate decls as necessary in CTX to satisfy the data sharing
1482 specified by CLAUSES. */
1483
1484static void
1485scan_sharing_clauses (tree clauses, omp_context *ctx)
1486{
1487 tree c, decl;
1488 bool scan_array_reductions = false;
1489
1490 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1491 {
1492 bool by_ref;
1493
55d6e7cd 1494 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1495 {
1496 case OMP_CLAUSE_PRIVATE:
1497 decl = OMP_CLAUSE_DECL (c);
fd6481cf 1498 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1499 goto do_private;
1500 else if (!is_variable_sized (decl))
1e8e9920 1501 install_var_local (decl, ctx);
1502 break;
1503
1504 case OMP_CLAUSE_SHARED:
bc7bff74 1505 /* Ignore shared directives in teams construct. */
1506 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1507 break;
fd6481cf 1508 gcc_assert (is_taskreg_ctx (ctx));
1e8e9920 1509 decl = OMP_CLAUSE_DECL (c);
e7327393 1510 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1511 || !is_variable_sized (decl));
f49d7bb5 1512 /* Global variables don't need to be copied,
1513 the receiver side will use them directly. */
1514 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1515 break;
fd6481cf 1516 by_ref = use_pointer_for_field (decl, ctx);
1e8e9920 1517 if (! TREE_READONLY (decl)
1518 || TREE_ADDRESSABLE (decl)
1519 || by_ref
1520 || is_reference (decl))
1521 {
fd6481cf 1522 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1523 install_var_local (decl, ctx);
1524 break;
1525 }
1526 /* We don't need to copy const scalar vars back. */
55d6e7cd 1527 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1e8e9920 1528 goto do_private;
1529
1530 case OMP_CLAUSE_LASTPRIVATE:
1531 /* Let the corresponding firstprivate clause create
1532 the variable. */
1533 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1534 break;
1535 /* FALLTHRU */
1536
1537 case OMP_CLAUSE_FIRSTPRIVATE:
1538 case OMP_CLAUSE_REDUCTION:
3d483a94 1539 case OMP_CLAUSE_LINEAR:
1e8e9920 1540 decl = OMP_CLAUSE_DECL (c);
1541 do_private:
1542 if (is_variable_sized (decl))
1e8e9920 1543 {
fd6481cf 1544 if (is_task_ctx (ctx))
1545 install_var_field (decl, false, 1, ctx);
1546 break;
1547 }
1548 else if (is_taskreg_ctx (ctx))
1549 {
1550 bool global
1551 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
e8a588af 1552 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1553
1554 if (is_task_ctx (ctx)
1555 && (global || by_ref || is_reference (decl)))
1556 {
1557 install_var_field (decl, false, 1, ctx);
1558 if (!global)
1559 install_var_field (decl, by_ref, 2, ctx);
1560 }
1561 else if (!global)
1562 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1563 }
1564 install_var_local (decl, ctx);
1565 break;
1566
bc7bff74 1567 case OMP_CLAUSE__LOOPTEMP_:
1568 gcc_assert (is_parallel_ctx (ctx));
1569 decl = OMP_CLAUSE_DECL (c);
1570 install_var_field (decl, false, 3, ctx);
1571 install_var_local (decl, ctx);
1572 break;
1573
1e8e9920 1574 case OMP_CLAUSE_COPYPRIVATE:
1e8e9920 1575 case OMP_CLAUSE_COPYIN:
1576 decl = OMP_CLAUSE_DECL (c);
e8a588af 1577 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1578 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1579 break;
1580
1581 case OMP_CLAUSE_DEFAULT:
1582 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1583 break;
1584
2169f33b 1585 case OMP_CLAUSE_FINAL:
1e8e9920 1586 case OMP_CLAUSE_IF:
1587 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1588 case OMP_CLAUSE_NUM_TEAMS:
1589 case OMP_CLAUSE_THREAD_LIMIT:
1590 case OMP_CLAUSE_DEVICE:
1e8e9920 1591 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1592 case OMP_CLAUSE_DIST_SCHEDULE:
1593 case OMP_CLAUSE_DEPEND:
1e8e9920 1594 if (ctx->outer)
75a70cf9 1595 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1e8e9920 1596 break;
1597
bc7bff74 1598 case OMP_CLAUSE_TO:
1599 case OMP_CLAUSE_FROM:
1600 case OMP_CLAUSE_MAP:
1601 if (ctx->outer)
1602 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1603 decl = OMP_CLAUSE_DECL (c);
1604 /* Global variables with "omp declare target" attribute
1605 don't need to be copied, the receiver side will use them
1606 directly. */
1607 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1608 && DECL_P (decl)
1609 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1610 && lookup_attribute ("omp declare target",
1611 DECL_ATTRIBUTES (decl)))
1612 break;
1613 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1614 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER)
1615 {
1616 /* Ignore OMP_CLAUSE_MAP_POINTER kind for arrays in
1617 #pragma omp target data, there is nothing to map for
1618 those. */
1619 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA
1620 && !POINTER_TYPE_P (TREE_TYPE (decl)))
1621 break;
1622 }
1623 if (DECL_P (decl))
1624 {
1625 if (DECL_SIZE (decl)
1626 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1627 {
1628 tree decl2 = DECL_VALUE_EXPR (decl);
1629 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1630 decl2 = TREE_OPERAND (decl2, 0);
1631 gcc_assert (DECL_P (decl2));
1632 install_var_field (decl2, true, 3, ctx);
1633 install_var_local (decl2, ctx);
1634 install_var_local (decl, ctx);
1635 }
1636 else
1637 {
1638 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1639 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1640 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1641 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1642 install_var_field (decl, true, 7, ctx);
1643 else
1644 install_var_field (decl, true, 3, ctx);
1645 if (gimple_omp_target_kind (ctx->stmt)
1646 == GF_OMP_TARGET_KIND_REGION)
1647 install_var_local (decl, ctx);
1648 }
1649 }
1650 else
1651 {
1652 tree base = get_base_address (decl);
1653 tree nc = OMP_CLAUSE_CHAIN (c);
1654 if (DECL_P (base)
1655 && nc != NULL_TREE
1656 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1657 && OMP_CLAUSE_DECL (nc) == base
1658 && OMP_CLAUSE_MAP_KIND (nc) == OMP_CLAUSE_MAP_POINTER
1659 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1660 {
1661 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1662 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1663 }
1664 else
1665 {
1666 gcc_assert (!splay_tree_lookup (ctx->field_map,
1667 (splay_tree_key) decl));
1668 tree field
1669 = build_decl (OMP_CLAUSE_LOCATION (c),
1670 FIELD_DECL, NULL_TREE, ptr_type_node);
1671 DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
1672 insert_field_into_struct (ctx->record_type, field);
1673 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1674 (splay_tree_value) field);
1675 }
1676 }
1677 break;
1678
1e8e9920 1679 case OMP_CLAUSE_NOWAIT:
1680 case OMP_CLAUSE_ORDERED:
fd6481cf 1681 case OMP_CLAUSE_COLLAPSE:
1682 case OMP_CLAUSE_UNTIED:
2169f33b 1683 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1684 case OMP_CLAUSE_PROC_BIND:
3d483a94 1685 case OMP_CLAUSE_SAFELEN:
1e8e9920 1686 break;
1687
bc7bff74 1688 case OMP_CLAUSE_ALIGNED:
1689 decl = OMP_CLAUSE_DECL (c);
1690 if (is_global_var (decl)
1691 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1692 install_var_local (decl, ctx);
1693 break;
1694
1e8e9920 1695 default:
1696 gcc_unreachable ();
1697 }
1698 }
1699
1700 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1701 {
55d6e7cd 1702 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1703 {
1704 case OMP_CLAUSE_LASTPRIVATE:
1705 /* Let the corresponding firstprivate clause create
1706 the variable. */
75a70cf9 1707 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
fd6481cf 1708 scan_array_reductions = true;
1e8e9920 1709 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1710 break;
1711 /* FALLTHRU */
1712
1713 case OMP_CLAUSE_PRIVATE:
1714 case OMP_CLAUSE_FIRSTPRIVATE:
1715 case OMP_CLAUSE_REDUCTION:
3d483a94 1716 case OMP_CLAUSE_LINEAR:
1e8e9920 1717 decl = OMP_CLAUSE_DECL (c);
1718 if (is_variable_sized (decl))
1719 install_var_local (decl, ctx);
1720 fixup_remapped_decl (decl, ctx,
55d6e7cd 1721 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1e8e9920 1722 && OMP_CLAUSE_PRIVATE_DEBUG (c));
55d6e7cd 1723 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1724 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1725 scan_array_reductions = true;
1726 break;
1727
1728 case OMP_CLAUSE_SHARED:
bc7bff74 1729 /* Ignore shared directives in teams construct. */
1730 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1731 break;
1e8e9920 1732 decl = OMP_CLAUSE_DECL (c);
f49d7bb5 1733 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1734 fixup_remapped_decl (decl, ctx, false);
1e8e9920 1735 break;
1736
bc7bff74 1737 case OMP_CLAUSE_MAP:
1738 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA)
1739 break;
1740 decl = OMP_CLAUSE_DECL (c);
1741 if (DECL_P (decl)
1742 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1743 && lookup_attribute ("omp declare target",
1744 DECL_ATTRIBUTES (decl)))
1745 break;
1746 if (DECL_P (decl))
1747 {
1748 if (OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1749 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1750 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1751 {
1752 tree new_decl = lookup_decl (decl, ctx);
1753 TREE_TYPE (new_decl)
1754 = remap_type (TREE_TYPE (decl), &ctx->cb);
1755 }
1756 else if (DECL_SIZE (decl)
1757 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1758 {
1759 tree decl2 = DECL_VALUE_EXPR (decl);
1760 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1761 decl2 = TREE_OPERAND (decl2, 0);
1762 gcc_assert (DECL_P (decl2));
1763 fixup_remapped_decl (decl2, ctx, false);
1764 fixup_remapped_decl (decl, ctx, true);
1765 }
1766 else
1767 fixup_remapped_decl (decl, ctx, false);
1768 }
1769 break;
1770
1e8e9920 1771 case OMP_CLAUSE_COPYPRIVATE:
1772 case OMP_CLAUSE_COPYIN:
1773 case OMP_CLAUSE_DEFAULT:
1774 case OMP_CLAUSE_IF:
1775 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1776 case OMP_CLAUSE_NUM_TEAMS:
1777 case OMP_CLAUSE_THREAD_LIMIT:
1778 case OMP_CLAUSE_DEVICE:
1e8e9920 1779 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1780 case OMP_CLAUSE_DIST_SCHEDULE:
1e8e9920 1781 case OMP_CLAUSE_NOWAIT:
1782 case OMP_CLAUSE_ORDERED:
fd6481cf 1783 case OMP_CLAUSE_COLLAPSE:
1784 case OMP_CLAUSE_UNTIED:
2169f33b 1785 case OMP_CLAUSE_FINAL:
1786 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1787 case OMP_CLAUSE_PROC_BIND:
3d483a94 1788 case OMP_CLAUSE_SAFELEN:
bc7bff74 1789 case OMP_CLAUSE_ALIGNED:
1790 case OMP_CLAUSE_DEPEND:
1791 case OMP_CLAUSE__LOOPTEMP_:
1792 case OMP_CLAUSE_TO:
1793 case OMP_CLAUSE_FROM:
1e8e9920 1794 break;
1795
1796 default:
1797 gcc_unreachable ();
1798 }
1799 }
1800
1801 if (scan_array_reductions)
1802 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 1803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1804 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1805 {
ab129075 1806 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1807 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1e8e9920 1808 }
fd6481cf 1809 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
75a70cf9 1810 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
ab129075 1811 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1e8e9920 1812}
1813
1814/* Create a new name for omp child function. Returns an identifier. */
1815
1816static GTY(()) unsigned int tmp_ompfn_id_num;
1817
1818static tree
fd6481cf 1819create_omp_child_function_name (bool task_copy)
1e8e9920 1820{
a70a5e2c 1821 return (clone_function_name (current_function_decl,
1822 task_copy ? "_omp_cpyfn" : "_omp_fn"));
1e8e9920 1823}
1824
1825/* Build a decl for the omp child function. It'll not contain a body
1826 yet, just the bare decl. */
1827
1828static void
fd6481cf 1829create_omp_child_function (omp_context *ctx, bool task_copy)
1e8e9920 1830{
1831 tree decl, type, name, t;
1832
fd6481cf 1833 name = create_omp_child_function_name (task_copy);
1834 if (task_copy)
1835 type = build_function_type_list (void_type_node, ptr_type_node,
1836 ptr_type_node, NULL_TREE);
1837 else
1838 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1e8e9920 1839
e60a6f7b 1840 decl = build_decl (gimple_location (ctx->stmt),
1841 FUNCTION_DECL, name, type);
1e8e9920 1842
fd6481cf 1843 if (!task_copy)
1844 ctx->cb.dst_fn = decl;
1845 else
75a70cf9 1846 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1e8e9920 1847
1848 TREE_STATIC (decl) = 1;
1849 TREE_USED (decl) = 1;
1850 DECL_ARTIFICIAL (decl) = 1;
84bfaaeb 1851 DECL_NAMELESS (decl) = 1;
1e8e9920 1852 DECL_IGNORED_P (decl) = 0;
1853 TREE_PUBLIC (decl) = 0;
1854 DECL_UNINLINABLE (decl) = 1;
1855 DECL_EXTERNAL (decl) = 0;
1856 DECL_CONTEXT (decl) = NULL_TREE;
773c5ba7 1857 DECL_INITIAL (decl) = make_node (BLOCK);
bc7bff74 1858 bool target_p = false;
1859 if (lookup_attribute ("omp declare target",
1860 DECL_ATTRIBUTES (current_function_decl)))
1861 target_p = true;
1862 else
1863 {
1864 omp_context *octx;
1865 for (octx = ctx; octx; octx = octx->outer)
1866 if (gimple_code (octx->stmt) == GIMPLE_OMP_TARGET
1867 && gimple_omp_target_kind (octx->stmt)
1868 == GF_OMP_TARGET_KIND_REGION)
1869 {
1870 target_p = true;
1871 break;
1872 }
1873 }
1874 if (target_p)
1875 DECL_ATTRIBUTES (decl)
1876 = tree_cons (get_identifier ("omp declare target"),
1877 NULL_TREE, DECL_ATTRIBUTES (decl));
1e8e9920 1878
e60a6f7b 1879 t = build_decl (DECL_SOURCE_LOCATION (decl),
1880 RESULT_DECL, NULL_TREE, void_type_node);
1e8e9920 1881 DECL_ARTIFICIAL (t) = 1;
1882 DECL_IGNORED_P (t) = 1;
8e5b4ed6 1883 DECL_CONTEXT (t) = decl;
1e8e9920 1884 DECL_RESULT (decl) = t;
1885
e60a6f7b 1886 t = build_decl (DECL_SOURCE_LOCATION (decl),
1887 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1e8e9920 1888 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1889 DECL_NAMELESS (t) = 1;
1e8e9920 1890 DECL_ARG_TYPE (t) = ptr_type_node;
773c5ba7 1891 DECL_CONTEXT (t) = current_function_decl;
1e8e9920 1892 TREE_USED (t) = 1;
1893 DECL_ARGUMENTS (decl) = t;
fd6481cf 1894 if (!task_copy)
1895 ctx->receiver_decl = t;
1896 else
1897 {
e60a6f7b 1898 t = build_decl (DECL_SOURCE_LOCATION (decl),
1899 PARM_DECL, get_identifier (".omp_data_o"),
fd6481cf 1900 ptr_type_node);
1901 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1902 DECL_NAMELESS (t) = 1;
fd6481cf 1903 DECL_ARG_TYPE (t) = ptr_type_node;
1904 DECL_CONTEXT (t) = current_function_decl;
1905 TREE_USED (t) = 1;
86f2ad37 1906 TREE_ADDRESSABLE (t) = 1;
1767a056 1907 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
fd6481cf 1908 DECL_ARGUMENTS (decl) = t;
1909 }
1e8e9920 1910
48e1416a 1911 /* Allocate memory for the function structure. The call to
773c5ba7 1912 allocate_struct_function clobbers CFUN, so we need to restore
1e8e9920 1913 it afterward. */
87d4aa85 1914 push_struct_function (decl);
75a70cf9 1915 cfun->function_end_locus = gimple_location (ctx->stmt);
87d4aa85 1916 pop_cfun ();
1e8e9920 1917}
1918
bc7bff74 1919/* Callback for walk_gimple_seq. Check if combined parallel
1920 contains gimple_omp_for_combined_into_p OMP_FOR. */
1921
1922static tree
1923find_combined_for (gimple_stmt_iterator *gsi_p,
1924 bool *handled_ops_p,
1925 struct walk_stmt_info *wi)
1926{
1927 gimple stmt = gsi_stmt (*gsi_p);
1928
1929 *handled_ops_p = true;
1930 switch (gimple_code (stmt))
1931 {
1932 WALK_SUBSTMTS;
1933
1934 case GIMPLE_OMP_FOR:
1935 if (gimple_omp_for_combined_into_p (stmt)
1936 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
1937 {
1938 wi->info = stmt;
1939 return integer_zero_node;
1940 }
1941 break;
1942 default:
1943 break;
1944 }
1945 return NULL;
1946}
1947
1e8e9920 1948/* Scan an OpenMP parallel directive. */
1949
1950static void
75a70cf9 1951scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1e8e9920 1952{
1953 omp_context *ctx;
1954 tree name;
75a70cf9 1955 gimple stmt = gsi_stmt (*gsi);
1e8e9920 1956
1957 /* Ignore parallel directives with empty bodies, unless there
1958 are copyin clauses. */
1959 if (optimize > 0
75a70cf9 1960 && empty_body_p (gimple_omp_body (stmt))
1961 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1962 OMP_CLAUSE_COPYIN) == NULL)
1e8e9920 1963 {
75a70cf9 1964 gsi_replace (gsi, gimple_build_nop (), false);
1e8e9920 1965 return;
1966 }
1967
bc7bff74 1968 if (gimple_omp_parallel_combined_p (stmt))
1969 {
1970 gimple for_stmt;
1971 struct walk_stmt_info wi;
1972
1973 memset (&wi, 0, sizeof (wi));
1974 wi.val_only = true;
1975 walk_gimple_seq (gimple_omp_body (stmt),
1976 find_combined_for, NULL, &wi);
1977 for_stmt = (gimple) wi.info;
1978 if (for_stmt)
1979 {
1980 struct omp_for_data fd;
1981 extract_omp_for_data (for_stmt, &fd, NULL);
1982 /* We need two temporaries with fd.loop.v type (istart/iend)
1983 and then (fd.collapse - 1) temporaries with the same
1984 type for count2 ... countN-1 vars if not constant. */
1985 size_t count = 2, i;
1986 tree type = fd.iter_type;
1987 if (fd.collapse > 1
1988 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1989 count += fd.collapse - 1;
1990 for (i = 0; i < count; i++)
1991 {
1992 tree temp = create_tmp_var (type, NULL);
1993 tree c = build_omp_clause (UNKNOWN_LOCATION,
1994 OMP_CLAUSE__LOOPTEMP_);
1995 OMP_CLAUSE_DECL (c) = temp;
1996 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1997 gimple_omp_parallel_set_clauses (stmt, c);
1998 }
1999 }
2000 }
2001
75a70cf9 2002 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 2003 if (taskreg_nesting_level > 1)
773c5ba7 2004 ctx->is_nested = true;
1e8e9920 2005 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1e8e9920 2006 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2007 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1e8e9920 2008 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 2009 name = build_decl (gimple_location (stmt),
2010 TYPE_DECL, name, ctx->record_type);
84bfaaeb 2011 DECL_ARTIFICIAL (name) = 1;
2012 DECL_NAMELESS (name) = 1;
1e8e9920 2013 TYPE_NAME (ctx->record_type) = name;
fd6481cf 2014 create_omp_child_function (ctx, false);
75a70cf9 2015 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1e8e9920 2016
75a70cf9 2017 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
ab129075 2018 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2019
2020 if (TYPE_FIELDS (ctx->record_type) == NULL)
2021 ctx->record_type = ctx->receiver_decl = NULL;
2022 else
2023 {
2024 layout_type (ctx->record_type);
2025 fixup_child_record_type (ctx);
2026 }
2027}
2028
fd6481cf 2029/* Scan an OpenMP task directive. */
2030
2031static void
75a70cf9 2032scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
fd6481cf 2033{
2034 omp_context *ctx;
75a70cf9 2035 tree name, t;
2036 gimple stmt = gsi_stmt (*gsi);
389dd41b 2037 location_t loc = gimple_location (stmt);
fd6481cf 2038
2039 /* Ignore task directives with empty bodies. */
2040 if (optimize > 0
75a70cf9 2041 && empty_body_p (gimple_omp_body (stmt)))
fd6481cf 2042 {
75a70cf9 2043 gsi_replace (gsi, gimple_build_nop (), false);
fd6481cf 2044 return;
2045 }
2046
75a70cf9 2047 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 2048 if (taskreg_nesting_level > 1)
2049 ctx->is_nested = true;
2050 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2051 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2052 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2053 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 2054 name = build_decl (gimple_location (stmt),
2055 TYPE_DECL, name, ctx->record_type);
84bfaaeb 2056 DECL_ARTIFICIAL (name) = 1;
2057 DECL_NAMELESS (name) = 1;
fd6481cf 2058 TYPE_NAME (ctx->record_type) = name;
2059 create_omp_child_function (ctx, false);
75a70cf9 2060 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
fd6481cf 2061
75a70cf9 2062 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
fd6481cf 2063
2064 if (ctx->srecord_type)
2065 {
2066 name = create_tmp_var_name (".omp_data_a");
e60a6f7b 2067 name = build_decl (gimple_location (stmt),
2068 TYPE_DECL, name, ctx->srecord_type);
84bfaaeb 2069 DECL_ARTIFICIAL (name) = 1;
2070 DECL_NAMELESS (name) = 1;
fd6481cf 2071 TYPE_NAME (ctx->srecord_type) = name;
2072 create_omp_child_function (ctx, true);
2073 }
2074
ab129075 2075 scan_omp (gimple_omp_body_ptr (stmt), ctx);
fd6481cf 2076
2077 if (TYPE_FIELDS (ctx->record_type) == NULL)
2078 {
2079 ctx->record_type = ctx->receiver_decl = NULL;
75a70cf9 2080 t = build_int_cst (long_integer_type_node, 0);
2081 gimple_omp_task_set_arg_size (stmt, t);
2082 t = build_int_cst (long_integer_type_node, 1);
2083 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2084 }
2085 else
2086 {
2087 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2088 /* Move VLA fields to the end. */
2089 p = &TYPE_FIELDS (ctx->record_type);
2090 while (*p)
2091 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2092 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2093 {
2094 *q = *p;
2095 *p = TREE_CHAIN (*p);
2096 TREE_CHAIN (*q) = NULL_TREE;
2097 q = &TREE_CHAIN (*q);
2098 }
2099 else
1767a056 2100 p = &DECL_CHAIN (*p);
fd6481cf 2101 *p = vla_fields;
2102 layout_type (ctx->record_type);
2103 fixup_child_record_type (ctx);
2104 if (ctx->srecord_type)
2105 layout_type (ctx->srecord_type);
389dd41b 2106 t = fold_convert_loc (loc, long_integer_type_node,
fd6481cf 2107 TYPE_SIZE_UNIT (ctx->record_type));
75a70cf9 2108 gimple_omp_task_set_arg_size (stmt, t);
2109 t = build_int_cst (long_integer_type_node,
fd6481cf 2110 TYPE_ALIGN_UNIT (ctx->record_type));
75a70cf9 2111 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2112 }
2113}
2114
1e8e9920 2115
773c5ba7 2116/* Scan an OpenMP loop directive. */
1e8e9920 2117
2118static void
75a70cf9 2119scan_omp_for (gimple stmt, omp_context *outer_ctx)
1e8e9920 2120{
773c5ba7 2121 omp_context *ctx;
75a70cf9 2122 size_t i;
1e8e9920 2123
773c5ba7 2124 ctx = new_omp_context (stmt, outer_ctx);
1e8e9920 2125
75a70cf9 2126 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
1e8e9920 2127
ab129075 2128 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
75a70cf9 2129 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 2130 {
75a70cf9 2131 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2132 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2133 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2134 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
fd6481cf 2135 }
ab129075 2136 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2137}
2138
2139/* Scan an OpenMP sections directive. */
2140
2141static void
75a70cf9 2142scan_omp_sections (gimple stmt, omp_context *outer_ctx)
1e8e9920 2143{
1e8e9920 2144 omp_context *ctx;
2145
2146 ctx = new_omp_context (stmt, outer_ctx);
75a70cf9 2147 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
ab129075 2148 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2149}
2150
2151/* Scan an OpenMP single directive. */
2152
2153static void
75a70cf9 2154scan_omp_single (gimple stmt, omp_context *outer_ctx)
1e8e9920 2155{
1e8e9920 2156 omp_context *ctx;
2157 tree name;
2158
2159 ctx = new_omp_context (stmt, outer_ctx);
2160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2162 name = create_tmp_var_name (".omp_copy_s");
e60a6f7b 2163 name = build_decl (gimple_location (stmt),
2164 TYPE_DECL, name, ctx->record_type);
1e8e9920 2165 TYPE_NAME (ctx->record_type) = name;
2166
75a70cf9 2167 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
ab129075 2168 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2169
2170 if (TYPE_FIELDS (ctx->record_type) == NULL)
2171 ctx->record_type = NULL;
2172 else
2173 layout_type (ctx->record_type);
2174}
2175
bc7bff74 2176/* Scan an OpenMP target{, data, update} directive. */
2177
2178static void
2179scan_omp_target (gimple stmt, omp_context *outer_ctx)
2180{
2181 omp_context *ctx;
2182 tree name;
2183 int kind = gimple_omp_target_kind (stmt);
2184
2185 ctx = new_omp_context (stmt, outer_ctx);
2186 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2187 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2188 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2189 name = create_tmp_var_name (".omp_data_t");
2190 name = build_decl (gimple_location (stmt),
2191 TYPE_DECL, name, ctx->record_type);
2192 DECL_ARTIFICIAL (name) = 1;
2193 DECL_NAMELESS (name) = 1;
2194 TYPE_NAME (ctx->record_type) = name;
2195 if (kind == GF_OMP_TARGET_KIND_REGION)
2196 {
2197 create_omp_child_function (ctx, false);
2198 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2199 }
2200
2201 scan_sharing_clauses (gimple_omp_target_clauses (stmt), ctx);
2202 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2203
2204 if (TYPE_FIELDS (ctx->record_type) == NULL)
2205 ctx->record_type = ctx->receiver_decl = NULL;
2206 else
2207 {
2208 TYPE_FIELDS (ctx->record_type)
2209 = nreverse (TYPE_FIELDS (ctx->record_type));
2210#ifdef ENABLE_CHECKING
2211 tree field;
2212 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2213 for (field = TYPE_FIELDS (ctx->record_type);
2214 field;
2215 field = DECL_CHAIN (field))
2216 gcc_assert (DECL_ALIGN (field) == align);
2217#endif
2218 layout_type (ctx->record_type);
2219 if (kind == GF_OMP_TARGET_KIND_REGION)
2220 fixup_child_record_type (ctx);
2221 }
2222}
2223
2224/* Scan an OpenMP teams directive. */
2225
2226static void
2227scan_omp_teams (gimple stmt, omp_context *outer_ctx)
2228{
2229 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2230 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2232}
1e8e9920 2233
c1d127dd 2234/* Check OpenMP nesting restrictions. */
ab129075 2235static bool
2236check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
c1d127dd 2237{
3d483a94 2238 if (ctx != NULL)
2239 {
2240 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 2241 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 2242 {
2243 error_at (gimple_location (stmt),
2244 "OpenMP constructs may not be nested inside simd region");
2245 return false;
2246 }
bc7bff74 2247 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2248 {
2249 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2250 || (gimple_omp_for_kind (stmt)
2251 != GF_OMP_FOR_KIND_DISTRIBUTE))
2252 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2253 {
2254 error_at (gimple_location (stmt),
2255 "only distribute or parallel constructs are allowed to "
2256 "be closely nested inside teams construct");
2257 return false;
2258 }
2259 }
3d483a94 2260 }
75a70cf9 2261 switch (gimple_code (stmt))
c1d127dd 2262 {
75a70cf9 2263 case GIMPLE_OMP_FOR:
f2697631 2264 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 2265 return true;
bc7bff74 2266 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2267 {
2268 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2269 {
2270 error_at (gimple_location (stmt),
2271 "distribute construct must be closely nested inside "
2272 "teams construct");
2273 return false;
2274 }
2275 return true;
2276 }
2277 /* FALLTHRU */
2278 case GIMPLE_CALL:
2279 if (is_gimple_call (stmt)
2280 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2281 == BUILT_IN_GOMP_CANCEL
2282 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2283 == BUILT_IN_GOMP_CANCELLATION_POINT))
2284 {
2285 const char *bad = NULL;
2286 const char *kind = NULL;
2287 if (ctx == NULL)
2288 {
2289 error_at (gimple_location (stmt), "orphaned %qs construct",
2290 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2291 == BUILT_IN_GOMP_CANCEL
2292 ? "#pragma omp cancel"
2293 : "#pragma omp cancellation point");
2294 return false;
2295 }
35ec552a 2296 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
fcb97e84 2297 ? tree_to_shwi (gimple_call_arg (stmt, 0))
bc7bff74 2298 : 0)
2299 {
2300 case 1:
2301 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2302 bad = "#pragma omp parallel";
2303 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2304 == BUILT_IN_GOMP_CANCEL
2305 && !integer_zerop (gimple_call_arg (stmt, 1)))
2306 ctx->cancellable = true;
2307 kind = "parallel";
2308 break;
2309 case 2:
2310 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2311 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2312 bad = "#pragma omp for";
2313 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2314 == BUILT_IN_GOMP_CANCEL
2315 && !integer_zerop (gimple_call_arg (stmt, 1)))
2316 {
2317 ctx->cancellable = true;
2318 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2319 OMP_CLAUSE_NOWAIT))
2320 warning_at (gimple_location (stmt), 0,
2321 "%<#pragma omp cancel for%> inside "
2322 "%<nowait%> for construct");
2323 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2324 OMP_CLAUSE_ORDERED))
2325 warning_at (gimple_location (stmt), 0,
2326 "%<#pragma omp cancel for%> inside "
2327 "%<ordered%> for construct");
2328 }
2329 kind = "for";
2330 break;
2331 case 4:
2332 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2333 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2334 bad = "#pragma omp sections";
2335 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2336 == BUILT_IN_GOMP_CANCEL
2337 && !integer_zerop (gimple_call_arg (stmt, 1)))
2338 {
2339 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2340 {
2341 ctx->cancellable = true;
2342 if (find_omp_clause (gimple_omp_sections_clauses
2343 (ctx->stmt),
2344 OMP_CLAUSE_NOWAIT))
2345 warning_at (gimple_location (stmt), 0,
2346 "%<#pragma omp cancel sections%> inside "
2347 "%<nowait%> sections construct");
2348 }
2349 else
2350 {
2351 gcc_assert (ctx->outer
2352 && gimple_code (ctx->outer->stmt)
2353 == GIMPLE_OMP_SECTIONS);
2354 ctx->outer->cancellable = true;
2355 if (find_omp_clause (gimple_omp_sections_clauses
2356 (ctx->outer->stmt),
2357 OMP_CLAUSE_NOWAIT))
2358 warning_at (gimple_location (stmt), 0,
2359 "%<#pragma omp cancel sections%> inside "
2360 "%<nowait%> sections construct");
2361 }
2362 }
2363 kind = "sections";
2364 break;
2365 case 8:
2366 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2367 bad = "#pragma omp task";
2368 else
2369 ctx->cancellable = true;
2370 kind = "taskgroup";
2371 break;
2372 default:
2373 error_at (gimple_location (stmt), "invalid arguments");
2374 return false;
2375 }
2376 if (bad)
2377 {
2378 error_at (gimple_location (stmt),
2379 "%<%s %s%> construct not closely nested inside of %qs",
2380 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2381 == BUILT_IN_GOMP_CANCEL
2382 ? "#pragma omp cancel"
2383 : "#pragma omp cancellation point", kind, bad);
2384 return false;
2385 }
2386 }
3d483a94 2387 /* FALLTHRU */
75a70cf9 2388 case GIMPLE_OMP_SECTIONS:
2389 case GIMPLE_OMP_SINGLE:
c1d127dd 2390 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2391 switch (gimple_code (ctx->stmt))
c1d127dd 2392 {
75a70cf9 2393 case GIMPLE_OMP_FOR:
2394 case GIMPLE_OMP_SECTIONS:
2395 case GIMPLE_OMP_SINGLE:
2396 case GIMPLE_OMP_ORDERED:
2397 case GIMPLE_OMP_MASTER:
2398 case GIMPLE_OMP_TASK:
bc7bff74 2399 case GIMPLE_OMP_CRITICAL:
75a70cf9 2400 if (is_gimple_call (stmt))
fd6481cf 2401 {
bc7bff74 2402 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2403 != BUILT_IN_GOMP_BARRIER)
2404 return true;
ab129075 2405 error_at (gimple_location (stmt),
2406 "barrier region may not be closely nested inside "
2407 "of work-sharing, critical, ordered, master or "
2408 "explicit task region");
2409 return false;
fd6481cf 2410 }
ab129075 2411 error_at (gimple_location (stmt),
2412 "work-sharing region may not be closely nested inside "
2413 "of work-sharing, critical, ordered, master or explicit "
2414 "task region");
2415 return false;
75a70cf9 2416 case GIMPLE_OMP_PARALLEL:
ab129075 2417 return true;
c1d127dd 2418 default:
2419 break;
2420 }
2421 break;
75a70cf9 2422 case GIMPLE_OMP_MASTER:
c1d127dd 2423 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2424 switch (gimple_code (ctx->stmt))
c1d127dd 2425 {
75a70cf9 2426 case GIMPLE_OMP_FOR:
2427 case GIMPLE_OMP_SECTIONS:
2428 case GIMPLE_OMP_SINGLE:
2429 case GIMPLE_OMP_TASK:
ab129075 2430 error_at (gimple_location (stmt),
2431 "master region may not be closely nested inside "
2432 "of work-sharing or explicit task region");
2433 return false;
75a70cf9 2434 case GIMPLE_OMP_PARALLEL:
ab129075 2435 return true;
c1d127dd 2436 default:
2437 break;
2438 }
2439 break;
75a70cf9 2440 case GIMPLE_OMP_ORDERED:
c1d127dd 2441 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2442 switch (gimple_code (ctx->stmt))
c1d127dd 2443 {
75a70cf9 2444 case GIMPLE_OMP_CRITICAL:
2445 case GIMPLE_OMP_TASK:
ab129075 2446 error_at (gimple_location (stmt),
2447 "ordered region may not be closely nested inside "
2448 "of critical or explicit task region");
2449 return false;
75a70cf9 2450 case GIMPLE_OMP_FOR:
2451 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
c1d127dd 2452 OMP_CLAUSE_ORDERED) == NULL)
ab129075 2453 {
2454 error_at (gimple_location (stmt),
2455 "ordered region must be closely nested inside "
c1d127dd 2456 "a loop region with an ordered clause");
ab129075 2457 return false;
2458 }
2459 return true;
75a70cf9 2460 case GIMPLE_OMP_PARALLEL:
bc7bff74 2461 error_at (gimple_location (stmt),
2462 "ordered region must be closely nested inside "
2463 "a loop region with an ordered clause");
2464 return false;
c1d127dd 2465 default:
2466 break;
2467 }
2468 break;
75a70cf9 2469 case GIMPLE_OMP_CRITICAL:
c1d127dd 2470 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2471 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
2472 && (gimple_omp_critical_name (stmt)
2473 == gimple_omp_critical_name (ctx->stmt)))
c1d127dd 2474 {
ab129075 2475 error_at (gimple_location (stmt),
2476 "critical region may not be nested inside a critical "
2477 "region with the same name");
2478 return false;
c1d127dd 2479 }
2480 break;
bc7bff74 2481 case GIMPLE_OMP_TEAMS:
2482 if (ctx == NULL
2483 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2484 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2485 {
2486 error_at (gimple_location (stmt),
2487 "teams construct not closely nested inside of target "
2488 "region");
2489 return false;
2490 }
2491 break;
c1d127dd 2492 default:
2493 break;
2494 }
ab129075 2495 return true;
c1d127dd 2496}
2497
2498
75a70cf9 2499/* Helper function scan_omp.
2500
2501 Callback for walk_tree or operators in walk_gimple_stmt used to
2502 scan for OpenMP directives in TP. */
1e8e9920 2503
2504static tree
75a70cf9 2505scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
1e8e9920 2506{
4077bf7a 2507 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2508 omp_context *ctx = (omp_context *) wi->info;
1e8e9920 2509 tree t = *tp;
2510
75a70cf9 2511 switch (TREE_CODE (t))
2512 {
2513 case VAR_DECL:
2514 case PARM_DECL:
2515 case LABEL_DECL:
2516 case RESULT_DECL:
2517 if (ctx)
2518 *tp = remap_decl (t, &ctx->cb);
2519 break;
2520
2521 default:
2522 if (ctx && TYPE_P (t))
2523 *tp = remap_type (t, &ctx->cb);
2524 else if (!DECL_P (t))
7cf869dd 2525 {
2526 *walk_subtrees = 1;
2527 if (ctx)
182cf5a9 2528 {
2529 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2530 if (tem != TREE_TYPE (t))
2531 {
2532 if (TREE_CODE (t) == INTEGER_CST)
2533 *tp = build_int_cst_wide (tem,
2534 TREE_INT_CST_LOW (t),
2535 TREE_INT_CST_HIGH (t));
2536 else
2537 TREE_TYPE (t) = tem;
2538 }
2539 }
7cf869dd 2540 }
75a70cf9 2541 break;
2542 }
2543
2544 return NULL_TREE;
2545}
2546
f2697631 2547/* Return true if FNDECL is a setjmp or a longjmp. */
2548
2549static bool
2550setjmp_or_longjmp_p (const_tree fndecl)
2551{
2552 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2553 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2554 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2555 return true;
2556
2557 tree declname = DECL_NAME (fndecl);
2558 if (!declname)
2559 return false;
2560 const char *name = IDENTIFIER_POINTER (declname);
2561 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2562}
2563
75a70cf9 2564
2565/* Helper function for scan_omp.
2566
2567 Callback for walk_gimple_stmt used to scan for OpenMP directives in
2568 the current statement in GSI. */
2569
2570static tree
2571scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2572 struct walk_stmt_info *wi)
2573{
2574 gimple stmt = gsi_stmt (*gsi);
2575 omp_context *ctx = (omp_context *) wi->info;
2576
2577 if (gimple_has_location (stmt))
2578 input_location = gimple_location (stmt);
1e8e9920 2579
c1d127dd 2580 /* Check the OpenMP nesting restrictions. */
bc7bff74 2581 bool remove = false;
2582 if (is_gimple_omp (stmt))
2583 remove = !check_omp_nesting_restrictions (stmt, ctx);
2584 else if (is_gimple_call (stmt))
2585 {
2586 tree fndecl = gimple_call_fndecl (stmt);
f2697631 2587 if (fndecl)
2588 {
2589 if (setjmp_or_longjmp_p (fndecl)
2590 && ctx
2591 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2592 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
2593 {
2594 remove = true;
2595 error_at (gimple_location (stmt),
2596 "setjmp/longjmp inside simd construct");
2597 }
2598 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2599 switch (DECL_FUNCTION_CODE (fndecl))
2600 {
2601 case BUILT_IN_GOMP_BARRIER:
2602 case BUILT_IN_GOMP_CANCEL:
2603 case BUILT_IN_GOMP_CANCELLATION_POINT:
2604 case BUILT_IN_GOMP_TASKYIELD:
2605 case BUILT_IN_GOMP_TASKWAIT:
2606 case BUILT_IN_GOMP_TASKGROUP_START:
2607 case BUILT_IN_GOMP_TASKGROUP_END:
2608 remove = !check_omp_nesting_restrictions (stmt, ctx);
2609 break;
2610 default:
2611 break;
2612 }
2613 }
bc7bff74 2614 }
2615 if (remove)
2616 {
2617 stmt = gimple_build_nop ();
2618 gsi_replace (gsi, stmt, false);
fd6481cf 2619 }
c1d127dd 2620
75a70cf9 2621 *handled_ops_p = true;
2622
2623 switch (gimple_code (stmt))
1e8e9920 2624 {
75a70cf9 2625 case GIMPLE_OMP_PARALLEL:
fd6481cf 2626 taskreg_nesting_level++;
75a70cf9 2627 scan_omp_parallel (gsi, ctx);
fd6481cf 2628 taskreg_nesting_level--;
2629 break;
2630
75a70cf9 2631 case GIMPLE_OMP_TASK:
fd6481cf 2632 taskreg_nesting_level++;
75a70cf9 2633 scan_omp_task (gsi, ctx);
fd6481cf 2634 taskreg_nesting_level--;
1e8e9920 2635 break;
2636
75a70cf9 2637 case GIMPLE_OMP_FOR:
2638 scan_omp_for (stmt, ctx);
1e8e9920 2639 break;
2640
75a70cf9 2641 case GIMPLE_OMP_SECTIONS:
2642 scan_omp_sections (stmt, ctx);
1e8e9920 2643 break;
2644
75a70cf9 2645 case GIMPLE_OMP_SINGLE:
2646 scan_omp_single (stmt, ctx);
1e8e9920 2647 break;
2648
75a70cf9 2649 case GIMPLE_OMP_SECTION:
2650 case GIMPLE_OMP_MASTER:
bc7bff74 2651 case GIMPLE_OMP_TASKGROUP:
75a70cf9 2652 case GIMPLE_OMP_ORDERED:
2653 case GIMPLE_OMP_CRITICAL:
2654 ctx = new_omp_context (stmt, ctx);
ab129075 2655 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2656 break;
2657
bc7bff74 2658 case GIMPLE_OMP_TARGET:
2659 scan_omp_target (stmt, ctx);
2660 break;
2661
2662 case GIMPLE_OMP_TEAMS:
2663 scan_omp_teams (stmt, ctx);
2664 break;
2665
75a70cf9 2666 case GIMPLE_BIND:
1e8e9920 2667 {
2668 tree var;
1e8e9920 2669
75a70cf9 2670 *handled_ops_p = false;
2671 if (ctx)
1767a056 2672 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
75a70cf9 2673 insert_decl_map (&ctx->cb, var, var);
1e8e9920 2674 }
2675 break;
1e8e9920 2676 default:
75a70cf9 2677 *handled_ops_p = false;
1e8e9920 2678 break;
2679 }
2680
2681 return NULL_TREE;
2682}
2683
2684
75a70cf9 2685/* Scan all the statements starting at the current statement. CTX
2686 contains context information about the OpenMP directives and
2687 clauses found during the scan. */
1e8e9920 2688
2689static void
ab129075 2690scan_omp (gimple_seq *body_p, omp_context *ctx)
1e8e9920 2691{
2692 location_t saved_location;
2693 struct walk_stmt_info wi;
2694
2695 memset (&wi, 0, sizeof (wi));
1e8e9920 2696 wi.info = ctx;
1e8e9920 2697 wi.want_locations = true;
2698
2699 saved_location = input_location;
ab129075 2700 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
1e8e9920 2701 input_location = saved_location;
2702}
2703\f
2704/* Re-gimplification and code generation routines. */
2705
2706/* Build a call to GOMP_barrier. */
2707
bc7bff74 2708static gimple
2709build_omp_barrier (tree lhs)
2710{
2711 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
2712 : BUILT_IN_GOMP_BARRIER);
2713 gimple g = gimple_build_call (fndecl, 0);
2714 if (lhs)
2715 gimple_call_set_lhs (g, lhs);
2716 return g;
1e8e9920 2717}
2718
2719/* If a context was created for STMT when it was scanned, return it. */
2720
2721static omp_context *
75a70cf9 2722maybe_lookup_ctx (gimple stmt)
1e8e9920 2723{
2724 splay_tree_node n;
2725 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2726 return n ? (omp_context *) n->value : NULL;
2727}
2728
773c5ba7 2729
2730/* Find the mapping for DECL in CTX or the immediately enclosing
2731 context that has a mapping for DECL.
2732
2733 If CTX is a nested parallel directive, we may have to use the decl
2734 mappings created in CTX's parent context. Suppose that we have the
2735 following parallel nesting (variable UIDs showed for clarity):
2736
2737 iD.1562 = 0;
2738 #omp parallel shared(iD.1562) -> outer parallel
2739 iD.1562 = iD.1562 + 1;
2740
2741 #omp parallel shared (iD.1562) -> inner parallel
2742 iD.1562 = iD.1562 - 1;
2743
2744 Each parallel structure will create a distinct .omp_data_s structure
2745 for copying iD.1562 in/out of the directive:
2746
2747 outer parallel .omp_data_s.1.i -> iD.1562
2748 inner parallel .omp_data_s.2.i -> iD.1562
2749
2750 A shared variable mapping will produce a copy-out operation before
2751 the parallel directive and a copy-in operation after it. So, in
2752 this case we would have:
2753
2754 iD.1562 = 0;
2755 .omp_data_o.1.i = iD.1562;
2756 #omp parallel shared(iD.1562) -> outer parallel
2757 .omp_data_i.1 = &.omp_data_o.1
2758 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2759
2760 .omp_data_o.2.i = iD.1562; -> **
2761 #omp parallel shared(iD.1562) -> inner parallel
2762 .omp_data_i.2 = &.omp_data_o.2
2763 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2764
2765
2766 ** This is a problem. The symbol iD.1562 cannot be referenced
2767 inside the body of the outer parallel region. But since we are
2768 emitting this copy operation while expanding the inner parallel
2769 directive, we need to access the CTX structure of the outer
2770 parallel directive to get the correct mapping:
2771
2772 .omp_data_o.2.i = .omp_data_i.1->i
2773
2774 Since there may be other workshare or parallel directives enclosing
2775 the parallel directive, it may be necessary to walk up the context
2776 parent chain. This is not a problem in general because nested
2777 parallelism happens only rarely. */
2778
2779static tree
2780lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2781{
2782 tree t;
2783 omp_context *up;
2784
773c5ba7 2785 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2786 t = maybe_lookup_decl (decl, up);
2787
87b31375 2788 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
773c5ba7 2789
c37594c7 2790 return t ? t : decl;
773c5ba7 2791}
2792
2793
f49d7bb5 2794/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2795 in outer contexts. */
2796
2797static tree
2798maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2799{
2800 tree t = NULL;
2801 omp_context *up;
2802
87b31375 2803 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2804 t = maybe_lookup_decl (decl, up);
f49d7bb5 2805
2806 return t ? t : decl;
2807}
2808
2809
1e8e9920 2810/* Construct the initialization value for reduction CLAUSE. */
2811
2812tree
2813omp_reduction_init (tree clause, tree type)
2814{
389dd41b 2815 location_t loc = OMP_CLAUSE_LOCATION (clause);
1e8e9920 2816 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2817 {
2818 case PLUS_EXPR:
2819 case MINUS_EXPR:
2820 case BIT_IOR_EXPR:
2821 case BIT_XOR_EXPR:
2822 case TRUTH_OR_EXPR:
2823 case TRUTH_ORIF_EXPR:
2824 case TRUTH_XOR_EXPR:
2825 case NE_EXPR:
385f3f36 2826 return build_zero_cst (type);
1e8e9920 2827
2828 case MULT_EXPR:
2829 case TRUTH_AND_EXPR:
2830 case TRUTH_ANDIF_EXPR:
2831 case EQ_EXPR:
389dd41b 2832 return fold_convert_loc (loc, type, integer_one_node);
1e8e9920 2833
2834 case BIT_AND_EXPR:
389dd41b 2835 return fold_convert_loc (loc, type, integer_minus_one_node);
1e8e9920 2836
2837 case MAX_EXPR:
2838 if (SCALAR_FLOAT_TYPE_P (type))
2839 {
2840 REAL_VALUE_TYPE max, min;
2841 if (HONOR_INFINITIES (TYPE_MODE (type)))
2842 {
2843 real_inf (&max);
2844 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2845 }
2846 else
2847 real_maxval (&min, 1, TYPE_MODE (type));
2848 return build_real (type, min);
2849 }
2850 else
2851 {
2852 gcc_assert (INTEGRAL_TYPE_P (type));
2853 return TYPE_MIN_VALUE (type);
2854 }
2855
2856 case MIN_EXPR:
2857 if (SCALAR_FLOAT_TYPE_P (type))
2858 {
2859 REAL_VALUE_TYPE max;
2860 if (HONOR_INFINITIES (TYPE_MODE (type)))
2861 real_inf (&max);
2862 else
2863 real_maxval (&max, 0, TYPE_MODE (type));
2864 return build_real (type, max);
2865 }
2866 else
2867 {
2868 gcc_assert (INTEGRAL_TYPE_P (type));
2869 return TYPE_MAX_VALUE (type);
2870 }
2871
2872 default:
2873 gcc_unreachable ();
2874 }
2875}
2876
bc7bff74 2877/* Return alignment to be assumed for var in CLAUSE, which should be
2878 OMP_CLAUSE_ALIGNED. */
2879
2880static tree
2881omp_clause_aligned_alignment (tree clause)
2882{
2883 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2884 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
2885
2886 /* Otherwise return implementation defined alignment. */
2887 unsigned int al = 1;
2888 enum machine_mode mode, vmode;
2889 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2890 if (vs)
2891 vs = 1 << floor_log2 (vs);
2892 static enum mode_class classes[]
2893 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
2894 for (int i = 0; i < 4; i += 2)
2895 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
2896 mode != VOIDmode;
2897 mode = GET_MODE_WIDER_MODE (mode))
2898 {
2899 vmode = targetm.vectorize.preferred_simd_mode (mode);
2900 if (GET_MODE_CLASS (vmode) != classes[i + 1])
2901 continue;
2902 while (vs
2903 && GET_MODE_SIZE (vmode) < vs
2904 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
2905 vmode = GET_MODE_2XWIDER_MODE (vmode);
2906
2907 tree type = lang_hooks.types.type_for_mode (mode, 1);
2908 if (type == NULL_TREE || TYPE_MODE (type) != mode)
2909 continue;
2910 type = build_vector_type (type, GET_MODE_SIZE (vmode)
2911 / GET_MODE_SIZE (mode));
2912 if (TYPE_MODE (type) != vmode)
2913 continue;
2914 if (TYPE_ALIGN_UNIT (type) > al)
2915 al = TYPE_ALIGN_UNIT (type);
2916 }
2917 return build_int_cst (integer_type_node, al);
2918}
2919
3d483a94 2920/* Return maximum possible vectorization factor for the target. */
2921
2922static int
2923omp_max_vf (void)
2924{
2925 if (!optimize
2926 || optimize_debug
043115ec 2927 || (!flag_tree_loop_vectorize
2928 && (global_options_set.x_flag_tree_loop_vectorize
2929 || global_options_set.x_flag_tree_vectorize)))
3d483a94 2930 return 1;
2931
2932 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2933 if (vs)
2934 {
2935 vs = 1 << floor_log2 (vs);
2936 return vs;
2937 }
2938 enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
2939 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
2940 return GET_MODE_NUNITS (vqimode);
2941 return 1;
2942}
2943
2944/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
2945 privatization. */
2946
2947static bool
2948lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
2949 tree &idx, tree &lane, tree &ivar, tree &lvar)
2950{
2951 if (max_vf == 0)
2952 {
2953 max_vf = omp_max_vf ();
2954 if (max_vf > 1)
2955 {
2956 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2957 OMP_CLAUSE_SAFELEN);
2958 if (c
2959 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
fcb97e84 2960 max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3d483a94 2961 }
2962 if (max_vf > 1)
2963 {
2964 idx = create_tmp_var (unsigned_type_node, NULL);
2965 lane = create_tmp_var (unsigned_type_node, NULL);
2966 }
2967 }
2968 if (max_vf == 1)
2969 return false;
2970
2971 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
2972 tree avar = create_tmp_var_raw (atype, NULL);
2973 if (TREE_ADDRESSABLE (new_var))
2974 TREE_ADDRESSABLE (avar) = 1;
2975 DECL_ATTRIBUTES (avar)
2976 = tree_cons (get_identifier ("omp simd array"), NULL,
2977 DECL_ATTRIBUTES (avar));
2978 gimple_add_tmp_var (avar);
2979 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
2980 NULL_TREE, NULL_TREE);
2981 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
2982 NULL_TREE, NULL_TREE);
bc7bff74 2983 if (DECL_P (new_var))
2984 {
2985 SET_DECL_VALUE_EXPR (new_var, lvar);
2986 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2987 }
3d483a94 2988 return true;
2989}
2990
1e8e9920 2991/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2992 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2993 private variables. Initialization statements go in ILIST, while calls
2994 to destructors go in DLIST. */
2995
2996static void
75a70cf9 2997lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
bc7bff74 2998 omp_context *ctx, struct omp_for_data *fd)
1e8e9920 2999{
c2f47e15 3000 tree c, dtor, copyin_seq, x, ptr;
1e8e9920 3001 bool copyin_by_ref = false;
f49d7bb5 3002 bool lastprivate_firstprivate = false;
bc7bff74 3003 bool reduction_omp_orig_ref = false;
1e8e9920 3004 int pass;
3d483a94 3005 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3006 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD);
3d483a94 3007 int max_vf = 0;
3008 tree lane = NULL_TREE, idx = NULL_TREE;
3009 tree ivar = NULL_TREE, lvar = NULL_TREE;
3010 gimple_seq llist[2] = { NULL, NULL };
1e8e9920 3011
1e8e9920 3012 copyin_seq = NULL;
3013
3d483a94 3014 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3015 with data sharing clauses referencing variable sized vars. That
3016 is unnecessarily hard to support and very unlikely to result in
3017 vectorized code anyway. */
3018 if (is_simd)
3019 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3020 switch (OMP_CLAUSE_CODE (c))
3021 {
3022 case OMP_CLAUSE_REDUCTION:
3d483a94 3023 case OMP_CLAUSE_PRIVATE:
3024 case OMP_CLAUSE_FIRSTPRIVATE:
3025 case OMP_CLAUSE_LASTPRIVATE:
3026 case OMP_CLAUSE_LINEAR:
3027 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3028 max_vf = 1;
3029 break;
3030 default:
3031 continue;
3032 }
3033
1e8e9920 3034 /* Do all the fixed sized types in the first pass, and the variable sized
3035 types in the second pass. This makes sure that the scalar arguments to
48e1416a 3036 the variable sized types are processed before we use them in the
1e8e9920 3037 variable sized operations. */
3038 for (pass = 0; pass < 2; ++pass)
3039 {
3040 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3041 {
55d6e7cd 3042 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1e8e9920 3043 tree var, new_var;
3044 bool by_ref;
389dd41b 3045 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3046
3047 switch (c_kind)
3048 {
3049 case OMP_CLAUSE_PRIVATE:
3050 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3051 continue;
3052 break;
3053 case OMP_CLAUSE_SHARED:
bc7bff74 3054 /* Ignore shared directives in teams construct. */
3055 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3056 continue;
f49d7bb5 3057 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3058 {
3059 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
3060 continue;
3061 }
1e8e9920 3062 case OMP_CLAUSE_FIRSTPRIVATE:
1e8e9920 3063 case OMP_CLAUSE_COPYIN:
bc7bff74 3064 case OMP_CLAUSE_LINEAR:
3065 break;
1e8e9920 3066 case OMP_CLAUSE_REDUCTION:
bc7bff74 3067 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3068 reduction_omp_orig_ref = true;
1e8e9920 3069 break;
bc7bff74 3070 case OMP_CLAUSE__LOOPTEMP_:
3071 /* Handle _looptemp_ clauses only on parallel. */
3072 if (fd)
3073 continue;
3d483a94 3074 break;
df2c34fc 3075 case OMP_CLAUSE_LASTPRIVATE:
f49d7bb5 3076 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3077 {
3078 lastprivate_firstprivate = true;
3079 if (pass != 0)
3080 continue;
3081 }
df2c34fc 3082 break;
bc7bff74 3083 case OMP_CLAUSE_ALIGNED:
3084 if (pass == 0)
3085 continue;
3086 var = OMP_CLAUSE_DECL (c);
3087 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3088 && !is_global_var (var))
3089 {
3090 new_var = maybe_lookup_decl (var, ctx);
3091 if (new_var == NULL_TREE)
3092 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3093 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3094 x = build_call_expr_loc (clause_loc, x, 2, new_var,
3095 omp_clause_aligned_alignment (c));
3096 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3097 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3098 gimplify_and_add (x, ilist);
3099 }
3100 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3101 && is_global_var (var))
3102 {
3103 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3104 new_var = lookup_decl (var, ctx);
3105 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3106 t = build_fold_addr_expr_loc (clause_loc, t);
3107 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3108 t = build_call_expr_loc (clause_loc, t2, 2, t,
3109 omp_clause_aligned_alignment (c));
3110 t = fold_convert_loc (clause_loc, ptype, t);
3111 x = create_tmp_var (ptype, NULL);
3112 t = build2 (MODIFY_EXPR, ptype, x, t);
3113 gimplify_and_add (t, ilist);
3114 t = build_simple_mem_ref_loc (clause_loc, x);
3115 SET_DECL_VALUE_EXPR (new_var, t);
3116 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3117 }
3118 continue;
1e8e9920 3119 default:
3120 continue;
3121 }
3122
3123 new_var = var = OMP_CLAUSE_DECL (c);
3124 if (c_kind != OMP_CLAUSE_COPYIN)
3125 new_var = lookup_decl (var, ctx);
3126
3127 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3128 {
3129 if (pass != 0)
3130 continue;
3131 }
1e8e9920 3132 else if (is_variable_sized (var))
3133 {
773c5ba7 3134 /* For variable sized types, we need to allocate the
3135 actual storage here. Call alloca and store the
3136 result in the pointer decl that we created elsewhere. */
1e8e9920 3137 if (pass == 0)
3138 continue;
3139
fd6481cf 3140 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3141 {
75a70cf9 3142 gimple stmt;
b9a16870 3143 tree tmp, atmp;
75a70cf9 3144
fd6481cf 3145 ptr = DECL_VALUE_EXPR (new_var);
3146 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3147 ptr = TREE_OPERAND (ptr, 0);
3148 gcc_assert (DECL_P (ptr));
3149 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
75a70cf9 3150
3151 /* void *tmp = __builtin_alloca */
b9a16870 3152 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3153 stmt = gimple_build_call (atmp, 1, x);
75a70cf9 3154 tmp = create_tmp_var_raw (ptr_type_node, NULL);
3155 gimple_add_tmp_var (tmp);
3156 gimple_call_set_lhs (stmt, tmp);
3157
3158 gimple_seq_add_stmt (ilist, stmt);
3159
389dd41b 3160 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
75a70cf9 3161 gimplify_assign (ptr, x, ilist);
fd6481cf 3162 }
1e8e9920 3163 }
1e8e9920 3164 else if (is_reference (var))
3165 {
773c5ba7 3166 /* For references that are being privatized for Fortran,
3167 allocate new backing storage for the new pointer
3168 variable. This allows us to avoid changing all the
3169 code that expects a pointer to something that expects
bc7bff74 3170 a direct variable. */
1e8e9920 3171 if (pass == 0)
3172 continue;
3173
3174 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
fd6481cf 3175 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
3176 {
3177 x = build_receiver_ref (var, false, ctx);
389dd41b 3178 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 3179 }
3180 else if (TREE_CONSTANT (x))
1e8e9920 3181 {
3182 const char *name = NULL;
3183 if (DECL_NAME (var))
3184 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
3185
df2c34fc 3186 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
3187 name);
3188 gimple_add_tmp_var (x);
86f2ad37 3189 TREE_ADDRESSABLE (x) = 1;
389dd41b 3190 x = build_fold_addr_expr_loc (clause_loc, x);
1e8e9920 3191 }
3192 else
3193 {
b9a16870 3194 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3195 x = build_call_expr_loc (clause_loc, atmp, 1, x);
1e8e9920 3196 }
3197
389dd41b 3198 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
75a70cf9 3199 gimplify_assign (new_var, x, ilist);
1e8e9920 3200
182cf5a9 3201 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3202 }
3203 else if (c_kind == OMP_CLAUSE_REDUCTION
3204 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3205 {
3206 if (pass == 0)
3207 continue;
3208 }
3209 else if (pass != 0)
3210 continue;
3211
55d6e7cd 3212 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3213 {
3214 case OMP_CLAUSE_SHARED:
bc7bff74 3215 /* Ignore shared directives in teams construct. */
3216 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3217 continue;
f49d7bb5 3218 /* Shared global vars are just accessed directly. */
3219 if (is_global_var (new_var))
3220 break;
1e8e9920 3221 /* Set up the DECL_VALUE_EXPR for shared variables now. This
3222 needs to be delayed until after fixup_child_record_type so
3223 that we get the correct type during the dereference. */
e8a588af 3224 by_ref = use_pointer_for_field (var, ctx);
1e8e9920 3225 x = build_receiver_ref (var, by_ref, ctx);
3226 SET_DECL_VALUE_EXPR (new_var, x);
3227 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3228
3229 /* ??? If VAR is not passed by reference, and the variable
3230 hasn't been initialized yet, then we'll get a warning for
3231 the store into the omp_data_s structure. Ideally, we'd be
48e1416a 3232 able to notice this and not store anything at all, but
1e8e9920 3233 we're generating code too early. Suppress the warning. */
3234 if (!by_ref)
3235 TREE_NO_WARNING (var) = 1;
3236 break;
3237
3238 case OMP_CLAUSE_LASTPRIVATE:
3239 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3240 break;
3241 /* FALLTHRU */
3242
3243 case OMP_CLAUSE_PRIVATE:
fd6481cf 3244 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
3245 x = build_outer_var_ref (var, ctx);
3246 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3247 {
3248 if (is_task_ctx (ctx))
3249 x = build_receiver_ref (var, false, ctx);
3250 else
3251 x = build_outer_var_ref (var, ctx);
3252 }
3253 else
3254 x = NULL;
3d483a94 3255 do_private:
bc7bff74 3256 tree nx;
3257 nx = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
3d483a94 3258 if (is_simd)
3259 {
3260 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
bc7bff74 3261 if ((TREE_ADDRESSABLE (new_var) || nx || y
3d483a94 3262 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
3263 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3264 idx, lane, ivar, lvar))
3265 {
bc7bff74 3266 if (nx)
3d483a94 3267 x = lang_hooks.decls.omp_clause_default_ctor
3268 (c, unshare_expr (ivar), x);
bc7bff74 3269 if (nx && x)
3d483a94 3270 gimplify_and_add (x, &llist[0]);
3271 if (y)
3272 {
3273 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
3274 if (y)
3275 {
3276 gimple_seq tseq = NULL;
3277
3278 dtor = y;
3279 gimplify_stmt (&dtor, &tseq);
3280 gimple_seq_add_seq (&llist[1], tseq);
3281 }
3282 }
3283 break;
3284 }
3285 }
bc7bff74 3286 if (nx)
3287 gimplify_and_add (nx, ilist);
1e8e9920 3288 /* FALLTHRU */
3289
3290 do_dtor:
3291 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
3292 if (x)
3293 {
75a70cf9 3294 gimple_seq tseq = NULL;
3295
1e8e9920 3296 dtor = x;
75a70cf9 3297 gimplify_stmt (&dtor, &tseq);
e3a19533 3298 gimple_seq_add_seq (dlist, tseq);
1e8e9920 3299 }
3300 break;
3301
3d483a94 3302 case OMP_CLAUSE_LINEAR:
3303 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
3304 goto do_firstprivate;
3305 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3306 x = NULL;
3307 else
3308 x = build_outer_var_ref (var, ctx);
3309 goto do_private;
3310
1e8e9920 3311 case OMP_CLAUSE_FIRSTPRIVATE:
fd6481cf 3312 if (is_task_ctx (ctx))
3313 {
3314 if (is_reference (var) || is_variable_sized (var))
3315 goto do_dtor;
3316 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
3317 ctx))
3318 || use_pointer_for_field (var, NULL))
3319 {
3320 x = build_receiver_ref (var, false, ctx);
3321 SET_DECL_VALUE_EXPR (new_var, x);
3322 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3323 goto do_dtor;
3324 }
3325 }
3d483a94 3326 do_firstprivate:
1e8e9920 3327 x = build_outer_var_ref (var, ctx);
3d483a94 3328 if (is_simd)
3329 {
bc7bff74 3330 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3331 && gimple_omp_for_combined_into_p (ctx->stmt))
3332 {
3333 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3334 ? sizetype : TREE_TYPE (x);
3335 tree t = fold_convert (stept,
3336 OMP_CLAUSE_LINEAR_STEP (c));
3337 tree c = find_omp_clause (clauses,
3338 OMP_CLAUSE__LOOPTEMP_);
3339 gcc_assert (c);
3340 tree l = OMP_CLAUSE_DECL (c);
3341 if (fd->collapse == 1)
3342 {
3343 tree n1 = fd->loop.n1;
3344 tree step = fd->loop.step;
3345 tree itype = TREE_TYPE (l);
3346 if (POINTER_TYPE_P (itype))
3347 itype = signed_type_for (itype);
3348 l = fold_build2 (MINUS_EXPR, itype, l, n1);
3349 if (TYPE_UNSIGNED (itype)
3350 && fd->loop.cond_code == GT_EXPR)
3351 l = fold_build2 (TRUNC_DIV_EXPR, itype,
3352 fold_build1 (NEGATE_EXPR,
3353 itype, l),
3354 fold_build1 (NEGATE_EXPR,
3355 itype, step));
3356 else
3357 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
3358 }
3359 t = fold_build2 (MULT_EXPR, stept,
3360 fold_convert (stept, l), t);
3361 if (POINTER_TYPE_P (TREE_TYPE (x)))
3362 x = fold_build2 (POINTER_PLUS_EXPR,
3363 TREE_TYPE (x), x, t);
3364 else
3365 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
3366 }
3367
3d483a94 3368 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
3369 || TREE_ADDRESSABLE (new_var))
3370 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3371 idx, lane, ivar, lvar))
3372 {
3373 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
3374 {
3375 tree iv = create_tmp_var (TREE_TYPE (new_var), NULL);
3376 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
3377 gimplify_and_add (x, ilist);
3378 gimple_stmt_iterator gsi
3379 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3380 gimple g
3381 = gimple_build_assign (unshare_expr (lvar), iv);
3382 gsi_insert_before_without_update (&gsi, g,
3383 GSI_SAME_STMT);
3384 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3385 ? sizetype : TREE_TYPE (x);
3386 tree t = fold_convert (stept,
3387 OMP_CLAUSE_LINEAR_STEP (c));
3388 enum tree_code code = PLUS_EXPR;
3389 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
3390 code = POINTER_PLUS_EXPR;
3391 g = gimple_build_assign_with_ops (code, iv, iv, t);
3392 gsi_insert_before_without_update (&gsi, g,
3393 GSI_SAME_STMT);
3394 break;
3395 }
3396 x = lang_hooks.decls.omp_clause_copy_ctor
3397 (c, unshare_expr (ivar), x);
3398 gimplify_and_add (x, &llist[0]);
3399 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3400 if (x)
3401 {
3402 gimple_seq tseq = NULL;
3403
3404 dtor = x;
3405 gimplify_stmt (&dtor, &tseq);
3406 gimple_seq_add_seq (&llist[1], tseq);
3407 }
3408 break;
3409 }
3410 }
1e8e9920 3411 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
3412 gimplify_and_add (x, ilist);
3413 goto do_dtor;
1e8e9920 3414
bc7bff74 3415 case OMP_CLAUSE__LOOPTEMP_:
3416 gcc_assert (is_parallel_ctx (ctx));
3417 x = build_outer_var_ref (var, ctx);
3418 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3419 gimplify_and_add (x, ilist);
3420 break;
3421
1e8e9920 3422 case OMP_CLAUSE_COPYIN:
e8a588af 3423 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 3424 x = build_receiver_ref (var, by_ref, ctx);
3425 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
3426 append_to_statement_list (x, &copyin_seq);
3427 copyin_by_ref |= by_ref;
3428 break;
3429
3430 case OMP_CLAUSE_REDUCTION:
3431 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3432 {
fd6481cf 3433 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
bc7bff74 3434 gimple tseq;
fd6481cf 3435 x = build_outer_var_ref (var, ctx);
3436
bc7bff74 3437 if (is_reference (var)
3438 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3439 TREE_TYPE (x)))
389dd41b 3440 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 3441 SET_DECL_VALUE_EXPR (placeholder, x);
3442 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
bc7bff74 3443 tree new_vard = new_var;
3444 if (is_reference (var))
3445 {
3446 gcc_assert (TREE_CODE (new_var) == MEM_REF);
3447 new_vard = TREE_OPERAND (new_var, 0);
3448 gcc_assert (DECL_P (new_vard));
3449 }
3d483a94 3450 if (is_simd
3451 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3452 idx, lane, ivar, lvar))
3453 {
bc7bff74 3454 if (new_vard == new_var)
3455 {
3456 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
3457 SET_DECL_VALUE_EXPR (new_var, ivar);
3458 }
3459 else
3460 {
3461 SET_DECL_VALUE_EXPR (new_vard,
3462 build_fold_addr_expr (ivar));
3463 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3464 }
3465 x = lang_hooks.decls.omp_clause_default_ctor
3466 (c, unshare_expr (ivar),
3467 build_outer_var_ref (var, ctx));
3468 if (x)
3469 gimplify_and_add (x, &llist[0]);
3470 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3471 {
3472 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3473 lower_omp (&tseq, ctx);
3474 gimple_seq_add_seq (&llist[0], tseq);
3475 }
3476 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3477 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3478 lower_omp (&tseq, ctx);
3479 gimple_seq_add_seq (&llist[1], tseq);
3480 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3481 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3482 if (new_vard == new_var)
3483 SET_DECL_VALUE_EXPR (new_var, lvar);
3484 else
3485 SET_DECL_VALUE_EXPR (new_vard,
3486 build_fold_addr_expr (lvar));
3487 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3488 if (x)
3489 {
3490 tseq = NULL;
3491 dtor = x;
3492 gimplify_stmt (&dtor, &tseq);
3493 gimple_seq_add_seq (&llist[1], tseq);
3494 }
3495 break;
3496 }
3497 x = lang_hooks.decls.omp_clause_default_ctor
3498 (c, new_var, unshare_expr (x));
3499 if (x)
3500 gimplify_and_add (x, ilist);
3501 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3502 {
3503 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3504 lower_omp (&tseq, ctx);
3505 gimple_seq_add_seq (ilist, tseq);
3506 }
3507 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3508 if (is_simd)
3509 {
3510 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3511 lower_omp (&tseq, ctx);
3512 gimple_seq_add_seq (dlist, tseq);
3513 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3514 }
3515 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3516 goto do_dtor;
3517 }
3518 else
3519 {
3520 x = omp_reduction_init (c, TREE_TYPE (new_var));
3521 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
3522 if (is_simd
3523 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3524 idx, lane, ivar, lvar))
3525 {
3526 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3527 tree ref = build_outer_var_ref (var, ctx);
3528
3529 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
3530
3531 /* reduction(-:var) sums up the partial results, so it
3532 acts identically to reduction(+:var). */
3533 if (code == MINUS_EXPR)
3534 code = PLUS_EXPR;
3535
3536 x = build2 (code, TREE_TYPE (ref), ref, ivar);
3d483a94 3537 ref = build_outer_var_ref (var, ctx);
3538 gimplify_assign (ref, x, &llist[1]);
3539 }
3540 else
3541 {
3542 gimplify_assign (new_var, x, ilist);
3543 if (is_simd)
3544 gimplify_assign (build_outer_var_ref (var, ctx),
3545 new_var, dlist);
3546 }
1e8e9920 3547 }
3548 break;
3549
3550 default:
3551 gcc_unreachable ();
3552 }
3553 }
3554 }
3555
3d483a94 3556 if (lane)
3557 {
3558 tree uid = create_tmp_var (ptr_type_node, "simduid");
8e1a382d 3559 /* Don't want uninit warnings on simduid, it is always uninitialized,
3560 but we use it not for the value, but for the DECL_UID only. */
3561 TREE_NO_WARNING (uid) = 1;
3d483a94 3562 gimple g
3563 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
3564 gimple_call_set_lhs (g, lane);
3565 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3566 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
3567 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
3568 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
3569 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3570 gimple_omp_for_set_clauses (ctx->stmt, c);
3571 g = gimple_build_assign_with_ops (INTEGER_CST, lane,
3572 build_int_cst (unsigned_type_node, 0),
3573 NULL_TREE);
3574 gimple_seq_add_stmt (ilist, g);
3575 for (int i = 0; i < 2; i++)
3576 if (llist[i])
3577 {
3578 tree vf = create_tmp_var (unsigned_type_node, NULL);
3579 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
3580 gimple_call_set_lhs (g, vf);
3581 gimple_seq *seq = i == 0 ? ilist : dlist;
3582 gimple_seq_add_stmt (seq, g);
3583 tree t = build_int_cst (unsigned_type_node, 0);
3584 g = gimple_build_assign_with_ops (INTEGER_CST, idx, t, NULL_TREE);
3585 gimple_seq_add_stmt (seq, g);
3586 tree body = create_artificial_label (UNKNOWN_LOCATION);
3587 tree header = create_artificial_label (UNKNOWN_LOCATION);
3588 tree end = create_artificial_label (UNKNOWN_LOCATION);
3589 gimple_seq_add_stmt (seq, gimple_build_goto (header));
3590 gimple_seq_add_stmt (seq, gimple_build_label (body));
3591 gimple_seq_add_seq (seq, llist[i]);
3592 t = build_int_cst (unsigned_type_node, 1);
3593 g = gimple_build_assign_with_ops (PLUS_EXPR, idx, idx, t);
3594 gimple_seq_add_stmt (seq, g);
3595 gimple_seq_add_stmt (seq, gimple_build_label (header));
3596 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
3597 gimple_seq_add_stmt (seq, g);
3598 gimple_seq_add_stmt (seq, gimple_build_label (end));
3599 }
3600 }
3601
1e8e9920 3602 /* The copyin sequence is not to be executed by the main thread, since
3603 that would result in self-copies. Perhaps not visible to scalars,
3604 but it certainly is to C++ operator=. */
3605 if (copyin_seq)
3606 {
b9a16870 3607 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
3608 0);
1e8e9920 3609 x = build2 (NE_EXPR, boolean_type_node, x,
3610 build_int_cst (TREE_TYPE (x), 0));
3611 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
3612 gimplify_and_add (x, ilist);
3613 }
3614
3615 /* If any copyin variable is passed by reference, we must ensure the
3616 master thread doesn't modify it before it is copied over in all
f49d7bb5 3617 threads. Similarly for variables in both firstprivate and
3618 lastprivate clauses we need to ensure the lastprivate copying
bc7bff74 3619 happens after firstprivate copying in all threads. And similarly
3620 for UDRs if initializer expression refers to omp_orig. */
3621 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
3d483a94 3622 {
3623 /* Don't add any barrier for #pragma omp simd or
3624 #pragma omp distribute. */
3625 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
f2697631 3626 || gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_FOR)
bc7bff74 3627 gimple_seq_add_stmt (ilist, build_omp_barrier (NULL_TREE));
3d483a94 3628 }
3629
3630 /* If max_vf is non-zero, then we can use only a vectorization factor
3631 up to the max_vf we chose. So stick it into the safelen clause. */
3632 if (max_vf)
3633 {
3634 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
3635 OMP_CLAUSE_SAFELEN);
3636 if (c == NULL_TREE
3637 || compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3638 max_vf) == 1)
3639 {
3640 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
3641 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
3642 max_vf);
3643 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3644 gimple_omp_for_set_clauses (ctx->stmt, c);
3645 }
3646 }
1e8e9920 3647}
3648
773c5ba7 3649
1e8e9920 3650/* Generate code to implement the LASTPRIVATE clauses. This is used for
3651 both parallel and workshare constructs. PREDICATE may be NULL if it's
3652 always true. */
3653
3654static void
75a70cf9 3655lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
bc7bff74 3656 omp_context *ctx)
1e8e9920 3657{
3d483a94 3658 tree x, c, label = NULL, orig_clauses = clauses;
fd6481cf 3659 bool par_clauses = false;
3d483a94 3660 tree simduid = NULL, lastlane = NULL;
1e8e9920 3661
3d483a94 3662 /* Early exit if there are no lastprivate or linear clauses. */
3663 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
3664 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
3665 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
3666 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
3667 break;
1e8e9920 3668 if (clauses == NULL)
3669 {
3670 /* If this was a workshare clause, see if it had been combined
3671 with its parallel. In that case, look for the clauses on the
3672 parallel statement itself. */
3673 if (is_parallel_ctx (ctx))
3674 return;
3675
3676 ctx = ctx->outer;
3677 if (ctx == NULL || !is_parallel_ctx (ctx))
3678 return;
3679
75a70cf9 3680 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
1e8e9920 3681 OMP_CLAUSE_LASTPRIVATE);
3682 if (clauses == NULL)
3683 return;
fd6481cf 3684 par_clauses = true;
1e8e9920 3685 }
3686
75a70cf9 3687 if (predicate)
3688 {
3689 gimple stmt;
3690 tree label_true, arm1, arm2;
3691
e60a6f7b 3692 label = create_artificial_label (UNKNOWN_LOCATION);
3693 label_true = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 3694 arm1 = TREE_OPERAND (predicate, 0);
3695 arm2 = TREE_OPERAND (predicate, 1);
3696 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
3697 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
3698 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
3699 label_true, label);
3700 gimple_seq_add_stmt (stmt_list, stmt);
3701 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
3702 }
1e8e9920 3703
3d483a94 3704 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3705 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 3706 {
3707 simduid = find_omp_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
3708 if (simduid)
3709 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
3710 }
3711
fd6481cf 3712 for (c = clauses; c ;)
1e8e9920 3713 {
3714 tree var, new_var;
389dd41b 3715 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3716
3d483a94 3717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3718 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3719 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
fd6481cf 3720 {
3721 var = OMP_CLAUSE_DECL (c);
3722 new_var = lookup_decl (var, ctx);
1e8e9920 3723
3d483a94 3724 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3725 {
3726 tree val = DECL_VALUE_EXPR (new_var);
3727 if (TREE_CODE (val) == ARRAY_REF
3728 && VAR_P (TREE_OPERAND (val, 0))
3729 && lookup_attribute ("omp simd array",
3730 DECL_ATTRIBUTES (TREE_OPERAND (val,
3731 0))))
3732 {
3733 if (lastlane == NULL)
3734 {
3735 lastlane = create_tmp_var (unsigned_type_node, NULL);
3736 gimple g
3737 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
3738 2, simduid,
3739 TREE_OPERAND (val, 1));
3740 gimple_call_set_lhs (g, lastlane);
3741 gimple_seq_add_stmt (stmt_list, g);
3742 }
3743 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
3744 TREE_OPERAND (val, 0), lastlane,
3745 NULL_TREE, NULL_TREE);
3746 }
3747 }
3748
3749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3750 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
75a70cf9 3751 {
e3a19533 3752 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
75a70cf9 3753 gimple_seq_add_seq (stmt_list,
3754 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
3d483a94 3755 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
75a70cf9 3756 }
1e8e9920 3757
fd6481cf 3758 x = build_outer_var_ref (var, ctx);
3759 if (is_reference (var))
182cf5a9 3760 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
fd6481cf 3761 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
75a70cf9 3762 gimplify_and_add (x, stmt_list);
fd6481cf 3763 }
3764 c = OMP_CLAUSE_CHAIN (c);
3765 if (c == NULL && !par_clauses)
3766 {
3767 /* If this was a workshare clause, see if it had been combined
3768 with its parallel. In that case, continue looking for the
3769 clauses also on the parallel statement itself. */
3770 if (is_parallel_ctx (ctx))
3771 break;
3772
3773 ctx = ctx->outer;
3774 if (ctx == NULL || !is_parallel_ctx (ctx))
3775 break;
3776
75a70cf9 3777 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
fd6481cf 3778 OMP_CLAUSE_LASTPRIVATE);
3779 par_clauses = true;
3780 }
1e8e9920 3781 }
3782
75a70cf9 3783 if (label)
3784 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
1e8e9920 3785}
3786
773c5ba7 3787
1e8e9920 3788/* Generate code to implement the REDUCTION clauses. */
3789
3790static void
75a70cf9 3791lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
1e8e9920 3792{
75a70cf9 3793 gimple_seq sub_seq = NULL;
3794 gimple stmt;
3795 tree x, c;
1e8e9920 3796 int count = 0;
3797
3d483a94 3798 /* SIMD reductions are handled in lower_rec_input_clauses. */
3799 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3800 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 3801 return;
3802
1e8e9920 3803 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
3804 update in that case, otherwise use a lock. */
3805 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 3806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
1e8e9920 3807 {
3808 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3809 {
bc7bff74 3810 /* Never use OMP_ATOMIC for array reductions or UDRs. */
1e8e9920 3811 count = -1;
3812 break;
3813 }
3814 count++;
3815 }
3816
3817 if (count == 0)
3818 return;
3819
3820 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3821 {
3822 tree var, ref, new_var;
3823 enum tree_code code;
389dd41b 3824 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3825
55d6e7cd 3826 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
1e8e9920 3827 continue;
3828
3829 var = OMP_CLAUSE_DECL (c);
3830 new_var = lookup_decl (var, ctx);
3831 if (is_reference (var))
182cf5a9 3832 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3833 ref = build_outer_var_ref (var, ctx);
3834 code = OMP_CLAUSE_REDUCTION_CODE (c);
773c5ba7 3835
3836 /* reduction(-:var) sums up the partial results, so it acts
3837 identically to reduction(+:var). */
1e8e9920 3838 if (code == MINUS_EXPR)
3839 code = PLUS_EXPR;
3840
3841 if (count == 1)
3842 {
389dd41b 3843 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 3844
3845 addr = save_expr (addr);
3846 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
389dd41b 3847 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
1e8e9920 3848 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
75a70cf9 3849 gimplify_and_add (x, stmt_seqp);
1e8e9920 3850 return;
3851 }
3852
3853 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3854 {
3855 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3856
bc7bff74 3857 if (is_reference (var)
3858 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3859 TREE_TYPE (ref)))
389dd41b 3860 ref = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 3861 SET_DECL_VALUE_EXPR (placeholder, ref);
3862 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
e3a19533 3863 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
75a70cf9 3864 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
3865 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
1e8e9920 3866 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
3867 }
3868 else
3869 {
3870 x = build2 (code, TREE_TYPE (ref), ref, new_var);
3871 ref = build_outer_var_ref (var, ctx);
75a70cf9 3872 gimplify_assign (ref, x, &sub_seq);
1e8e9920 3873 }
3874 }
3875
b9a16870 3876 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
3877 0);
75a70cf9 3878 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 3879
75a70cf9 3880 gimple_seq_add_seq (stmt_seqp, sub_seq);
1e8e9920 3881
b9a16870 3882 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
3883 0);
75a70cf9 3884 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 3885}
3886
773c5ba7 3887
1e8e9920 3888/* Generate code to implement the COPYPRIVATE clauses. */
3889
3890static void
75a70cf9 3891lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
1e8e9920 3892 omp_context *ctx)
3893{
3894 tree c;
3895
3896 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3897 {
cb561506 3898 tree var, new_var, ref, x;
1e8e9920 3899 bool by_ref;
389dd41b 3900 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3901
55d6e7cd 3902 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
1e8e9920 3903 continue;
3904
3905 var = OMP_CLAUSE_DECL (c);
e8a588af 3906 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 3907
3908 ref = build_sender_ref (var, ctx);
cb561506 3909 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
3910 if (by_ref)
3911 {
3912 x = build_fold_addr_expr_loc (clause_loc, new_var);
3913 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
3914 }
75a70cf9 3915 gimplify_assign (ref, x, slist);
1e8e9920 3916
cb561506 3917 ref = build_receiver_ref (var, false, ctx);
3918 if (by_ref)
3919 {
3920 ref = fold_convert_loc (clause_loc,
3921 build_pointer_type (TREE_TYPE (new_var)),
3922 ref);
3923 ref = build_fold_indirect_ref_loc (clause_loc, ref);
3924 }
1e8e9920 3925 if (is_reference (var))
3926 {
cb561506 3927 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
182cf5a9 3928 ref = build_simple_mem_ref_loc (clause_loc, ref);
3929 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3930 }
cb561506 3931 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
1e8e9920 3932 gimplify_and_add (x, rlist);
3933 }
3934}
3935
773c5ba7 3936
1e8e9920 3937/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
3938 and REDUCTION from the sender (aka parent) side. */
3939
3940static void
75a70cf9 3941lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
3942 omp_context *ctx)
1e8e9920 3943{
3944 tree c;
3945
3946 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3947 {
773c5ba7 3948 tree val, ref, x, var;
1e8e9920 3949 bool by_ref, do_in = false, do_out = false;
389dd41b 3950 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3951
55d6e7cd 3952 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3953 {
fd6481cf 3954 case OMP_CLAUSE_PRIVATE:
3955 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3956 break;
3957 continue;
1e8e9920 3958 case OMP_CLAUSE_FIRSTPRIVATE:
3959 case OMP_CLAUSE_COPYIN:
3960 case OMP_CLAUSE_LASTPRIVATE:
3961 case OMP_CLAUSE_REDUCTION:
bc7bff74 3962 case OMP_CLAUSE__LOOPTEMP_:
1e8e9920 3963 break;
3964 default:
3965 continue;
3966 }
3967
87b31375 3968 val = OMP_CLAUSE_DECL (c);
3969 var = lookup_decl_in_outer_ctx (val, ctx);
773c5ba7 3970
f49d7bb5 3971 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
3972 && is_global_var (var))
3973 continue;
1e8e9920 3974 if (is_variable_sized (val))
3975 continue;
e8a588af 3976 by_ref = use_pointer_for_field (val, NULL);
1e8e9920 3977
55d6e7cd 3978 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3979 {
fd6481cf 3980 case OMP_CLAUSE_PRIVATE:
1e8e9920 3981 case OMP_CLAUSE_FIRSTPRIVATE:
3982 case OMP_CLAUSE_COPYIN:
bc7bff74 3983 case OMP_CLAUSE__LOOPTEMP_:
1e8e9920 3984 do_in = true;
3985 break;
3986
3987 case OMP_CLAUSE_LASTPRIVATE:
3988 if (by_ref || is_reference (val))
3989 {
3990 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3991 continue;
3992 do_in = true;
3993 }
3994 else
fd6481cf 3995 {
3996 do_out = true;
3997 if (lang_hooks.decls.omp_private_outer_ref (val))
3998 do_in = true;
3999 }
1e8e9920 4000 break;
4001
4002 case OMP_CLAUSE_REDUCTION:
4003 do_in = true;
4004 do_out = !(by_ref || is_reference (val));
4005 break;
4006
4007 default:
4008 gcc_unreachable ();
4009 }
4010
4011 if (do_in)
4012 {
4013 ref = build_sender_ref (val, ctx);
389dd41b 4014 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
75a70cf9 4015 gimplify_assign (ref, x, ilist);
fd6481cf 4016 if (is_task_ctx (ctx))
4017 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
1e8e9920 4018 }
773c5ba7 4019
1e8e9920 4020 if (do_out)
4021 {
4022 ref = build_sender_ref (val, ctx);
75a70cf9 4023 gimplify_assign (var, ref, olist);
1e8e9920 4024 }
4025 }
4026}
4027
75a70cf9 4028/* Generate code to implement SHARED from the sender (aka parent)
4029 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
4030 list things that got automatically shared. */
1e8e9920 4031
4032static void
75a70cf9 4033lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
1e8e9920 4034{
fd6481cf 4035 tree var, ovar, nvar, f, x, record_type;
1e8e9920 4036
4037 if (ctx->record_type == NULL)
4038 return;
773c5ba7 4039
fd6481cf 4040 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
1767a056 4041 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
1e8e9920 4042 {
4043 ovar = DECL_ABSTRACT_ORIGIN (f);
4044 nvar = maybe_lookup_decl (ovar, ctx);
4045 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
4046 continue;
4047
773c5ba7 4048 /* If CTX is a nested parallel directive. Find the immediately
4049 enclosing parallel or workshare construct that contains a
4050 mapping for OVAR. */
87b31375 4051 var = lookup_decl_in_outer_ctx (ovar, ctx);
773c5ba7 4052
e8a588af 4053 if (use_pointer_for_field (ovar, ctx))
1e8e9920 4054 {
4055 x = build_sender_ref (ovar, ctx);
773c5ba7 4056 var = build_fold_addr_expr (var);
75a70cf9 4057 gimplify_assign (x, var, ilist);
1e8e9920 4058 }
4059 else
4060 {
4061 x = build_sender_ref (ovar, ctx);
75a70cf9 4062 gimplify_assign (x, var, ilist);
1e8e9920 4063
d2263ebb 4064 if (!TREE_READONLY (var)
4065 /* We don't need to receive a new reference to a result
4066 or parm decl. In fact we may not store to it as we will
4067 invalidate any pending RSO and generate wrong gimple
4068 during inlining. */
4069 && !((TREE_CODE (var) == RESULT_DECL
4070 || TREE_CODE (var) == PARM_DECL)
4071 && DECL_BY_REFERENCE (var)))
fd6481cf 4072 {
4073 x = build_sender_ref (ovar, ctx);
75a70cf9 4074 gimplify_assign (var, x, olist);
fd6481cf 4075 }
1e8e9920 4076 }
4077 }
4078}
4079
75a70cf9 4080
4081/* A convenience function to build an empty GIMPLE_COND with just the
4082 condition. */
4083
4084static gimple
4085gimple_build_cond_empty (tree cond)
4086{
4087 enum tree_code pred_code;
4088 tree lhs, rhs;
4089
4090 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
4091 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
4092}
4093
4094
48e1416a 4095/* Build the function calls to GOMP_parallel_start etc to actually
773c5ba7 4096 generate the parallel operation. REGION is the parallel region
4097 being expanded. BB is the block where to insert the code. WS_ARGS
4098 will be set if this is a call to a combined parallel+workshare
4099 construct, it contains the list of additional arguments needed by
4100 the workshare construct. */
1e8e9920 4101
4102static void
61e47ac8 4103expand_parallel_call (struct omp_region *region, basic_block bb,
f1f41a6c 4104 gimple entry_stmt, vec<tree, va_gc> *ws_args)
1e8e9920 4105{
bc7bff74 4106 tree t, t1, t2, val, cond, c, clauses, flags;
75a70cf9 4107 gimple_stmt_iterator gsi;
4108 gimple stmt;
b9a16870 4109 enum built_in_function start_ix;
4110 int start_ix2;
389dd41b 4111 location_t clause_loc;
f1f41a6c 4112 vec<tree, va_gc> *args;
773c5ba7 4113
75a70cf9 4114 clauses = gimple_omp_parallel_clauses (entry_stmt);
773c5ba7 4115
bc7bff74 4116 /* Determine what flavor of GOMP_parallel we will be
773c5ba7 4117 emitting. */
bc7bff74 4118 start_ix = BUILT_IN_GOMP_PARALLEL;
773c5ba7 4119 if (is_combined_parallel (region))
4120 {
61e47ac8 4121 switch (region->inner->type)
773c5ba7 4122 {
75a70cf9 4123 case GIMPLE_OMP_FOR:
fd6481cf 4124 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
bc7bff74 4125 start_ix2 = ((int)BUILT_IN_GOMP_PARALLEL_LOOP_STATIC
b9a16870 4126 + (region->inner->sched_kind
4127 == OMP_CLAUSE_SCHEDULE_RUNTIME
4128 ? 3 : region->inner->sched_kind));
4129 start_ix = (enum built_in_function)start_ix2;
61e47ac8 4130 break;
75a70cf9 4131 case GIMPLE_OMP_SECTIONS:
bc7bff74 4132 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
61e47ac8 4133 break;
4134 default:
4135 gcc_unreachable ();
773c5ba7 4136 }
773c5ba7 4137 }
1e8e9920 4138
4139 /* By default, the value of NUM_THREADS is zero (selected at run time)
4140 and there is no conditional. */
4141 cond = NULL_TREE;
4142 val = build_int_cst (unsigned_type_node, 0);
bc7bff74 4143 flags = build_int_cst (unsigned_type_node, 0);
1e8e9920 4144
4145 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4146 if (c)
4147 cond = OMP_CLAUSE_IF_EXPR (c);
4148
4149 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
4150 if (c)
389dd41b 4151 {
4152 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
4153 clause_loc = OMP_CLAUSE_LOCATION (c);
4154 }
4155 else
4156 clause_loc = gimple_location (entry_stmt);
1e8e9920 4157
bc7bff74 4158 c = find_omp_clause (clauses, OMP_CLAUSE_PROC_BIND);
4159 if (c)
4160 flags = build_int_cst (unsigned_type_node, OMP_CLAUSE_PROC_BIND_KIND (c));
4161
1e8e9920 4162 /* Ensure 'val' is of the correct type. */
389dd41b 4163 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
1e8e9920 4164
4165 /* If we found the clause 'if (cond)', build either
4166 (cond != 0) or (cond ? val : 1u). */
4167 if (cond)
4168 {
75a70cf9 4169 gimple_stmt_iterator gsi;
773c5ba7 4170
4171 cond = gimple_boolify (cond);
4172
1e8e9920 4173 if (integer_zerop (val))
389dd41b 4174 val = fold_build2_loc (clause_loc,
4175 EQ_EXPR, unsigned_type_node, cond,
79acaae1 4176 build_int_cst (TREE_TYPE (cond), 0));
1e8e9920 4177 else
773c5ba7 4178 {
4179 basic_block cond_bb, then_bb, else_bb;
79acaae1 4180 edge e, e_then, e_else;
75a70cf9 4181 tree tmp_then, tmp_else, tmp_join, tmp_var;
79acaae1 4182
4183 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
4184 if (gimple_in_ssa_p (cfun))
4185 {
75a70cf9 4186 tmp_then = make_ssa_name (tmp_var, NULL);
4187 tmp_else = make_ssa_name (tmp_var, NULL);
4188 tmp_join = make_ssa_name (tmp_var, NULL);
79acaae1 4189 }
4190 else
4191 {
4192 tmp_then = tmp_var;
4193 tmp_else = tmp_var;
4194 tmp_join = tmp_var;
4195 }
773c5ba7 4196
773c5ba7 4197 e = split_block (bb, NULL);
4198 cond_bb = e->src;
4199 bb = e->dest;
4200 remove_edge (e);
4201
4202 then_bb = create_empty_bb (cond_bb);
4203 else_bb = create_empty_bb (then_bb);
79acaae1 4204 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
4205 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
773c5ba7 4206
75a70cf9 4207 stmt = gimple_build_cond_empty (cond);
4208 gsi = gsi_start_bb (cond_bb);
4209 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4210
75a70cf9 4211 gsi = gsi_start_bb (then_bb);
4212 stmt = gimple_build_assign (tmp_then, val);
4213 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4214
75a70cf9 4215 gsi = gsi_start_bb (else_bb);
4216 stmt = gimple_build_assign
4217 (tmp_else, build_int_cst (unsigned_type_node, 1));
4218 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4219
4220 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
4221 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
f6568ea4 4222 if (current_loops)
4223 {
4224 add_bb_to_loop (then_bb, cond_bb->loop_father);
4225 add_bb_to_loop (else_bb, cond_bb->loop_father);
4226 }
79acaae1 4227 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
4228 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
773c5ba7 4229
79acaae1 4230 if (gimple_in_ssa_p (cfun))
4231 {
75a70cf9 4232 gimple phi = create_phi_node (tmp_join, bb);
60d535d2 4233 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
4234 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
79acaae1 4235 }
4236
4237 val = tmp_join;
773c5ba7 4238 }
4239
75a70cf9 4240 gsi = gsi_start_bb (bb);
4241 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
4242 false, GSI_CONTINUE_LINKING);
1e8e9920 4243 }
4244
75a70cf9 4245 gsi = gsi_last_bb (bb);
4246 t = gimple_omp_parallel_data_arg (entry_stmt);
1e8e9920 4247 if (t == NULL)
c2f47e15 4248 t1 = null_pointer_node;
1e8e9920 4249 else
c2f47e15 4250 t1 = build_fold_addr_expr (t);
75a70cf9 4251 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
773c5ba7 4252
bc7bff74 4253 vec_alloc (args, 4 + vec_safe_length (ws_args));
f1f41a6c 4254 args->quick_push (t2);
4255 args->quick_push (t1);
4256 args->quick_push (val);
4257 if (ws_args)
4258 args->splice (*ws_args);
bc7bff74 4259 args->quick_push (flags);
414c3a2c 4260
4261 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
b9a16870 4262 builtin_decl_explicit (start_ix), args);
773c5ba7 4263
75a70cf9 4264 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4265 false, GSI_CONTINUE_LINKING);
1e8e9920 4266}
4267
773c5ba7 4268
fd6481cf 4269/* Build the function call to GOMP_task to actually
4270 generate the task operation. BB is the block where to insert the code. */
4271
4272static void
75a70cf9 4273expand_task_call (basic_block bb, gimple entry_stmt)
fd6481cf 4274{
bc7bff74 4275 tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
75a70cf9 4276 gimple_stmt_iterator gsi;
389dd41b 4277 location_t loc = gimple_location (entry_stmt);
fd6481cf 4278
75a70cf9 4279 clauses = gimple_omp_task_clauses (entry_stmt);
fd6481cf 4280
fd6481cf 4281 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4282 if (c)
4283 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
4284 else
4285 cond = boolean_true_node;
4286
4287 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
2169f33b 4288 c2 = find_omp_clause (clauses, OMP_CLAUSE_MERGEABLE);
bc7bff74 4289 depend = find_omp_clause (clauses, OMP_CLAUSE_DEPEND);
2169f33b 4290 flags = build_int_cst (unsigned_type_node,
bc7bff74 4291 (c ? 1 : 0) + (c2 ? 4 : 0) + (depend ? 8 : 0));
2169f33b 4292
4293 c = find_omp_clause (clauses, OMP_CLAUSE_FINAL);
4294 if (c)
4295 {
4296 c = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (c));
4297 c = fold_build3_loc (loc, COND_EXPR, unsigned_type_node, c,
4298 build_int_cst (unsigned_type_node, 2),
4299 build_int_cst (unsigned_type_node, 0));
4300 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_node, flags, c);
4301 }
bc7bff74 4302 if (depend)
4303 depend = OMP_CLAUSE_DECL (depend);
4304 else
4305 depend = build_int_cst (ptr_type_node, 0);
fd6481cf 4306
75a70cf9 4307 gsi = gsi_last_bb (bb);
4308 t = gimple_omp_task_data_arg (entry_stmt);
fd6481cf 4309 if (t == NULL)
4310 t2 = null_pointer_node;
4311 else
389dd41b 4312 t2 = build_fold_addr_expr_loc (loc, t);
4313 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
75a70cf9 4314 t = gimple_omp_task_copy_fn (entry_stmt);
fd6481cf 4315 if (t == NULL)
4316 t3 = null_pointer_node;
4317 else
389dd41b 4318 t3 = build_fold_addr_expr_loc (loc, t);
fd6481cf 4319
b9a16870 4320 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
bc7bff74 4321 8, t1, t2, t3,
75a70cf9 4322 gimple_omp_task_arg_size (entry_stmt),
bc7bff74 4323 gimple_omp_task_arg_align (entry_stmt), cond, flags,
4324 depend);
fd6481cf 4325
75a70cf9 4326 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4327 false, GSI_CONTINUE_LINKING);
fd6481cf 4328}
4329
4330
75a70cf9 4331/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
4332 catch handler and return it. This prevents programs from violating the
4333 structured block semantics with throws. */
1e8e9920 4334
75a70cf9 4335static gimple_seq
4336maybe_catch_exception (gimple_seq body)
1e8e9920 4337{
e38def9c 4338 gimple g;
4339 tree decl;
1e8e9920 4340
4341 if (!flag_exceptions)
75a70cf9 4342 return body;
1e8e9920 4343
596981c8 4344 if (lang_hooks.eh_protect_cleanup_actions != NULL)
4345 decl = lang_hooks.eh_protect_cleanup_actions ();
1e8e9920 4346 else
b9a16870 4347 decl = builtin_decl_explicit (BUILT_IN_TRAP);
75a70cf9 4348
e38def9c 4349 g = gimple_build_eh_must_not_throw (decl);
4350 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
75a70cf9 4351 GIMPLE_TRY_CATCH);
1e8e9920 4352
e38def9c 4353 return gimple_seq_alloc_with_stmt (g);
1e8e9920 4354}
4355
773c5ba7 4356/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
1e8e9920 4357
773c5ba7 4358static tree
f1f41a6c 4359vec2chain (vec<tree, va_gc> *v)
1e8e9920 4360{
2ab2ce89 4361 tree chain = NULL_TREE, t;
4362 unsigned ix;
1e8e9920 4363
f1f41a6c 4364 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
773c5ba7 4365 {
1767a056 4366 DECL_CHAIN (t) = chain;
2ab2ce89 4367 chain = t;
773c5ba7 4368 }
1e8e9920 4369
2ab2ce89 4370 return chain;
773c5ba7 4371}
1e8e9920 4372
1e8e9920 4373
773c5ba7 4374/* Remove barriers in REGION->EXIT's block. Note that this is only
75a70cf9 4375 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
4376 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
4377 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
773c5ba7 4378 removed. */
1e8e9920 4379
773c5ba7 4380static void
4381remove_exit_barrier (struct omp_region *region)
4382{
75a70cf9 4383 gimple_stmt_iterator gsi;
773c5ba7 4384 basic_block exit_bb;
61e47ac8 4385 edge_iterator ei;
4386 edge e;
75a70cf9 4387 gimple stmt;
4a04f4b4 4388 int any_addressable_vars = -1;
1e8e9920 4389
61e47ac8 4390 exit_bb = region->exit;
1e8e9920 4391
5056ba1a 4392 /* If the parallel region doesn't return, we don't have REGION->EXIT
4393 block at all. */
4394 if (! exit_bb)
4395 return;
4396
75a70cf9 4397 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
4398 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
61e47ac8 4399 statements that can appear in between are extremely limited -- no
4400 memory operations at all. Here, we allow nothing at all, so the
75a70cf9 4401 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
4402 gsi = gsi_last_bb (exit_bb);
4403 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4404 gsi_prev (&gsi);
4405 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
773c5ba7 4406 return;
1e8e9920 4407
61e47ac8 4408 FOR_EACH_EDGE (e, ei, exit_bb->preds)
4409 {
75a70cf9 4410 gsi = gsi_last_bb (e->src);
4411 if (gsi_end_p (gsi))
61e47ac8 4412 continue;
75a70cf9 4413 stmt = gsi_stmt (gsi);
4a04f4b4 4414 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
4415 && !gimple_omp_return_nowait_p (stmt))
4416 {
4417 /* OpenMP 3.0 tasks unfortunately prevent this optimization
4418 in many cases. If there could be tasks queued, the barrier
4419 might be needed to let the tasks run before some local
4420 variable of the parallel that the task uses as shared
4421 runs out of scope. The task can be spawned either
4422 from within current function (this would be easy to check)
4423 or from some function it calls and gets passed an address
4424 of such a variable. */
4425 if (any_addressable_vars < 0)
4426 {
4427 gimple parallel_stmt = last_stmt (region->entry);
4428 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
2ab2ce89 4429 tree local_decls, block, decl;
4430 unsigned ix;
4a04f4b4 4431
4432 any_addressable_vars = 0;
2ab2ce89 4433 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
4434 if (TREE_ADDRESSABLE (decl))
4a04f4b4 4435 {
4436 any_addressable_vars = 1;
4437 break;
4438 }
4439 for (block = gimple_block (stmt);
4440 !any_addressable_vars
4441 && block
4442 && TREE_CODE (block) == BLOCK;
4443 block = BLOCK_SUPERCONTEXT (block))
4444 {
4445 for (local_decls = BLOCK_VARS (block);
4446 local_decls;
1767a056 4447 local_decls = DECL_CHAIN (local_decls))
4a04f4b4 4448 if (TREE_ADDRESSABLE (local_decls))
4449 {
4450 any_addressable_vars = 1;
4451 break;
4452 }
4453 if (block == gimple_block (parallel_stmt))
4454 break;
4455 }
4456 }
4457 if (!any_addressable_vars)
4458 gimple_omp_return_set_nowait (stmt);
4459 }
61e47ac8 4460 }
1e8e9920 4461}
4462
61e47ac8 4463static void
4464remove_exit_barriers (struct omp_region *region)
4465{
75a70cf9 4466 if (region->type == GIMPLE_OMP_PARALLEL)
61e47ac8 4467 remove_exit_barrier (region);
4468
4469 if (region->inner)
4470 {
4471 region = region->inner;
4472 remove_exit_barriers (region);
4473 while (region->next)
4474 {
4475 region = region->next;
4476 remove_exit_barriers (region);
4477 }
4478 }
4479}
773c5ba7 4480
658b4427 4481/* Optimize omp_get_thread_num () and omp_get_num_threads ()
4482 calls. These can't be declared as const functions, but
4483 within one parallel body they are constant, so they can be
4484 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
fd6481cf 4485 which are declared const. Similarly for task body, except
4486 that in untied task omp_get_thread_num () can change at any task
4487 scheduling point. */
658b4427 4488
4489static void
75a70cf9 4490optimize_omp_library_calls (gimple entry_stmt)
658b4427 4491{
4492 basic_block bb;
75a70cf9 4493 gimple_stmt_iterator gsi;
b9a16870 4494 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4495 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree);
4496 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
4497 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree);
75a70cf9 4498 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
4499 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
fd6481cf 4500 OMP_CLAUSE_UNTIED) != NULL);
658b4427 4501
4502 FOR_EACH_BB (bb)
75a70cf9 4503 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
658b4427 4504 {
75a70cf9 4505 gimple call = gsi_stmt (gsi);
658b4427 4506 tree decl;
4507
75a70cf9 4508 if (is_gimple_call (call)
4509 && (decl = gimple_call_fndecl (call))
658b4427 4510 && DECL_EXTERNAL (decl)
4511 && TREE_PUBLIC (decl)
4512 && DECL_INITIAL (decl) == NULL)
4513 {
4514 tree built_in;
4515
4516 if (DECL_NAME (decl) == thr_num_id)
fd6481cf 4517 {
4518 /* In #pragma omp task untied omp_get_thread_num () can change
4519 during the execution of the task region. */
4520 if (untied_task)
4521 continue;
b9a16870 4522 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
fd6481cf 4523 }
658b4427 4524 else if (DECL_NAME (decl) == num_thr_id)
b9a16870 4525 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
658b4427 4526 else
4527 continue;
4528
4529 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
75a70cf9 4530 || gimple_call_num_args (call) != 0)
658b4427 4531 continue;
4532
4533 if (flag_exceptions && !TREE_NOTHROW (decl))
4534 continue;
4535
4536 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
1ea6a73c 4537 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
4538 TREE_TYPE (TREE_TYPE (built_in))))
658b4427 4539 continue;
4540
0acacf9e 4541 gimple_call_set_fndecl (call, built_in);
658b4427 4542 }
4543 }
4544}
4545
8e6b4515 4546/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
4547 regimplified. */
4548
4549static tree
4550expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
4551{
4552 tree t = *tp;
4553
4554 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
4555 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
4556 return t;
4557
4558 if (TREE_CODE (t) == ADDR_EXPR)
4559 recompute_tree_invariant_for_addr_expr (t);
4560
4561 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
4562 return NULL_TREE;
4563}
4564
3d483a94 4565/* Prepend TO = FROM assignment before *GSI_P. */
4566
4567static void
4568expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
4569{
4570 bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
4571 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
4572 true, GSI_SAME_STMT);
4573 gimple stmt = gimple_build_assign (to, from);
4574 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
4575 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
4576 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
4577 {
4578 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4579 gimple_regimplify_operands (stmt, &gsi);
4580 }
4581}
4582
fd6481cf 4583/* Expand the OpenMP parallel or task directive starting at REGION. */
1e8e9920 4584
4585static void
fd6481cf 4586expand_omp_taskreg (struct omp_region *region)
1e8e9920 4587{
773c5ba7 4588 basic_block entry_bb, exit_bb, new_bb;
87d4aa85 4589 struct function *child_cfun;
414c3a2c 4590 tree child_fn, block, t;
75a70cf9 4591 gimple_stmt_iterator gsi;
4592 gimple entry_stmt, stmt;
773c5ba7 4593 edge e;
f1f41a6c 4594 vec<tree, va_gc> *ws_args;
773c5ba7 4595
61e47ac8 4596 entry_stmt = last_stmt (region->entry);
75a70cf9 4597 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
773c5ba7 4598 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
773c5ba7 4599
61e47ac8 4600 entry_bb = region->entry;
4601 exit_bb = region->exit;
773c5ba7 4602
773c5ba7 4603 if (is_combined_parallel (region))
61e47ac8 4604 ws_args = region->ws_args;
773c5ba7 4605 else
414c3a2c 4606 ws_args = NULL;
1e8e9920 4607
61e47ac8 4608 if (child_cfun->cfg)
1e8e9920 4609 {
773c5ba7 4610 /* Due to inlining, it may happen that we have already outlined
4611 the region, in which case all we need to do is make the
4612 sub-graph unreachable and emit the parallel call. */
4613 edge entry_succ_e, exit_succ_e;
75a70cf9 4614 gimple_stmt_iterator gsi;
773c5ba7 4615
4616 entry_succ_e = single_succ_edge (entry_bb);
773c5ba7 4617
75a70cf9 4618 gsi = gsi_last_bb (entry_bb);
4619 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
4620 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
4621 gsi_remove (&gsi, true);
773c5ba7 4622
4623 new_bb = entry_bb;
03ed154b 4624 if (exit_bb)
4625 {
4626 exit_succ_e = single_succ_edge (exit_bb);
4627 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
4628 }
79acaae1 4629 remove_edge_and_dominated_blocks (entry_succ_e);
1e8e9920 4630 }
773c5ba7 4631 else
4632 {
501bdd19 4633 unsigned srcidx, dstidx, num;
2ab2ce89 4634
773c5ba7 4635 /* If the parallel region needs data sent from the parent
3480139d 4636 function, then the very first statement (except possible
4637 tree profile counter updates) of the parallel body
773c5ba7 4638 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
4639 &.OMP_DATA_O is passed as an argument to the child function,
4640 we need to replace it with the argument as seen by the child
4641 function.
4642
4643 In most cases, this will end up being the identity assignment
4644 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
4645 a function call that has been inlined, the original PARM_DECL
4646 .OMP_DATA_I may have been converted into a different local
4647 variable. In which case, we need to keep the assignment. */
75a70cf9 4648 if (gimple_omp_taskreg_data_arg (entry_stmt))
773c5ba7 4649 {
4650 basic_block entry_succ_bb = single_succ (entry_bb);
75a70cf9 4651 gimple_stmt_iterator gsi;
4652 tree arg, narg;
4653 gimple parcopy_stmt = NULL;
1e8e9920 4654
75a70cf9 4655 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
3480139d 4656 {
75a70cf9 4657 gimple stmt;
3480139d 4658
75a70cf9 4659 gcc_assert (!gsi_end_p (gsi));
4660 stmt = gsi_stmt (gsi);
4661 if (gimple_code (stmt) != GIMPLE_ASSIGN)
cc6b725b 4662 continue;
4663
75a70cf9 4664 if (gimple_num_ops (stmt) == 2)
3480139d 4665 {
75a70cf9 4666 tree arg = gimple_assign_rhs1 (stmt);
4667
4668 /* We're ignore the subcode because we're
4669 effectively doing a STRIP_NOPS. */
4670
4671 if (TREE_CODE (arg) == ADDR_EXPR
4672 && TREE_OPERAND (arg, 0)
4673 == gimple_omp_taskreg_data_arg (entry_stmt))
4674 {
4675 parcopy_stmt = stmt;
4676 break;
4677 }
3480139d 4678 }
4679 }
79acaae1 4680
75a70cf9 4681 gcc_assert (parcopy_stmt != NULL);
79acaae1 4682 arg = DECL_ARGUMENTS (child_fn);
4683
4684 if (!gimple_in_ssa_p (cfun))
4685 {
75a70cf9 4686 if (gimple_assign_lhs (parcopy_stmt) == arg)
4687 gsi_remove (&gsi, true);
79acaae1 4688 else
75a70cf9 4689 {
4690 /* ?? Is setting the subcode really necessary ?? */
4691 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
4692 gimple_assign_set_rhs1 (parcopy_stmt, arg);
4693 }
79acaae1 4694 }
4695 else
4696 {
4697 /* If we are in ssa form, we must load the value from the default
4698 definition of the argument. That should not be defined now,
4699 since the argument is not used uninitialized. */
c6dfe037 4700 gcc_assert (ssa_default_def (cfun, arg) == NULL);
75a70cf9 4701 narg = make_ssa_name (arg, gimple_build_nop ());
c6dfe037 4702 set_ssa_default_def (cfun, arg, narg);
75a70cf9 4703 /* ?? Is setting the subcode really necessary ?? */
4704 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
4705 gimple_assign_set_rhs1 (parcopy_stmt, narg);
79acaae1 4706 update_stmt (parcopy_stmt);
4707 }
773c5ba7 4708 }
4709
4710 /* Declare local variables needed in CHILD_CFUN. */
4711 block = DECL_INITIAL (child_fn);
2ab2ce89 4712 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
e1a7ccb9 4713 /* The gimplifier could record temporaries in parallel/task block
4714 rather than in containing function's local_decls chain,
4715 which would mean cgraph missed finalizing them. Do it now. */
1767a056 4716 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
e1a7ccb9 4717 if (TREE_CODE (t) == VAR_DECL
4718 && TREE_STATIC (t)
4719 && !DECL_EXTERNAL (t))
4720 varpool_finalize_decl (t);
75a70cf9 4721 DECL_SAVED_TREE (child_fn) = NULL;
e3a19533 4722 /* We'll create a CFG for child_fn, so no gimple body is needed. */
4723 gimple_set_body (child_fn, NULL);
1d22f541 4724 TREE_USED (block) = 1;
773c5ba7 4725
79acaae1 4726 /* Reset DECL_CONTEXT on function arguments. */
1767a056 4727 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
773c5ba7 4728 DECL_CONTEXT (t) = child_fn;
4729
75a70cf9 4730 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
4731 so that it can be moved to the child function. */
4732 gsi = gsi_last_bb (entry_bb);
4733 stmt = gsi_stmt (gsi);
4734 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
4735 || gimple_code (stmt) == GIMPLE_OMP_TASK));
4736 gsi_remove (&gsi, true);
4737 e = split_block (entry_bb, stmt);
773c5ba7 4738 entry_bb = e->dest;
4739 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4740
75a70cf9 4741 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
5056ba1a 4742 if (exit_bb)
4743 {
75a70cf9 4744 gsi = gsi_last_bb (exit_bb);
4745 gcc_assert (!gsi_end_p (gsi)
4746 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4747 stmt = gimple_build_return (NULL);
4748 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4749 gsi_remove (&gsi, true);
5056ba1a 4750 }
79acaae1 4751
4752 /* Move the parallel region into CHILD_CFUN. */
48e1416a 4753
79acaae1 4754 if (gimple_in_ssa_p (cfun))
4755 {
bcaa2770 4756 init_tree_ssa (child_cfun);
5084b2e4 4757 init_ssa_operands (child_cfun);
4758 child_cfun->gimple_df->in_ssa_p = true;
1d22f541 4759 block = NULL_TREE;
79acaae1 4760 }
1d22f541 4761 else
75a70cf9 4762 block = gimple_block (entry_stmt);
1d22f541 4763
4764 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
79acaae1 4765 if (exit_bb)
4766 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
04c2922b 4767 /* When the OMP expansion process cannot guarantee an up-to-date
4768 loop tree arrange for the child function to fixup loops. */
4769 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
4770 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
79acaae1 4771
1d22f541 4772 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
f1f41a6c 4773 num = vec_safe_length (child_cfun->local_decls);
501bdd19 4774 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
4775 {
f1f41a6c 4776 t = (*child_cfun->local_decls)[srcidx];
501bdd19 4777 if (DECL_CONTEXT (t) == cfun->decl)
4778 continue;
4779 if (srcidx != dstidx)
f1f41a6c 4780 (*child_cfun->local_decls)[dstidx] = t;
501bdd19 4781 dstidx++;
4782 }
4783 if (dstidx != num)
f1f41a6c 4784 vec_safe_truncate (child_cfun->local_decls, dstidx);
1d22f541 4785
79acaae1 4786 /* Inform the callgraph about the new function. */
82b40354 4787 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
79acaae1 4788 cgraph_add_new_function (child_fn, true);
4789
4790 /* Fix the callgraph edges for child_cfun. Those for cfun will be
4791 fixed in a following pass. */
4792 push_cfun (child_cfun);
658b4427 4793 if (optimize)
fd6481cf 4794 optimize_omp_library_calls (entry_stmt);
79acaae1 4795 rebuild_cgraph_edges ();
fbe86b1b 4796
4797 /* Some EH regions might become dead, see PR34608. If
4798 pass_cleanup_cfg isn't the first pass to happen with the
4799 new child, these dead EH edges might cause problems.
4800 Clean them up now. */
4801 if (flag_exceptions)
4802 {
4803 basic_block bb;
fbe86b1b 4804 bool changed = false;
4805
fbe86b1b 4806 FOR_EACH_BB (bb)
75a70cf9 4807 changed |= gimple_purge_dead_eh_edges (bb);
fbe86b1b 4808 if (changed)
4809 cleanup_tree_cfg ();
fbe86b1b 4810 }
dd277d48 4811 if (gimple_in_ssa_p (cfun))
4812 update_ssa (TODO_update_ssa);
79acaae1 4813 pop_cfun ();
773c5ba7 4814 }
48e1416a 4815
773c5ba7 4816 /* Emit a library call to launch the children threads. */
75a70cf9 4817 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
fd6481cf 4818 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
4819 else
4820 expand_task_call (new_bb, entry_stmt);
083152fb 4821 if (gimple_in_ssa_p (cfun))
4822 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 4823}
4824
773c5ba7 4825
3d483a94 4826/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
4827 of the combined collapse > 1 loop constructs, generate code like:
4828 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
4829 if (cond3 is <)
4830 adj = STEP3 - 1;
4831 else
4832 adj = STEP3 + 1;
4833 count3 = (adj + N32 - N31) / STEP3;
4834 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
4835 if (cond2 is <)
4836 adj = STEP2 - 1;
4837 else
4838 adj = STEP2 + 1;
4839 count2 = (adj + N22 - N21) / STEP2;
4840 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
4841 if (cond1 is <)
4842 adj = STEP1 - 1;
4843 else
4844 adj = STEP1 + 1;
4845 count1 = (adj + N12 - N11) / STEP1;
4846 count = count1 * count2 * count3;
4847 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
4848 count = 0;
bc7bff74 4849 and set ZERO_ITER_BB to that bb. If this isn't the outermost
4850 of the combined loop constructs, just initialize COUNTS array
4851 from the _looptemp_ clauses. */
3d483a94 4852
4853/* NOTE: It *could* be better to moosh all of the BBs together,
4854 creating one larger BB with all the computation and the unexpected
4855 jump at the end. I.e.
4856
4857 bool zero3, zero2, zero1, zero;
4858
4859 zero3 = N32 c3 N31;
4860 count3 = (N32 - N31) /[cl] STEP3;
4861 zero2 = N22 c2 N21;
4862 count2 = (N22 - N21) /[cl] STEP2;
4863 zero1 = N12 c1 N11;
4864 count1 = (N12 - N11) /[cl] STEP1;
4865 zero = zero3 || zero2 || zero1;
4866 count = count1 * count2 * count3;
4867 if (__builtin_expect(zero, false)) goto zero_iter_bb;
4868
4869 After all, we expect the zero=false, and thus we expect to have to
4870 evaluate all of the comparison expressions, so short-circuiting
4871 oughtn't be a win. Since the condition isn't protecting a
4872 denominator, we're not concerned about divide-by-zero, so we can
4873 fully evaluate count even if a numerator turned out to be wrong.
4874
4875 It seems like putting this all together would create much better
4876 scheduling opportunities, and less pressure on the chip's branch
4877 predictor. */
4878
4879static void
4880expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
4881 basic_block &entry_bb, tree *counts,
4882 basic_block &zero_iter_bb, int &first_zero_iter,
4883 basic_block &l2_dom_bb)
4884{
4885 tree t, type = TREE_TYPE (fd->loop.v);
4886 gimple stmt;
4887 edge e, ne;
4888 int i;
4889
4890 /* Collapsed loops need work for expansion into SSA form. */
4891 gcc_assert (!gimple_in_ssa_p (cfun));
4892
bc7bff74 4893 if (gimple_omp_for_combined_into_p (fd->for_stmt)
4894 && TREE_CODE (fd->loop.n2) != INTEGER_CST)
4895 {
4896 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4897 isn't supposed to be handled, as the inner loop doesn't
4898 use it. */
4899 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
4900 OMP_CLAUSE__LOOPTEMP_);
4901 gcc_assert (innerc);
4902 for (i = 0; i < fd->collapse; i++)
4903 {
4904 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
4905 OMP_CLAUSE__LOOPTEMP_);
4906 gcc_assert (innerc);
4907 if (i)
4908 counts[i] = OMP_CLAUSE_DECL (innerc);
4909 else
4910 counts[0] = NULL_TREE;
4911 }
4912 return;
4913 }
4914
3d483a94 4915 for (i = 0; i < fd->collapse; i++)
4916 {
4917 tree itype = TREE_TYPE (fd->loops[i].v);
4918
4919 if (SSA_VAR_P (fd->loop.n2)
4920 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,
4921 fold_convert (itype, fd->loops[i].n1),
4922 fold_convert (itype, fd->loops[i].n2)))
4923 == NULL_TREE || !integer_onep (t)))
4924 {
4925 tree n1, n2;
4926 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
4927 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
4928 true, GSI_SAME_STMT);
4929 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
4930 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
4931 true, GSI_SAME_STMT);
4932 stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
4933 NULL_TREE, NULL_TREE);
4934 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4935 if (walk_tree (gimple_cond_lhs_ptr (stmt),
4936 expand_omp_regimplify_p, NULL, NULL)
4937 || walk_tree (gimple_cond_rhs_ptr (stmt),
4938 expand_omp_regimplify_p, NULL, NULL))
4939 {
4940 *gsi = gsi_for_stmt (stmt);
4941 gimple_regimplify_operands (stmt, gsi);
4942 }
4943 e = split_block (entry_bb, stmt);
4944 if (zero_iter_bb == NULL)
4945 {
4946 first_zero_iter = i;
4947 zero_iter_bb = create_empty_bb (entry_bb);
4948 if (current_loops)
4949 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
4950 *gsi = gsi_after_labels (zero_iter_bb);
4951 stmt = gimple_build_assign (fd->loop.n2,
4952 build_zero_cst (type));
4953 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4954 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
4955 entry_bb);
4956 }
4957 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
4958 ne->probability = REG_BR_PROB_BASE / 2000 - 1;
4959 e->flags = EDGE_TRUE_VALUE;
4960 e->probability = REG_BR_PROB_BASE - ne->probability;
4961 if (l2_dom_bb == NULL)
4962 l2_dom_bb = entry_bb;
4963 entry_bb = e->dest;
4964 *gsi = gsi_last_bb (entry_bb);
4965 }
4966
4967 if (POINTER_TYPE_P (itype))
4968 itype = signed_type_for (itype);
4969 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
4970 ? -1 : 1));
4971 t = fold_build2 (PLUS_EXPR, itype,
4972 fold_convert (itype, fd->loops[i].step), t);
4973 t = fold_build2 (PLUS_EXPR, itype, t,
4974 fold_convert (itype, fd->loops[i].n2));
4975 t = fold_build2 (MINUS_EXPR, itype, t,
4976 fold_convert (itype, fd->loops[i].n1));
4977 /* ?? We could probably use CEIL_DIV_EXPR instead of
4978 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
4979 generate the same code in the end because generically we
4980 don't know that the values involved must be negative for
4981 GT?? */
4982 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
4983 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4984 fold_build1 (NEGATE_EXPR, itype, t),
4985 fold_build1 (NEGATE_EXPR, itype,
4986 fold_convert (itype,
4987 fd->loops[i].step)));
4988 else
4989 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
4990 fold_convert (itype, fd->loops[i].step));
4991 t = fold_convert (type, t);
4992 if (TREE_CODE (t) == INTEGER_CST)
4993 counts[i] = t;
4994 else
4995 {
4996 counts[i] = create_tmp_reg (type, ".count");
4997 expand_omp_build_assign (gsi, counts[i], t);
4998 }
4999 if (SSA_VAR_P (fd->loop.n2))
5000 {
5001 if (i == 0)
5002 t = counts[0];
5003 else
5004 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
5005 expand_omp_build_assign (gsi, fd->loop.n2, t);
5006 }
5007 }
5008}
5009
5010
5011/* Helper function for expand_omp_{for_*,simd}. Generate code like:
5012 T = V;
5013 V3 = N31 + (T % count3) * STEP3;
5014 T = T / count3;
5015 V2 = N21 + (T % count2) * STEP2;
5016 T = T / count2;
5017 V1 = N11 + T * STEP1;
bc7bff74 5018 if this loop doesn't have an inner loop construct combined with it.
5019 If it does have an inner loop construct combined with it and the
5020 iteration count isn't known constant, store values from counts array
5021 into its _looptemp_ temporaries instead. */
3d483a94 5022
5023static void
5024expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
bc7bff74 5025 tree *counts, gimple inner_stmt, tree startvar)
3d483a94 5026{
5027 int i;
bc7bff74 5028 if (gimple_omp_for_combined_p (fd->for_stmt))
5029 {
5030 /* If fd->loop.n2 is constant, then no propagation of the counts
5031 is needed, they are constant. */
5032 if (TREE_CODE (fd->loop.n2) == INTEGER_CST)
5033 return;
5034
5035 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5036 ? gimple_omp_parallel_clauses (inner_stmt)
5037 : gimple_omp_for_clauses (inner_stmt);
5038 /* First two _looptemp_ clauses are for istart/iend, counts[0]
5039 isn't supposed to be handled, as the inner loop doesn't
5040 use it. */
5041 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5042 gcc_assert (innerc);
5043 for (i = 0; i < fd->collapse; i++)
5044 {
5045 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5046 OMP_CLAUSE__LOOPTEMP_);
5047 gcc_assert (innerc);
5048 if (i)
5049 {
5050 tree tem = OMP_CLAUSE_DECL (innerc);
5051 tree t = fold_convert (TREE_TYPE (tem), counts[i]);
5052 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5053 false, GSI_CONTINUE_LINKING);
5054 gimple stmt = gimple_build_assign (tem, t);
5055 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5056 }
5057 }
5058 return;
5059 }
5060
3d483a94 5061 tree type = TREE_TYPE (fd->loop.v);
5062 tree tem = create_tmp_reg (type, ".tem");
5063 gimple stmt = gimple_build_assign (tem, startvar);
5064 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5065
5066 for (i = fd->collapse - 1; i >= 0; i--)
5067 {
5068 tree vtype = TREE_TYPE (fd->loops[i].v), itype, t;
5069 itype = vtype;
5070 if (POINTER_TYPE_P (vtype))
5071 itype = signed_type_for (vtype);
5072 if (i != 0)
5073 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
5074 else
5075 t = tem;
5076 t = fold_convert (itype, t);
5077 t = fold_build2 (MULT_EXPR, itype, t,
5078 fold_convert (itype, fd->loops[i].step));
5079 if (POINTER_TYPE_P (vtype))
5080 t = fold_build_pointer_plus (fd->loops[i].n1, t);
5081 else
5082 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
5083 t = force_gimple_operand_gsi (gsi, t,
5084 DECL_P (fd->loops[i].v)
5085 && TREE_ADDRESSABLE (fd->loops[i].v),
5086 NULL_TREE, false,
5087 GSI_CONTINUE_LINKING);
5088 stmt = gimple_build_assign (fd->loops[i].v, t);
5089 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5090 if (i != 0)
5091 {
5092 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
5093 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5094 false, GSI_CONTINUE_LINKING);
5095 stmt = gimple_build_assign (tem, t);
5096 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5097 }
5098 }
5099}
5100
5101
5102/* Helper function for expand_omp_for_*. Generate code like:
5103 L10:
5104 V3 += STEP3;
5105 if (V3 cond3 N32) goto BODY_BB; else goto L11;
5106 L11:
5107 V3 = N31;
5108 V2 += STEP2;
5109 if (V2 cond2 N22) goto BODY_BB; else goto L12;
5110 L12:
5111 V2 = N21;
5112 V1 += STEP1;
5113 goto BODY_BB; */
5114
5115static basic_block
5116extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
5117 basic_block body_bb)
5118{
5119 basic_block last_bb, bb, collapse_bb = NULL;
5120 int i;
5121 gimple_stmt_iterator gsi;
5122 edge e;
5123 tree t;
5124 gimple stmt;
5125
5126 last_bb = cont_bb;
5127 for (i = fd->collapse - 1; i >= 0; i--)
5128 {
5129 tree vtype = TREE_TYPE (fd->loops[i].v);
5130
5131 bb = create_empty_bb (last_bb);
5132 if (current_loops)
5133 add_bb_to_loop (bb, last_bb->loop_father);
5134 gsi = gsi_start_bb (bb);
5135
5136 if (i < fd->collapse - 1)
5137 {
5138 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
5139 e->probability = REG_BR_PROB_BASE / 8;
5140
5141 t = fd->loops[i + 1].n1;
5142 t = force_gimple_operand_gsi (&gsi, t,
5143 DECL_P (fd->loops[i + 1].v)
5144 && TREE_ADDRESSABLE (fd->loops[i
5145 + 1].v),
5146 NULL_TREE, false,
5147 GSI_CONTINUE_LINKING);
5148 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
5149 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5150 }
5151 else
5152 collapse_bb = bb;
5153
5154 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
5155
5156 if (POINTER_TYPE_P (vtype))
5157 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
5158 else
5159 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step);
5160 t = force_gimple_operand_gsi (&gsi, t,
5161 DECL_P (fd->loops[i].v)
5162 && TREE_ADDRESSABLE (fd->loops[i].v),
5163 NULL_TREE, false, GSI_CONTINUE_LINKING);
5164 stmt = gimple_build_assign (fd->loops[i].v, t);
5165 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5166
5167 if (i > 0)
5168 {
5169 t = fd->loops[i].n2;
5170 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5171 false, GSI_CONTINUE_LINKING);
5172 tree v = fd->loops[i].v;
5173 if (DECL_P (v) && TREE_ADDRESSABLE (v))
5174 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE,
5175 false, GSI_CONTINUE_LINKING);
5176 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t);
5177 stmt = gimple_build_cond_empty (t);
5178 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5179 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
5180 e->probability = REG_BR_PROB_BASE * 7 / 8;
5181 }
5182 else
5183 make_edge (bb, body_bb, EDGE_FALLTHRU);
5184 last_bb = bb;
5185 }
5186
5187 return collapse_bb;
5188}
5189
5190
773c5ba7 5191/* A subroutine of expand_omp_for. Generate code for a parallel
1e8e9920 5192 loop with any schedule. Given parameters:
5193
5194 for (V = N1; V cond N2; V += STEP) BODY;
5195
5196 where COND is "<" or ">", we generate pseudocode
5197
5198 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
773c5ba7 5199 if (more) goto L0; else goto L3;
1e8e9920 5200 L0:
5201 V = istart0;
5202 iend = iend0;
5203 L1:
5204 BODY;
5205 V += STEP;
773c5ba7 5206 if (V cond iend) goto L1; else goto L2;
1e8e9920 5207 L2:
773c5ba7 5208 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5209 L3:
1e8e9920 5210
773c5ba7 5211 If this is a combined omp parallel loop, instead of the call to
fd6481cf 5212 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
bc7bff74 5213 If this is gimple_omp_for_combined_p loop, then instead of assigning
5214 V and iend in L0 we assign the first two _looptemp_ clause decls of the
5215 inner GIMPLE_OMP_FOR and V += STEP; and
5216 if (V cond iend) goto L1; else goto L2; are removed.
fd6481cf 5217
5218 For collapsed loops, given parameters:
5219 collapse(3)
5220 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
5221 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
5222 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
5223 BODY;
5224
5225 we generate pseudocode
5226
8e6b4515 5227 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
fd6481cf 5228 if (cond3 is <)
5229 adj = STEP3 - 1;
5230 else
5231 adj = STEP3 + 1;
5232 count3 = (adj + N32 - N31) / STEP3;
8e6b4515 5233 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
fd6481cf 5234 if (cond2 is <)
5235 adj = STEP2 - 1;
5236 else
5237 adj = STEP2 + 1;
5238 count2 = (adj + N22 - N21) / STEP2;
8e6b4515 5239 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
fd6481cf 5240 if (cond1 is <)
5241 adj = STEP1 - 1;
5242 else
5243 adj = STEP1 + 1;
5244 count1 = (adj + N12 - N11) / STEP1;
5245 count = count1 * count2 * count3;
8e6b4515 5246 goto Z1;
5247 Z0:
5248 count = 0;
5249 Z1:
fd6481cf 5250 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
5251 if (more) goto L0; else goto L3;
5252 L0:
5253 V = istart0;
5254 T = V;
5255 V3 = N31 + (T % count3) * STEP3;
5256 T = T / count3;
5257 V2 = N21 + (T % count2) * STEP2;
5258 T = T / count2;
5259 V1 = N11 + T * STEP1;
5260 iend = iend0;
5261 L1:
5262 BODY;
5263 V += 1;
5264 if (V < iend) goto L10; else goto L2;
5265 L10:
5266 V3 += STEP3;
5267 if (V3 cond3 N32) goto L1; else goto L11;
5268 L11:
5269 V3 = N31;
5270 V2 += STEP2;
5271 if (V2 cond2 N22) goto L1; else goto L12;
5272 L12:
5273 V2 = N21;
5274 V1 += STEP1;
5275 goto L1;
5276 L2:
5277 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5278 L3:
5279
5280 */
1e8e9920 5281
61e47ac8 5282static void
773c5ba7 5283expand_omp_for_generic (struct omp_region *region,
5284 struct omp_for_data *fd,
1e8e9920 5285 enum built_in_function start_fn,
bc7bff74 5286 enum built_in_function next_fn,
5287 gimple inner_stmt)
1e8e9920 5288{
75a70cf9 5289 tree type, istart0, iend0, iend;
fd6481cf 5290 tree t, vmain, vback, bias = NULL_TREE;
5291 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
03ed154b 5292 basic_block l2_bb = NULL, l3_bb = NULL;
75a70cf9 5293 gimple_stmt_iterator gsi;
5294 gimple stmt;
773c5ba7 5295 bool in_combined_parallel = is_combined_parallel (region);
ac6e3339 5296 bool broken_loop = region->cont == NULL;
79acaae1 5297 edge e, ne;
fd6481cf 5298 tree *counts = NULL;
5299 int i;
ac6e3339 5300
5301 gcc_assert (!broken_loop || !in_combined_parallel);
fd6481cf 5302 gcc_assert (fd->iter_type == long_integer_type_node
5303 || !in_combined_parallel);
1e8e9920 5304
fd6481cf 5305 type = TREE_TYPE (fd->loop.v);
5306 istart0 = create_tmp_var (fd->iter_type, ".istart0");
5307 iend0 = create_tmp_var (fd->iter_type, ".iend0");
6d63fc03 5308 TREE_ADDRESSABLE (istart0) = 1;
5309 TREE_ADDRESSABLE (iend0) = 1;
1e8e9920 5310
fd6481cf 5311 /* See if we need to bias by LLONG_MIN. */
5312 if (fd->iter_type == long_long_unsigned_type_node
5313 && TREE_CODE (type) == INTEGER_TYPE
5314 && !TYPE_UNSIGNED (type))
5315 {
5316 tree n1, n2;
5317
5318 if (fd->loop.cond_code == LT_EXPR)
5319 {
5320 n1 = fd->loop.n1;
5321 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
5322 }
5323 else
5324 {
5325 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
5326 n2 = fd->loop.n1;
5327 }
5328 if (TREE_CODE (n1) != INTEGER_CST
5329 || TREE_CODE (n2) != INTEGER_CST
5330 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
5331 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
5332 }
5333
61e47ac8 5334 entry_bb = region->entry;
03ed154b 5335 cont_bb = region->cont;
fd6481cf 5336 collapse_bb = NULL;
ac6e3339 5337 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
5338 gcc_assert (broken_loop
5339 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
5340 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5341 l1_bb = single_succ (l0_bb);
5342 if (!broken_loop)
03ed154b 5343 {
5344 l2_bb = create_empty_bb (cont_bb);
ac6e3339 5345 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
5346 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
03ed154b 5347 }
ac6e3339 5348 else
5349 l2_bb = NULL;
5350 l3_bb = BRANCH_EDGE (entry_bb)->dest;
5351 exit_bb = region->exit;
773c5ba7 5352
75a70cf9 5353 gsi = gsi_last_bb (entry_bb);
fd6481cf 5354
75a70cf9 5355 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
fd6481cf 5356 if (fd->collapse > 1)
5357 {
8e6b4515 5358 int first_zero_iter = -1;
3d483a94 5359 basic_block zero_iter_bb = NULL, l2_dom_bb = NULL;
8e6b4515 5360
3d483a94 5361 counts = XALLOCAVEC (tree, fd->collapse);
5362 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5363 zero_iter_bb, first_zero_iter,
5364 l2_dom_bb);
fd6481cf 5365
8e6b4515 5366 if (zero_iter_bb)
5367 {
5368 /* Some counts[i] vars might be uninitialized if
5369 some loop has zero iterations. But the body shouldn't
5370 be executed in that case, so just avoid uninit warnings. */
5371 for (i = first_zero_iter; i < fd->collapse; i++)
5372 if (SSA_VAR_P (counts[i]))
5373 TREE_NO_WARNING (counts[i]) = 1;
5374 gsi_prev (&gsi);
5375 e = split_block (entry_bb, gsi_stmt (gsi));
5376 entry_bb = e->dest;
5377 make_edge (zero_iter_bb, entry_bb, EDGE_FALLTHRU);
5378 gsi = gsi_last_bb (entry_bb);
5379 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
5380 get_immediate_dominator (CDI_DOMINATORS,
5381 zero_iter_bb));
5382 }
fd6481cf 5383 }
79acaae1 5384 if (in_combined_parallel)
5385 {
5386 /* In a combined parallel loop, emit a call to
5387 GOMP_loop_foo_next. */
b9a16870 5388 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
79acaae1 5389 build_fold_addr_expr (istart0),
5390 build_fold_addr_expr (iend0));
5391 }
5392 else
1e8e9920 5393 {
c2f47e15 5394 tree t0, t1, t2, t3, t4;
773c5ba7 5395 /* If this is not a combined parallel loop, emit a call to
5396 GOMP_loop_foo_start in ENTRY_BB. */
c2f47e15 5397 t4 = build_fold_addr_expr (iend0);
5398 t3 = build_fold_addr_expr (istart0);
fd6481cf 5399 t2 = fold_convert (fd->iter_type, fd->loop.step);
3d483a94 5400 t1 = fd->loop.n2;
5401 t0 = fd->loop.n1;
bc7bff74 5402 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5403 {
5404 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5405 OMP_CLAUSE__LOOPTEMP_);
5406 gcc_assert (innerc);
5407 t0 = OMP_CLAUSE_DECL (innerc);
5408 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5409 OMP_CLAUSE__LOOPTEMP_);
5410 gcc_assert (innerc);
5411 t1 = OMP_CLAUSE_DECL (innerc);
5412 }
3d483a94 5413 if (POINTER_TYPE_P (TREE_TYPE (t0))
5414 && TYPE_PRECISION (TREE_TYPE (t0))
5415 != TYPE_PRECISION (fd->iter_type))
c799f233 5416 {
5417 /* Avoid casting pointers to integer of a different size. */
3cea8318 5418 tree itype = signed_type_for (type);
3d483a94 5419 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1));
5420 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0));
c799f233 5421 }
5422 else
5423 {
3d483a94 5424 t1 = fold_convert (fd->iter_type, t1);
5425 t0 = fold_convert (fd->iter_type, t0);
c799f233 5426 }
fd6481cf 5427 if (bias)
1e8e9920 5428 {
fd6481cf 5429 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
5430 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
5431 }
5432 if (fd->iter_type == long_integer_type_node)
5433 {
5434 if (fd->chunk_size)
5435 {
5436 t = fold_convert (fd->iter_type, fd->chunk_size);
b9a16870 5437 t = build_call_expr (builtin_decl_explicit (start_fn),
5438 6, t0, t1, t2, t, t3, t4);
fd6481cf 5439 }
5440 else
b9a16870 5441 t = build_call_expr (builtin_decl_explicit (start_fn),
5442 5, t0, t1, t2, t3, t4);
1e8e9920 5443 }
c2f47e15 5444 else
fd6481cf 5445 {
5446 tree t5;
5447 tree c_bool_type;
b9a16870 5448 tree bfn_decl;
fd6481cf 5449
5450 /* The GOMP_loop_ull_*start functions have additional boolean
5451 argument, true for < loops and false for > loops.
5452 In Fortran, the C bool type can be different from
5453 boolean_type_node. */
b9a16870 5454 bfn_decl = builtin_decl_explicit (start_fn);
5455 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl));
fd6481cf 5456 t5 = build_int_cst (c_bool_type,
5457 fd->loop.cond_code == LT_EXPR ? 1 : 0);
5458 if (fd->chunk_size)
5459 {
b9a16870 5460 tree bfn_decl = builtin_decl_explicit (start_fn);
fd6481cf 5461 t = fold_convert (fd->iter_type, fd->chunk_size);
b9a16870 5462 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
fd6481cf 5463 }
5464 else
b9a16870 5465 t = build_call_expr (builtin_decl_explicit (start_fn),
5466 6, t5, t0, t1, t2, t3, t4);
fd6481cf 5467 }
1e8e9920 5468 }
fd6481cf 5469 if (TREE_TYPE (t) != boolean_type_node)
5470 t = fold_build2 (NE_EXPR, boolean_type_node,
5471 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 5472 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5473 true, GSI_SAME_STMT);
5474 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
79acaae1 5475
75a70cf9 5476 /* Remove the GIMPLE_OMP_FOR statement. */
5477 gsi_remove (&gsi, true);
1e8e9920 5478
773c5ba7 5479 /* Iteration setup for sequential loop goes in L0_BB. */
3d483a94 5480 tree startvar = fd->loop.v;
5481 tree endvar = NULL_TREE;
5482
bc7bff74 5483 if (gimple_omp_for_combined_p (fd->for_stmt))
5484 {
5485 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR
5486 && gimple_omp_for_kind (inner_stmt)
5487 == GF_OMP_FOR_KIND_SIMD);
5488 tree innerc = find_omp_clause (gimple_omp_for_clauses (inner_stmt),
5489 OMP_CLAUSE__LOOPTEMP_);
5490 gcc_assert (innerc);
5491 startvar = OMP_CLAUSE_DECL (innerc);
5492 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5493 OMP_CLAUSE__LOOPTEMP_);
5494 gcc_assert (innerc);
5495 endvar = OMP_CLAUSE_DECL (innerc);
5496 }
5497
75a70cf9 5498 gsi = gsi_start_bb (l0_bb);
1efcacec 5499 t = istart0;
fd6481cf 5500 if (bias)
1efcacec 5501 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3d483a94 5502 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5503 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5504 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 5505 t = force_gimple_operand_gsi (&gsi, t,
3d483a94 5506 DECL_P (startvar)
5507 && TREE_ADDRESSABLE (startvar),
4abecb72 5508 NULL_TREE, false, GSI_CONTINUE_LINKING);
3d483a94 5509 stmt = gimple_build_assign (startvar, t);
75a70cf9 5510 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
1e8e9920 5511
1efcacec 5512 t = iend0;
fd6481cf 5513 if (bias)
1efcacec 5514 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3d483a94 5515 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5516 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5517 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 5518 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5519 false, GSI_CONTINUE_LINKING);
3d483a94 5520 if (endvar)
fd6481cf 5521 {
3d483a94 5522 stmt = gimple_build_assign (endvar, iend);
75a70cf9 5523 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 5524 }
3d483a94 5525 if (fd->collapse > 1)
bc7bff74 5526 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
773c5ba7 5527
ac6e3339 5528 if (!broken_loop)
03ed154b 5529 {
ac6e3339 5530 /* Code to control the increment and predicate for the sequential
5531 loop goes in the CONT_BB. */
75a70cf9 5532 gsi = gsi_last_bb (cont_bb);
5533 stmt = gsi_stmt (gsi);
5534 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5535 vmain = gimple_omp_continue_control_use (stmt);
5536 vback = gimple_omp_continue_control_def (stmt);
79acaae1 5537
bc7bff74 5538 if (!gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5539 {
5540 if (POINTER_TYPE_P (type))
5541 t = fold_build_pointer_plus (vmain, fd->loop.step);
5542 else
5543 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
5544 t = force_gimple_operand_gsi (&gsi, t,
5545 DECL_P (vback)
5546 && TREE_ADDRESSABLE (vback),
5547 NULL_TREE, true, GSI_SAME_STMT);
5548 stmt = gimple_build_assign (vback, t);
5549 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5550
5551 t = build2 (fd->loop.cond_code, boolean_type_node,
5552 DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
5553 iend);
5554 stmt = gimple_build_cond_empty (t);
5555 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5556 }
773c5ba7 5557
75a70cf9 5558 /* Remove GIMPLE_OMP_CONTINUE. */
5559 gsi_remove (&gsi, true);
773c5ba7 5560
bc7bff74 5561 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5562 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, l1_bb);
fd6481cf 5563
ac6e3339 5564 /* Emit code to get the next parallel iteration in L2_BB. */
75a70cf9 5565 gsi = gsi_start_bb (l2_bb);
773c5ba7 5566
b9a16870 5567 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
ac6e3339 5568 build_fold_addr_expr (istart0),
5569 build_fold_addr_expr (iend0));
75a70cf9 5570 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5571 false, GSI_CONTINUE_LINKING);
fd6481cf 5572 if (TREE_TYPE (t) != boolean_type_node)
5573 t = fold_build2 (NE_EXPR, boolean_type_node,
5574 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 5575 stmt = gimple_build_cond_empty (t);
5576 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
ac6e3339 5577 }
1e8e9920 5578
61e47ac8 5579 /* Add the loop cleanup function. */
75a70cf9 5580 gsi = gsi_last_bb (exit_bb);
5581 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
b9a16870 5582 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
bc7bff74 5583 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5584 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
61e47ac8 5585 else
b9a16870 5586 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
75a70cf9 5587 stmt = gimple_build_call (t, 0);
bc7bff74 5588 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5589 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
75a70cf9 5590 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
5591 gsi_remove (&gsi, true);
773c5ba7 5592
5593 /* Connect the new blocks. */
79acaae1 5594 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
5595 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
1e8e9920 5596
ac6e3339 5597 if (!broken_loop)
5598 {
75a70cf9 5599 gimple_seq phis;
5600
79acaae1 5601 e = find_edge (cont_bb, l3_bb);
5602 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
5603
75a70cf9 5604 phis = phi_nodes (l3_bb);
5605 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5606 {
5607 gimple phi = gsi_stmt (gsi);
5608 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
5609 PHI_ARG_DEF_FROM_EDGE (phi, e));
5610 }
79acaae1 5611 remove_edge (e);
5612
ac6e3339 5613 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
f6568ea4 5614 if (current_loops)
5615 add_bb_to_loop (l2_bb, cont_bb->loop_father);
3d483a94 5616 e = find_edge (cont_bb, l1_bb);
bc7bff74 5617 if (gimple_omp_for_combined_p (fd->for_stmt))
5618 {
5619 remove_edge (e);
5620 e = NULL;
5621 }
3d483a94 5622 else if (fd->collapse > 1)
fd6481cf 5623 {
fd6481cf 5624 remove_edge (e);
5625 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5626 }
5627 else
3d483a94 5628 e->flags = EDGE_TRUE_VALUE;
5629 if (e)
fd6481cf 5630 {
3d483a94 5631 e->probability = REG_BR_PROB_BASE * 7 / 8;
5632 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
5633 }
5634 else
5635 {
5636 e = find_edge (cont_bb, l2_bb);
5637 e->flags = EDGE_FALLTHRU;
fd6481cf 5638 }
ac6e3339 5639 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
79acaae1 5640
5641 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
5642 recompute_dominator (CDI_DOMINATORS, l2_bb));
5643 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
5644 recompute_dominator (CDI_DOMINATORS, l3_bb));
5645 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
5646 recompute_dominator (CDI_DOMINATORS, l0_bb));
5647 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
5648 recompute_dominator (CDI_DOMINATORS, l1_bb));
04c2922b 5649
5650 struct loop *outer_loop = alloc_loop ();
5651 outer_loop->header = l0_bb;
5652 outer_loop->latch = l2_bb;
5653 add_loop (outer_loop, l0_bb->loop_father);
5654
bc7bff74 5655 if (!gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5656 {
5657 struct loop *loop = alloc_loop ();
5658 loop->header = l1_bb;
5659 /* The loop may have multiple latches. */
5660 add_loop (loop, outer_loop);
5661 }
ac6e3339 5662 }
1e8e9920 5663}
5664
5665
773c5ba7 5666/* A subroutine of expand_omp_for. Generate code for a parallel
5667 loop with static schedule and no specified chunk size. Given
5668 parameters:
1e8e9920 5669
5670 for (V = N1; V cond N2; V += STEP) BODY;
5671
5672 where COND is "<" or ">", we generate pseudocode
5673
8e6b4515 5674 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
1e8e9920 5675 if (cond is <)
5676 adj = STEP - 1;
5677 else
5678 adj = STEP + 1;
fd6481cf 5679 if ((__typeof (V)) -1 > 0 && cond is >)
5680 n = -(adj + N2 - N1) / -STEP;
5681 else
5682 n = (adj + N2 - N1) / STEP;
1e8e9920 5683 q = n / nthreads;
31712e83 5684 tt = n % nthreads;
5685 if (threadid < tt) goto L3; else goto L4;
5686 L3:
5687 tt = 0;
5688 q = q + 1;
5689 L4:
5690 s0 = q * threadid + tt;
5691 e0 = s0 + q;
79acaae1 5692 V = s0 * STEP + N1;
1e8e9920 5693 if (s0 >= e0) goto L2; else goto L0;
5694 L0:
1e8e9920 5695 e = e0 * STEP + N1;
5696 L1:
5697 BODY;
5698 V += STEP;
5699 if (V cond e) goto L1;
1e8e9920 5700 L2:
5701*/
5702
61e47ac8 5703static void
773c5ba7 5704expand_omp_for_static_nochunk (struct omp_region *region,
bc7bff74 5705 struct omp_for_data *fd,
5706 gimple inner_stmt)
1e8e9920 5707{
31712e83 5708 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
fd6481cf 5709 tree type, itype, vmain, vback;
31712e83 5710 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
bc7bff74 5711 basic_block body_bb, cont_bb, collapse_bb = NULL;
61e47ac8 5712 basic_block fin_bb;
75a70cf9 5713 gimple_stmt_iterator gsi;
5714 gimple stmt;
31712e83 5715 edge ep;
bc7bff74 5716 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
5717 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
5718 bool broken_loop = region->cont == NULL;
5719 tree *counts = NULL;
5720 tree n1, n2, step;
1e8e9920 5721
fd6481cf 5722 itype = type = TREE_TYPE (fd->loop.v);
5723 if (POINTER_TYPE_P (type))
3cea8318 5724 itype = signed_type_for (type);
1e8e9920 5725
61e47ac8 5726 entry_bb = region->entry;
61e47ac8 5727 cont_bb = region->cont;
ac6e3339 5728 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
bc7bff74 5729 fin_bb = BRANCH_EDGE (entry_bb)->dest;
5730 gcc_assert (broken_loop
5731 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest));
ac6e3339 5732 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5733 body_bb = single_succ (seq_start_bb);
bc7bff74 5734 if (!broken_loop)
5735 {
5736 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
5737 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
5738 }
61e47ac8 5739 exit_bb = region->exit;
5740
773c5ba7 5741 /* Iteration space partitioning goes in ENTRY_BB. */
75a70cf9 5742 gsi = gsi_last_bb (entry_bb);
5743 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
61e47ac8 5744
bc7bff74 5745 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
5746 {
5747 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
5748 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
5749 }
5750
5751 if (fd->collapse > 1)
5752 {
5753 int first_zero_iter = -1;
5754 basic_block l2_dom_bb = NULL;
5755
5756 counts = XALLOCAVEC (tree, fd->collapse);
5757 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5758 fin_bb, first_zero_iter,
5759 l2_dom_bb);
5760 t = NULL_TREE;
5761 }
5762 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
5763 t = integer_one_node;
5764 else
5765 t = fold_binary (fd->loop.cond_code, boolean_type_node,
5766 fold_convert (type, fd->loop.n1),
5767 fold_convert (type, fd->loop.n2));
5768 if (fd->collapse == 1
5769 && TYPE_UNSIGNED (type)
8e6b4515 5770 && (t == NULL_TREE || !integer_onep (t)))
5771 {
8e6b4515 5772 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
5773 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE,
5774 true, GSI_SAME_STMT);
5775 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
5776 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
5777 true, GSI_SAME_STMT);
5778 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
5779 NULL_TREE, NULL_TREE);
5780 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5781 if (walk_tree (gimple_cond_lhs_ptr (stmt),
5782 expand_omp_regimplify_p, NULL, NULL)
5783 || walk_tree (gimple_cond_rhs_ptr (stmt),
5784 expand_omp_regimplify_p, NULL, NULL))
5785 {
5786 gsi = gsi_for_stmt (stmt);
5787 gimple_regimplify_operands (stmt, &gsi);
5788 }
5789 ep = split_block (entry_bb, stmt);
5790 ep->flags = EDGE_TRUE_VALUE;
5791 entry_bb = ep->dest;
5792 ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
5793 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
5794 ep->probability = REG_BR_PROB_BASE / 2000 - 1;
5795 if (gimple_in_ssa_p (cfun))
5796 {
5797 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
5798 for (gsi = gsi_start_phis (fin_bb);
5799 !gsi_end_p (gsi); gsi_next (&gsi))
5800 {
5801 gimple phi = gsi_stmt (gsi);
5802 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
5803 ep, UNKNOWN_LOCATION);
5804 }
5805 }
5806 gsi = gsi_last_bb (entry_bb);
5807 }
5808
bc7bff74 5809 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
fd6481cf 5810 t = fold_convert (itype, t);
75a70cf9 5811 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5812 true, GSI_SAME_STMT);
48e1416a 5813
bc7bff74 5814 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
fd6481cf 5815 t = fold_convert (itype, t);
75a70cf9 5816 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5817 true, GSI_SAME_STMT);
1e8e9920 5818
bc7bff74 5819 n1 = fd->loop.n1;
5820 n2 = fd->loop.n2;
5821 step = fd->loop.step;
5822 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5823 {
5824 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5825 OMP_CLAUSE__LOOPTEMP_);
5826 gcc_assert (innerc);
5827 n1 = OMP_CLAUSE_DECL (innerc);
5828 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5829 OMP_CLAUSE__LOOPTEMP_);
5830 gcc_assert (innerc);
5831 n2 = OMP_CLAUSE_DECL (innerc);
5832 }
5833 n1 = force_gimple_operand_gsi (&gsi, fold_convert (type, n1),
5834 true, NULL_TREE, true, GSI_SAME_STMT);
5835 n2 = force_gimple_operand_gsi (&gsi, fold_convert (itype, n2),
5836 true, NULL_TREE, true, GSI_SAME_STMT);
5837 step = force_gimple_operand_gsi (&gsi, fold_convert (itype, step),
5838 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 5839
5840 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
bc7bff74 5841 t = fold_build2 (PLUS_EXPR, itype, step, t);
5842 t = fold_build2 (PLUS_EXPR, itype, t, n2);
5843 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
fd6481cf 5844 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
5845 t = fold_build2 (TRUNC_DIV_EXPR, itype,
5846 fold_build1 (NEGATE_EXPR, itype, t),
bc7bff74 5847 fold_build1 (NEGATE_EXPR, itype, step));
fd6481cf 5848 else
bc7bff74 5849 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
fd6481cf 5850 t = fold_convert (itype, t);
75a70cf9 5851 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 5852
072f7ab1 5853 q = create_tmp_reg (itype, "q");
fd6481cf 5854 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
31712e83 5855 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5856 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
5857
072f7ab1 5858 tt = create_tmp_reg (itype, "tt");
31712e83 5859 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
5860 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5861 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
1e8e9920 5862
31712e83 5863 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
5864 stmt = gimple_build_cond_empty (t);
5865 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5866
5867 second_bb = split_block (entry_bb, stmt)->dest;
5868 gsi = gsi_last_bb (second_bb);
5869 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
5870
5871 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
5872 GSI_SAME_STMT);
5873 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
5874 build_int_cst (itype, 1));
5875 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5876
5877 third_bb = split_block (second_bb, stmt)->dest;
5878 gsi = gsi_last_bb (third_bb);
5879 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
1e8e9920 5880
fd6481cf 5881 t = build2 (MULT_EXPR, itype, q, threadid);
31712e83 5882 t = build2 (PLUS_EXPR, itype, t, tt);
75a70cf9 5883 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 5884
fd6481cf 5885 t = fold_build2 (PLUS_EXPR, itype, s0, q);
75a70cf9 5886 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 5887
1e8e9920 5888 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
75a70cf9 5889 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
773c5ba7 5890
75a70cf9 5891 /* Remove the GIMPLE_OMP_FOR statement. */
5892 gsi_remove (&gsi, true);
773c5ba7 5893
5894 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 5895 gsi = gsi_start_bb (seq_start_bb);
1e8e9920 5896
bc7bff74 5897 tree startvar = fd->loop.v;
5898 tree endvar = NULL_TREE;
5899
5900 if (gimple_omp_for_combined_p (fd->for_stmt))
5901 {
5902 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5903 ? gimple_omp_parallel_clauses (inner_stmt)
5904 : gimple_omp_for_clauses (inner_stmt);
5905 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5906 gcc_assert (innerc);
5907 startvar = OMP_CLAUSE_DECL (innerc);
5908 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5909 OMP_CLAUSE__LOOPTEMP_);
5910 gcc_assert (innerc);
5911 endvar = OMP_CLAUSE_DECL (innerc);
5912 }
fd6481cf 5913 t = fold_convert (itype, s0);
bc7bff74 5914 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 5915 if (POINTER_TYPE_P (type))
bc7bff74 5916 t = fold_build_pointer_plus (n1, t);
fd6481cf 5917 else
bc7bff74 5918 t = fold_build2 (PLUS_EXPR, type, t, n1);
5919 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 5920 t = force_gimple_operand_gsi (&gsi, t,
bc7bff74 5921 DECL_P (startvar)
5922 && TREE_ADDRESSABLE (startvar),
4abecb72 5923 NULL_TREE, false, GSI_CONTINUE_LINKING);
bc7bff74 5924 stmt = gimple_build_assign (startvar, t);
75a70cf9 5925 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
48e1416a 5926
fd6481cf 5927 t = fold_convert (itype, e0);
bc7bff74 5928 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 5929 if (POINTER_TYPE_P (type))
bc7bff74 5930 t = fold_build_pointer_plus (n1, t);
fd6481cf 5931 else
bc7bff74 5932 t = fold_build2 (PLUS_EXPR, type, t, n1);
5933 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 5934 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5935 false, GSI_CONTINUE_LINKING);
bc7bff74 5936 if (endvar)
5937 {
5938 stmt = gimple_build_assign (endvar, e);
5939 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5940 }
5941 if (fd->collapse > 1)
5942 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
1e8e9920 5943
bc7bff74 5944 if (!broken_loop)
5945 {
5946 /* The code controlling the sequential loop replaces the
5947 GIMPLE_OMP_CONTINUE. */
5948 gsi = gsi_last_bb (cont_bb);
5949 stmt = gsi_stmt (gsi);
5950 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5951 vmain = gimple_omp_continue_control_use (stmt);
5952 vback = gimple_omp_continue_control_def (stmt);
79acaae1 5953
bc7bff74 5954 if (!gimple_omp_for_combined_p (fd->for_stmt))
5955 {
5956 if (POINTER_TYPE_P (type))
5957 t = fold_build_pointer_plus (vmain, step);
5958 else
5959 t = fold_build2 (PLUS_EXPR, type, vmain, step);
5960 t = force_gimple_operand_gsi (&gsi, t,
5961 DECL_P (vback)
5962 && TREE_ADDRESSABLE (vback),
5963 NULL_TREE, true, GSI_SAME_STMT);
5964 stmt = gimple_build_assign (vback, t);
5965 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
79acaae1 5966
bc7bff74 5967 t = build2 (fd->loop.cond_code, boolean_type_node,
5968 DECL_P (vback) && TREE_ADDRESSABLE (vback)
5969 ? t : vback, e);
5970 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
5971 }
1e8e9920 5972
bc7bff74 5973 /* Remove the GIMPLE_OMP_CONTINUE statement. */
5974 gsi_remove (&gsi, true);
5975
5976 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
5977 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
5978 }
773c5ba7 5979
75a70cf9 5980 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
5981 gsi = gsi_last_bb (exit_bb);
5982 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
bc7bff74 5983 {
5984 t = gimple_omp_return_lhs (gsi_stmt (gsi));
5985 gsi_insert_after (&gsi, build_omp_barrier (t), GSI_SAME_STMT);
5986 }
75a70cf9 5987 gsi_remove (&gsi, true);
773c5ba7 5988
5989 /* Connect all the blocks. */
31712e83 5990 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
5991 ep->probability = REG_BR_PROB_BASE / 4 * 3;
5992 ep = find_edge (entry_bb, second_bb);
5993 ep->flags = EDGE_TRUE_VALUE;
5994 ep->probability = REG_BR_PROB_BASE / 4;
5995 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
5996 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
79acaae1 5997
bc7bff74 5998 if (!broken_loop)
5999 {
6000 ep = find_edge (cont_bb, body_bb);
6001 if (gimple_omp_for_combined_p (fd->for_stmt))
6002 {
6003 remove_edge (ep);
6004 ep = NULL;
6005 }
6006 else if (fd->collapse > 1)
6007 {
6008 remove_edge (ep);
6009 ep = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6010 }
6011 else
6012 ep->flags = EDGE_TRUE_VALUE;
6013 find_edge (cont_bb, fin_bb)->flags
6014 = ep ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
6015 }
48e1416a 6016
31712e83 6017 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
6018 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
6019 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
bc7bff74 6020
79acaae1 6021 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6022 recompute_dominator (CDI_DOMINATORS, body_bb));
6023 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6024 recompute_dominator (CDI_DOMINATORS, fin_bb));
04c2922b 6025
bc7bff74 6026 if (!broken_loop && !gimple_omp_for_combined_p (fd->for_stmt))
6027 {
6028 struct loop *loop = alloc_loop ();
6029 loop->header = body_bb;
6030 if (collapse_bb == NULL)
6031 loop->latch = cont_bb;
6032 add_loop (loop, body_bb->loop_father);
6033 }
1e8e9920 6034}
6035
773c5ba7 6036
6037/* A subroutine of expand_omp_for. Generate code for a parallel
6038 loop with static schedule and a specified chunk size. Given
6039 parameters:
1e8e9920 6040
6041 for (V = N1; V cond N2; V += STEP) BODY;
6042
6043 where COND is "<" or ">", we generate pseudocode
6044
8e6b4515 6045 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
1e8e9920 6046 if (cond is <)
6047 adj = STEP - 1;
6048 else
6049 adj = STEP + 1;
fd6481cf 6050 if ((__typeof (V)) -1 > 0 && cond is >)
6051 n = -(adj + N2 - N1) / -STEP;
6052 else
6053 n = (adj + N2 - N1) / STEP;
1e8e9920 6054 trip = 0;
79acaae1 6055 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
6056 here so that V is defined
6057 if the loop is not entered
1e8e9920 6058 L0:
6059 s0 = (trip * nthreads + threadid) * CHUNK;
6060 e0 = min(s0 + CHUNK, n);
6061 if (s0 < n) goto L1; else goto L4;
6062 L1:
6063 V = s0 * STEP + N1;
6064 e = e0 * STEP + N1;
6065 L2:
6066 BODY;
6067 V += STEP;
6068 if (V cond e) goto L2; else goto L3;
6069 L3:
6070 trip += 1;
6071 goto L0;
6072 L4:
1e8e9920 6073*/
6074
61e47ac8 6075static void
bc7bff74 6076expand_omp_for_static_chunk (struct omp_region *region,
6077 struct omp_for_data *fd, gimple inner_stmt)
1e8e9920 6078{
75a70cf9 6079 tree n, s0, e0, e, t;
79acaae1 6080 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
75a70cf9 6081 tree type, itype, v_main, v_back, v_extra;
773c5ba7 6082 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
bc7bff74 6083 basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
75a70cf9 6084 gimple_stmt_iterator si;
6085 gimple stmt;
6086 edge se;
bc7bff74 6087 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
6088 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
6089 bool broken_loop = region->cont == NULL;
6090 tree *counts = NULL;
6091 tree n1, n2, step;
1e8e9920 6092
fd6481cf 6093 itype = type = TREE_TYPE (fd->loop.v);
6094 if (POINTER_TYPE_P (type))
3cea8318 6095 itype = signed_type_for (type);
1e8e9920 6096
61e47ac8 6097 entry_bb = region->entry;
ac6e3339 6098 se = split_block (entry_bb, last_stmt (entry_bb));
6099 entry_bb = se->src;
6100 iter_part_bb = se->dest;
61e47ac8 6101 cont_bb = region->cont;
ac6e3339 6102 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
bc7bff74 6103 fin_bb = BRANCH_EDGE (iter_part_bb)->dest;
6104 gcc_assert (broken_loop
6105 || fin_bb == FALLTHRU_EDGE (cont_bb)->dest);
ac6e3339 6106 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
6107 body_bb = single_succ (seq_start_bb);
bc7bff74 6108 if (!broken_loop)
6109 {
6110 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
6111 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6112 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
6113 }
61e47ac8 6114 exit_bb = region->exit;
773c5ba7 6115
773c5ba7 6116 /* Trip and adjustment setup goes in ENTRY_BB. */
75a70cf9 6117 si = gsi_last_bb (entry_bb);
6118 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
773c5ba7 6119
bc7bff74 6120 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
6121 {
6122 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
6123 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
6124 }
6125
6126 if (fd->collapse > 1)
6127 {
6128 int first_zero_iter = -1;
6129 basic_block l2_dom_bb = NULL;
6130
6131 counts = XALLOCAVEC (tree, fd->collapse);
6132 expand_omp_for_init_counts (fd, &si, entry_bb, counts,
6133 fin_bb, first_zero_iter,
6134 l2_dom_bb);
6135 t = NULL_TREE;
6136 }
6137 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
6138 t = integer_one_node;
6139 else
6140 t = fold_binary (fd->loop.cond_code, boolean_type_node,
6141 fold_convert (type, fd->loop.n1),
6142 fold_convert (type, fd->loop.n2));
6143 if (fd->collapse == 1
6144 && TYPE_UNSIGNED (type)
8e6b4515 6145 && (t == NULL_TREE || !integer_onep (t)))
6146 {
8e6b4515 6147 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
6148 n1 = force_gimple_operand_gsi (&si, n1, true, NULL_TREE,
6149 true, GSI_SAME_STMT);
6150 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
6151 n2 = force_gimple_operand_gsi (&si, n2, true, NULL_TREE,
6152 true, GSI_SAME_STMT);
6153 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
6154 NULL_TREE, NULL_TREE);
6155 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
6156 if (walk_tree (gimple_cond_lhs_ptr (stmt),
6157 expand_omp_regimplify_p, NULL, NULL)
6158 || walk_tree (gimple_cond_rhs_ptr (stmt),
6159 expand_omp_regimplify_p, NULL, NULL))
6160 {
6161 si = gsi_for_stmt (stmt);
6162 gimple_regimplify_operands (stmt, &si);
6163 }
6164 se = split_block (entry_bb, stmt);
6165 se->flags = EDGE_TRUE_VALUE;
6166 entry_bb = se->dest;
6167 se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
6168 se = make_edge (se->src, fin_bb, EDGE_FALSE_VALUE);
6169 se->probability = REG_BR_PROB_BASE / 2000 - 1;
6170 if (gimple_in_ssa_p (cfun))
6171 {
6172 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
6173 for (si = gsi_start_phis (fin_bb);
6174 !gsi_end_p (si); gsi_next (&si))
6175 {
6176 gimple phi = gsi_stmt (si);
6177 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
6178 se, UNKNOWN_LOCATION);
6179 }
6180 }
6181 si = gsi_last_bb (entry_bb);
6182 }
6183
bc7bff74 6184 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
fd6481cf 6185 t = fold_convert (itype, t);
75a70cf9 6186 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6187 true, GSI_SAME_STMT);
48e1416a 6188
bc7bff74 6189 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
fd6481cf 6190 t = fold_convert (itype, t);
75a70cf9 6191 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6192 true, GSI_SAME_STMT);
79acaae1 6193
bc7bff74 6194 n1 = fd->loop.n1;
6195 n2 = fd->loop.n2;
6196 step = fd->loop.step;
6197 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6198 {
6199 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6200 OMP_CLAUSE__LOOPTEMP_);
6201 gcc_assert (innerc);
6202 n1 = OMP_CLAUSE_DECL (innerc);
6203 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6204 OMP_CLAUSE__LOOPTEMP_);
6205 gcc_assert (innerc);
6206 n2 = OMP_CLAUSE_DECL (innerc);
6207 }
6208 n1 = force_gimple_operand_gsi (&si, fold_convert (type, n1),
6209 true, NULL_TREE, true, GSI_SAME_STMT);
6210 n2 = force_gimple_operand_gsi (&si, fold_convert (itype, n2),
6211 true, NULL_TREE, true, GSI_SAME_STMT);
6212 step = force_gimple_operand_gsi (&si, fold_convert (itype, step),
6213 true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 6214 fd->chunk_size
75a70cf9 6215 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
6216 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 6217
6218 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
bc7bff74 6219 t = fold_build2 (PLUS_EXPR, itype, step, t);
6220 t = fold_build2 (PLUS_EXPR, itype, t, n2);
6221 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
fd6481cf 6222 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
6223 t = fold_build2 (TRUNC_DIV_EXPR, itype,
6224 fold_build1 (NEGATE_EXPR, itype, t),
bc7bff74 6225 fold_build1 (NEGATE_EXPR, itype, step));
fd6481cf 6226 else
bc7bff74 6227 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
fd6481cf 6228 t = fold_convert (itype, t);
75a70cf9 6229 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6230 true, GSI_SAME_STMT);
79acaae1 6231
083152fb 6232 trip_var = create_tmp_reg (itype, ".trip");
79acaae1 6233 if (gimple_in_ssa_p (cfun))
6234 {
75a70cf9 6235 trip_init = make_ssa_name (trip_var, NULL);
6236 trip_main = make_ssa_name (trip_var, NULL);
6237 trip_back = make_ssa_name (trip_var, NULL);
79acaae1 6238 }
1e8e9920 6239 else
79acaae1 6240 {
6241 trip_init = trip_var;
6242 trip_main = trip_var;
6243 trip_back = trip_var;
6244 }
1e8e9920 6245
75a70cf9 6246 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
6247 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
773c5ba7 6248
fd6481cf 6249 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
bc7bff74 6250 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6251 if (POINTER_TYPE_P (type))
bc7bff74 6252 t = fold_build_pointer_plus (n1, t);
fd6481cf 6253 else
bc7bff74 6254 t = fold_build2 (PLUS_EXPR, type, t, n1);
75a70cf9 6255 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6256 true, GSI_SAME_STMT);
79acaae1 6257
75a70cf9 6258 /* Remove the GIMPLE_OMP_FOR. */
6259 gsi_remove (&si, true);
773c5ba7 6260
6261 /* Iteration space partitioning goes in ITER_PART_BB. */
75a70cf9 6262 si = gsi_last_bb (iter_part_bb);
1e8e9920 6263
fd6481cf 6264 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
6265 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
6266 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
75a70cf9 6267 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6268 false, GSI_CONTINUE_LINKING);
1e8e9920 6269
fd6481cf 6270 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
6271 t = fold_build2 (MIN_EXPR, itype, t, n);
75a70cf9 6272 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6273 false, GSI_CONTINUE_LINKING);
1e8e9920 6274
6275 t = build2 (LT_EXPR, boolean_type_node, s0, n);
75a70cf9 6276 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
773c5ba7 6277
6278 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 6279 si = gsi_start_bb (seq_start_bb);
1e8e9920 6280
bc7bff74 6281 tree startvar = fd->loop.v;
6282 tree endvar = NULL_TREE;
6283
6284 if (gimple_omp_for_combined_p (fd->for_stmt))
6285 {
6286 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
6287 ? gimple_omp_parallel_clauses (inner_stmt)
6288 : gimple_omp_for_clauses (inner_stmt);
6289 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
6290 gcc_assert (innerc);
6291 startvar = OMP_CLAUSE_DECL (innerc);
6292 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6293 OMP_CLAUSE__LOOPTEMP_);
6294 gcc_assert (innerc);
6295 endvar = OMP_CLAUSE_DECL (innerc);
6296 }
6297
fd6481cf 6298 t = fold_convert (itype, s0);
bc7bff74 6299 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6300 if (POINTER_TYPE_P (type))
bc7bff74 6301 t = fold_build_pointer_plus (n1, t);
fd6481cf 6302 else
bc7bff74 6303 t = fold_build2 (PLUS_EXPR, type, t, n1);
6304 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 6305 t = force_gimple_operand_gsi (&si, t,
bc7bff74 6306 DECL_P (startvar)
6307 && TREE_ADDRESSABLE (startvar),
4abecb72 6308 NULL_TREE, false, GSI_CONTINUE_LINKING);
bc7bff74 6309 stmt = gimple_build_assign (startvar, t);
75a70cf9 6310 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
1e8e9920 6311
fd6481cf 6312 t = fold_convert (itype, e0);
bc7bff74 6313 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6314 if (POINTER_TYPE_P (type))
bc7bff74 6315 t = fold_build_pointer_plus (n1, t);
fd6481cf 6316 else
bc7bff74 6317 t = fold_build2 (PLUS_EXPR, type, t, n1);
6318 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 6319 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6320 false, GSI_CONTINUE_LINKING);
bc7bff74 6321 if (endvar)
6322 {
6323 stmt = gimple_build_assign (endvar, e);
6324 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6325 }
6326 if (fd->collapse > 1)
6327 expand_omp_for_init_vars (fd, &si, counts, inner_stmt, startvar);
6328
6329 if (!broken_loop)
6330 {
6331 /* The code controlling the sequential loop goes in CONT_BB,
6332 replacing the GIMPLE_OMP_CONTINUE. */
6333 si = gsi_last_bb (cont_bb);
6334 stmt = gsi_stmt (si);
6335 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6336 v_main = gimple_omp_continue_control_use (stmt);
6337 v_back = gimple_omp_continue_control_def (stmt);
1e8e9920 6338
bc7bff74 6339 if (!gimple_omp_for_combined_p (fd->for_stmt))
6340 {
6341 if (POINTER_TYPE_P (type))
6342 t = fold_build_pointer_plus (v_main, step);
6343 else
6344 t = fold_build2 (PLUS_EXPR, type, v_main, step);
6345 if (DECL_P (v_back) && TREE_ADDRESSABLE (v_back))
6346 t = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6347 true, GSI_SAME_STMT);
6348 stmt = gimple_build_assign (v_back, t);
6349 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
79acaae1 6350
bc7bff74 6351 t = build2 (fd->loop.cond_code, boolean_type_node,
6352 DECL_P (v_back) && TREE_ADDRESSABLE (v_back)
6353 ? t : v_back, e);
6354 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
6355 }
79acaae1 6356
bc7bff74 6357 /* Remove GIMPLE_OMP_CONTINUE. */
6358 gsi_remove (&si, true);
48e1416a 6359
bc7bff74 6360 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
6361 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
773c5ba7 6362
bc7bff74 6363 /* Trip update code goes into TRIP_UPDATE_BB. */
6364 si = gsi_start_bb (trip_update_bb);
1e8e9920 6365
bc7bff74 6366 t = build_int_cst (itype, 1);
6367 t = build2 (PLUS_EXPR, itype, trip_main, t);
6368 stmt = gimple_build_assign (trip_back, t);
6369 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6370 }
1e8e9920 6371
75a70cf9 6372 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
6373 si = gsi_last_bb (exit_bb);
6374 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
bc7bff74 6375 {
6376 t = gimple_omp_return_lhs (gsi_stmt (si));
6377 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
6378 }
75a70cf9 6379 gsi_remove (&si, true);
1e8e9920 6380
773c5ba7 6381 /* Connect the new blocks. */
ac6e3339 6382 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
6383 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
79acaae1 6384
bc7bff74 6385 if (!broken_loop)
6386 {
6387 se = find_edge (cont_bb, body_bb);
6388 if (gimple_omp_for_combined_p (fd->for_stmt))
6389 {
6390 remove_edge (se);
6391 se = NULL;
6392 }
6393 else if (fd->collapse > 1)
6394 {
6395 remove_edge (se);
6396 se = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6397 }
6398 else
6399 se->flags = EDGE_TRUE_VALUE;
6400 find_edge (cont_bb, trip_update_bb)->flags
6401 = se ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
79acaae1 6402
bc7bff74 6403 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
6404 }
79acaae1 6405
6406 if (gimple_in_ssa_p (cfun))
6407 {
75a70cf9 6408 gimple_stmt_iterator psi;
6409 gimple phi;
6410 edge re, ene;
f1f41a6c 6411 edge_var_map_vector *head;
75a70cf9 6412 edge_var_map *vm;
6413 size_t i;
6414
bc7bff74 6415 gcc_assert (fd->collapse == 1 && !broken_loop);
6416
79acaae1 6417 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
6418 remove arguments of the phi nodes in fin_bb. We need to create
6419 appropriate phi nodes in iter_part_bb instead. */
6420 se = single_pred_edge (fin_bb);
6421 re = single_succ_edge (trip_update_bb);
75a70cf9 6422 head = redirect_edge_var_map_vector (re);
79acaae1 6423 ene = single_succ_edge (entry_bb);
6424
75a70cf9 6425 psi = gsi_start_phis (fin_bb);
f1f41a6c 6426 for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
75a70cf9 6427 gsi_next (&psi), ++i)
79acaae1 6428 {
75a70cf9 6429 gimple nphi;
efbcb6de 6430 source_location locus;
75a70cf9 6431
6432 phi = gsi_stmt (psi);
6433 t = gimple_phi_result (phi);
6434 gcc_assert (t == redirect_edge_var_map_result (vm));
79acaae1 6435 nphi = create_phi_node (t, iter_part_bb);
79acaae1 6436
6437 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
efbcb6de 6438 locus = gimple_phi_arg_location_from_edge (phi, se);
6439
fd6481cf 6440 /* A special case -- fd->loop.v is not yet computed in
6441 iter_part_bb, we need to use v_extra instead. */
6442 if (t == fd->loop.v)
79acaae1 6443 t = v_extra;
60d535d2 6444 add_phi_arg (nphi, t, ene, locus);
efbcb6de 6445 locus = redirect_edge_var_map_location (vm);
60d535d2 6446 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
75a70cf9 6447 }
f1f41a6c 6448 gcc_assert (!gsi_end_p (psi) && i == head->length ());
75a70cf9 6449 redirect_edge_var_map_clear (re);
6450 while (1)
6451 {
6452 psi = gsi_start_phis (fin_bb);
6453 if (gsi_end_p (psi))
6454 break;
6455 remove_phi_node (&psi, false);
79acaae1 6456 }
79acaae1 6457
6458 /* Make phi node for trip. */
6459 phi = create_phi_node (trip_main, iter_part_bb);
efbcb6de 6460 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
60d535d2 6461 UNKNOWN_LOCATION);
efbcb6de 6462 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
60d535d2 6463 UNKNOWN_LOCATION);
79acaae1 6464 }
6465
bc7bff74 6466 if (!broken_loop)
6467 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
79acaae1 6468 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
6469 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
6470 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6471 recompute_dominator (CDI_DOMINATORS, fin_bb));
6472 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
6473 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
6474 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6475 recompute_dominator (CDI_DOMINATORS, body_bb));
04c2922b 6476
bc7bff74 6477 if (!broken_loop)
6478 {
6479 struct loop *trip_loop = alloc_loop ();
6480 trip_loop->header = iter_part_bb;
6481 trip_loop->latch = trip_update_bb;
6482 add_loop (trip_loop, iter_part_bb->loop_father);
04c2922b 6483
bc7bff74 6484 if (!gimple_omp_for_combined_p (fd->for_stmt))
6485 {
6486 struct loop *loop = alloc_loop ();
6487 loop->header = body_bb;
6488 loop->latch = cont_bb;
6489 add_loop (loop, trip_loop);
6490 }
6491 }
1e8e9920 6492}
6493
bc7bff74 6494
3d483a94 6495/* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
6496 loop. Given parameters:
6497
6498 for (V = N1; V cond N2; V += STEP) BODY;
6499
6500 where COND is "<" or ">", we generate pseudocode
6501
6502 V = N1;
6503 goto L1;
6504 L0:
6505 BODY;
6506 V += STEP;
6507 L1:
6508 if (V cond N2) goto L0; else goto L2;
6509 L2:
6510
6511 For collapsed loops, given parameters:
6512 collapse(3)
6513 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
6514 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
6515 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
6516 BODY;
6517
6518 we generate pseudocode
6519
6520 if (cond3 is <)
6521 adj = STEP3 - 1;
6522 else
6523 adj = STEP3 + 1;
6524 count3 = (adj + N32 - N31) / STEP3;
6525 if (cond2 is <)
6526 adj = STEP2 - 1;
6527 else
6528 adj = STEP2 + 1;
6529 count2 = (adj + N22 - N21) / STEP2;
6530 if (cond1 is <)
6531 adj = STEP1 - 1;
6532 else
6533 adj = STEP1 + 1;
6534 count1 = (adj + N12 - N11) / STEP1;
6535 count = count1 * count2 * count3;
6536 V = 0;
6537 V1 = N11;
6538 V2 = N21;
6539 V3 = N31;
6540 goto L1;
6541 L0:
6542 BODY;
6543 V += 1;
6544 V3 += STEP3;
6545 V2 += (V3 cond3 N32) ? 0 : STEP2;
6546 V3 = (V3 cond3 N32) ? V3 : N31;
6547 V1 += (V2 cond2 N22) ? 0 : STEP1;
6548 V2 = (V2 cond2 N22) ? V2 : N21;
6549 L1:
6550 if (V < count) goto L0; else goto L2;
6551 L2:
6552
6553 */
6554
6555static void
6556expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
6557{
6558 tree type, t;
6559 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
6560 gimple_stmt_iterator gsi;
6561 gimple stmt;
6562 bool broken_loop = region->cont == NULL;
6563 edge e, ne;
6564 tree *counts = NULL;
6565 int i;
6566 tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6567 OMP_CLAUSE_SAFELEN);
6568 tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6569 OMP_CLAUSE__SIMDUID_);
bc7bff74 6570 tree n1, n2;
3d483a94 6571
6572 type = TREE_TYPE (fd->loop.v);
6573 entry_bb = region->entry;
6574 cont_bb = region->cont;
6575 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
6576 gcc_assert (broken_loop
6577 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
6578 l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
6579 if (!broken_loop)
6580 {
6581 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
6582 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6583 l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
6584 l2_bb = BRANCH_EDGE (entry_bb)->dest;
6585 }
6586 else
6587 {
6588 BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
6589 l1_bb = split_edge (BRANCH_EDGE (entry_bb));
6590 l2_bb = single_succ (l1_bb);
6591 }
6592 exit_bb = region->exit;
6593 l2_dom_bb = NULL;
6594
6595 gsi = gsi_last_bb (entry_bb);
6596
6597 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
6598 /* Not needed in SSA form right now. */
6599 gcc_assert (!gimple_in_ssa_p (cfun));
6600 if (fd->collapse > 1)
6601 {
6602 int first_zero_iter = -1;
6603 basic_block zero_iter_bb = l2_bb;
6604
6605 counts = XALLOCAVEC (tree, fd->collapse);
6606 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
6607 zero_iter_bb, first_zero_iter,
6608 l2_dom_bb);
6609 }
6610 if (l2_dom_bb == NULL)
6611 l2_dom_bb = l1_bb;
6612
bc7bff74 6613 n1 = fd->loop.n1;
3d483a94 6614 n2 = fd->loop.n2;
bc7bff74 6615 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6616 {
6617 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6618 OMP_CLAUSE__LOOPTEMP_);
6619 gcc_assert (innerc);
6620 n1 = OMP_CLAUSE_DECL (innerc);
6621 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6622 OMP_CLAUSE__LOOPTEMP_);
6623 gcc_assert (innerc);
6624 n2 = OMP_CLAUSE_DECL (innerc);
6625 expand_omp_build_assign (&gsi, fd->loop.v,
6626 fold_convert (type, n1));
6627 if (fd->collapse > 1)
6628 {
6629 gsi_prev (&gsi);
6630 expand_omp_for_init_vars (fd, &gsi, counts, NULL, n1);
6631 gsi_next (&gsi);
6632 }
6633 }
3d483a94 6634 else
6635 {
6636 expand_omp_build_assign (&gsi, fd->loop.v,
6637 fold_convert (type, fd->loop.n1));
6638 if (fd->collapse > 1)
6639 for (i = 0; i < fd->collapse; i++)
6640 {
6641 tree itype = TREE_TYPE (fd->loops[i].v);
6642 if (POINTER_TYPE_P (itype))
6643 itype = signed_type_for (itype);
6644 t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
6645 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6646 }
6647 }
6648
6649 /* Remove the GIMPLE_OMP_FOR statement. */
6650 gsi_remove (&gsi, true);
6651
6652 if (!broken_loop)
6653 {
6654 /* Code to control the increment goes in the CONT_BB. */
6655 gsi = gsi_last_bb (cont_bb);
6656 stmt = gsi_stmt (gsi);
6657 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6658
6659 if (POINTER_TYPE_P (type))
6660 t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
6661 else
6662 t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
6663 expand_omp_build_assign (&gsi, fd->loop.v, t);
6664
6665 if (fd->collapse > 1)
6666 {
6667 i = fd->collapse - 1;
6668 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
6669 {
6670 t = fold_convert (sizetype, fd->loops[i].step);
6671 t = fold_build_pointer_plus (fd->loops[i].v, t);
6672 }
6673 else
6674 {
6675 t = fold_convert (TREE_TYPE (fd->loops[i].v),
6676 fd->loops[i].step);
6677 t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
6678 fd->loops[i].v, t);
6679 }
6680 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6681
6682 for (i = fd->collapse - 1; i > 0; i--)
6683 {
6684 tree itype = TREE_TYPE (fd->loops[i].v);
6685 tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
6686 if (POINTER_TYPE_P (itype2))
6687 itype2 = signed_type_for (itype2);
6688 t = build3 (COND_EXPR, itype2,
6689 build2 (fd->loops[i].cond_code, boolean_type_node,
6690 fd->loops[i].v,
6691 fold_convert (itype, fd->loops[i].n2)),
6692 build_int_cst (itype2, 0),
6693 fold_convert (itype2, fd->loops[i - 1].step));
6694 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
6695 t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
6696 else
6697 t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
6698 expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
6699
6700 t = build3 (COND_EXPR, itype,
6701 build2 (fd->loops[i].cond_code, boolean_type_node,
6702 fd->loops[i].v,
6703 fold_convert (itype, fd->loops[i].n2)),
6704 fd->loops[i].v,
6705 fold_convert (itype, fd->loops[i].n1));
6706 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6707 }
6708 }
6709
6710 /* Remove GIMPLE_OMP_CONTINUE. */
6711 gsi_remove (&gsi, true);
6712 }
6713
6714 /* Emit the condition in L1_BB. */
6715 gsi = gsi_start_bb (l1_bb);
6716
6717 t = fold_convert (type, n2);
6718 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
6719 false, GSI_CONTINUE_LINKING);
6720 t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
6721 stmt = gimple_build_cond_empty (t);
6722 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
6723 if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
6724 NULL, NULL)
6725 || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
6726 NULL, NULL))
6727 {
6728 gsi = gsi_for_stmt (stmt);
6729 gimple_regimplify_operands (stmt, &gsi);
6730 }
6731
6732 /* Remove GIMPLE_OMP_RETURN. */
6733 gsi = gsi_last_bb (exit_bb);
6734 gsi_remove (&gsi, true);
6735
6736 /* Connect the new blocks. */
6737 remove_edge (FALLTHRU_EDGE (entry_bb));
6738
6739 if (!broken_loop)
6740 {
6741 remove_edge (BRANCH_EDGE (entry_bb));
6742 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
6743
6744 e = BRANCH_EDGE (l1_bb);
6745 ne = FALLTHRU_EDGE (l1_bb);
6746 e->flags = EDGE_TRUE_VALUE;
6747 }
6748 else
6749 {
6750 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
6751
6752 ne = single_succ_edge (l1_bb);
6753 e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
6754
6755 }
6756 ne->flags = EDGE_FALSE_VALUE;
6757 e->probability = REG_BR_PROB_BASE * 7 / 8;
6758 ne->probability = REG_BR_PROB_BASE / 8;
6759
6760 set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
6761 set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
6762 set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
6763
6764 if (!broken_loop)
6765 {
6766 struct loop *loop = alloc_loop ();
6767 loop->header = l1_bb;
6768 loop->latch = e->dest;
6769 add_loop (loop, l1_bb->loop_father);
6770 if (safelen == NULL_TREE)
6771 loop->safelen = INT_MAX;
6772 else
6773 {
6774 safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
cd4547bf 6775 if (!tree_fits_uhwi_p (safelen)
aa59f000 6776 || tree_to_uhwi (safelen) > INT_MAX)
3d483a94 6777 loop->safelen = INT_MAX;
6778 else
6a0712d4 6779 loop->safelen = tree_to_uhwi (safelen);
3d483a94 6780 if (loop->safelen == 1)
6781 loop->safelen = 0;
6782 }
6783 if (simduid)
6784 {
6785 loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6786 cfun->has_simduid_loops = true;
6787 }
043115ec 6788 /* If not -fno-tree-loop-vectorize, hint that we want to vectorize
3d483a94 6789 the loop. */
043115ec 6790 if ((flag_tree_loop_vectorize
6791 || (!global_options_set.x_flag_tree_loop_vectorize
6792 && !global_options_set.x_flag_tree_vectorize))
3d483a94 6793 && loop->safelen > 1)
6794 {
6795 loop->force_vect = true;
6796 cfun->has_force_vect_loops = true;
6797 }
6798 }
6799}
6800
1e8e9920 6801
773c5ba7 6802/* Expand the OpenMP loop defined by REGION. */
1e8e9920 6803
773c5ba7 6804static void
bc7bff74 6805expand_omp_for (struct omp_region *region, gimple inner_stmt)
773c5ba7 6806{
6807 struct omp_for_data fd;
fd6481cf 6808 struct omp_for_data_loop *loops;
1e8e9920 6809
fd6481cf 6810 loops
6811 = (struct omp_for_data_loop *)
75a70cf9 6812 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
fd6481cf 6813 * sizeof (struct omp_for_data_loop));
fd6481cf 6814 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
f77459c5 6815 region->sched_kind = fd.sched_kind;
1e8e9920 6816
b3a3ddec 6817 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
6818 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6819 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6820 if (region->cont)
6821 {
6822 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
6823 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6824 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6825 }
04c2922b 6826 else
75de4aa2 6827 /* If there isn't a continue then this is a degerate case where
04c2922b 6828 the introduction of abnormal edges during lowering will prevent
6829 original loops from being detected. Fix that up. */
6830 loops_state_set (LOOPS_NEED_FIXUP);
b3a3ddec 6831
f2697631 6832 if (gimple_omp_for_kind (fd.for_stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 6833 expand_omp_simd (region, &fd);
6834 else if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
bc7bff74 6835 && !fd.have_ordered)
1e8e9920 6836 {
6837 if (fd.chunk_size == NULL)
bc7bff74 6838 expand_omp_for_static_nochunk (region, &fd, inner_stmt);
1e8e9920 6839 else
bc7bff74 6840 expand_omp_for_static_chunk (region, &fd, inner_stmt);
1e8e9920 6841 }
6842 else
6843 {
fd6481cf 6844 int fn_index, start_ix, next_ix;
6845
3d483a94 6846 gcc_assert (gimple_omp_for_kind (fd.for_stmt)
6847 == GF_OMP_FOR_KIND_FOR);
0416ca72 6848 if (fd.chunk_size == NULL
6849 && fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
6850 fd.chunk_size = integer_zero_node;
fd6481cf 6851 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
6852 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
75a70cf9 6853 ? 3 : fd.sched_kind;
fd6481cf 6854 fn_index += fd.have_ordered * 4;
b9a16870 6855 start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
6856 next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
fd6481cf 6857 if (fd.iter_type == long_long_unsigned_type_node)
6858 {
b9a16870 6859 start_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_START
6860 - (int)BUILT_IN_GOMP_LOOP_STATIC_START);
6861 next_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
6862 - (int)BUILT_IN_GOMP_LOOP_STATIC_NEXT);
fd6481cf 6863 }
b9c74b4d 6864 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
bc7bff74 6865 (enum built_in_function) next_ix, inner_stmt);
1e8e9920 6866 }
28c92cbb 6867
083152fb 6868 if (gimple_in_ssa_p (cfun))
6869 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 6870}
6871
1e8e9920 6872
6873/* Expand code for an OpenMP sections directive. In pseudo code, we generate
6874
1e8e9920 6875 v = GOMP_sections_start (n);
6876 L0:
6877 switch (v)
6878 {
6879 case 0:
6880 goto L2;
6881 case 1:
6882 section 1;
6883 goto L1;
6884 case 2:
6885 ...
6886 case n:
6887 ...
1e8e9920 6888 default:
6889 abort ();
6890 }
6891 L1:
6892 v = GOMP_sections_next ();
6893 goto L0;
6894 L2:
6895 reduction;
6896
773c5ba7 6897 If this is a combined parallel sections, replace the call to
79acaae1 6898 GOMP_sections_start with call to GOMP_sections_next. */
1e8e9920 6899
6900static void
773c5ba7 6901expand_omp_sections (struct omp_region *region)
1e8e9920 6902{
f018d957 6903 tree t, u, vin = NULL, vmain, vnext, l2;
f1f41a6c 6904 vec<tree> label_vec;
75a70cf9 6905 unsigned len;
ac6e3339 6906 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
75a70cf9 6907 gimple_stmt_iterator si, switch_si;
6908 gimple sections_stmt, stmt, cont;
9884aaf8 6909 edge_iterator ei;
6910 edge e;
61e47ac8 6911 struct omp_region *inner;
75a70cf9 6912 unsigned i, casei;
ac6e3339 6913 bool exit_reachable = region->cont != NULL;
1e8e9920 6914
d244d9de 6915 gcc_assert (region->exit != NULL);
61e47ac8 6916 entry_bb = region->entry;
ac6e3339 6917 l0_bb = single_succ (entry_bb);
61e47ac8 6918 l1_bb = region->cont;
ac6e3339 6919 l2_bb = region->exit;
d244d9de 6920 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
6921 l2 = gimple_block_label (l2_bb);
6922 else
03ed154b 6923 {
d244d9de 6924 /* This can happen if there are reductions. */
6925 len = EDGE_COUNT (l0_bb->succs);
6926 gcc_assert (len > 0);
6927 e = EDGE_SUCC (l0_bb, len - 1);
6928 si = gsi_last_bb (e->dest);
6929 l2 = NULL_TREE;
6930 if (gsi_end_p (si)
6931 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
6932 l2 = gimple_block_label (e->dest);
9884aaf8 6933 else
d244d9de 6934 FOR_EACH_EDGE (e, ei, l0_bb->succs)
6935 {
6936 si = gsi_last_bb (e->dest);
6937 if (gsi_end_p (si)
6938 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
9884aaf8 6939 {
d244d9de 6940 l2 = gimple_block_label (e->dest);
6941 break;
9884aaf8 6942 }
d244d9de 6943 }
03ed154b 6944 }
d244d9de 6945 if (exit_reachable)
6946 default_bb = create_empty_bb (l1_bb->prev_bb);
03ed154b 6947 else
d244d9de 6948 default_bb = create_empty_bb (l0_bb);
773c5ba7 6949
6950 /* We will build a switch() with enough cases for all the
75a70cf9 6951 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
773c5ba7 6952 and a default case to abort if something goes wrong. */
ac6e3339 6953 len = EDGE_COUNT (l0_bb->succs);
75a70cf9 6954
f1f41a6c 6955 /* Use vec::quick_push on label_vec throughout, since we know the size
75a70cf9 6956 in advance. */
f1f41a6c 6957 label_vec.create (len);
1e8e9920 6958
61e47ac8 6959 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
75a70cf9 6960 GIMPLE_OMP_SECTIONS statement. */
6961 si = gsi_last_bb (entry_bb);
6962 sections_stmt = gsi_stmt (si);
6963 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
6964 vin = gimple_omp_sections_control (sections_stmt);
773c5ba7 6965 if (!is_combined_parallel (region))
1e8e9920 6966 {
773c5ba7 6967 /* If we are not inside a combined parallel+sections region,
6968 call GOMP_sections_start. */
39cb6d68 6969 t = build_int_cst (unsigned_type_node, len - 1);
b9a16870 6970 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_START);
75a70cf9 6971 stmt = gimple_build_call (u, 1, t);
1e8e9920 6972 }
79acaae1 6973 else
6974 {
6975 /* Otherwise, call GOMP_sections_next. */
b9a16870 6976 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
75a70cf9 6977 stmt = gimple_build_call (u, 0);
79acaae1 6978 }
75a70cf9 6979 gimple_call_set_lhs (stmt, vin);
6980 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
6981 gsi_remove (&si, true);
6982
6983 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
6984 L0_BB. */
6985 switch_si = gsi_last_bb (l0_bb);
6986 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
79acaae1 6987 if (exit_reachable)
6988 {
6989 cont = last_stmt (l1_bb);
75a70cf9 6990 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
6991 vmain = gimple_omp_continue_control_use (cont);
6992 vnext = gimple_omp_continue_control_def (cont);
79acaae1 6993 }
6994 else
6995 {
6996 vmain = vin;
6997 vnext = NULL_TREE;
6998 }
1e8e9920 6999
d244d9de 7000 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
f1f41a6c 7001 label_vec.quick_push (t);
d244d9de 7002 i = 1;
03ed154b 7003
75a70cf9 7004 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
ac6e3339 7005 for (inner = region->inner, casei = 1;
7006 inner;
7007 inner = inner->next, i++, casei++)
1e8e9920 7008 {
773c5ba7 7009 basic_block s_entry_bb, s_exit_bb;
7010
9884aaf8 7011 /* Skip optional reduction region. */
75a70cf9 7012 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
9884aaf8 7013 {
7014 --i;
7015 --casei;
7016 continue;
7017 }
7018
61e47ac8 7019 s_entry_bb = inner->entry;
7020 s_exit_bb = inner->exit;
1e8e9920 7021
75a70cf9 7022 t = gimple_block_label (s_entry_bb);
ac6e3339 7023 u = build_int_cst (unsigned_type_node, casei);
b6e3dd65 7024 u = build_case_label (u, NULL, t);
f1f41a6c 7025 label_vec.quick_push (u);
61e47ac8 7026
75a70cf9 7027 si = gsi_last_bb (s_entry_bb);
7028 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
7029 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
7030 gsi_remove (&si, true);
61e47ac8 7031 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
03ed154b 7032
7033 if (s_exit_bb == NULL)
7034 continue;
7035
75a70cf9 7036 si = gsi_last_bb (s_exit_bb);
7037 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7038 gsi_remove (&si, true);
03ed154b 7039
773c5ba7 7040 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
1e8e9920 7041 }
7042
773c5ba7 7043 /* Error handling code goes in DEFAULT_BB. */
75a70cf9 7044 t = gimple_block_label (default_bb);
b6e3dd65 7045 u = build_case_label (NULL, NULL, t);
61e47ac8 7046 make_edge (l0_bb, default_bb, 0);
f6568ea4 7047 if (current_loops)
04c2922b 7048 add_bb_to_loop (default_bb, current_loops->tree_root);
1e8e9920 7049
49a70175 7050 stmt = gimple_build_switch (vmain, u, label_vec);
75a70cf9 7051 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
7052 gsi_remove (&switch_si, true);
f1f41a6c 7053 label_vec.release ();
75a70cf9 7054
7055 si = gsi_start_bb (default_bb);
b9a16870 7056 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
75a70cf9 7057 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
773c5ba7 7058
ac6e3339 7059 if (exit_reachable)
03ed154b 7060 {
b9a16870 7061 tree bfn_decl;
7062
ac6e3339 7063 /* Code to get the next section goes in L1_BB. */
75a70cf9 7064 si = gsi_last_bb (l1_bb);
7065 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
1e8e9920 7066
b9a16870 7067 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
7068 stmt = gimple_build_call (bfn_decl, 0);
75a70cf9 7069 gimple_call_set_lhs (stmt, vnext);
7070 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7071 gsi_remove (&si, true);
773c5ba7 7072
ac6e3339 7073 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
03ed154b 7074 }
773c5ba7 7075
d244d9de 7076 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
7077 si = gsi_last_bb (l2_bb);
7078 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
7079 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_NOWAIT);
bc7bff74 7080 else if (gimple_omp_return_lhs (gsi_stmt (si)))
7081 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_CANCEL);
d244d9de 7082 else
7083 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END);
7084 stmt = gimple_build_call (t, 0);
bc7bff74 7085 if (gimple_omp_return_lhs (gsi_stmt (si)))
7086 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (si)));
d244d9de 7087 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7088 gsi_remove (&si, true);
7089
79acaae1 7090 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
773c5ba7 7091}
1e8e9920 7092
1e8e9920 7093
61e47ac8 7094/* Expand code for an OpenMP single directive. We've already expanded
7095 much of the code, here we simply place the GOMP_barrier call. */
7096
7097static void
7098expand_omp_single (struct omp_region *region)
7099{
7100 basic_block entry_bb, exit_bb;
75a70cf9 7101 gimple_stmt_iterator si;
61e47ac8 7102
7103 entry_bb = region->entry;
7104 exit_bb = region->exit;
7105
75a70cf9 7106 si = gsi_last_bb (entry_bb);
75a70cf9 7107 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
7108 gsi_remove (&si, true);
61e47ac8 7109 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7110
75a70cf9 7111 si = gsi_last_bb (exit_bb);
bc7bff74 7112 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
7113 {
7114 tree t = gimple_omp_return_lhs (gsi_stmt (si));
7115 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
7116 }
75a70cf9 7117 gsi_remove (&si, true);
61e47ac8 7118 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7119}
7120
7121
7122/* Generic expansion for OpenMP synchronization directives: master,
7123 ordered and critical. All we need to do here is remove the entry
7124 and exit markers for REGION. */
773c5ba7 7125
7126static void
7127expand_omp_synch (struct omp_region *region)
7128{
7129 basic_block entry_bb, exit_bb;
75a70cf9 7130 gimple_stmt_iterator si;
773c5ba7 7131
61e47ac8 7132 entry_bb = region->entry;
7133 exit_bb = region->exit;
773c5ba7 7134
75a70cf9 7135 si = gsi_last_bb (entry_bb);
7136 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
7137 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
bc7bff74 7138 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TASKGROUP
75a70cf9 7139 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
bc7bff74 7140 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL
7141 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TEAMS);
75a70cf9 7142 gsi_remove (&si, true);
773c5ba7 7143 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7144
03ed154b 7145 if (exit_bb)
7146 {
75a70cf9 7147 si = gsi_last_bb (exit_bb);
7148 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7149 gsi_remove (&si, true);
03ed154b 7150 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7151 }
773c5ba7 7152}
1e8e9920 7153
2169f33b 7154/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7155 operation as a normal volatile load. */
7156
7157static bool
3ec11c49 7158expand_omp_atomic_load (basic_block load_bb, tree addr,
7159 tree loaded_val, int index)
2169f33b 7160{
3ec11c49 7161 enum built_in_function tmpbase;
7162 gimple_stmt_iterator gsi;
7163 basic_block store_bb;
7164 location_t loc;
7165 gimple stmt;
7166 tree decl, call, type, itype;
7167
7168 gsi = gsi_last_bb (load_bb);
7169 stmt = gsi_stmt (gsi);
7170 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7171 loc = gimple_location (stmt);
7172
7173 /* ??? If the target does not implement atomic_load_optab[mode], and mode
7174 is smaller than word size, then expand_atomic_load assumes that the load
7175 is atomic. We could avoid the builtin entirely in this case. */
7176
7177 tmpbase = (enum built_in_function) (BUILT_IN_ATOMIC_LOAD_N + index + 1);
7178 decl = builtin_decl_explicit (tmpbase);
7179 if (decl == NULL_TREE)
7180 return false;
7181
7182 type = TREE_TYPE (loaded_val);
7183 itype = TREE_TYPE (TREE_TYPE (decl));
7184
7185 call = build_call_expr_loc (loc, decl, 2, addr,
bc7bff74 7186 build_int_cst (NULL,
7187 gimple_omp_atomic_seq_cst_p (stmt)
7188 ? MEMMODEL_SEQ_CST
7189 : MEMMODEL_RELAXED));
3ec11c49 7190 if (!useless_type_conversion_p (type, itype))
7191 call = fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7192 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7193
7194 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7195 gsi_remove (&gsi, true);
7196
7197 store_bb = single_succ (load_bb);
7198 gsi = gsi_last_bb (store_bb);
7199 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7200 gsi_remove (&gsi, true);
7201
7202 if (gimple_in_ssa_p (cfun))
7203 update_ssa (TODO_update_ssa_no_phi);
7204
7205 return true;
2169f33b 7206}
7207
7208/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7209 operation as a normal volatile store. */
7210
7211static bool
3ec11c49 7212expand_omp_atomic_store (basic_block load_bb, tree addr,
7213 tree loaded_val, tree stored_val, int index)
2169f33b 7214{
3ec11c49 7215 enum built_in_function tmpbase;
7216 gimple_stmt_iterator gsi;
7217 basic_block store_bb = single_succ (load_bb);
7218 location_t loc;
7219 gimple stmt;
7220 tree decl, call, type, itype;
7221 enum machine_mode imode;
7222 bool exchange;
7223
7224 gsi = gsi_last_bb (load_bb);
7225 stmt = gsi_stmt (gsi);
7226 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7227
7228 /* If the load value is needed, then this isn't a store but an exchange. */
7229 exchange = gimple_omp_atomic_need_value_p (stmt);
7230
7231 gsi = gsi_last_bb (store_bb);
7232 stmt = gsi_stmt (gsi);
7233 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE);
7234 loc = gimple_location (stmt);
7235
7236 /* ??? If the target does not implement atomic_store_optab[mode], and mode
7237 is smaller than word size, then expand_atomic_store assumes that the store
7238 is atomic. We could avoid the builtin entirely in this case. */
7239
7240 tmpbase = (exchange ? BUILT_IN_ATOMIC_EXCHANGE_N : BUILT_IN_ATOMIC_STORE_N);
7241 tmpbase = (enum built_in_function) ((int) tmpbase + index + 1);
7242 decl = builtin_decl_explicit (tmpbase);
7243 if (decl == NULL_TREE)
7244 return false;
7245
7246 type = TREE_TYPE (stored_val);
7247
7248 /* Dig out the type of the function's second argument. */
7249 itype = TREE_TYPE (decl);
7250 itype = TYPE_ARG_TYPES (itype);
7251 itype = TREE_CHAIN (itype);
7252 itype = TREE_VALUE (itype);
7253 imode = TYPE_MODE (itype);
7254
7255 if (exchange && !can_atomic_exchange_p (imode, true))
7256 return false;
7257
7258 if (!useless_type_conversion_p (itype, type))
7259 stored_val = fold_build1_loc (loc, VIEW_CONVERT_EXPR, itype, stored_val);
7260 call = build_call_expr_loc (loc, decl, 3, addr, stored_val,
bc7bff74 7261 build_int_cst (NULL,
7262 gimple_omp_atomic_seq_cst_p (stmt)
7263 ? MEMMODEL_SEQ_CST
7264 : MEMMODEL_RELAXED));
3ec11c49 7265 if (exchange)
7266 {
7267 if (!useless_type_conversion_p (type, itype))
7268 call = build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7269 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7270 }
7271
7272 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7273 gsi_remove (&gsi, true);
7274
7275 /* Remove the GIMPLE_OMP_ATOMIC_LOAD that we verified above. */
7276 gsi = gsi_last_bb (load_bb);
7277 gsi_remove (&gsi, true);
7278
7279 if (gimple_in_ssa_p (cfun))
7280 update_ssa (TODO_update_ssa_no_phi);
7281
7282 return true;
2169f33b 7283}
7284
cb7f680b 7285/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
1cd6e20d 7286 operation as a __atomic_fetch_op builtin. INDEX is log2 of the
cb7f680b 7287 size of the data type, and thus usable to find the index of the builtin
7288 decl. Returns false if the expression is not of the proper form. */
7289
7290static bool
7291expand_omp_atomic_fetch_op (basic_block load_bb,
7292 tree addr, tree loaded_val,
7293 tree stored_val, int index)
7294{
b9a16870 7295 enum built_in_function oldbase, newbase, tmpbase;
cb7f680b 7296 tree decl, itype, call;
2169f33b 7297 tree lhs, rhs;
cb7f680b 7298 basic_block store_bb = single_succ (load_bb);
75a70cf9 7299 gimple_stmt_iterator gsi;
7300 gimple stmt;
389dd41b 7301 location_t loc;
1cd6e20d 7302 enum tree_code code;
2169f33b 7303 bool need_old, need_new;
1cd6e20d 7304 enum machine_mode imode;
bc7bff74 7305 bool seq_cst;
cb7f680b 7306
7307 /* We expect to find the following sequences:
48e1416a 7308
cb7f680b 7309 load_bb:
75a70cf9 7310 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
cb7f680b 7311
7312 store_bb:
7313 val = tmp OP something; (or: something OP tmp)
48e1416a 7314 GIMPLE_OMP_STORE (val)
cb7f680b 7315
48e1416a 7316 ???FIXME: Allow a more flexible sequence.
cb7f680b 7317 Perhaps use data flow to pick the statements.
48e1416a 7318
cb7f680b 7319 */
7320
75a70cf9 7321 gsi = gsi_after_labels (store_bb);
7322 stmt = gsi_stmt (gsi);
389dd41b 7323 loc = gimple_location (stmt);
75a70cf9 7324 if (!is_gimple_assign (stmt))
cb7f680b 7325 return false;
75a70cf9 7326 gsi_next (&gsi);
7327 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 7328 return false;
2169f33b 7329 need_new = gimple_omp_atomic_need_value_p (gsi_stmt (gsi));
7330 need_old = gimple_omp_atomic_need_value_p (last_stmt (load_bb));
bc7bff74 7331 seq_cst = gimple_omp_atomic_seq_cst_p (last_stmt (load_bb));
2169f33b 7332 gcc_checking_assert (!need_old || !need_new);
cb7f680b 7333
75a70cf9 7334 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
cb7f680b 7335 return false;
7336
cb7f680b 7337 /* Check for one of the supported fetch-op operations. */
1cd6e20d 7338 code = gimple_assign_rhs_code (stmt);
7339 switch (code)
cb7f680b 7340 {
7341 case PLUS_EXPR:
7342 case POINTER_PLUS_EXPR:
1cd6e20d 7343 oldbase = BUILT_IN_ATOMIC_FETCH_ADD_N;
7344 newbase = BUILT_IN_ATOMIC_ADD_FETCH_N;
cb7f680b 7345 break;
7346 case MINUS_EXPR:
1cd6e20d 7347 oldbase = BUILT_IN_ATOMIC_FETCH_SUB_N;
7348 newbase = BUILT_IN_ATOMIC_SUB_FETCH_N;
cb7f680b 7349 break;
7350 case BIT_AND_EXPR:
1cd6e20d 7351 oldbase = BUILT_IN_ATOMIC_FETCH_AND_N;
7352 newbase = BUILT_IN_ATOMIC_AND_FETCH_N;
cb7f680b 7353 break;
7354 case BIT_IOR_EXPR:
1cd6e20d 7355 oldbase = BUILT_IN_ATOMIC_FETCH_OR_N;
7356 newbase = BUILT_IN_ATOMIC_OR_FETCH_N;
cb7f680b 7357 break;
7358 case BIT_XOR_EXPR:
1cd6e20d 7359 oldbase = BUILT_IN_ATOMIC_FETCH_XOR_N;
7360 newbase = BUILT_IN_ATOMIC_XOR_FETCH_N;
cb7f680b 7361 break;
7362 default:
7363 return false;
7364 }
1cd6e20d 7365
cb7f680b 7366 /* Make sure the expression is of the proper form. */
75a70cf9 7367 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
7368 rhs = gimple_assign_rhs2 (stmt);
7369 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
7370 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
7371 rhs = gimple_assign_rhs1 (stmt);
cb7f680b 7372 else
7373 return false;
7374
b9a16870 7375 tmpbase = ((enum built_in_function)
7376 ((need_new ? newbase : oldbase) + index + 1));
7377 decl = builtin_decl_explicit (tmpbase);
0f94f46b 7378 if (decl == NULL_TREE)
7379 return false;
cb7f680b 7380 itype = TREE_TYPE (TREE_TYPE (decl));
1cd6e20d 7381 imode = TYPE_MODE (itype);
cb7f680b 7382
1cd6e20d 7383 /* We could test all of the various optabs involved, but the fact of the
7384 matter is that (with the exception of i486 vs i586 and xadd) all targets
7385 that support any atomic operaton optab also implements compare-and-swap.
7386 Let optabs.c take care of expanding any compare-and-swap loop. */
29139cdc 7387 if (!can_compare_and_swap_p (imode, true))
cb7f680b 7388 return false;
7389
75a70cf9 7390 gsi = gsi_last_bb (load_bb);
7391 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
1cd6e20d 7392
7393 /* OpenMP does not imply any barrier-like semantics on its atomic ops.
7394 It only requires that the operation happen atomically. Thus we can
7395 use the RELAXED memory model. */
7396 call = build_call_expr_loc (loc, decl, 3, addr,
7397 fold_convert_loc (loc, itype, rhs),
bc7bff74 7398 build_int_cst (NULL,
7399 seq_cst ? MEMMODEL_SEQ_CST
7400 : MEMMODEL_RELAXED));
1cd6e20d 7401
2169f33b 7402 if (need_old || need_new)
7403 {
7404 lhs = need_old ? loaded_val : stored_val;
7405 call = fold_convert_loc (loc, TREE_TYPE (lhs), call);
7406 call = build2_loc (loc, MODIFY_EXPR, void_type_node, lhs, call);
7407 }
7408 else
7409 call = fold_convert_loc (loc, void_type_node, call);
75a70cf9 7410 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7411 gsi_remove (&gsi, true);
cb7f680b 7412
75a70cf9 7413 gsi = gsi_last_bb (store_bb);
7414 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7415 gsi_remove (&gsi, true);
7416 gsi = gsi_last_bb (store_bb);
7417 gsi_remove (&gsi, true);
cb7f680b 7418
7419 if (gimple_in_ssa_p (cfun))
7420 update_ssa (TODO_update_ssa_no_phi);
7421
7422 return true;
7423}
7424
7425/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7426
7427 oldval = *addr;
7428 repeat:
7429 newval = rhs; // with oldval replacing *addr in rhs
7430 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
7431 if (oldval != newval)
7432 goto repeat;
7433
7434 INDEX is log2 of the size of the data type, and thus usable to find the
7435 index of the builtin decl. */
7436
7437static bool
7438expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
7439 tree addr, tree loaded_val, tree stored_val,
7440 int index)
7441{
790368c5 7442 tree loadedi, storedi, initial, new_storedi, old_vali;
cb7f680b 7443 tree type, itype, cmpxchg, iaddr;
75a70cf9 7444 gimple_stmt_iterator si;
cb7f680b 7445 basic_block loop_header = single_succ (load_bb);
75a70cf9 7446 gimple phi, stmt;
cb7f680b 7447 edge e;
b9a16870 7448 enum built_in_function fncode;
cb7f680b 7449
1cd6e20d 7450 /* ??? We need a non-pointer interface to __atomic_compare_exchange in
7451 order to use the RELAXED memory model effectively. */
b9a16870 7452 fncode = (enum built_in_function)((int)BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
7453 + index + 1);
7454 cmpxchg = builtin_decl_explicit (fncode);
0f94f46b 7455 if (cmpxchg == NULL_TREE)
7456 return false;
cb7f680b 7457 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7458 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
7459
29139cdc 7460 if (!can_compare_and_swap_p (TYPE_MODE (itype), true))
cb7f680b 7461 return false;
7462
75a70cf9 7463 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
7464 si = gsi_last_bb (load_bb);
7465 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
7466
790368c5 7467 /* For floating-point values, we'll need to view-convert them to integers
7468 so that we can perform the atomic compare and swap. Simplify the
7469 following code by always setting up the "i"ntegral variables. */
7470 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7471 {
75a70cf9 7472 tree iaddr_val;
7473
072f7ab1 7474 iaddr = create_tmp_reg (build_pointer_type_for_mode (itype, ptr_mode,
7475 true), NULL);
75a70cf9 7476 iaddr_val
7477 = force_gimple_operand_gsi (&si,
7478 fold_convert (TREE_TYPE (iaddr), addr),
7479 false, NULL_TREE, true, GSI_SAME_STMT);
7480 stmt = gimple_build_assign (iaddr, iaddr_val);
7481 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
790368c5 7482 loadedi = create_tmp_var (itype, NULL);
7483 if (gimple_in_ssa_p (cfun))
b03e5397 7484 loadedi = make_ssa_name (loadedi, NULL);
790368c5 7485 }
7486 else
7487 {
7488 iaddr = addr;
7489 loadedi = loaded_val;
7490 }
75a70cf9 7491
182cf5a9 7492 initial
7493 = force_gimple_operand_gsi (&si,
7494 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
7495 iaddr,
7496 build_int_cst (TREE_TYPE (iaddr), 0)),
7497 true, NULL_TREE, true, GSI_SAME_STMT);
790368c5 7498
7499 /* Move the value to the LOADEDI temporary. */
cb7f680b 7500 if (gimple_in_ssa_p (cfun))
7501 {
75a70cf9 7502 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
790368c5 7503 phi = create_phi_node (loadedi, loop_header);
cb7f680b 7504 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
7505 initial);
7506 }
7507 else
75a70cf9 7508 gsi_insert_before (&si,
7509 gimple_build_assign (loadedi, initial),
7510 GSI_SAME_STMT);
790368c5 7511 if (loadedi != loaded_val)
7512 {
75a70cf9 7513 gimple_stmt_iterator gsi2;
7514 tree x;
790368c5 7515
7516 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
75a70cf9 7517 gsi2 = gsi_start_bb (loop_header);
790368c5 7518 if (gimple_in_ssa_p (cfun))
7519 {
75a70cf9 7520 gimple stmt;
7521 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7522 true, GSI_SAME_STMT);
7523 stmt = gimple_build_assign (loaded_val, x);
7524 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
790368c5 7525 }
7526 else
7527 {
75a70cf9 7528 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
7529 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7530 true, GSI_SAME_STMT);
790368c5 7531 }
7532 }
75a70cf9 7533 gsi_remove (&si, true);
cb7f680b 7534
75a70cf9 7535 si = gsi_last_bb (store_bb);
7536 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 7537
790368c5 7538 if (iaddr == addr)
7539 storedi = stored_val;
cb7f680b 7540 else
790368c5 7541 storedi =
75a70cf9 7542 force_gimple_operand_gsi (&si,
790368c5 7543 build1 (VIEW_CONVERT_EXPR, itype,
7544 stored_val), true, NULL_TREE, true,
75a70cf9 7545 GSI_SAME_STMT);
cb7f680b 7546
7547 /* Build the compare&swap statement. */
7548 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
75a70cf9 7549 new_storedi = force_gimple_operand_gsi (&si,
87f9ffa4 7550 fold_convert (TREE_TYPE (loadedi),
7551 new_storedi),
cb7f680b 7552 true, NULL_TREE,
75a70cf9 7553 true, GSI_SAME_STMT);
cb7f680b 7554
7555 if (gimple_in_ssa_p (cfun))
7556 old_vali = loadedi;
7557 else
7558 {
87f9ffa4 7559 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
75a70cf9 7560 stmt = gimple_build_assign (old_vali, loadedi);
7561 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7562
75a70cf9 7563 stmt = gimple_build_assign (loadedi, new_storedi);
7564 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7565 }
7566
7567 /* Note that we always perform the comparison as an integer, even for
48e1416a 7568 floating point. This allows the atomic operation to properly
cb7f680b 7569 succeed even with NaNs and -0.0. */
75a70cf9 7570 stmt = gimple_build_cond_empty
7571 (build2 (NE_EXPR, boolean_type_node,
7572 new_storedi, old_vali));
7573 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7574
7575 /* Update cfg. */
7576 e = single_succ_edge (store_bb);
7577 e->flags &= ~EDGE_FALLTHRU;
7578 e->flags |= EDGE_FALSE_VALUE;
7579
7580 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
7581
790368c5 7582 /* Copy the new value to loadedi (we already did that before the condition
cb7f680b 7583 if we are not in SSA). */
7584 if (gimple_in_ssa_p (cfun))
7585 {
75a70cf9 7586 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
790368c5 7587 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
cb7f680b 7588 }
7589
75a70cf9 7590 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
7591 gsi_remove (&si, true);
cb7f680b 7592
04c2922b 7593 struct loop *loop = alloc_loop ();
7594 loop->header = loop_header;
5f037457 7595 loop->latch = store_bb;
04c2922b 7596 add_loop (loop, loop_header->loop_father);
7597
cb7f680b 7598 if (gimple_in_ssa_p (cfun))
7599 update_ssa (TODO_update_ssa_no_phi);
7600
7601 return true;
7602}
7603
7604/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7605
7606 GOMP_atomic_start ();
7607 *addr = rhs;
7608 GOMP_atomic_end ();
7609
7610 The result is not globally atomic, but works so long as all parallel
7611 references are within #pragma omp atomic directives. According to
7612 responses received from omp@openmp.org, appears to be within spec.
7613 Which makes sense, since that's how several other compilers handle
48e1416a 7614 this situation as well.
75a70cf9 7615 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
7616 expanding. STORED_VAL is the operand of the matching
7617 GIMPLE_OMP_ATOMIC_STORE.
cb7f680b 7618
48e1416a 7619 We replace
7620 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
cb7f680b 7621 loaded_val = *addr;
7622
7623 and replace
3ec11c49 7624 GIMPLE_OMP_ATOMIC_STORE (stored_val) with
48e1416a 7625 *addr = stored_val;
cb7f680b 7626*/
7627
7628static bool
7629expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
7630 tree addr, tree loaded_val, tree stored_val)
7631{
75a70cf9 7632 gimple_stmt_iterator si;
7633 gimple stmt;
cb7f680b 7634 tree t;
7635
75a70cf9 7636 si = gsi_last_bb (load_bb);
7637 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 7638
b9a16870 7639 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
414c3a2c 7640 t = build_call_expr (t, 0);
75a70cf9 7641 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
cb7f680b 7642
182cf5a9 7643 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
75a70cf9 7644 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
7645 gsi_remove (&si, true);
cb7f680b 7646
75a70cf9 7647 si = gsi_last_bb (store_bb);
7648 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 7649
182cf5a9 7650 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
7651 stored_val);
75a70cf9 7652 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7653
b9a16870 7654 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
414c3a2c 7655 t = build_call_expr (t, 0);
75a70cf9 7656 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
7657 gsi_remove (&si, true);
cb7f680b 7658
7659 if (gimple_in_ssa_p (cfun))
7660 update_ssa (TODO_update_ssa_no_phi);
7661 return true;
7662}
7663
48e1416a 7664/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
7665 using expand_omp_atomic_fetch_op. If it failed, we try to
cb7f680b 7666 call expand_omp_atomic_pipeline, and if it fails too, the
7667 ultimate fallback is wrapping the operation in a mutex
48e1416a 7668 (expand_omp_atomic_mutex). REGION is the atomic region built
7669 by build_omp_regions_1(). */
cb7f680b 7670
7671static void
7672expand_omp_atomic (struct omp_region *region)
7673{
7674 basic_block load_bb = region->entry, store_bb = region->exit;
75a70cf9 7675 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
7676 tree loaded_val = gimple_omp_atomic_load_lhs (load);
7677 tree addr = gimple_omp_atomic_load_rhs (load);
7678 tree stored_val = gimple_omp_atomic_store_val (store);
cb7f680b 7679 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7680 HOST_WIDE_INT index;
7681
7682 /* Make sure the type is one of the supported sizes. */
6a0712d4 7683 index = tree_to_uhwi (TYPE_SIZE_UNIT (type));
cb7f680b 7684 index = exact_log2 (index);
7685 if (index >= 0 && index <= 4)
7686 {
7687 unsigned int align = TYPE_ALIGN_UNIT (type);
7688
7689 /* __sync builtins require strict data alignment. */
dcf7024c 7690 if (exact_log2 (align) >= index)
cb7f680b 7691 {
3ec11c49 7692 /* Atomic load. */
2169f33b 7693 if (loaded_val == stored_val
7694 && (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7695 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7696 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
3ec11c49 7697 && expand_omp_atomic_load (load_bb, addr, loaded_val, index))
2169f33b 7698 return;
7699
3ec11c49 7700 /* Atomic store. */
2169f33b 7701 if ((GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7702 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7703 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
7704 && store_bb == single_succ (load_bb)
7705 && first_stmt (store_bb) == store
3ec11c49 7706 && expand_omp_atomic_store (load_bb, addr, loaded_val,
7707 stored_val, index))
2169f33b 7708 return;
7709
cb7f680b 7710 /* When possible, use specialized atomic update functions. */
7711 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
3ec11c49 7712 && store_bb == single_succ (load_bb)
7713 && expand_omp_atomic_fetch_op (load_bb, addr,
7714 loaded_val, stored_val, index))
7715 return;
cb7f680b 7716
7717 /* If we don't have specialized __sync builtins, try and implement
7718 as a compare and swap loop. */
7719 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
7720 loaded_val, stored_val, index))
7721 return;
7722 }
7723 }
7724
7725 /* The ultimate fallback is wrapping the operation in a mutex. */
7726 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
7727}
7728
1e8e9920 7729
bc7bff74 7730/* Expand the OpenMP target{, data, update} directive starting at REGION. */
7731
7732static void
7733expand_omp_target (struct omp_region *region)
7734{
7735 basic_block entry_bb, exit_bb, new_bb;
7736 struct function *child_cfun = NULL;
7737 tree child_fn = NULL_TREE, block, t;
7738 gimple_stmt_iterator gsi;
7739 gimple entry_stmt, stmt;
7740 edge e;
7741
7742 entry_stmt = last_stmt (region->entry);
7743 new_bb = region->entry;
7744 int kind = gimple_omp_target_kind (entry_stmt);
7745 if (kind == GF_OMP_TARGET_KIND_REGION)
7746 {
7747 child_fn = gimple_omp_target_child_fn (entry_stmt);
7748 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7749 }
7750
7751 entry_bb = region->entry;
7752 exit_bb = region->exit;
7753
7754 if (kind == GF_OMP_TARGET_KIND_REGION)
7755 {
7756 unsigned srcidx, dstidx, num;
7757
7758 /* If the target region needs data sent from the parent
7759 function, then the very first statement (except possible
7760 tree profile counter updates) of the parallel body
7761 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
7762 &.OMP_DATA_O is passed as an argument to the child function,
7763 we need to replace it with the argument as seen by the child
7764 function.
7765
7766 In most cases, this will end up being the identity assignment
7767 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
7768 a function call that has been inlined, the original PARM_DECL
7769 .OMP_DATA_I may have been converted into a different local
7770 variable. In which case, we need to keep the assignment. */
7771 if (gimple_omp_target_data_arg (entry_stmt))
7772 {
7773 basic_block entry_succ_bb = single_succ (entry_bb);
7774 gimple_stmt_iterator gsi;
7775 tree arg;
7776 gimple tgtcopy_stmt = NULL;
7777 tree sender
7778 = TREE_VEC_ELT (gimple_omp_target_data_arg (entry_stmt), 0);
7779
7780 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
7781 {
7782 gcc_assert (!gsi_end_p (gsi));
7783 stmt = gsi_stmt (gsi);
7784 if (gimple_code (stmt) != GIMPLE_ASSIGN)
7785 continue;
7786
7787 if (gimple_num_ops (stmt) == 2)
7788 {
7789 tree arg = gimple_assign_rhs1 (stmt);
7790
7791 /* We're ignoring the subcode because we're
7792 effectively doing a STRIP_NOPS. */
7793
7794 if (TREE_CODE (arg) == ADDR_EXPR
7795 && TREE_OPERAND (arg, 0) == sender)
7796 {
7797 tgtcopy_stmt = stmt;
7798 break;
7799 }
7800 }
7801 }
7802
7803 gcc_assert (tgtcopy_stmt != NULL);
7804 arg = DECL_ARGUMENTS (child_fn);
7805
7806 gcc_assert (gimple_assign_lhs (tgtcopy_stmt) == arg);
7807 gsi_remove (&gsi, true);
7808 }
7809
7810 /* Declare local variables needed in CHILD_CFUN. */
7811 block = DECL_INITIAL (child_fn);
7812 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
7813 /* The gimplifier could record temporaries in target block
7814 rather than in containing function's local_decls chain,
7815 which would mean cgraph missed finalizing them. Do it now. */
7816 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
7817 if (TREE_CODE (t) == VAR_DECL
7818 && TREE_STATIC (t)
7819 && !DECL_EXTERNAL (t))
7820 varpool_finalize_decl (t);
7821 DECL_SAVED_TREE (child_fn) = NULL;
7822 /* We'll create a CFG for child_fn, so no gimple body is needed. */
7823 gimple_set_body (child_fn, NULL);
7824 TREE_USED (block) = 1;
7825
7826 /* Reset DECL_CONTEXT on function arguments. */
7827 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7828 DECL_CONTEXT (t) = child_fn;
7829
7830 /* Split ENTRY_BB at GIMPLE_OMP_TARGET,
7831 so that it can be moved to the child function. */
7832 gsi = gsi_last_bb (entry_bb);
7833 stmt = gsi_stmt (gsi);
7834 gcc_assert (stmt && gimple_code (stmt) == GIMPLE_OMP_TARGET
7835 && gimple_omp_target_kind (stmt)
7836 == GF_OMP_TARGET_KIND_REGION);
7837 gsi_remove (&gsi, true);
7838 e = split_block (entry_bb, stmt);
7839 entry_bb = e->dest;
7840 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7841
7842 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
7843 if (exit_bb)
7844 {
7845 gsi = gsi_last_bb (exit_bb);
7846 gcc_assert (!gsi_end_p (gsi)
7847 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
7848 stmt = gimple_build_return (NULL);
7849 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
7850 gsi_remove (&gsi, true);
7851 }
7852
7853 /* Move the target region into CHILD_CFUN. */
7854
7855 block = gimple_block (entry_stmt);
7856
7857 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
7858 if (exit_bb)
7859 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
7860 /* When the OMP expansion process cannot guarantee an up-to-date
7861 loop tree arrange for the child function to fixup loops. */
7862 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
7863 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
7864
7865 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
7866 num = vec_safe_length (child_cfun->local_decls);
7867 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
7868 {
7869 t = (*child_cfun->local_decls)[srcidx];
7870 if (DECL_CONTEXT (t) == cfun->decl)
7871 continue;
7872 if (srcidx != dstidx)
7873 (*child_cfun->local_decls)[dstidx] = t;
7874 dstidx++;
7875 }
7876 if (dstidx != num)
7877 vec_safe_truncate (child_cfun->local_decls, dstidx);
7878
7879 /* Inform the callgraph about the new function. */
7880 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
7881 cgraph_add_new_function (child_fn, true);
7882
7883 /* Fix the callgraph edges for child_cfun. Those for cfun will be
7884 fixed in a following pass. */
7885 push_cfun (child_cfun);
7886 rebuild_cgraph_edges ();
7887
7888 /* Some EH regions might become dead, see PR34608. If
7889 pass_cleanup_cfg isn't the first pass to happen with the
7890 new child, these dead EH edges might cause problems.
7891 Clean them up now. */
7892 if (flag_exceptions)
7893 {
7894 basic_block bb;
7895 bool changed = false;
7896
7897 FOR_EACH_BB (bb)
7898 changed |= gimple_purge_dead_eh_edges (bb);
7899 if (changed)
7900 cleanup_tree_cfg ();
7901 }
7902 pop_cfun ();
7903 }
7904
7905 /* Emit a library call to launch the target region, or do data
7906 transfers. */
7907 tree t1, t2, t3, t4, device, cond, c, clauses;
7908 enum built_in_function start_ix;
7909 location_t clause_loc;
7910
7911 clauses = gimple_omp_target_clauses (entry_stmt);
7912
7913 if (kind == GF_OMP_TARGET_KIND_REGION)
7914 start_ix = BUILT_IN_GOMP_TARGET;
7915 else if (kind == GF_OMP_TARGET_KIND_DATA)
7916 start_ix = BUILT_IN_GOMP_TARGET_DATA;
7917 else
7918 start_ix = BUILT_IN_GOMP_TARGET_UPDATE;
7919
7920 /* By default, the value of DEVICE is -1 (let runtime library choose)
7921 and there is no conditional. */
7922 cond = NULL_TREE;
7923 device = build_int_cst (integer_type_node, -1);
7924
7925 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
7926 if (c)
7927 cond = OMP_CLAUSE_IF_EXPR (c);
7928
7929 c = find_omp_clause (clauses, OMP_CLAUSE_DEVICE);
7930 if (c)
7931 {
7932 device = OMP_CLAUSE_DEVICE_ID (c);
7933 clause_loc = OMP_CLAUSE_LOCATION (c);
7934 }
7935 else
7936 clause_loc = gimple_location (entry_stmt);
7937
7938 /* Ensure 'device' is of the correct type. */
7939 device = fold_convert_loc (clause_loc, integer_type_node, device);
7940
7941 /* If we found the clause 'if (cond)', build
7942 (cond ? device : -2). */
7943 if (cond)
7944 {
7945 cond = gimple_boolify (cond);
7946
7947 basic_block cond_bb, then_bb, else_bb;
7948 edge e;
7949 tree tmp_var;
7950
7951 tmp_var = create_tmp_var (TREE_TYPE (device), NULL);
7952 if (kind != GF_OMP_TARGET_KIND_REGION)
7953 {
7954 gsi = gsi_last_bb (new_bb);
7955 gsi_prev (&gsi);
7956 e = split_block (new_bb, gsi_stmt (gsi));
7957 }
7958 else
7959 e = split_block (new_bb, NULL);
7960 cond_bb = e->src;
7961 new_bb = e->dest;
7962 remove_edge (e);
7963
7964 then_bb = create_empty_bb (cond_bb);
7965 else_bb = create_empty_bb (then_bb);
7966 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
7967 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
7968
7969 stmt = gimple_build_cond_empty (cond);
7970 gsi = gsi_last_bb (cond_bb);
7971 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7972
7973 gsi = gsi_start_bb (then_bb);
7974 stmt = gimple_build_assign (tmp_var, device);
7975 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7976
7977 gsi = gsi_start_bb (else_bb);
7978 stmt = gimple_build_assign (tmp_var,
7979 build_int_cst (integer_type_node, -2));
7980 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7981
7982 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
7983 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
7984 if (current_loops)
7985 {
7986 add_bb_to_loop (then_bb, cond_bb->loop_father);
7987 add_bb_to_loop (else_bb, cond_bb->loop_father);
7988 }
7989 make_edge (then_bb, new_bb, EDGE_FALLTHRU);
7990 make_edge (else_bb, new_bb, EDGE_FALLTHRU);
7991
7992 device = tmp_var;
7993 }
7994
7995 gsi = gsi_last_bb (new_bb);
7996 t = gimple_omp_target_data_arg (entry_stmt);
7997 if (t == NULL)
7998 {
7999 t1 = size_zero_node;
8000 t2 = build_zero_cst (ptr_type_node);
8001 t3 = t2;
8002 t4 = t2;
8003 }
8004 else
8005 {
8006 t1 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (TREE_VEC_ELT (t, 1))));
8007 t1 = size_binop (PLUS_EXPR, t1, size_int (1));
8008 t2 = build_fold_addr_expr (TREE_VEC_ELT (t, 0));
8009 t3 = build_fold_addr_expr (TREE_VEC_ELT (t, 1));
8010 t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
8011 }
8012
8013 gimple g;
8014 /* FIXME: This will be address of
8015 extern char __OPENMP_TARGET__[] __attribute__((visibility ("hidden")))
8016 symbol, as soon as the linker plugin is able to create it for us. */
8017 tree openmp_target = build_zero_cst (ptr_type_node);
8018 if (kind == GF_OMP_TARGET_KIND_REGION)
8019 {
8020 tree fnaddr = build_fold_addr_expr (child_fn);
8021 g = gimple_build_call (builtin_decl_explicit (start_ix), 7,
8022 device, fnaddr, openmp_target, t1, t2, t3, t4);
8023 }
8024 else
8025 g = gimple_build_call (builtin_decl_explicit (start_ix), 6,
8026 device, openmp_target, t1, t2, t3, t4);
8027 gimple_set_location (g, gimple_location (entry_stmt));
8028 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8029 if (kind != GF_OMP_TARGET_KIND_REGION)
8030 {
8031 g = gsi_stmt (gsi);
8032 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_TARGET);
8033 gsi_remove (&gsi, true);
8034 }
8035 if (kind == GF_OMP_TARGET_KIND_DATA && region->exit)
8036 {
8037 gsi = gsi_last_bb (region->exit);
8038 g = gsi_stmt (gsi);
8039 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_RETURN);
8040 gsi_remove (&gsi, true);
8041 }
8042}
8043
8044
8045/* Expand the parallel region tree rooted at REGION. Expansion
8046 proceeds in depth-first order. Innermost regions are expanded
8047 first. This way, parallel regions that require a new function to
75a70cf9 8048 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
773c5ba7 8049 internal dependencies in their body. */
8050
8051static void
8052expand_omp (struct omp_region *region)
8053{
8054 while (region)
8055 {
1d22f541 8056 location_t saved_location;
bc7bff74 8057 gimple inner_stmt = NULL;
1d22f541 8058
d1d5b012 8059 /* First, determine whether this is a combined parallel+workshare
8060 region. */
75a70cf9 8061 if (region->type == GIMPLE_OMP_PARALLEL)
d1d5b012 8062 determine_parallel_type (region);
8063
bc7bff74 8064 if (region->type == GIMPLE_OMP_FOR
8065 && gimple_omp_for_combined_p (last_stmt (region->entry)))
8066 inner_stmt = last_stmt (region->inner->entry);
8067
773c5ba7 8068 if (region->inner)
8069 expand_omp (region->inner);
8070
1d22f541 8071 saved_location = input_location;
75a70cf9 8072 if (gimple_has_location (last_stmt (region->entry)))
8073 input_location = gimple_location (last_stmt (region->entry));
1d22f541 8074
61e47ac8 8075 switch (region->type)
773c5ba7 8076 {
75a70cf9 8077 case GIMPLE_OMP_PARALLEL:
8078 case GIMPLE_OMP_TASK:
fd6481cf 8079 expand_omp_taskreg (region);
8080 break;
8081
75a70cf9 8082 case GIMPLE_OMP_FOR:
bc7bff74 8083 expand_omp_for (region, inner_stmt);
61e47ac8 8084 break;
773c5ba7 8085
75a70cf9 8086 case GIMPLE_OMP_SECTIONS:
61e47ac8 8087 expand_omp_sections (region);
8088 break;
773c5ba7 8089
75a70cf9 8090 case GIMPLE_OMP_SECTION:
61e47ac8 8091 /* Individual omp sections are handled together with their
75a70cf9 8092 parent GIMPLE_OMP_SECTIONS region. */
61e47ac8 8093 break;
773c5ba7 8094
75a70cf9 8095 case GIMPLE_OMP_SINGLE:
61e47ac8 8096 expand_omp_single (region);
8097 break;
773c5ba7 8098
75a70cf9 8099 case GIMPLE_OMP_MASTER:
bc7bff74 8100 case GIMPLE_OMP_TASKGROUP:
75a70cf9 8101 case GIMPLE_OMP_ORDERED:
8102 case GIMPLE_OMP_CRITICAL:
bc7bff74 8103 case GIMPLE_OMP_TEAMS:
61e47ac8 8104 expand_omp_synch (region);
8105 break;
773c5ba7 8106
75a70cf9 8107 case GIMPLE_OMP_ATOMIC_LOAD:
cb7f680b 8108 expand_omp_atomic (region);
8109 break;
8110
bc7bff74 8111 case GIMPLE_OMP_TARGET:
8112 expand_omp_target (region);
8113 break;
8114
61e47ac8 8115 default:
8116 gcc_unreachable ();
8117 }
cc5982dc 8118
1d22f541 8119 input_location = saved_location;
773c5ba7 8120 region = region->next;
8121 }
8122}
8123
8124
8125/* Helper for build_omp_regions. Scan the dominator tree starting at
28c92cbb 8126 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
8127 true, the function ends once a single tree is built (otherwise, whole
8128 forest of OMP constructs may be built). */
773c5ba7 8129
8130static void
28c92cbb 8131build_omp_regions_1 (basic_block bb, struct omp_region *parent,
8132 bool single_tree)
773c5ba7 8133{
75a70cf9 8134 gimple_stmt_iterator gsi;
8135 gimple stmt;
773c5ba7 8136 basic_block son;
8137
75a70cf9 8138 gsi = gsi_last_bb (bb);
8139 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
773c5ba7 8140 {
8141 struct omp_region *region;
75a70cf9 8142 enum gimple_code code;
773c5ba7 8143
75a70cf9 8144 stmt = gsi_stmt (gsi);
8145 code = gimple_code (stmt);
8146 if (code == GIMPLE_OMP_RETURN)
773c5ba7 8147 {
8148 /* STMT is the return point out of region PARENT. Mark it
8149 as the exit point and make PARENT the immediately
8150 enclosing region. */
8151 gcc_assert (parent);
8152 region = parent;
61e47ac8 8153 region->exit = bb;
773c5ba7 8154 parent = parent->outer;
773c5ba7 8155 }
75a70cf9 8156 else if (code == GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 8157 {
75a70cf9 8158 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
8159 GIMPLE_OMP_RETURN, but matches with
8160 GIMPLE_OMP_ATOMIC_LOAD. */
cb7f680b 8161 gcc_assert (parent);
75a70cf9 8162 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 8163 region = parent;
8164 region->exit = bb;
8165 parent = parent->outer;
8166 }
8167
75a70cf9 8168 else if (code == GIMPLE_OMP_CONTINUE)
61e47ac8 8169 {
8170 gcc_assert (parent);
8171 parent->cont = bb;
8172 }
75a70cf9 8173 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
ac6e3339 8174 {
75a70cf9 8175 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
8176 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
8177 ;
ac6e3339 8178 }
bc7bff74 8179 else if (code == GIMPLE_OMP_TARGET
8180 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_UPDATE)
8181 new_omp_region (bb, code, parent);
773c5ba7 8182 else
8183 {
8184 /* Otherwise, this directive becomes the parent for a new
8185 region. */
61e47ac8 8186 region = new_omp_region (bb, code, parent);
773c5ba7 8187 parent = region;
8188 }
773c5ba7 8189 }
8190
28c92cbb 8191 if (single_tree && !parent)
8192 return;
8193
773c5ba7 8194 for (son = first_dom_son (CDI_DOMINATORS, bb);
8195 son;
8196 son = next_dom_son (CDI_DOMINATORS, son))
28c92cbb 8197 build_omp_regions_1 (son, parent, single_tree);
8198}
8199
8200/* Builds the tree of OMP regions rooted at ROOT, storing it to
8201 root_omp_region. */
8202
8203static void
8204build_omp_regions_root (basic_block root)
8205{
8206 gcc_assert (root_omp_region == NULL);
8207 build_omp_regions_1 (root, NULL, true);
8208 gcc_assert (root_omp_region != NULL);
773c5ba7 8209}
8210
28c92cbb 8211/* Expands omp construct (and its subconstructs) starting in HEAD. */
8212
8213void
8214omp_expand_local (basic_block head)
8215{
8216 build_omp_regions_root (head);
8217 if (dump_file && (dump_flags & TDF_DETAILS))
8218 {
8219 fprintf (dump_file, "\nOMP region tree\n\n");
8220 dump_omp_region (dump_file, root_omp_region, 0);
8221 fprintf (dump_file, "\n");
8222 }
8223
8224 remove_exit_barriers (root_omp_region);
8225 expand_omp (root_omp_region);
8226
8227 free_omp_regions ();
8228}
773c5ba7 8229
8230/* Scan the CFG and build a tree of OMP regions. Return the root of
8231 the OMP region tree. */
8232
8233static void
8234build_omp_regions (void)
8235{
61e47ac8 8236 gcc_assert (root_omp_region == NULL);
773c5ba7 8237 calculate_dominance_info (CDI_DOMINATORS);
34154e27 8238 build_omp_regions_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, false);
773c5ba7 8239}
8240
773c5ba7 8241/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
8242
2a1990e9 8243static unsigned int
773c5ba7 8244execute_expand_omp (void)
8245{
8246 build_omp_regions ();
8247
61e47ac8 8248 if (!root_omp_region)
8249 return 0;
773c5ba7 8250
61e47ac8 8251 if (dump_file)
8252 {
8253 fprintf (dump_file, "\nOMP region tree\n\n");
8254 dump_omp_region (dump_file, root_omp_region, 0);
8255 fprintf (dump_file, "\n");
773c5ba7 8256 }
61e47ac8 8257
8258 remove_exit_barriers (root_omp_region);
8259
8260 expand_omp (root_omp_region);
8261
61e47ac8 8262 cleanup_tree_cfg ();
8263
8264 free_omp_regions ();
8265
2a1990e9 8266 return 0;
773c5ba7 8267}
8268
79acaae1 8269/* OMP expansion -- the default pass, run before creation of SSA form. */
8270
773c5ba7 8271static bool
8272gate_expand_omp (void)
8273{
f2697631 8274 return ((flag_openmp != 0 || flag_openmp_simd != 0
8275 || flag_enable_cilkplus != 0) && !seen_error ());
773c5ba7 8276}
8277
cbe8bda8 8278namespace {
8279
8280const pass_data pass_data_expand_omp =
8281{
8282 GIMPLE_PASS, /* type */
8283 "ompexp", /* name */
8284 OPTGROUP_NONE, /* optinfo_flags */
8285 true, /* has_gate */
8286 true, /* has_execute */
8287 TV_NONE, /* tv_id */
8288 PROP_gimple_any, /* properties_required */
8289 0, /* properties_provided */
8290 0, /* properties_destroyed */
8291 0, /* todo_flags_start */
8292 0, /* todo_flags_finish */
773c5ba7 8293};
cbe8bda8 8294
8295class pass_expand_omp : public gimple_opt_pass
8296{
8297public:
9af5ce0c 8298 pass_expand_omp (gcc::context *ctxt)
8299 : gimple_opt_pass (pass_data_expand_omp, ctxt)
cbe8bda8 8300 {}
8301
8302 /* opt_pass methods: */
8303 bool gate () { return gate_expand_omp (); }
8304 unsigned int execute () { return execute_expand_omp (); }
8305
8306}; // class pass_expand_omp
8307
8308} // anon namespace
8309
8310gimple_opt_pass *
8311make_pass_expand_omp (gcc::context *ctxt)
8312{
8313 return new pass_expand_omp (ctxt);
8314}
773c5ba7 8315\f
8316/* Routines to lower OpenMP directives into OMP-GIMPLE. */
8317
bc7bff74 8318/* If ctx is a worksharing context inside of a cancellable parallel
8319 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8320 and conditional branch to parallel's cancel_label to handle
8321 cancellation in the implicit barrier. */
8322
8323static void
8324maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
8325{
8326 gimple omp_return = gimple_seq_last_stmt (*body);
8327 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8328 if (gimple_omp_return_nowait_p (omp_return))
8329 return;
8330 if (ctx->outer
8331 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
8332 && ctx->outer->cancellable)
8333 {
8334 tree lhs = create_tmp_var (boolean_type_node, NULL);
8335 gimple_omp_return_set_lhs (omp_return, lhs);
8336 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8337 gimple g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
8338 ctx->outer->cancel_label, fallthru_label);
8339 gimple_seq_add_stmt (body, g);
8340 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8341 }
8342}
8343
75a70cf9 8344/* Lower the OpenMP sections directive in the current statement in GSI_P.
8345 CTX is the enclosing OMP context for the current statement. */
773c5ba7 8346
8347static void
75a70cf9 8348lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 8349{
75a70cf9 8350 tree block, control;
8351 gimple_stmt_iterator tgsi;
75a70cf9 8352 gimple stmt, new_stmt, bind, t;
e3a19533 8353 gimple_seq ilist, dlist, olist, new_body;
dac18d1a 8354 struct gimplify_ctx gctx;
773c5ba7 8355
75a70cf9 8356 stmt = gsi_stmt (*gsi_p);
773c5ba7 8357
dac18d1a 8358 push_gimplify_context (&gctx);
773c5ba7 8359
8360 dlist = NULL;
8361 ilist = NULL;
75a70cf9 8362 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
bc7bff74 8363 &ilist, &dlist, ctx, NULL);
773c5ba7 8364
e3a19533 8365 new_body = gimple_omp_body (stmt);
8366 gimple_omp_set_body (stmt, NULL);
8367 tgsi = gsi_start (new_body);
8368 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
773c5ba7 8369 {
8370 omp_context *sctx;
75a70cf9 8371 gimple sec_start;
773c5ba7 8372
75a70cf9 8373 sec_start = gsi_stmt (tgsi);
773c5ba7 8374 sctx = maybe_lookup_ctx (sec_start);
8375 gcc_assert (sctx);
8376
e3a19533 8377 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8378 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8379 GSI_CONTINUE_LINKING);
75a70cf9 8380 gimple_omp_set_body (sec_start, NULL);
773c5ba7 8381
e3a19533 8382 if (gsi_one_before_end_p (tgsi))
773c5ba7 8383 {
75a70cf9 8384 gimple_seq l = NULL;
8385 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
773c5ba7 8386 &l, ctx);
e3a19533 8387 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
75a70cf9 8388 gimple_omp_section_set_last (sec_start);
773c5ba7 8389 }
48e1416a 8390
e3a19533 8391 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8392 GSI_CONTINUE_LINKING);
773c5ba7 8393 }
1e8e9920 8394
8395 block = make_node (BLOCK);
e3a19533 8396 bind = gimple_build_bind (NULL, new_body, block);
1e8e9920 8397
75a70cf9 8398 olist = NULL;
8399 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
773c5ba7 8400
1d22f541 8401 block = make_node (BLOCK);
75a70cf9 8402 new_stmt = gimple_build_bind (NULL, NULL, block);
e3a19533 8403 gsi_replace (gsi_p, new_stmt, true);
773c5ba7 8404
1d22f541 8405 pop_gimplify_context (new_stmt);
75a70cf9 8406 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8407 BLOCK_VARS (block) = gimple_bind_vars (bind);
1d22f541 8408 if (BLOCK_VARS (block))
8409 TREE_USED (block) = 1;
8410
75a70cf9 8411 new_body = NULL;
8412 gimple_seq_add_seq (&new_body, ilist);
8413 gimple_seq_add_stmt (&new_body, stmt);
8414 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8415 gimple_seq_add_stmt (&new_body, bind);
61e47ac8 8416
ac6e3339 8417 control = create_tmp_var (unsigned_type_node, ".section");
75a70cf9 8418 t = gimple_build_omp_continue (control, control);
8419 gimple_omp_sections_set_control (stmt, control);
8420 gimple_seq_add_stmt (&new_body, t);
61e47ac8 8421
75a70cf9 8422 gimple_seq_add_seq (&new_body, olist);
bc7bff74 8423 if (ctx->cancellable)
8424 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
75a70cf9 8425 gimple_seq_add_seq (&new_body, dlist);
773c5ba7 8426
75a70cf9 8427 new_body = maybe_catch_exception (new_body);
aade31a0 8428
75a70cf9 8429 t = gimple_build_omp_return
8430 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
8431 OMP_CLAUSE_NOWAIT));
8432 gimple_seq_add_stmt (&new_body, t);
bc7bff74 8433 maybe_add_implicit_barrier_cancel (ctx, &new_body);
61e47ac8 8434
75a70cf9 8435 gimple_bind_set_body (new_stmt, new_body);
1e8e9920 8436}
8437
8438
773c5ba7 8439/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 8440 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
1e8e9920 8441
8442 if (GOMP_single_start ())
8443 BODY;
8444 [ GOMP_barrier (); ] -> unless 'nowait' is present.
773c5ba7 8445
8446 FIXME. It may be better to delay expanding the logic of this until
8447 pass_expand_omp. The expanded logic may make the job more difficult
8448 to a synchronization analysis pass. */
1e8e9920 8449
8450static void
75a70cf9 8451lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
1e8e9920 8452{
e60a6f7b 8453 location_t loc = gimple_location (single_stmt);
8454 tree tlabel = create_artificial_label (loc);
8455 tree flabel = create_artificial_label (loc);
75a70cf9 8456 gimple call, cond;
8457 tree lhs, decl;
8458
b9a16870 8459 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
75a70cf9 8460 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
8461 call = gimple_build_call (decl, 0);
8462 gimple_call_set_lhs (call, lhs);
8463 gimple_seq_add_stmt (pre_p, call);
8464
8465 cond = gimple_build_cond (EQ_EXPR, lhs,
389dd41b 8466 fold_convert_loc (loc, TREE_TYPE (lhs),
8467 boolean_true_node),
75a70cf9 8468 tlabel, flabel);
8469 gimple_seq_add_stmt (pre_p, cond);
8470 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8471 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8472 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
1e8e9920 8473}
8474
773c5ba7 8475
8476/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 8477 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
1e8e9920 8478
8479 #pragma omp single copyprivate (a, b, c)
8480
8481 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8482
8483 {
8484 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8485 {
8486 BODY;
8487 copyout.a = a;
8488 copyout.b = b;
8489 copyout.c = c;
8490 GOMP_single_copy_end (&copyout);
8491 }
8492 else
8493 {
8494 a = copyout_p->a;
8495 b = copyout_p->b;
8496 c = copyout_p->c;
8497 }
8498 GOMP_barrier ();
8499 }
773c5ba7 8500
8501 FIXME. It may be better to delay expanding the logic of this until
8502 pass_expand_omp. The expanded logic may make the job more difficult
8503 to a synchronization analysis pass. */
1e8e9920 8504
8505static void
75a70cf9 8506lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
1e8e9920 8507{
b9a16870 8508 tree ptr_type, t, l0, l1, l2, bfn_decl;
75a70cf9 8509 gimple_seq copyin_seq;
e60a6f7b 8510 location_t loc = gimple_location (single_stmt);
1e8e9920 8511
8512 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8513
8514 ptr_type = build_pointer_type (ctx->record_type);
8515 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8516
e60a6f7b 8517 l0 = create_artificial_label (loc);
8518 l1 = create_artificial_label (loc);
8519 l2 = create_artificial_label (loc);
1e8e9920 8520
b9a16870 8521 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8522 t = build_call_expr_loc (loc, bfn_decl, 0);
389dd41b 8523 t = fold_convert_loc (loc, ptr_type, t);
75a70cf9 8524 gimplify_assign (ctx->receiver_decl, t, pre_p);
1e8e9920 8525
8526 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8527 build_int_cst (ptr_type, 0));
8528 t = build3 (COND_EXPR, void_type_node, t,
8529 build_and_jump (&l0), build_and_jump (&l1));
8530 gimplify_and_add (t, pre_p);
8531
75a70cf9 8532 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
1e8e9920 8533
75a70cf9 8534 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
1e8e9920 8535
8536 copyin_seq = NULL;
75a70cf9 8537 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
1e8e9920 8538 &copyin_seq, ctx);
8539
389dd41b 8540 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
b9a16870 8541 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8542 t = build_call_expr_loc (loc, bfn_decl, 1, t);
1e8e9920 8543 gimplify_and_add (t, pre_p);
8544
8545 t = build_and_jump (&l2);
8546 gimplify_and_add (t, pre_p);
8547
75a70cf9 8548 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
1e8e9920 8549
75a70cf9 8550 gimple_seq_add_seq (pre_p, copyin_seq);
1e8e9920 8551
75a70cf9 8552 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
1e8e9920 8553}
8554
773c5ba7 8555
1e8e9920 8556/* Expand code for an OpenMP single directive. */
8557
8558static void
75a70cf9 8559lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8560{
75a70cf9 8561 tree block;
8562 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
bc7bff74 8563 gimple_seq bind_body, bind_body_tail = NULL, dlist;
dac18d1a 8564 struct gimplify_ctx gctx;
1e8e9920 8565
dac18d1a 8566 push_gimplify_context (&gctx);
1e8e9920 8567
e3a19533 8568 block = make_node (BLOCK);
8569 bind = gimple_build_bind (NULL, NULL, block);
8570 gsi_replace (gsi_p, bind, true);
75a70cf9 8571 bind_body = NULL;
e3a19533 8572 dlist = NULL;
75a70cf9 8573 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
bc7bff74 8574 &bind_body, &dlist, ctx, NULL);
e3a19533 8575 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
1e8e9920 8576
75a70cf9 8577 gimple_seq_add_stmt (&bind_body, single_stmt);
1e8e9920 8578
8579 if (ctx->record_type)
75a70cf9 8580 lower_omp_single_copy (single_stmt, &bind_body, ctx);
1e8e9920 8581 else
75a70cf9 8582 lower_omp_single_simple (single_stmt, &bind_body);
8583
8584 gimple_omp_set_body (single_stmt, NULL);
1e8e9920 8585
75a70cf9 8586 gimple_seq_add_seq (&bind_body, dlist);
61e47ac8 8587
75a70cf9 8588 bind_body = maybe_catch_exception (bind_body);
61e47ac8 8589
48e1416a 8590 t = gimple_build_omp_return
75a70cf9 8591 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
8592 OMP_CLAUSE_NOWAIT));
bc7bff74 8593 gimple_seq_add_stmt (&bind_body_tail, t);
8594 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
8595 if (ctx->record_type)
8596 {
8597 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8598 tree clobber = build_constructor (ctx->record_type, NULL);
8599 TREE_THIS_VOLATILE (clobber) = 1;
8600 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8601 clobber), GSI_SAME_STMT);
8602 }
8603 gimple_seq_add_seq (&bind_body, bind_body_tail);
e3a19533 8604 gimple_bind_set_body (bind, bind_body);
61e47ac8 8605
1e8e9920 8606 pop_gimplify_context (bind);
773c5ba7 8607
75a70cf9 8608 gimple_bind_append_vars (bind, ctx->block_vars);
8609 BLOCK_VARS (block) = ctx->block_vars;
1d22f541 8610 if (BLOCK_VARS (block))
8611 TREE_USED (block) = 1;
1e8e9920 8612}
8613
773c5ba7 8614
1e8e9920 8615/* Expand code for an OpenMP master directive. */
8616
8617static void
75a70cf9 8618lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8619{
b9a16870 8620 tree block, lab = NULL, x, bfn_decl;
75a70cf9 8621 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 8622 location_t loc = gimple_location (stmt);
75a70cf9 8623 gimple_seq tseq;
dac18d1a 8624 struct gimplify_ctx gctx;
1e8e9920 8625
dac18d1a 8626 push_gimplify_context (&gctx);
1e8e9920 8627
8628 block = make_node (BLOCK);
e3a19533 8629 bind = gimple_build_bind (NULL, NULL, block);
8630 gsi_replace (gsi_p, bind, true);
8631 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8632
b9a16870 8633 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8634 x = build_call_expr_loc (loc, bfn_decl, 0);
1e8e9920 8635 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8636 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
75a70cf9 8637 tseq = NULL;
8638 gimplify_and_add (x, &tseq);
8639 gimple_bind_add_seq (bind, tseq);
1e8e9920 8640
e3a19533 8641 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8642 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8643 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8644 gimple_omp_set_body (stmt, NULL);
1e8e9920 8645
75a70cf9 8646 gimple_bind_add_stmt (bind, gimple_build_label (lab));
61e47ac8 8647
75a70cf9 8648 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 8649
1e8e9920 8650 pop_gimplify_context (bind);
773c5ba7 8651
75a70cf9 8652 gimple_bind_append_vars (bind, ctx->block_vars);
8653 BLOCK_VARS (block) = ctx->block_vars;
1e8e9920 8654}
8655
773c5ba7 8656
bc7bff74 8657/* Expand code for an OpenMP taskgroup directive. */
8658
8659static void
8660lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8661{
8662 gimple stmt = gsi_stmt (*gsi_p), bind, x;
8663 tree block = make_node (BLOCK);
8664
8665 bind = gimple_build_bind (NULL, NULL, block);
8666 gsi_replace (gsi_p, bind, true);
8667 gimple_bind_add_stmt (bind, stmt);
8668
8669 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8670 0);
8671 gimple_bind_add_stmt (bind, x);
8672
8673 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8674 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8675 gimple_omp_set_body (stmt, NULL);
8676
8677 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8678
8679 gimple_bind_append_vars (bind, ctx->block_vars);
8680 BLOCK_VARS (block) = ctx->block_vars;
8681}
8682
8683
1e8e9920 8684/* Expand code for an OpenMP ordered directive. */
8685
8686static void
75a70cf9 8687lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8688{
75a70cf9 8689 tree block;
8690 gimple stmt = gsi_stmt (*gsi_p), bind, x;
dac18d1a 8691 struct gimplify_ctx gctx;
1e8e9920 8692
dac18d1a 8693 push_gimplify_context (&gctx);
1e8e9920 8694
8695 block = make_node (BLOCK);
e3a19533 8696 bind = gimple_build_bind (NULL, NULL, block);
8697 gsi_replace (gsi_p, bind, true);
8698 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8699
b9a16870 8700 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8701 0);
75a70cf9 8702 gimple_bind_add_stmt (bind, x);
1e8e9920 8703
e3a19533 8704 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8705 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8706 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8707 gimple_omp_set_body (stmt, NULL);
1e8e9920 8708
b9a16870 8709 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 0);
75a70cf9 8710 gimple_bind_add_stmt (bind, x);
61e47ac8 8711
75a70cf9 8712 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 8713
1e8e9920 8714 pop_gimplify_context (bind);
773c5ba7 8715
75a70cf9 8716 gimple_bind_append_vars (bind, ctx->block_vars);
8717 BLOCK_VARS (block) = gimple_bind_vars (bind);
1e8e9920 8718}
8719
1e8e9920 8720
75a70cf9 8721/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
1e8e9920 8722 substitution of a couple of function calls. But in the NAMED case,
8723 requires that languages coordinate a symbol name. It is therefore
8724 best put here in common code. */
8725
8726static GTY((param1_is (tree), param2_is (tree)))
8727 splay_tree critical_name_mutexes;
8728
8729static void
75a70cf9 8730lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8731{
75a70cf9 8732 tree block;
8733 tree name, lock, unlock;
8734 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 8735 location_t loc = gimple_location (stmt);
75a70cf9 8736 gimple_seq tbody;
dac18d1a 8737 struct gimplify_ctx gctx;
1e8e9920 8738
75a70cf9 8739 name = gimple_omp_critical_name (stmt);
1e8e9920 8740 if (name)
8741 {
c2f47e15 8742 tree decl;
1e8e9920 8743 splay_tree_node n;
8744
8745 if (!critical_name_mutexes)
8746 critical_name_mutexes
ba72912a 8747 = splay_tree_new_ggc (splay_tree_compare_pointers,
8748 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
8749 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
1e8e9920 8750
8751 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
8752 if (n == NULL)
8753 {
8754 char *new_str;
8755
8756 decl = create_tmp_var_raw (ptr_type_node, NULL);
8757
8758 new_str = ACONCAT ((".gomp_critical_user_",
8759 IDENTIFIER_POINTER (name), NULL));
8760 DECL_NAME (decl) = get_identifier (new_str);
8761 TREE_PUBLIC (decl) = 1;
8762 TREE_STATIC (decl) = 1;
8763 DECL_COMMON (decl) = 1;
8764 DECL_ARTIFICIAL (decl) = 1;
8765 DECL_IGNORED_P (decl) = 1;
1d416bd7 8766 varpool_finalize_decl (decl);
1e8e9920 8767
8768 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
8769 (splay_tree_value) decl);
8770 }
8771 else
8772 decl = (tree) n->value;
8773
b9a16870 8774 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
389dd41b 8775 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
1e8e9920 8776
b9a16870 8777 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
389dd41b 8778 unlock = build_call_expr_loc (loc, unlock, 1,
8779 build_fold_addr_expr_loc (loc, decl));
1e8e9920 8780 }
8781 else
8782 {
b9a16870 8783 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
389dd41b 8784 lock = build_call_expr_loc (loc, lock, 0);
1e8e9920 8785
b9a16870 8786 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
389dd41b 8787 unlock = build_call_expr_loc (loc, unlock, 0);
1e8e9920 8788 }
8789
dac18d1a 8790 push_gimplify_context (&gctx);
1e8e9920 8791
8792 block = make_node (BLOCK);
e3a19533 8793 bind = gimple_build_bind (NULL, NULL, block);
8794 gsi_replace (gsi_p, bind, true);
8795 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8796
75a70cf9 8797 tbody = gimple_bind_body (bind);
8798 gimplify_and_add (lock, &tbody);
8799 gimple_bind_set_body (bind, tbody);
1e8e9920 8800
e3a19533 8801 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8802 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8803 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8804 gimple_omp_set_body (stmt, NULL);
1e8e9920 8805
75a70cf9 8806 tbody = gimple_bind_body (bind);
8807 gimplify_and_add (unlock, &tbody);
8808 gimple_bind_set_body (bind, tbody);
61e47ac8 8809
75a70cf9 8810 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
1e8e9920 8811
8812 pop_gimplify_context (bind);
75a70cf9 8813 gimple_bind_append_vars (bind, ctx->block_vars);
8814 BLOCK_VARS (block) = gimple_bind_vars (bind);
773c5ba7 8815}
8816
8817
8818/* A subroutine of lower_omp_for. Generate code to emit the predicate
8819 for a lastprivate clause. Given a loop control predicate of (V
8820 cond N2), we gate the clause on (!(V cond N2)). The lowered form
1e4afe3c 8821 is appended to *DLIST, iterator initialization is appended to
8822 *BODY_P. */
773c5ba7 8823
8824static void
75a70cf9 8825lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8826 gimple_seq *dlist, struct omp_context *ctx)
773c5ba7 8827{
75a70cf9 8828 tree clauses, cond, vinit;
773c5ba7 8829 enum tree_code cond_code;
75a70cf9 8830 gimple_seq stmts;
48e1416a 8831
fd6481cf 8832 cond_code = fd->loop.cond_code;
773c5ba7 8833 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8834
8835 /* When possible, use a strict equality expression. This can let VRP
8836 type optimizations deduce the value and remove a copy. */
35ec552a 8837 if (tree_fits_shwi_p (fd->loop.step))
773c5ba7 8838 {
8c53c46c 8839 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
773c5ba7 8840 if (step == 1 || step == -1)
8841 cond_code = EQ_EXPR;
8842 }
8843
fd6481cf 8844 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
773c5ba7 8845
75a70cf9 8846 clauses = gimple_omp_for_clauses (fd->for_stmt);
1e4afe3c 8847 stmts = NULL;
8848 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
75a70cf9 8849 if (!gimple_seq_empty_p (stmts))
1e4afe3c 8850 {
75a70cf9 8851 gimple_seq_add_seq (&stmts, *dlist);
fd6481cf 8852 *dlist = stmts;
1e4afe3c 8853
8854 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
fd6481cf 8855 vinit = fd->loop.n1;
1e4afe3c 8856 if (cond_code == EQ_EXPR
35ec552a 8857 && tree_fits_shwi_p (fd->loop.n2)
fd6481cf 8858 && ! integer_zerop (fd->loop.n2))
8859 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
3d483a94 8860 else
8861 vinit = unshare_expr (vinit);
1e4afe3c 8862
8863 /* Initialize the iterator variable, so that threads that don't execute
8864 any iterations don't execute the lastprivate clauses by accident. */
75a70cf9 8865 gimplify_assign (fd->loop.v, vinit, body_p);
1e4afe3c 8866 }
773c5ba7 8867}
8868
8869
8870/* Lower code for an OpenMP loop directive. */
8871
8872static void
75a70cf9 8873lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 8874{
75a70cf9 8875 tree *rhs_p, block;
bc7bff74 8876 struct omp_for_data fd, *fdp = NULL;
75a70cf9 8877 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
f018d957 8878 gimple_seq omp_for_body, body, dlist;
75a70cf9 8879 size_t i;
dac18d1a 8880 struct gimplify_ctx gctx;
773c5ba7 8881
dac18d1a 8882 push_gimplify_context (&gctx);
773c5ba7 8883
e3a19533 8884 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
773c5ba7 8885
1d22f541 8886 block = make_node (BLOCK);
75a70cf9 8887 new_stmt = gimple_build_bind (NULL, NULL, block);
e3a19533 8888 /* Replace at gsi right away, so that 'stmt' is no member
8889 of a sequence anymore as we're going to add to to a different
8890 one below. */
8891 gsi_replace (gsi_p, new_stmt, true);
1d22f541 8892
773c5ba7 8893 /* Move declaration of temporaries in the loop body before we make
8894 it go away. */
75a70cf9 8895 omp_for_body = gimple_omp_body (stmt);
8896 if (!gimple_seq_empty_p (omp_for_body)
8897 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8898 {
8899 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
8900 gimple_bind_append_vars (new_stmt, vars);
8901 }
773c5ba7 8902
bc7bff74 8903 if (gimple_omp_for_combined_into_p (stmt))
8904 {
8905 extract_omp_for_data (stmt, &fd, NULL);
8906 fdp = &fd;
8907
8908 /* We need two temporaries with fd.loop.v type (istart/iend)
8909 and then (fd.collapse - 1) temporaries with the same
8910 type for count2 ... countN-1 vars if not constant. */
8911 size_t count = 2;
8912 tree type = fd.iter_type;
8913 if (fd.collapse > 1
8914 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8915 count += fd.collapse - 1;
8916 bool parallel_for = gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR;
8917 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8918 tree clauses = *pc;
8919 if (parallel_for)
8920 outerc
8921 = find_omp_clause (gimple_omp_parallel_clauses (ctx->outer->stmt),
8922 OMP_CLAUSE__LOOPTEMP_);
8923 for (i = 0; i < count; i++)
8924 {
8925 tree temp;
8926 if (parallel_for)
8927 {
8928 gcc_assert (outerc);
8929 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8930 outerc = find_omp_clause (OMP_CLAUSE_CHAIN (outerc),
8931 OMP_CLAUSE__LOOPTEMP_);
8932 }
8933 else
8934 temp = create_tmp_var (type, NULL);
8935 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8936 OMP_CLAUSE_DECL (*pc) = temp;
8937 pc = &OMP_CLAUSE_CHAIN (*pc);
8938 }
8939 *pc = clauses;
8940 }
8941
75a70cf9 8942 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
773c5ba7 8943 dlist = NULL;
75a70cf9 8944 body = NULL;
bc7bff74 8945 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8946 fdp);
75a70cf9 8947 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
773c5ba7 8948
3d483a94 8949 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8950
773c5ba7 8951 /* Lower the header expressions. At this point, we can assume that
8952 the header is of the form:
8953
8954 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8955
8956 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8957 using the .omp_data_s mapping, if needed. */
75a70cf9 8958 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 8959 {
75a70cf9 8960 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
fd6481cf 8961 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8962 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8963
75a70cf9 8964 rhs_p = gimple_omp_for_final_ptr (stmt, i);
fd6481cf 8965 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8966 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8967
75a70cf9 8968 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
fd6481cf 8969 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8970 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8971 }
773c5ba7 8972
8973 /* Once lowered, extract the bounds and clauses. */
fd6481cf 8974 extract_omp_for_data (stmt, &fd, NULL);
773c5ba7 8975
75a70cf9 8976 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
773c5ba7 8977
75a70cf9 8978 gimple_seq_add_stmt (&body, stmt);
8979 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
61e47ac8 8980
75a70cf9 8981 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8982 fd.loop.v));
61e47ac8 8983
773c5ba7 8984 /* After the loop, add exit clauses. */
75a70cf9 8985 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
bc7bff74 8986
8987 if (ctx->cancellable)
8988 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8989
75a70cf9 8990 gimple_seq_add_seq (&body, dlist);
773c5ba7 8991
75a70cf9 8992 body = maybe_catch_exception (body);
aade31a0 8993
61e47ac8 8994 /* Region exit marker goes at the end of the loop body. */
75a70cf9 8995 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
bc7bff74 8996 maybe_add_implicit_barrier_cancel (ctx, &body);
1d22f541 8997 pop_gimplify_context (new_stmt);
75a70cf9 8998
8999 gimple_bind_append_vars (new_stmt, ctx->block_vars);
9000 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
1d22f541 9001 if (BLOCK_VARS (block))
9002 TREE_USED (block) = 1;
773c5ba7 9003
75a70cf9 9004 gimple_bind_set_body (new_stmt, body);
9005 gimple_omp_set_body (stmt, NULL);
9006 gimple_omp_for_set_pre_body (stmt, NULL);
1e8e9920 9007}
9008
48e1416a 9009/* Callback for walk_stmts. Check if the current statement only contains
75a70cf9 9010 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
de7ef844 9011
9012static tree
75a70cf9 9013check_combined_parallel (gimple_stmt_iterator *gsi_p,
9014 bool *handled_ops_p,
9015 struct walk_stmt_info *wi)
de7ef844 9016{
4077bf7a 9017 int *info = (int *) wi->info;
75a70cf9 9018 gimple stmt = gsi_stmt (*gsi_p);
de7ef844 9019
75a70cf9 9020 *handled_ops_p = true;
9021 switch (gimple_code (stmt))
de7ef844 9022 {
75a70cf9 9023 WALK_SUBSTMTS;
9024
9025 case GIMPLE_OMP_FOR:
9026 case GIMPLE_OMP_SECTIONS:
de7ef844 9027 *info = *info == 0 ? 1 : -1;
9028 break;
9029 default:
9030 *info = -1;
9031 break;
9032 }
9033 return NULL;
9034}
773c5ba7 9035
fd6481cf 9036struct omp_taskcopy_context
9037{
9038 /* This field must be at the beginning, as we do "inheritance": Some
9039 callback functions for tree-inline.c (e.g., omp_copy_decl)
9040 receive a copy_body_data pointer that is up-casted to an
9041 omp_context pointer. */
9042 copy_body_data cb;
9043 omp_context *ctx;
9044};
9045
9046static tree
9047task_copyfn_copy_decl (tree var, copy_body_data *cb)
9048{
9049 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9050
9051 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9052 return create_tmp_var (TREE_TYPE (var), NULL);
9053
9054 return var;
9055}
9056
9057static tree
9058task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9059{
9060 tree name, new_fields = NULL, type, f;
9061
9062 type = lang_hooks.types.make_type (RECORD_TYPE);
9063 name = DECL_NAME (TYPE_NAME (orig_type));
e60a6f7b 9064 name = build_decl (gimple_location (tcctx->ctx->stmt),
9065 TYPE_DECL, name, type);
fd6481cf 9066 TYPE_NAME (type) = name;
9067
9068 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9069 {
9070 tree new_f = copy_node (f);
9071 DECL_CONTEXT (new_f) = type;
9072 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9073 TREE_CHAIN (new_f) = new_fields;
75a70cf9 9074 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9075 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9076 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9077 &tcctx->cb, NULL);
fd6481cf 9078 new_fields = new_f;
9079 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
9080 }
9081 TYPE_FIELDS (type) = nreverse (new_fields);
9082 layout_type (type);
9083 return type;
9084}
9085
9086/* Create task copyfn. */
9087
9088static void
75a70cf9 9089create_task_copyfn (gimple task_stmt, omp_context *ctx)
fd6481cf 9090{
9091 struct function *child_cfun;
9092 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9093 tree record_type, srecord_type, bind, list;
9094 bool record_needs_remap = false, srecord_needs_remap = false;
9095 splay_tree_node n;
9096 struct omp_taskcopy_context tcctx;
dac18d1a 9097 struct gimplify_ctx gctx;
389dd41b 9098 location_t loc = gimple_location (task_stmt);
fd6481cf 9099
75a70cf9 9100 child_fn = gimple_omp_task_copy_fn (task_stmt);
fd6481cf 9101 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9102 gcc_assert (child_cfun->cfg == NULL);
fd6481cf 9103 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9104
9105 /* Reset DECL_CONTEXT on function arguments. */
1767a056 9106 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
fd6481cf 9107 DECL_CONTEXT (t) = child_fn;
9108
9109 /* Populate the function. */
dac18d1a 9110 push_gimplify_context (&gctx);
9078126c 9111 push_cfun (child_cfun);
fd6481cf 9112
9113 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9114 TREE_SIDE_EFFECTS (bind) = 1;
9115 list = NULL;
9116 DECL_SAVED_TREE (child_fn) = bind;
75a70cf9 9117 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
fd6481cf 9118
9119 /* Remap src and dst argument types if needed. */
9120 record_type = ctx->record_type;
9121 srecord_type = ctx->srecord_type;
1767a056 9122 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
fd6481cf 9123 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9124 {
9125 record_needs_remap = true;
9126 break;
9127 }
1767a056 9128 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
fd6481cf 9129 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9130 {
9131 srecord_needs_remap = true;
9132 break;
9133 }
9134
9135 if (record_needs_remap || srecord_needs_remap)
9136 {
9137 memset (&tcctx, '\0', sizeof (tcctx));
9138 tcctx.cb.src_fn = ctx->cb.src_fn;
9139 tcctx.cb.dst_fn = child_fn;
53f79206 9140 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
9141 gcc_checking_assert (tcctx.cb.src_node);
fd6481cf 9142 tcctx.cb.dst_node = tcctx.cb.src_node;
9143 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9144 tcctx.cb.copy_decl = task_copyfn_copy_decl;
e38def9c 9145 tcctx.cb.eh_lp_nr = 0;
fd6481cf 9146 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9147 tcctx.cb.decl_map = pointer_map_create ();
9148 tcctx.ctx = ctx;
9149
9150 if (record_needs_remap)
9151 record_type = task_copyfn_remap_type (&tcctx, record_type);
9152 if (srecord_needs_remap)
9153 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9154 }
9155 else
9156 tcctx.cb.decl_map = NULL;
9157
fd6481cf 9158 arg = DECL_ARGUMENTS (child_fn);
9159 TREE_TYPE (arg) = build_pointer_type (record_type);
1767a056 9160 sarg = DECL_CHAIN (arg);
fd6481cf 9161 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9162
9163 /* First pass: initialize temporaries used in record_type and srecord_type
9164 sizes and field offsets. */
9165 if (tcctx.cb.decl_map)
75a70cf9 9166 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9167 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9168 {
9169 tree *p;
9170
9171 decl = OMP_CLAUSE_DECL (c);
9172 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
9173 if (p == NULL)
9174 continue;
9175 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9176 sf = (tree) n->value;
9177 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9178 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9179 src = omp_build_component_ref (src, sf);
75a70cf9 9180 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
fd6481cf 9181 append_to_statement_list (t, &list);
9182 }
9183
9184 /* Second pass: copy shared var pointers and copy construct non-VLA
9185 firstprivate vars. */
75a70cf9 9186 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9187 switch (OMP_CLAUSE_CODE (c))
9188 {
9189 case OMP_CLAUSE_SHARED:
9190 decl = OMP_CLAUSE_DECL (c);
9191 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9192 if (n == NULL)
9193 break;
9194 f = (tree) n->value;
9195 if (tcctx.cb.decl_map)
9196 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9197 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9198 sf = (tree) n->value;
9199 if (tcctx.cb.decl_map)
9200 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9201 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9202 src = omp_build_component_ref (src, sf);
182cf5a9 9203 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9204 dst = omp_build_component_ref (dst, f);
75a70cf9 9205 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 9206 append_to_statement_list (t, &list);
9207 break;
9208 case OMP_CLAUSE_FIRSTPRIVATE:
9209 decl = OMP_CLAUSE_DECL (c);
9210 if (is_variable_sized (decl))
9211 break;
9212 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9213 if (n == NULL)
9214 break;
9215 f = (tree) n->value;
9216 if (tcctx.cb.decl_map)
9217 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9218 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9219 if (n != NULL)
9220 {
9221 sf = (tree) n->value;
9222 if (tcctx.cb.decl_map)
9223 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9224 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9225 src = omp_build_component_ref (src, sf);
fd6481cf 9226 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
182cf5a9 9227 src = build_simple_mem_ref_loc (loc, src);
fd6481cf 9228 }
9229 else
9230 src = decl;
182cf5a9 9231 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9232 dst = omp_build_component_ref (dst, f);
fd6481cf 9233 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9234 append_to_statement_list (t, &list);
9235 break;
9236 case OMP_CLAUSE_PRIVATE:
9237 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9238 break;
9239 decl = OMP_CLAUSE_DECL (c);
9240 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9241 f = (tree) n->value;
9242 if (tcctx.cb.decl_map)
9243 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9244 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9245 if (n != NULL)
9246 {
9247 sf = (tree) n->value;
9248 if (tcctx.cb.decl_map)
9249 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9250 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9251 src = omp_build_component_ref (src, sf);
fd6481cf 9252 if (use_pointer_for_field (decl, NULL))
182cf5a9 9253 src = build_simple_mem_ref_loc (loc, src);
fd6481cf 9254 }
9255 else
9256 src = decl;
182cf5a9 9257 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9258 dst = omp_build_component_ref (dst, f);
75a70cf9 9259 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 9260 append_to_statement_list (t, &list);
9261 break;
9262 default:
9263 break;
9264 }
9265
9266 /* Last pass: handle VLA firstprivates. */
9267 if (tcctx.cb.decl_map)
75a70cf9 9268 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9270 {
9271 tree ind, ptr, df;
9272
9273 decl = OMP_CLAUSE_DECL (c);
9274 if (!is_variable_sized (decl))
9275 continue;
9276 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9277 if (n == NULL)
9278 continue;
9279 f = (tree) n->value;
9280 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9281 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9282 ind = DECL_VALUE_EXPR (decl);
9283 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9284 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9285 n = splay_tree_lookup (ctx->sfield_map,
9286 (splay_tree_key) TREE_OPERAND (ind, 0));
9287 sf = (tree) n->value;
9288 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9289 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9290 src = omp_build_component_ref (src, sf);
182cf5a9 9291 src = build_simple_mem_ref_loc (loc, src);
9292 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9293 dst = omp_build_component_ref (dst, f);
fd6481cf 9294 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9295 append_to_statement_list (t, &list);
9296 n = splay_tree_lookup (ctx->field_map,
9297 (splay_tree_key) TREE_OPERAND (ind, 0));
9298 df = (tree) n->value;
9299 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
182cf5a9 9300 ptr = build_simple_mem_ref_loc (loc, arg);
445d06b6 9301 ptr = omp_build_component_ref (ptr, df);
75a70cf9 9302 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
389dd41b 9303 build_fold_addr_expr_loc (loc, dst));
fd6481cf 9304 append_to_statement_list (t, &list);
9305 }
9306
9307 t = build1 (RETURN_EXPR, void_type_node, NULL);
9308 append_to_statement_list (t, &list);
9309
9310 if (tcctx.cb.decl_map)
9311 pointer_map_destroy (tcctx.cb.decl_map);
9312 pop_gimplify_context (NULL);
9313 BIND_EXPR_BODY (bind) = list;
9314 pop_cfun ();
fd6481cf 9315}
9316
bc7bff74 9317static void
9318lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
9319{
9320 tree c, clauses;
9321 gimple g;
9322 size_t n_in = 0, n_out = 0, idx = 2, i;
9323
9324 clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
9325 OMP_CLAUSE_DEPEND);
9326 gcc_assert (clauses);
9327 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9328 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9329 switch (OMP_CLAUSE_DEPEND_KIND (c))
9330 {
9331 case OMP_CLAUSE_DEPEND_IN:
9332 n_in++;
9333 break;
9334 case OMP_CLAUSE_DEPEND_OUT:
9335 case OMP_CLAUSE_DEPEND_INOUT:
9336 n_out++;
9337 break;
9338 default:
9339 gcc_unreachable ();
9340 }
9341 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
9342 tree array = create_tmp_var (type, NULL);
9343 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9344 NULL_TREE);
9345 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
9346 gimple_seq_add_stmt (iseq, g);
9347 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9348 NULL_TREE);
9349 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
9350 gimple_seq_add_stmt (iseq, g);
9351 for (i = 0; i < 2; i++)
9352 {
9353 if ((i ? n_in : n_out) == 0)
9354 continue;
9355 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9356 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9357 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
9358 {
9359 tree t = OMP_CLAUSE_DECL (c);
9360 t = fold_convert (ptr_type_node, t);
9361 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9362 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9363 NULL_TREE, NULL_TREE);
9364 g = gimple_build_assign (r, t);
9365 gimple_seq_add_stmt (iseq, g);
9366 }
9367 }
9368 tree *p = gimple_omp_task_clauses_ptr (stmt);
9369 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9370 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9371 OMP_CLAUSE_CHAIN (c) = *p;
9372 *p = c;
9373 tree clobber = build_constructor (type, NULL);
9374 TREE_THIS_VOLATILE (clobber) = 1;
9375 g = gimple_build_assign (array, clobber);
9376 gimple_seq_add_stmt (oseq, g);
9377}
9378
75a70cf9 9379/* Lower the OpenMP parallel or task directive in the current statement
9380 in GSI_P. CTX holds context information for the directive. */
773c5ba7 9381
9382static void
75a70cf9 9383lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 9384{
75a70cf9 9385 tree clauses;
9386 tree child_fn, t;
9387 gimple stmt = gsi_stmt (*gsi_p);
bc7bff74 9388 gimple par_bind, bind, dep_bind = NULL;
9389 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
9390 struct gimplify_ctx gctx, dep_gctx;
389dd41b 9391 location_t loc = gimple_location (stmt);
773c5ba7 9392
75a70cf9 9393 clauses = gimple_omp_taskreg_clauses (stmt);
9394 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9395 par_body = gimple_bind_body (par_bind);
773c5ba7 9396 child_fn = ctx->cb.dst_fn;
75a70cf9 9397 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9398 && !gimple_omp_parallel_combined_p (stmt))
de7ef844 9399 {
9400 struct walk_stmt_info wi;
9401 int ws_num = 0;
9402
9403 memset (&wi, 0, sizeof (wi));
de7ef844 9404 wi.info = &ws_num;
9405 wi.val_only = true;
75a70cf9 9406 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
de7ef844 9407 if (ws_num == 1)
75a70cf9 9408 gimple_omp_parallel_set_combined_p (stmt, true);
de7ef844 9409 }
bc7bff74 9410 gimple_seq dep_ilist = NULL;
9411 gimple_seq dep_olist = NULL;
9412 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9413 && find_omp_clause (clauses, OMP_CLAUSE_DEPEND))
9414 {
9415 push_gimplify_context (&dep_gctx);
9416 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9417 lower_depend_clauses (stmt, &dep_ilist, &dep_olist);
9418 }
9419
fd6481cf 9420 if (ctx->srecord_type)
9421 create_task_copyfn (stmt, ctx);
773c5ba7 9422
dac18d1a 9423 push_gimplify_context (&gctx);
773c5ba7 9424
75a70cf9 9425 par_olist = NULL;
9426 par_ilist = NULL;
bc7bff74 9427 par_rlist = NULL;
9428 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
e3a19533 9429 lower_omp (&par_body, ctx);
75a70cf9 9430 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
bc7bff74 9431 lower_reduction_clauses (clauses, &par_rlist, ctx);
773c5ba7 9432
9433 /* Declare all the variables created by mapping and the variables
9434 declared in the scope of the parallel body. */
9435 record_vars_into (ctx->block_vars, child_fn);
75a70cf9 9436 record_vars_into (gimple_bind_vars (par_bind), child_fn);
773c5ba7 9437
9438 if (ctx->record_type)
9439 {
fd6481cf 9440 ctx->sender_decl
9441 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9442 : ctx->record_type, ".omp_data_o");
84bfaaeb 9443 DECL_NAMELESS (ctx->sender_decl) = 1;
86f2ad37 9444 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
75a70cf9 9445 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
773c5ba7 9446 }
9447
75a70cf9 9448 olist = NULL;
9449 ilist = NULL;
773c5ba7 9450 lower_send_clauses (clauses, &ilist, &olist, ctx);
9451 lower_send_shared_vars (&ilist, &olist, ctx);
9452
bc7bff74 9453 if (ctx->record_type)
9454 {
9455 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9456 TREE_THIS_VOLATILE (clobber) = 1;
9457 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9458 clobber));
9459 }
9460
773c5ba7 9461 /* Once all the expansions are done, sequence all the different
75a70cf9 9462 fragments inside gimple_omp_body. */
773c5ba7 9463
75a70cf9 9464 new_body = NULL;
773c5ba7 9465
9466 if (ctx->record_type)
9467 {
389dd41b 9468 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
cc6b725b 9469 /* fixup_child_record_type might have changed receiver_decl's type. */
389dd41b 9470 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
75a70cf9 9471 gimple_seq_add_stmt (&new_body,
9472 gimple_build_assign (ctx->receiver_decl, t));
773c5ba7 9473 }
9474
75a70cf9 9475 gimple_seq_add_seq (&new_body, par_ilist);
9476 gimple_seq_add_seq (&new_body, par_body);
bc7bff74 9477 gimple_seq_add_seq (&new_body, par_rlist);
9478 if (ctx->cancellable)
9479 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
75a70cf9 9480 gimple_seq_add_seq (&new_body, par_olist);
9481 new_body = maybe_catch_exception (new_body);
9482 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9483 gimple_omp_set_body (stmt, new_body);
773c5ba7 9484
75a70cf9 9485 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
bc7bff74 9486 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9487 gimple_bind_add_seq (bind, ilist);
9488 gimple_bind_add_stmt (bind, stmt);
9489 gimple_bind_add_seq (bind, olist);
9490
9491 pop_gimplify_context (NULL);
9492
9493 if (dep_bind)
9494 {
9495 gimple_bind_add_seq (dep_bind, dep_ilist);
9496 gimple_bind_add_stmt (dep_bind, bind);
9497 gimple_bind_add_seq (dep_bind, dep_olist);
9498 pop_gimplify_context (dep_bind);
9499 }
9500}
9501
9502/* Lower the OpenMP target directive in the current statement
9503 in GSI_P. CTX holds context information for the directive. */
9504
9505static void
9506lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9507{
9508 tree clauses;
9509 tree child_fn, t, c;
9510 gimple stmt = gsi_stmt (*gsi_p);
9511 gimple tgt_bind = NULL, bind;
9512 gimple_seq tgt_body = NULL, olist, ilist, new_body;
9513 struct gimplify_ctx gctx;
9514 location_t loc = gimple_location (stmt);
9515 int kind = gimple_omp_target_kind (stmt);
9516 unsigned int map_cnt = 0;
9517
9518 clauses = gimple_omp_target_clauses (stmt);
9519 if (kind == GF_OMP_TARGET_KIND_REGION)
9520 {
9521 tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9522 tgt_body = gimple_bind_body (tgt_bind);
9523 }
9524 else if (kind == GF_OMP_TARGET_KIND_DATA)
9525 tgt_body = gimple_omp_body (stmt);
9526 child_fn = ctx->cb.dst_fn;
9527
9528 push_gimplify_context (&gctx);
9529
9530 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9531 switch (OMP_CLAUSE_CODE (c))
9532 {
9533 tree var, x;
9534
9535 default:
9536 break;
9537 case OMP_CLAUSE_MAP:
9538 case OMP_CLAUSE_TO:
9539 case OMP_CLAUSE_FROM:
9540 var = OMP_CLAUSE_DECL (c);
9541 if (!DECL_P (var))
9542 {
9543 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9544 || !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9545 map_cnt++;
9546 continue;
9547 }
9548
9549 if (DECL_SIZE (var)
9550 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9551 {
9552 tree var2 = DECL_VALUE_EXPR (var);
9553 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9554 var2 = TREE_OPERAND (var2, 0);
9555 gcc_assert (DECL_P (var2));
9556 var = var2;
9557 }
9558
9559 if (!maybe_lookup_field (var, ctx))
9560 continue;
9561
9562 if (kind == GF_OMP_TARGET_KIND_REGION)
9563 {
9564 x = build_receiver_ref (var, true, ctx);
9565 tree new_var = lookup_decl (var, ctx);
9566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9567 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9568 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9569 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9570 x = build_simple_mem_ref (x);
9571 SET_DECL_VALUE_EXPR (new_var, x);
9572 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9573 }
9574 map_cnt++;
9575 }
9576
9577 if (kind == GF_OMP_TARGET_KIND_REGION)
9578 {
9579 target_nesting_level++;
9580 lower_omp (&tgt_body, ctx);
9581 target_nesting_level--;
9582 }
9583 else if (kind == GF_OMP_TARGET_KIND_DATA)
9584 lower_omp (&tgt_body, ctx);
9585
9586 if (kind == GF_OMP_TARGET_KIND_REGION)
9587 {
9588 /* Declare all the variables created by mapping and the variables
9589 declared in the scope of the target body. */
9590 record_vars_into (ctx->block_vars, child_fn);
9591 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9592 }
9593
9594 olist = NULL;
9595 ilist = NULL;
9596 if (ctx->record_type)
9597 {
9598 ctx->sender_decl
9599 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9600 DECL_NAMELESS (ctx->sender_decl) = 1;
9601 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9602 t = make_tree_vec (3);
9603 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9604 TREE_VEC_ELT (t, 1)
9605 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9606 ".omp_data_sizes");
9607 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9608 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9609 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9610 TREE_VEC_ELT (t, 2)
9611 = create_tmp_var (build_array_type_nelts (unsigned_char_type_node,
9612 map_cnt),
9613 ".omp_data_kinds");
9614 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9615 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9616 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9617 gimple_omp_target_set_data_arg (stmt, t);
9618
9619 vec<constructor_elt, va_gc> *vsize;
9620 vec<constructor_elt, va_gc> *vkind;
9621 vec_alloc (vsize, map_cnt);
9622 vec_alloc (vkind, map_cnt);
9623 unsigned int map_idx = 0;
9624
9625 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9626 switch (OMP_CLAUSE_CODE (c))
9627 {
9628 tree ovar, nc;
9629
9630 default:
9631 break;
9632 case OMP_CLAUSE_MAP:
9633 case OMP_CLAUSE_TO:
9634 case OMP_CLAUSE_FROM:
9635 nc = c;
9636 ovar = OMP_CLAUSE_DECL (c);
9637 if (!DECL_P (ovar))
9638 {
9639 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9640 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9641 {
9642 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9643 == get_base_address (ovar));
9644 nc = OMP_CLAUSE_CHAIN (c);
9645 ovar = OMP_CLAUSE_DECL (nc);
9646 }
9647 else
9648 {
9649 tree x = build_sender_ref (ovar, ctx);
9650 tree v
9651 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9652 gimplify_assign (x, v, &ilist);
9653 nc = NULL_TREE;
9654 }
9655 }
9656 else
9657 {
9658 if (DECL_SIZE (ovar)
9659 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9660 {
9661 tree ovar2 = DECL_VALUE_EXPR (ovar);
9662 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9663 ovar2 = TREE_OPERAND (ovar2, 0);
9664 gcc_assert (DECL_P (ovar2));
9665 ovar = ovar2;
9666 }
9667 if (!maybe_lookup_field (ovar, ctx))
9668 continue;
9669 }
9670
9671 if (nc)
9672 {
9673 tree var = lookup_decl_in_outer_ctx (ovar, ctx);
9674 tree x = build_sender_ref (ovar, ctx);
9675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9676 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9677 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9678 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9679 {
9680 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9681 tree avar
9682 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)), NULL);
9683 mark_addressable (avar);
9684 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9685 avar = build_fold_addr_expr (avar);
9686 gimplify_assign (x, avar, &ilist);
9687 }
9688 else if (is_gimple_reg (var))
9689 {
9690 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9691 tree avar = create_tmp_var (TREE_TYPE (var), NULL);
9692 mark_addressable (avar);
9693 if (OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_ALLOC
9694 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_FROM)
9695 gimplify_assign (avar, var, &ilist);
9696 avar = build_fold_addr_expr (avar);
9697 gimplify_assign (x, avar, &ilist);
9698 if ((OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_FROM
9699 || OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_TOFROM)
9700 && !TYPE_READONLY (TREE_TYPE (var)))
9701 {
9702 x = build_sender_ref (ovar, ctx);
9703 x = build_simple_mem_ref (x);
9704 gimplify_assign (var, x, &olist);
9705 }
9706 }
9707 else
9708 {
9709 var = build_fold_addr_expr (var);
9710 gimplify_assign (x, var, &ilist);
9711 }
9712 }
9713 tree s = OMP_CLAUSE_SIZE (c);
9714 if (s == NULL_TREE)
9715 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9716 s = fold_convert (size_type_node, s);
9717 tree purpose = size_int (map_idx++);
9718 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9719 if (TREE_CODE (s) != INTEGER_CST)
9720 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9721
9722 unsigned char tkind = 0;
9723 switch (OMP_CLAUSE_CODE (c))
9724 {
9725 case OMP_CLAUSE_MAP:
9726 tkind = OMP_CLAUSE_MAP_KIND (c);
9727 break;
9728 case OMP_CLAUSE_TO:
9729 tkind = OMP_CLAUSE_MAP_TO;
9730 break;
9731 case OMP_CLAUSE_FROM:
9732 tkind = OMP_CLAUSE_MAP_FROM;
9733 break;
9734 default:
9735 gcc_unreachable ();
9736 }
9737 unsigned int talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9738 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9739 talign = DECL_ALIGN_UNIT (ovar);
9740 talign = ceil_log2 (talign);
9741 tkind |= talign << 3;
9742 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9743 build_int_cst (unsigned_char_type_node,
9744 tkind));
9745 if (nc && nc != c)
9746 c = nc;
9747 }
9748
9749 gcc_assert (map_idx == map_cnt);
9750
9751 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9752 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9753 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9754 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9755 if (!TREE_STATIC (TREE_VEC_ELT (t, 1)))
9756 {
9757 gimple_seq initlist = NULL;
9758 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9759 TREE_VEC_ELT (t, 1)),
9760 &initlist, true, NULL_TREE);
9761 gimple_seq_add_seq (&ilist, initlist);
9762 }
9763
9764 tree clobber = build_constructor (ctx->record_type, NULL);
9765 TREE_THIS_VOLATILE (clobber) = 1;
9766 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9767 clobber));
9768 }
9769
9770 /* Once all the expansions are done, sequence all the different
9771 fragments inside gimple_omp_body. */
9772
9773 new_body = NULL;
9774
9775 if (ctx->record_type && kind == GF_OMP_TARGET_KIND_REGION)
9776 {
9777 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9778 /* fixup_child_record_type might have changed receiver_decl's type. */
9779 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9780 gimple_seq_add_stmt (&new_body,
9781 gimple_build_assign (ctx->receiver_decl, t));
9782 }
9783
9784 if (kind == GF_OMP_TARGET_KIND_REGION)
9785 {
9786 gimple_seq_add_seq (&new_body, tgt_body);
9787 new_body = maybe_catch_exception (new_body);
9788 }
9789 else if (kind == GF_OMP_TARGET_KIND_DATA)
9790 new_body = tgt_body;
9791 if (kind != GF_OMP_TARGET_KIND_UPDATE)
9792 {
9793 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9794 gimple_omp_set_body (stmt, new_body);
9795 }
9796
9797 bind = gimple_build_bind (NULL, NULL,
9798 tgt_bind ? gimple_bind_block (tgt_bind)
9799 : NULL_TREE);
75a70cf9 9800 gsi_replace (gsi_p, bind, true);
e3a19533 9801 gimple_bind_add_seq (bind, ilist);
9802 gimple_bind_add_stmt (bind, stmt);
9803 gimple_bind_add_seq (bind, olist);
773c5ba7 9804
75a70cf9 9805 pop_gimplify_context (NULL);
773c5ba7 9806}
9807
bc7bff74 9808/* Expand code for an OpenMP teams directive. */
9809
9810static void
9811lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9812{
9813 gimple teams_stmt = gsi_stmt (*gsi_p);
9814 struct gimplify_ctx gctx;
9815 push_gimplify_context (&gctx);
9816
9817 tree block = make_node (BLOCK);
9818 gimple bind = gimple_build_bind (NULL, NULL, block);
9819 gsi_replace (gsi_p, bind, true);
9820 gimple_seq bind_body = NULL;
9821 gimple_seq dlist = NULL;
9822 gimple_seq olist = NULL;
9823
9824 tree num_teams = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9825 OMP_CLAUSE_NUM_TEAMS);
9826 if (num_teams == NULL_TREE)
9827 num_teams = build_int_cst (unsigned_type_node, 0);
9828 else
9829 {
9830 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
9831 num_teams = fold_convert (unsigned_type_node, num_teams);
9832 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
9833 }
9834 tree thread_limit = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9835 OMP_CLAUSE_THREAD_LIMIT);
9836 if (thread_limit == NULL_TREE)
9837 thread_limit = build_int_cst (unsigned_type_node, 0);
9838 else
9839 {
9840 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
9841 thread_limit = fold_convert (unsigned_type_node, thread_limit);
9842 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
9843 fb_rvalue);
9844 }
9845
9846 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
9847 &bind_body, &dlist, ctx, NULL);
9848 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9849 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
9850 gimple_seq_add_stmt (&bind_body, teams_stmt);
9851
9852 location_t loc = gimple_location (teams_stmt);
9853 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
9854 gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
9855 gimple_set_location (call, loc);
9856 gimple_seq_add_stmt (&bind_body, call);
9857
9858 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
9859 gimple_omp_set_body (teams_stmt, NULL);
9860 gimple_seq_add_seq (&bind_body, olist);
9861 gimple_seq_add_seq (&bind_body, dlist);
9862 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
9863 gimple_bind_set_body (bind, bind_body);
9864
9865 pop_gimplify_context (bind);
9866
9867 gimple_bind_append_vars (bind, ctx->block_vars);
9868 BLOCK_VARS (block) = ctx->block_vars;
9869 if (BLOCK_VARS (block))
9870 TREE_USED (block) = 1;
9871}
9872
9873
a4890dc9 9874/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
75a70cf9 9875 regimplified. If DATA is non-NULL, lower_omp_1 is outside
9876 of OpenMP context, but with task_shared_vars set. */
46515c92 9877
9878static tree
75a70cf9 9879lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
9880 void *data)
46515c92 9881{
a4890dc9 9882 tree t = *tp;
46515c92 9883
a4890dc9 9884 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
75a70cf9 9885 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
9f49e155 9886 return t;
9887
9888 if (task_shared_vars
9889 && DECL_P (t)
9890 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
a4890dc9 9891 return t;
46515c92 9892
a4890dc9 9893 /* If a global variable has been privatized, TREE_CONSTANT on
9894 ADDR_EXPR might be wrong. */
75a70cf9 9895 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
a4890dc9 9896 recompute_tree_invariant_for_addr_expr (t);
46515c92 9897
a4890dc9 9898 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
9899 return NULL_TREE;
46515c92 9900}
773c5ba7 9901
a4890dc9 9902static void
75a70cf9 9903lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 9904{
75a70cf9 9905 gimple stmt = gsi_stmt (*gsi_p);
9906 struct walk_stmt_info wi;
1e8e9920 9907
75a70cf9 9908 if (gimple_has_location (stmt))
9909 input_location = gimple_location (stmt);
a4890dc9 9910
75a70cf9 9911 if (task_shared_vars)
9912 memset (&wi, '\0', sizeof (wi));
a4890dc9 9913
773c5ba7 9914 /* If we have issued syntax errors, avoid doing any heavy lifting.
9915 Just replace the OpenMP directives with a NOP to avoid
9916 confusing RTL expansion. */
852f689e 9917 if (seen_error () && is_gimple_omp (stmt))
773c5ba7 9918 {
75a70cf9 9919 gsi_replace (gsi_p, gimple_build_nop (), true);
a4890dc9 9920 return;
773c5ba7 9921 }
9922
75a70cf9 9923 switch (gimple_code (stmt))
1e8e9920 9924 {
75a70cf9 9925 case GIMPLE_COND:
fd6481cf 9926 if ((ctx || task_shared_vars)
75a70cf9 9927 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
9928 ctx ? NULL : &wi, NULL)
9929 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
9930 ctx ? NULL : &wi, NULL)))
9931 gimple_regimplify_operands (stmt, gsi_p);
a4890dc9 9932 break;
75a70cf9 9933 case GIMPLE_CATCH:
e3a19533 9934 lower_omp (gimple_catch_handler_ptr (stmt), ctx);
a4890dc9 9935 break;
75a70cf9 9936 case GIMPLE_EH_FILTER:
e3a19533 9937 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
a4890dc9 9938 break;
75a70cf9 9939 case GIMPLE_TRY:
e3a19533 9940 lower_omp (gimple_try_eval_ptr (stmt), ctx);
9941 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
a4890dc9 9942 break;
35215227 9943 case GIMPLE_TRANSACTION:
9944 lower_omp (gimple_transaction_body_ptr (stmt), ctx);
9945 break;
75a70cf9 9946 case GIMPLE_BIND:
e3a19533 9947 lower_omp (gimple_bind_body_ptr (stmt), ctx);
a4890dc9 9948 break;
75a70cf9 9949 case GIMPLE_OMP_PARALLEL:
9950 case GIMPLE_OMP_TASK:
9951 ctx = maybe_lookup_ctx (stmt);
bc7bff74 9952 gcc_assert (ctx);
9953 if (ctx->cancellable)
9954 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9955 lower_omp_taskreg (gsi_p, ctx);
a4890dc9 9956 break;
75a70cf9 9957 case GIMPLE_OMP_FOR:
9958 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9959 gcc_assert (ctx);
bc7bff74 9960 if (ctx->cancellable)
9961 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9962 lower_omp_for (gsi_p, ctx);
1e8e9920 9963 break;
75a70cf9 9964 case GIMPLE_OMP_SECTIONS:
9965 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9966 gcc_assert (ctx);
bc7bff74 9967 if (ctx->cancellable)
9968 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9969 lower_omp_sections (gsi_p, ctx);
1e8e9920 9970 break;
75a70cf9 9971 case GIMPLE_OMP_SINGLE:
9972 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9973 gcc_assert (ctx);
75a70cf9 9974 lower_omp_single (gsi_p, ctx);
1e8e9920 9975 break;
75a70cf9 9976 case GIMPLE_OMP_MASTER:
9977 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9978 gcc_assert (ctx);
75a70cf9 9979 lower_omp_master (gsi_p, ctx);
1e8e9920 9980 break;
bc7bff74 9981 case GIMPLE_OMP_TASKGROUP:
9982 ctx = maybe_lookup_ctx (stmt);
9983 gcc_assert (ctx);
9984 lower_omp_taskgroup (gsi_p, ctx);
9985 break;
75a70cf9 9986 case GIMPLE_OMP_ORDERED:
9987 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9988 gcc_assert (ctx);
75a70cf9 9989 lower_omp_ordered (gsi_p, ctx);
1e8e9920 9990 break;
75a70cf9 9991 case GIMPLE_OMP_CRITICAL:
9992 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9993 gcc_assert (ctx);
75a70cf9 9994 lower_omp_critical (gsi_p, ctx);
9995 break;
9996 case GIMPLE_OMP_ATOMIC_LOAD:
9997 if ((ctx || task_shared_vars)
9998 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
9999 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10000 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 10001 break;
bc7bff74 10002 case GIMPLE_OMP_TARGET:
10003 ctx = maybe_lookup_ctx (stmt);
10004 gcc_assert (ctx);
10005 lower_omp_target (gsi_p, ctx);
10006 break;
10007 case GIMPLE_OMP_TEAMS:
10008 ctx = maybe_lookup_ctx (stmt);
10009 gcc_assert (ctx);
10010 lower_omp_teams (gsi_p, ctx);
10011 break;
10012 case GIMPLE_CALL:
10013 tree fndecl;
10014 fndecl = gimple_call_fndecl (stmt);
10015 if (fndecl
10016 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10017 switch (DECL_FUNCTION_CODE (fndecl))
10018 {
10019 case BUILT_IN_GOMP_BARRIER:
10020 if (ctx == NULL)
10021 break;
10022 /* FALLTHRU */
10023 case BUILT_IN_GOMP_CANCEL:
10024 case BUILT_IN_GOMP_CANCELLATION_POINT:
10025 omp_context *cctx;
10026 cctx = ctx;
10027 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10028 cctx = cctx->outer;
10029 gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
10030 if (!cctx->cancellable)
10031 {
10032 if (DECL_FUNCTION_CODE (fndecl)
10033 == BUILT_IN_GOMP_CANCELLATION_POINT)
10034 {
10035 stmt = gimple_build_nop ();
10036 gsi_replace (gsi_p, stmt, false);
10037 }
10038 break;
10039 }
10040 tree lhs;
10041 lhs = create_tmp_var (boolean_type_node, NULL);
10042 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10043 {
10044 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10045 gimple_call_set_fndecl (stmt, fndecl);
10046 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
10047 }
10048 gimple_call_set_lhs (stmt, lhs);
10049 tree fallthru_label;
10050 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10051 gimple g;
10052 g = gimple_build_label (fallthru_label);
10053 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10054 g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
10055 cctx->cancel_label, fallthru_label);
10056 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10057 break;
10058 default:
10059 break;
10060 }
10061 /* FALLTHRU */
a4890dc9 10062 default:
fd6481cf 10063 if ((ctx || task_shared_vars)
75a70cf9 10064 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10065 ctx ? NULL : &wi))
10066 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 10067 break;
1e8e9920 10068 }
1e8e9920 10069}
10070
10071static void
e3a19533 10072lower_omp (gimple_seq *body, omp_context *ctx)
1e8e9920 10073{
1d22f541 10074 location_t saved_location = input_location;
e3a19533 10075 gimple_stmt_iterator gsi;
10076 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
75a70cf9 10077 lower_omp_1 (&gsi, ctx);
bc7bff74 10078 /* Inside target region we haven't called fold_stmt during gimplification,
10079 because it can break code by adding decl references that weren't in the
10080 source. Call fold_stmt now. */
10081 if (target_nesting_level)
10082 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10083 fold_stmt (&gsi);
1d22f541 10084 input_location = saved_location;
1e8e9920 10085}
10086\f
10087/* Main entry point. */
10088
2a1990e9 10089static unsigned int
1e8e9920 10090execute_lower_omp (void)
10091{
75a70cf9 10092 gimple_seq body;
10093
41709826 10094 /* This pass always runs, to provide PROP_gimple_lomp.
10095 But there is nothing to do unless -fopenmp is given. */
f2697631 10096 if (flag_openmp == 0 && flag_openmp_simd == 0 && flag_enable_cilkplus == 0)
41709826 10097 return 0;
10098
1e8e9920 10099 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10100 delete_omp_context);
10101
75a70cf9 10102 body = gimple_body (current_function_decl);
ab129075 10103 scan_omp (&body, NULL);
fd6481cf 10104 gcc_assert (taskreg_nesting_level == 0);
1e8e9920 10105
10106 if (all_contexts->root)
fd6481cf 10107 {
dac18d1a 10108 struct gimplify_ctx gctx;
10109
fd6481cf 10110 if (task_shared_vars)
dac18d1a 10111 push_gimplify_context (&gctx);
e3a19533 10112 lower_omp (&body, NULL);
fd6481cf 10113 if (task_shared_vars)
10114 pop_gimplify_context (NULL);
10115 }
1e8e9920 10116
773c5ba7 10117 if (all_contexts)
10118 {
10119 splay_tree_delete (all_contexts);
10120 all_contexts = NULL;
10121 }
fd6481cf 10122 BITMAP_FREE (task_shared_vars);
2a1990e9 10123 return 0;
1e8e9920 10124}
10125
cbe8bda8 10126namespace {
10127
10128const pass_data pass_data_lower_omp =
10129{
10130 GIMPLE_PASS, /* type */
10131 "omplower", /* name */
10132 OPTGROUP_NONE, /* optinfo_flags */
10133 false, /* has_gate */
10134 true, /* has_execute */
10135 TV_NONE, /* tv_id */
10136 PROP_gimple_any, /* properties_required */
10137 PROP_gimple_lomp, /* properties_provided */
10138 0, /* properties_destroyed */
10139 0, /* todo_flags_start */
10140 0, /* todo_flags_finish */
1e8e9920 10141};
cbe8bda8 10142
10143class pass_lower_omp : public gimple_opt_pass
10144{
10145public:
9af5ce0c 10146 pass_lower_omp (gcc::context *ctxt)
10147 : gimple_opt_pass (pass_data_lower_omp, ctxt)
cbe8bda8 10148 {}
10149
10150 /* opt_pass methods: */
10151 unsigned int execute () { return execute_lower_omp (); }
10152
10153}; // class pass_lower_omp
10154
10155} // anon namespace
10156
10157gimple_opt_pass *
10158make_pass_lower_omp (gcc::context *ctxt)
10159{
10160 return new pass_lower_omp (ctxt);
10161}
1e8e9920 10162\f
10163/* The following is a utility to diagnose OpenMP structured block violations.
61e47ac8 10164 It is not part of the "omplower" pass, as that's invoked too late. It
10165 should be invoked by the respective front ends after gimplification. */
1e8e9920 10166
10167static splay_tree all_labels;
10168
10169/* Check for mismatched contexts and generate an error if needed. Return
10170 true if an error is detected. */
10171
10172static bool
75a70cf9 10173diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10174 gimple branch_ctx, gimple label_ctx)
1e8e9920 10175{
75a70cf9 10176 if (label_ctx == branch_ctx)
1e8e9920 10177 return false;
10178
48e1416a 10179
75a70cf9 10180 /*
10181 Previously we kept track of the label's entire context in diagnose_sb_[12]
10182 so we could traverse it and issue a correct "exit" or "enter" error
10183 message upon a structured block violation.
10184
10185 We built the context by building a list with tree_cons'ing, but there is
10186 no easy counterpart in gimple tuples. It seems like far too much work
10187 for issuing exit/enter error messages. If someone really misses the
10188 distinct error message... patches welcome.
10189 */
48e1416a 10190
75a70cf9 10191#if 0
1e8e9920 10192 /* Try to avoid confusing the user by producing and error message
f0b5f617 10193 with correct "exit" or "enter" verbiage. We prefer "exit"
1e8e9920 10194 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10195 if (branch_ctx == NULL)
10196 exit_p = false;
10197 else
10198 {
10199 while (label_ctx)
10200 {
10201 if (TREE_VALUE (label_ctx) == branch_ctx)
10202 {
10203 exit_p = false;
10204 break;
10205 }
10206 label_ctx = TREE_CHAIN (label_ctx);
10207 }
10208 }
10209
10210 if (exit_p)
10211 error ("invalid exit from OpenMP structured block");
10212 else
10213 error ("invalid entry to OpenMP structured block");
75a70cf9 10214#endif
1e8e9920 10215
f2697631 10216 bool cilkplus_block = false;
10217 if (flag_enable_cilkplus)
10218 {
10219 if ((branch_ctx
10220 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
10221 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
10222 || (gimple_code (label_ctx) == GIMPLE_OMP_FOR
10223 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
10224 cilkplus_block = true;
10225 }
10226
75a70cf9 10227 /* If it's obvious we have an invalid entry, be specific about the error. */
10228 if (branch_ctx == NULL)
f2697631 10229 {
10230 if (cilkplus_block)
10231 error ("invalid entry to Cilk Plus structured block");
10232 else
10233 error ("invalid entry to OpenMP structured block");
10234 }
75a70cf9 10235 else
f2697631 10236 {
10237 /* Otherwise, be vague and lazy, but efficient. */
10238 if (cilkplus_block)
10239 error ("invalid branch to/from a Cilk Plus structured block");
10240 else
10241 error ("invalid branch to/from an OpenMP structured block");
10242 }
75a70cf9 10243
10244 gsi_replace (gsi_p, gimple_build_nop (), false);
1e8e9920 10245 return true;
10246}
10247
10248/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
75a70cf9 10249 where each label is found. */
1e8e9920 10250
10251static tree
75a70cf9 10252diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10253 struct walk_stmt_info *wi)
1e8e9920 10254{
75a70cf9 10255 gimple context = (gimple) wi->info;
10256 gimple inner_context;
10257 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 10258
75a70cf9 10259 *handled_ops_p = true;
10260
10261 switch (gimple_code (stmt))
1e8e9920 10262 {
75a70cf9 10263 WALK_SUBSTMTS;
48e1416a 10264
75a70cf9 10265 case GIMPLE_OMP_PARALLEL:
10266 case GIMPLE_OMP_TASK:
10267 case GIMPLE_OMP_SECTIONS:
10268 case GIMPLE_OMP_SINGLE:
10269 case GIMPLE_OMP_SECTION:
10270 case GIMPLE_OMP_MASTER:
10271 case GIMPLE_OMP_ORDERED:
10272 case GIMPLE_OMP_CRITICAL:
bc7bff74 10273 case GIMPLE_OMP_TARGET:
10274 case GIMPLE_OMP_TEAMS:
10275 case GIMPLE_OMP_TASKGROUP:
75a70cf9 10276 /* The minimal context here is just the current OMP construct. */
10277 inner_context = stmt;
1e8e9920 10278 wi->info = inner_context;
75a70cf9 10279 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 10280 wi->info = context;
10281 break;
10282
75a70cf9 10283 case GIMPLE_OMP_FOR:
10284 inner_context = stmt;
1e8e9920 10285 wi->info = inner_context;
75a70cf9 10286 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10287 walk them. */
10288 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10289 diagnose_sb_1, NULL, wi);
10290 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 10291 wi->info = context;
10292 break;
10293
75a70cf9 10294 case GIMPLE_LABEL:
10295 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
1e8e9920 10296 (splay_tree_value) context);
10297 break;
10298
10299 default:
10300 break;
10301 }
10302
10303 return NULL_TREE;
10304}
10305
10306/* Pass 2: Check each branch and see if its context differs from that of
10307 the destination label's context. */
10308
10309static tree
75a70cf9 10310diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10311 struct walk_stmt_info *wi)
1e8e9920 10312{
75a70cf9 10313 gimple context = (gimple) wi->info;
1e8e9920 10314 splay_tree_node n;
75a70cf9 10315 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 10316
75a70cf9 10317 *handled_ops_p = true;
10318
10319 switch (gimple_code (stmt))
1e8e9920 10320 {
75a70cf9 10321 WALK_SUBSTMTS;
10322
10323 case GIMPLE_OMP_PARALLEL:
10324 case GIMPLE_OMP_TASK:
10325 case GIMPLE_OMP_SECTIONS:
10326 case GIMPLE_OMP_SINGLE:
10327 case GIMPLE_OMP_SECTION:
10328 case GIMPLE_OMP_MASTER:
10329 case GIMPLE_OMP_ORDERED:
10330 case GIMPLE_OMP_CRITICAL:
bc7bff74 10331 case GIMPLE_OMP_TARGET:
10332 case GIMPLE_OMP_TEAMS:
10333 case GIMPLE_OMP_TASKGROUP:
75a70cf9 10334 wi->info = stmt;
e3a19533 10335 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 10336 wi->info = context;
10337 break;
10338
75a70cf9 10339 case GIMPLE_OMP_FOR:
10340 wi->info = stmt;
10341 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10342 walk them. */
e3a19533 10343 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10344 diagnose_sb_2, NULL, wi);
10345 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 10346 wi->info = context;
10347 break;
10348
0e1818e7 10349 case GIMPLE_COND:
10350 {
10351 tree lab = gimple_cond_true_label (stmt);
10352 if (lab)
10353 {
10354 n = splay_tree_lookup (all_labels,
10355 (splay_tree_key) lab);
10356 diagnose_sb_0 (gsi_p, context,
10357 n ? (gimple) n->value : NULL);
10358 }
10359 lab = gimple_cond_false_label (stmt);
10360 if (lab)
10361 {
10362 n = splay_tree_lookup (all_labels,
10363 (splay_tree_key) lab);
10364 diagnose_sb_0 (gsi_p, context,
10365 n ? (gimple) n->value : NULL);
10366 }
10367 }
10368 break;
10369
75a70cf9 10370 case GIMPLE_GOTO:
1e8e9920 10371 {
75a70cf9 10372 tree lab = gimple_goto_dest (stmt);
1e8e9920 10373 if (TREE_CODE (lab) != LABEL_DECL)
10374 break;
10375
10376 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 10377 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
1e8e9920 10378 }
10379 break;
10380
75a70cf9 10381 case GIMPLE_SWITCH:
1e8e9920 10382 {
75a70cf9 10383 unsigned int i;
10384 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
1e8e9920 10385 {
75a70cf9 10386 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
1e8e9920 10387 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 10388 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
1e8e9920 10389 break;
10390 }
10391 }
10392 break;
10393
75a70cf9 10394 case GIMPLE_RETURN:
10395 diagnose_sb_0 (gsi_p, context, NULL);
1e8e9920 10396 break;
10397
10398 default:
10399 break;
10400 }
10401
10402 return NULL_TREE;
10403}
10404
7740abd8 10405/* Called from tree-cfg.c::make_edges to create cfg edges for all GIMPLE_OMP
10406 codes. */
10407bool
10408make_gimple_omp_edges (basic_block bb, struct omp_region **region)
10409{
10410 gimple last = last_stmt (bb);
10411 enum gimple_code code = gimple_code (last);
10412 struct omp_region *cur_region = *region;
10413 bool fallthru = false;
10414
10415 switch (code)
10416 {
10417 case GIMPLE_OMP_PARALLEL:
10418 case GIMPLE_OMP_TASK:
10419 case GIMPLE_OMP_FOR:
10420 case GIMPLE_OMP_SINGLE:
10421 case GIMPLE_OMP_TEAMS:
10422 case GIMPLE_OMP_MASTER:
10423 case GIMPLE_OMP_TASKGROUP:
10424 case GIMPLE_OMP_ORDERED:
10425 case GIMPLE_OMP_CRITICAL:
10426 case GIMPLE_OMP_SECTION:
10427 cur_region = new_omp_region (bb, code, cur_region);
10428 fallthru = true;
10429 break;
10430
10431 case GIMPLE_OMP_TARGET:
10432 cur_region = new_omp_region (bb, code, cur_region);
10433 fallthru = true;
10434 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
10435 cur_region = cur_region->outer;
10436 break;
10437
10438 case GIMPLE_OMP_SECTIONS:
10439 cur_region = new_omp_region (bb, code, cur_region);
10440 fallthru = true;
10441 break;
10442
10443 case GIMPLE_OMP_SECTIONS_SWITCH:
10444 fallthru = false;
10445 break;
10446
10447 case GIMPLE_OMP_ATOMIC_LOAD:
10448 case GIMPLE_OMP_ATOMIC_STORE:
10449 fallthru = true;
10450 break;
10451
10452 case GIMPLE_OMP_RETURN:
10453 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
10454 somewhere other than the next block. This will be
10455 created later. */
10456 cur_region->exit = bb;
10457 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
10458 cur_region = cur_region->outer;
10459 break;
10460
10461 case GIMPLE_OMP_CONTINUE:
10462 cur_region->cont = bb;
10463 switch (cur_region->type)
10464 {
10465 case GIMPLE_OMP_FOR:
10466 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
10467 succs edges as abnormal to prevent splitting
10468 them. */
10469 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
10470 /* Make the loopback edge. */
10471 make_edge (bb, single_succ (cur_region->entry),
10472 EDGE_ABNORMAL);
10473
10474 /* Create an edge from GIMPLE_OMP_FOR to exit, which
10475 corresponds to the case that the body of the loop
10476 is not executed at all. */
10477 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
10478 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
10479 fallthru = false;
10480 break;
10481
10482 case GIMPLE_OMP_SECTIONS:
10483 /* Wire up the edges into and out of the nested sections. */
10484 {
10485 basic_block switch_bb = single_succ (cur_region->entry);
10486
10487 struct omp_region *i;
10488 for (i = cur_region->inner; i ; i = i->next)
10489 {
10490 gcc_assert (i->type == GIMPLE_OMP_SECTION);
10491 make_edge (switch_bb, i->entry, 0);
10492 make_edge (i->exit, bb, EDGE_FALLTHRU);
10493 }
10494
10495 /* Make the loopback edge to the block with
10496 GIMPLE_OMP_SECTIONS_SWITCH. */
10497 make_edge (bb, switch_bb, 0);
10498
10499 /* Make the edge from the switch to exit. */
10500 make_edge (switch_bb, bb->next_bb, 0);
10501 fallthru = false;
10502 }
10503 break;
10504
10505 default:
10506 gcc_unreachable ();
10507 }
10508 break;
10509
10510 default:
10511 gcc_unreachable ();
10512 }
10513
10514 if (*region != cur_region)
10515 *region = cur_region;
10516
10517 return fallthru;
10518}
10519
bfec3452 10520static unsigned int
10521diagnose_omp_structured_block_errors (void)
1e8e9920 10522{
1e8e9920 10523 struct walk_stmt_info wi;
bfec3452 10524 gimple_seq body = gimple_body (current_function_decl);
1e8e9920 10525
10526 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10527
10528 memset (&wi, 0, sizeof (wi));
75a70cf9 10529 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
1e8e9920 10530
10531 memset (&wi, 0, sizeof (wi));
1e8e9920 10532 wi.want_locations = true;
e3a19533 10533 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10534
10535 gimple_set_body (current_function_decl, body);
1e8e9920 10536
10537 splay_tree_delete (all_labels);
10538 all_labels = NULL;
10539
bfec3452 10540 return 0;
1e8e9920 10541}
10542
bfec3452 10543static bool
10544gate_diagnose_omp_blocks (void)
10545{
f2697631 10546 return flag_openmp || flag_enable_cilkplus;
bfec3452 10547}
10548
cbe8bda8 10549namespace {
10550
10551const pass_data pass_data_diagnose_omp_blocks =
10552{
10553 GIMPLE_PASS, /* type */
10554 "*diagnose_omp_blocks", /* name */
10555 OPTGROUP_NONE, /* optinfo_flags */
10556 true, /* has_gate */
10557 true, /* has_execute */
10558 TV_NONE, /* tv_id */
10559 PROP_gimple_any, /* properties_required */
10560 0, /* properties_provided */
10561 0, /* properties_destroyed */
10562 0, /* todo_flags_start */
10563 0, /* todo_flags_finish */
bfec3452 10564};
10565
cbe8bda8 10566class pass_diagnose_omp_blocks : public gimple_opt_pass
10567{
10568public:
9af5ce0c 10569 pass_diagnose_omp_blocks (gcc::context *ctxt)
10570 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
cbe8bda8 10571 {}
10572
10573 /* opt_pass methods: */
10574 bool gate () { return gate_diagnose_omp_blocks (); }
10575 unsigned int execute () {
10576 return diagnose_omp_structured_block_errors ();
10577 }
10578
10579}; // class pass_diagnose_omp_blocks
10580
10581} // anon namespace
10582
10583gimple_opt_pass *
10584make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10585{
10586 return new pass_diagnose_omp_blocks (ctxt);
10587}
10588
1e8e9920 10589#include "gt-omp-low.h"