]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
config/
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
1e8e9920 1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
711789cc 6 Copyright (C) 2005-2013 Free Software Foundation, Inc.
1e8e9920 7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
8c4c00c1 12Software Foundation; either version 3, or (at your option) any later
1e8e9920 13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
8c4c00c1 21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
1e8e9920 23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
e795d6e1 30#include "gimple.h"
a8783bee 31#include "gimplify.h"
dcf1a1ec 32#include "gimple-iterator.h"
e795d6e1 33#include "gimplify-me.h"
dcf1a1ec 34#include "gimple-walk.h"
75a70cf9 35#include "tree-iterator.h"
1e8e9920 36#include "tree-inline.h"
37#include "langhooks.h"
852f689e 38#include "diagnostic-core.h"
073c1fd5 39#include "gimple-ssa.h"
40#include "cgraph.h"
41#include "tree-cfg.h"
42#include "tree-phinodes.h"
43#include "ssa-iterators.h"
44#include "tree-ssanames.h"
45#include "tree-into-ssa.h"
46#include "tree-dfa.h"
69ee5dbb 47#include "tree-ssa.h"
1e8e9920 48#include "flags.h"
49#include "function.h"
50#include "expr.h"
1e8e9920 51#include "tree-pass.h"
52#include "ggc.h"
53#include "except.h"
e3022db7 54#include "splay-tree.h"
cb7f680b 55#include "optabs.h"
56#include "cfgloop.h"
3d483a94 57#include "target.h"
7740abd8 58#include "omp-low.h"
424a4a92 59#include "gimple-low.h"
60#include "tree-cfgcleanup.h"
e797f49f 61#include "tree-nested.h"
1e8e9920 62
75a70cf9 63
48e1416a 64/* Lowering of OpenMP parallel and workshare constructs proceeds in two
1e8e9920 65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
334ec2d8 68 re-gimplifying things when variables have been replaced with complex
1e8e9920 69 expressions.
70
d134bccc 71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for parallel regions which are then moved to a new
73 function, to be invoked by the thread library. */
1e8e9920 74
7740abd8 75/* Parallel region information. Every parallel and workshare
76 directive is enclosed between two markers, the OMP_* directive
77 and a corresponding OMP_RETURN statement. */
78
79struct omp_region
80{
81 /* The enclosing region. */
82 struct omp_region *outer;
83
84 /* First child region. */
85 struct omp_region *inner;
86
87 /* Next peer region. */
88 struct omp_region *next;
89
90 /* Block containing the omp directive as its last stmt. */
91 basic_block entry;
92
93 /* Block containing the OMP_RETURN as its last stmt. */
94 basic_block exit;
95
96 /* Block containing the OMP_CONTINUE as its last stmt. */
97 basic_block cont;
98
99 /* If this is a combined parallel+workshare region, this is a list
100 of additional arguments needed by the combined parallel+workshare
101 library call. */
102 vec<tree, va_gc> *ws_args;
103
104 /* The code for the omp directive of this region. */
105 enum gimple_code type;
106
107 /* Schedule kind, only used for OMP_FOR type regions. */
108 enum omp_clause_schedule_kind sched_kind;
109
110 /* True if this is a combined parallel+workshare region. */
111 bool is_combined_parallel;
112};
113
1e8e9920 114/* Context structure. Used to store information about each parallel
115 directive in the code. */
116
117typedef struct omp_context
118{
119 /* This field must be at the beginning, as we do "inheritance": Some
120 callback functions for tree-inline.c (e.g., omp_copy_decl)
121 receive a copy_body_data pointer that is up-casted to an
122 omp_context pointer. */
123 copy_body_data cb;
124
125 /* The tree of contexts corresponding to the encountered constructs. */
126 struct omp_context *outer;
75a70cf9 127 gimple stmt;
1e8e9920 128
48e1416a 129 /* Map variables to fields in a structure that allows communication
1e8e9920 130 between sending and receiving threads. */
131 splay_tree field_map;
132 tree record_type;
133 tree sender_decl;
134 tree receiver_decl;
135
fd6481cf 136 /* These are used just by task contexts, if task firstprivate fn is
137 needed. srecord_type is used to communicate from the thread
138 that encountered the task construct to task firstprivate fn,
139 record_type is allocated by GOMP_task, initialized by task firstprivate
140 fn and passed to the task body fn. */
141 splay_tree sfield_map;
142 tree srecord_type;
143
1e8e9920 144 /* A chain of variables to add to the top-level block surrounding the
145 construct. In the case of a parallel, this is in the child function. */
146 tree block_vars;
147
bc7bff74 148 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
149 barriers should jump to during omplower pass. */
150 tree cancel_label;
151
1e8e9920 152 /* What to do with variables with implicitly determined sharing
153 attributes. */
154 enum omp_clause_default_kind default_kind;
155
156 /* Nesting depth of this context. Used to beautify error messages re
157 invalid gotos. The outermost ctx is depth 1, with depth 0 being
158 reserved for the main body of the function. */
159 int depth;
160
1e8e9920 161 /* True if this parallel directive is nested within another. */
162 bool is_nested;
bc7bff74 163
164 /* True if this construct can be cancelled. */
165 bool cancellable;
1e8e9920 166} omp_context;
167
168
fd6481cf 169struct omp_for_data_loop
170{
171 tree v, n1, n2, step;
172 enum tree_code cond_code;
173};
174
773c5ba7 175/* A structure describing the main elements of a parallel loop. */
1e8e9920 176
773c5ba7 177struct omp_for_data
1e8e9920 178{
fd6481cf 179 struct omp_for_data_loop loop;
75a70cf9 180 tree chunk_size;
181 gimple for_stmt;
fd6481cf 182 tree pre, iter_type;
183 int collapse;
1e8e9920 184 bool have_nowait, have_ordered;
185 enum omp_clause_schedule_kind sched_kind;
fd6481cf 186 struct omp_for_data_loop *loops;
1e8e9920 187};
188
773c5ba7 189
1e8e9920 190static splay_tree all_contexts;
fd6481cf 191static int taskreg_nesting_level;
bc7bff74 192static int target_nesting_level;
7740abd8 193static struct omp_region *root_omp_region;
fd6481cf 194static bitmap task_shared_vars;
1e8e9920 195
ab129075 196static void scan_omp (gimple_seq *, omp_context *);
75a70cf9 197static tree scan_omp_1_op (tree *, int *, void *);
198
199#define WALK_SUBSTMTS \
200 case GIMPLE_BIND: \
201 case GIMPLE_TRY: \
202 case GIMPLE_CATCH: \
203 case GIMPLE_EH_FILTER: \
4c0315d0 204 case GIMPLE_TRANSACTION: \
75a70cf9 205 /* The sub-statements for these should be walked. */ \
206 *handled_ops_p = false; \
207 break;
208
209/* Convenience function for calling scan_omp_1_op on tree operands. */
210
211static inline tree
212scan_omp_op (tree *tp, omp_context *ctx)
213{
214 struct walk_stmt_info wi;
215
216 memset (&wi, 0, sizeof (wi));
217 wi.info = ctx;
218 wi.want_locations = true;
219
220 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
221}
222
e3a19533 223static void lower_omp (gimple_seq *, omp_context *);
f49d7bb5 224static tree lookup_decl_in_outer_ctx (tree, omp_context *);
225static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
1e8e9920 226
227/* Find an OpenMP clause of type KIND within CLAUSES. */
228
79acaae1 229tree
590c3166 230find_omp_clause (tree clauses, enum omp_clause_code kind)
1e8e9920 231{
232 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
55d6e7cd 233 if (OMP_CLAUSE_CODE (clauses) == kind)
1e8e9920 234 return clauses;
235
236 return NULL_TREE;
237}
238
239/* Return true if CTX is for an omp parallel. */
240
241static inline bool
242is_parallel_ctx (omp_context *ctx)
243{
75a70cf9 244 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
1e8e9920 245}
246
773c5ba7 247
fd6481cf 248/* Return true if CTX is for an omp task. */
249
250static inline bool
251is_task_ctx (omp_context *ctx)
252{
75a70cf9 253 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 254}
255
256
257/* Return true if CTX is for an omp parallel or omp task. */
258
259static inline bool
260is_taskreg_ctx (omp_context *ctx)
261{
75a70cf9 262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
263 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 264}
265
266
773c5ba7 267/* Return true if REGION is a combined parallel+workshare region. */
1e8e9920 268
269static inline bool
773c5ba7 270is_combined_parallel (struct omp_region *region)
271{
272 return region->is_combined_parallel;
273}
274
275
276/* Extract the header elements of parallel loop FOR_STMT and store
277 them into *FD. */
278
279static void
75a70cf9 280extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
fd6481cf 281 struct omp_for_data_loop *loops)
773c5ba7 282{
fd6481cf 283 tree t, var, *collapse_iter, *collapse_count;
284 tree count = NULL_TREE, iter_type = long_integer_type_node;
285 struct omp_for_data_loop *loop;
286 int i;
287 struct omp_for_data_loop dummy_loop;
389dd41b 288 location_t loc = gimple_location (for_stmt);
f2697631 289 bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_KIND_SIMD;
bc7bff74 290 bool distribute = gimple_omp_for_kind (for_stmt)
291 == GF_OMP_FOR_KIND_DISTRIBUTE;
773c5ba7 292
293 fd->for_stmt = for_stmt;
294 fd->pre = NULL;
75a70cf9 295 fd->collapse = gimple_omp_for_collapse (for_stmt);
fd6481cf 296 if (fd->collapse > 1)
297 fd->loops = loops;
298 else
299 fd->loops = &fd->loop;
773c5ba7 300
bc7bff74 301 fd->have_nowait = distribute || simd;
302 fd->have_ordered = false;
773c5ba7 303 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
304 fd->chunk_size = NULL_TREE;
fd6481cf 305 collapse_iter = NULL;
306 collapse_count = NULL;
773c5ba7 307
75a70cf9 308 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
55d6e7cd 309 switch (OMP_CLAUSE_CODE (t))
773c5ba7 310 {
311 case OMP_CLAUSE_NOWAIT:
312 fd->have_nowait = true;
313 break;
314 case OMP_CLAUSE_ORDERED:
315 fd->have_ordered = true;
316 break;
317 case OMP_CLAUSE_SCHEDULE:
bc7bff74 318 gcc_assert (!distribute);
773c5ba7 319 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
320 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
321 break;
bc7bff74 322 case OMP_CLAUSE_DIST_SCHEDULE:
323 gcc_assert (distribute);
324 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
325 break;
fd6481cf 326 case OMP_CLAUSE_COLLAPSE:
327 if (fd->collapse > 1)
328 {
329 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
330 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
331 }
773c5ba7 332 default:
333 break;
334 }
335
fd6481cf 336 /* FIXME: for now map schedule(auto) to schedule(static).
337 There should be analysis to determine whether all iterations
338 are approximately the same amount of work (then schedule(static)
bde357c8 339 is best) or if it varies (then schedule(dynamic,N) is better). */
fd6481cf 340 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
341 {
342 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
343 gcc_assert (fd->chunk_size == NULL);
344 }
345 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
773c5ba7 346 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
347 gcc_assert (fd->chunk_size == NULL);
348 else if (fd->chunk_size == NULL)
349 {
350 /* We only need to compute a default chunk size for ordered
351 static loops and dynamic loops. */
fd6481cf 352 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
bc7bff74 353 || fd->have_ordered)
773c5ba7 354 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
355 ? integer_zero_node : integer_one_node;
356 }
fd6481cf 357
358 for (i = 0; i < fd->collapse; i++)
359 {
360 if (fd->collapse == 1)
361 loop = &fd->loop;
362 else if (loops != NULL)
363 loop = loops + i;
364 else
365 loop = &dummy_loop;
366
75a70cf9 367 loop->v = gimple_omp_for_index (for_stmt, i);
fd6481cf 368 gcc_assert (SSA_VAR_P (loop->v));
369 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
370 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
371 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
75a70cf9 372 loop->n1 = gimple_omp_for_initial (for_stmt, i);
fd6481cf 373
75a70cf9 374 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
375 loop->n2 = gimple_omp_for_final (for_stmt, i);
fd6481cf 376 switch (loop->cond_code)
377 {
378 case LT_EXPR:
379 case GT_EXPR:
380 break;
f2697631 381 case NE_EXPR:
382 gcc_assert (gimple_omp_for_kind (for_stmt)
383 == GF_OMP_FOR_KIND_CILKSIMD);
384 break;
fd6481cf 385 case LE_EXPR:
386 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
2cc66f2a 387 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
fd6481cf 388 else
389dd41b 389 loop->n2 = fold_build2_loc (loc,
390 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 391 build_int_cst (TREE_TYPE (loop->n2), 1));
392 loop->cond_code = LT_EXPR;
393 break;
394 case GE_EXPR:
395 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
2cc66f2a 396 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
fd6481cf 397 else
389dd41b 398 loop->n2 = fold_build2_loc (loc,
399 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 400 build_int_cst (TREE_TYPE (loop->n2), 1));
401 loop->cond_code = GT_EXPR;
402 break;
403 default:
404 gcc_unreachable ();
405 }
406
75a70cf9 407 t = gimple_omp_for_incr (for_stmt, i);
fd6481cf 408 gcc_assert (TREE_OPERAND (t, 0) == var);
409 switch (TREE_CODE (t))
410 {
411 case PLUS_EXPR:
fd6481cf 412 loop->step = TREE_OPERAND (t, 1);
413 break;
85d86b55 414 case POINTER_PLUS_EXPR:
415 loop->step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
416 break;
fd6481cf 417 case MINUS_EXPR:
418 loop->step = TREE_OPERAND (t, 1);
389dd41b 419 loop->step = fold_build1_loc (loc,
420 NEGATE_EXPR, TREE_TYPE (loop->step),
fd6481cf 421 loop->step);
422 break;
423 default:
424 gcc_unreachable ();
425 }
426
bc7bff74 427 if (simd
428 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
429 && !fd->have_ordered))
3d483a94 430 {
431 if (fd->collapse == 1)
432 iter_type = TREE_TYPE (loop->v);
433 else if (i == 0
434 || TYPE_PRECISION (iter_type)
435 < TYPE_PRECISION (TREE_TYPE (loop->v)))
436 iter_type
437 = build_nonstandard_integer_type
bc7bff74 438 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
3d483a94 439 }
440 else if (iter_type != long_long_unsigned_type_node)
fd6481cf 441 {
442 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
443 iter_type = long_long_unsigned_type_node;
444 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
445 && TYPE_PRECISION (TREE_TYPE (loop->v))
446 >= TYPE_PRECISION (iter_type))
447 {
448 tree n;
449
450 if (loop->cond_code == LT_EXPR)
389dd41b 451 n = fold_build2_loc (loc,
452 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 453 loop->n2, loop->step);
454 else
455 n = loop->n1;
456 if (TREE_CODE (n) != INTEGER_CST
457 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
458 iter_type = long_long_unsigned_type_node;
459 }
460 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
461 > TYPE_PRECISION (iter_type))
462 {
463 tree n1, n2;
464
465 if (loop->cond_code == LT_EXPR)
466 {
467 n1 = loop->n1;
389dd41b 468 n2 = fold_build2_loc (loc,
469 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 470 loop->n2, loop->step);
471 }
472 else
473 {
389dd41b 474 n1 = fold_build2_loc (loc,
475 MINUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 476 loop->n2, loop->step);
477 n2 = loop->n1;
478 }
479 if (TREE_CODE (n1) != INTEGER_CST
480 || TREE_CODE (n2) != INTEGER_CST
481 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
482 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
483 iter_type = long_long_unsigned_type_node;
484 }
485 }
486
487 if (collapse_count && *collapse_count == NULL)
488 {
8e6b4515 489 t = fold_binary (loop->cond_code, boolean_type_node,
490 fold_convert (TREE_TYPE (loop->v), loop->n1),
491 fold_convert (TREE_TYPE (loop->v), loop->n2));
492 if (t && integer_zerop (t))
493 count = build_zero_cst (long_long_unsigned_type_node);
494 else if ((i == 0 || count != NULL_TREE)
495 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
496 && TREE_CONSTANT (loop->n1)
497 && TREE_CONSTANT (loop->n2)
498 && TREE_CODE (loop->step) == INTEGER_CST)
fd6481cf 499 {
500 tree itype = TREE_TYPE (loop->v);
501
502 if (POINTER_TYPE_P (itype))
3cea8318 503 itype = signed_type_for (itype);
fd6481cf 504 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
389dd41b 505 t = fold_build2_loc (loc,
506 PLUS_EXPR, itype,
507 fold_convert_loc (loc, itype, loop->step), t);
508 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
509 fold_convert_loc (loc, itype, loop->n2));
510 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
511 fold_convert_loc (loc, itype, loop->n1));
fd6481cf 512 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
389dd41b 513 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
514 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
515 fold_build1_loc (loc, NEGATE_EXPR, itype,
516 fold_convert_loc (loc, itype,
517 loop->step)));
fd6481cf 518 else
389dd41b 519 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
520 fold_convert_loc (loc, itype, loop->step));
521 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
fd6481cf 522 if (count != NULL_TREE)
389dd41b 523 count = fold_build2_loc (loc,
524 MULT_EXPR, long_long_unsigned_type_node,
fd6481cf 525 count, t);
526 else
527 count = t;
528 if (TREE_CODE (count) != INTEGER_CST)
529 count = NULL_TREE;
530 }
8e6b4515 531 else if (count && !integer_zerop (count))
fd6481cf 532 count = NULL_TREE;
533 }
534 }
535
3d483a94 536 if (count
bc7bff74 537 && !simd
538 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
539 || fd->have_ordered))
fd6481cf 540 {
541 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
542 iter_type = long_long_unsigned_type_node;
543 else
544 iter_type = long_integer_type_node;
545 }
546 else if (collapse_iter && *collapse_iter != NULL)
547 iter_type = TREE_TYPE (*collapse_iter);
548 fd->iter_type = iter_type;
549 if (collapse_iter && *collapse_iter == NULL)
550 *collapse_iter = create_tmp_var (iter_type, ".iter");
551 if (collapse_count && *collapse_count == NULL)
552 {
553 if (count)
389dd41b 554 *collapse_count = fold_convert_loc (loc, iter_type, count);
fd6481cf 555 else
556 *collapse_count = create_tmp_var (iter_type, ".count");
557 }
558
559 if (fd->collapse > 1)
560 {
561 fd->loop.v = *collapse_iter;
562 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
563 fd->loop.n2 = *collapse_count;
564 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
565 fd->loop.cond_code = LT_EXPR;
566 }
773c5ba7 567}
568
569
570/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
571 is the immediate dominator of PAR_ENTRY_BB, return true if there
572 are no data dependencies that would prevent expanding the parallel
573 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
574
575 When expanding a combined parallel+workshare region, the call to
576 the child function may need additional arguments in the case of
75a70cf9 577 GIMPLE_OMP_FOR regions. In some cases, these arguments are
578 computed out of variables passed in from the parent to the child
579 via 'struct .omp_data_s'. For instance:
773c5ba7 580
581 #pragma omp parallel for schedule (guided, i * 4)
582 for (j ...)
583
584 Is lowered into:
585
586 # BLOCK 2 (PAR_ENTRY_BB)
587 .omp_data_o.i = i;
588 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
48e1416a 589
773c5ba7 590 # BLOCK 3 (WS_ENTRY_BB)
591 .omp_data_i = &.omp_data_o;
592 D.1667 = .omp_data_i->i;
593 D.1598 = D.1667 * 4;
594 #pragma omp for schedule (guided, D.1598)
595
596 When we outline the parallel region, the call to the child function
597 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
598 that value is computed *after* the call site. So, in principle we
599 cannot do the transformation.
600
601 To see whether the code in WS_ENTRY_BB blocks the combined
602 parallel+workshare call, we collect all the variables used in the
75a70cf9 603 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
773c5ba7 604 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
605 call.
606
607 FIXME. If we had the SSA form built at this point, we could merely
608 hoist the code in block 3 into block 2 and be done with it. But at
609 this point we don't have dataflow information and though we could
610 hack something up here, it is really not worth the aggravation. */
611
612static bool
f018d957 613workshare_safe_to_combine_p (basic_block ws_entry_bb)
773c5ba7 614{
615 struct omp_for_data fd;
f018d957 616 gimple ws_stmt = last_stmt (ws_entry_bb);
773c5ba7 617
75a70cf9 618 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 619 return true;
620
75a70cf9 621 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
773c5ba7 622
fd6481cf 623 extract_omp_for_data (ws_stmt, &fd, NULL);
624
625 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
626 return false;
627 if (fd.iter_type != long_integer_type_node)
628 return false;
773c5ba7 629
630 /* FIXME. We give up too easily here. If any of these arguments
631 are not constants, they will likely involve variables that have
632 been mapped into fields of .omp_data_s for sharing with the child
633 function. With appropriate data flow, it would be possible to
634 see through this. */
fd6481cf 635 if (!is_gimple_min_invariant (fd.loop.n1)
636 || !is_gimple_min_invariant (fd.loop.n2)
637 || !is_gimple_min_invariant (fd.loop.step)
773c5ba7 638 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
639 return false;
640
641 return true;
642}
643
644
645/* Collect additional arguments needed to emit a combined
646 parallel+workshare call. WS_STMT is the workshare directive being
647 expanded. */
648
f1f41a6c 649static vec<tree, va_gc> *
bc7bff74 650get_ws_args_for (gimple par_stmt, gimple ws_stmt)
773c5ba7 651{
652 tree t;
389dd41b 653 location_t loc = gimple_location (ws_stmt);
f1f41a6c 654 vec<tree, va_gc> *ws_args;
773c5ba7 655
75a70cf9 656 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
773c5ba7 657 {
658 struct omp_for_data fd;
bc7bff74 659 tree n1, n2;
773c5ba7 660
fd6481cf 661 extract_omp_for_data (ws_stmt, &fd, NULL);
bc7bff74 662 n1 = fd.loop.n1;
663 n2 = fd.loop.n2;
664
665 if (gimple_omp_for_combined_into_p (ws_stmt))
666 {
667 tree innerc
668 = find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
669 OMP_CLAUSE__LOOPTEMP_);
670 gcc_assert (innerc);
671 n1 = OMP_CLAUSE_DECL (innerc);
672 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
673 OMP_CLAUSE__LOOPTEMP_);
674 gcc_assert (innerc);
675 n2 = OMP_CLAUSE_DECL (innerc);
676 }
773c5ba7 677
f1f41a6c 678 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
773c5ba7 679
bc7bff74 680 t = fold_convert_loc (loc, long_integer_type_node, n1);
f1f41a6c 681 ws_args->quick_push (t);
773c5ba7 682
bc7bff74 683 t = fold_convert_loc (loc, long_integer_type_node, n2);
f1f41a6c 684 ws_args->quick_push (t);
773c5ba7 685
414c3a2c 686 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
f1f41a6c 687 ws_args->quick_push (t);
414c3a2c 688
689 if (fd.chunk_size)
690 {
691 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
f1f41a6c 692 ws_args->quick_push (t);
414c3a2c 693 }
773c5ba7 694
695 return ws_args;
696 }
75a70cf9 697 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 698 {
ac6e3339 699 /* Number of sections is equal to the number of edges from the
75a70cf9 700 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
701 the exit of the sections region. */
702 basic_block bb = single_succ (gimple_bb (ws_stmt));
ac6e3339 703 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
f1f41a6c 704 vec_alloc (ws_args, 1);
705 ws_args->quick_push (t);
414c3a2c 706 return ws_args;
773c5ba7 707 }
708
709 gcc_unreachable ();
710}
711
712
713/* Discover whether REGION is a combined parallel+workshare region. */
714
715static void
716determine_parallel_type (struct omp_region *region)
1e8e9920 717{
773c5ba7 718 basic_block par_entry_bb, par_exit_bb;
719 basic_block ws_entry_bb, ws_exit_bb;
720
03ed154b 721 if (region == NULL || region->inner == NULL
ac6e3339 722 || region->exit == NULL || region->inner->exit == NULL
723 || region->inner->cont == NULL)
773c5ba7 724 return;
725
726 /* We only support parallel+for and parallel+sections. */
75a70cf9 727 if (region->type != GIMPLE_OMP_PARALLEL
728 || (region->inner->type != GIMPLE_OMP_FOR
729 && region->inner->type != GIMPLE_OMP_SECTIONS))
773c5ba7 730 return;
731
732 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
733 WS_EXIT_BB -> PAR_EXIT_BB. */
61e47ac8 734 par_entry_bb = region->entry;
735 par_exit_bb = region->exit;
736 ws_entry_bb = region->inner->entry;
737 ws_exit_bb = region->inner->exit;
773c5ba7 738
739 if (single_succ (par_entry_bb) == ws_entry_bb
740 && single_succ (ws_exit_bb) == par_exit_bb
f018d957 741 && workshare_safe_to_combine_p (ws_entry_bb)
75a70cf9 742 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
de7ef844 743 || (last_and_only_stmt (ws_entry_bb)
744 && last_and_only_stmt (par_exit_bb))))
773c5ba7 745 {
bc7bff74 746 gimple par_stmt = last_stmt (par_entry_bb);
75a70cf9 747 gimple ws_stmt = last_stmt (ws_entry_bb);
61e47ac8 748
75a70cf9 749 if (region->inner->type == GIMPLE_OMP_FOR)
773c5ba7 750 {
751 /* If this is a combined parallel loop, we need to determine
752 whether or not to use the combined library calls. There
753 are two cases where we do not apply the transformation:
754 static loops and any kind of ordered loop. In the first
755 case, we already open code the loop so there is no need
756 to do anything else. In the latter case, the combined
757 parallel loop call would still need extra synchronization
758 to implement ordered semantics, so there would not be any
759 gain in using the combined call. */
75a70cf9 760 tree clauses = gimple_omp_for_clauses (ws_stmt);
773c5ba7 761 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
762 if (c == NULL
763 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
764 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
765 {
766 region->is_combined_parallel = false;
767 region->inner->is_combined_parallel = false;
768 return;
769 }
770 }
771
772 region->is_combined_parallel = true;
773 region->inner->is_combined_parallel = true;
bc7bff74 774 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
773c5ba7 775 }
1e8e9920 776}
777
773c5ba7 778
1e8e9920 779/* Return true if EXPR is variable sized. */
780
781static inline bool
1f1872fd 782is_variable_sized (const_tree expr)
1e8e9920 783{
784 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
785}
786
787/* Return true if DECL is a reference type. */
788
789static inline bool
790is_reference (tree decl)
791{
792 return lang_hooks.decls.omp_privatize_by_reference (decl);
793}
794
795/* Lookup variables in the decl or field splay trees. The "maybe" form
796 allows for the variable form to not have been entered, otherwise we
797 assert that the variable must have been entered. */
798
799static inline tree
800lookup_decl (tree var, omp_context *ctx)
801{
e3022db7 802 tree *n;
803 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
804 return *n;
1e8e9920 805}
806
807static inline tree
e8a588af 808maybe_lookup_decl (const_tree var, omp_context *ctx)
1e8e9920 809{
e3022db7 810 tree *n;
811 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
812 return n ? *n : NULL_TREE;
1e8e9920 813}
814
815static inline tree
816lookup_field (tree var, omp_context *ctx)
817{
818 splay_tree_node n;
819 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
820 return (tree) n->value;
821}
822
fd6481cf 823static inline tree
824lookup_sfield (tree var, omp_context *ctx)
825{
826 splay_tree_node n;
827 n = splay_tree_lookup (ctx->sfield_map
828 ? ctx->sfield_map : ctx->field_map,
829 (splay_tree_key) var);
830 return (tree) n->value;
831}
832
1e8e9920 833static inline tree
834maybe_lookup_field (tree var, omp_context *ctx)
835{
836 splay_tree_node n;
837 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
838 return n ? (tree) n->value : NULL_TREE;
839}
840
e8a588af 841/* Return true if DECL should be copied by pointer. SHARED_CTX is
842 the parallel context if DECL is to be shared. */
1e8e9920 843
844static bool
fd6481cf 845use_pointer_for_field (tree decl, omp_context *shared_ctx)
1e8e9920 846{
847 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
848 return true;
849
554f2707 850 /* We can only use copy-in/copy-out semantics for shared variables
1e8e9920 851 when we know the value is not accessible from an outer scope. */
e8a588af 852 if (shared_ctx)
1e8e9920 853 {
854 /* ??? Trivially accessible from anywhere. But why would we even
855 be passing an address in this case? Should we simply assert
856 this to be false, or should we have a cleanup pass that removes
857 these from the list of mappings? */
858 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
859 return true;
860
861 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
862 without analyzing the expression whether or not its location
863 is accessible to anyone else. In the case of nested parallel
864 regions it certainly may be. */
df2c34fc 865 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
1e8e9920 866 return true;
867
868 /* Do not use copy-in/copy-out for variables that have their
869 address taken. */
870 if (TREE_ADDRESSABLE (decl))
871 return true;
e8a588af 872
b8214689 873 /* lower_send_shared_vars only uses copy-in, but not copy-out
874 for these. */
875 if (TREE_READONLY (decl)
876 || ((TREE_CODE (decl) == RESULT_DECL
877 || TREE_CODE (decl) == PARM_DECL)
878 && DECL_BY_REFERENCE (decl)))
879 return false;
880
e8a588af 881 /* Disallow copy-in/out in nested parallel if
882 decl is shared in outer parallel, otherwise
883 each thread could store the shared variable
884 in its own copy-in location, making the
885 variable no longer really shared. */
b8214689 886 if (shared_ctx->is_nested)
e8a588af 887 {
888 omp_context *up;
889
890 for (up = shared_ctx->outer; up; up = up->outer)
0cb159ec 891 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
e8a588af 892 break;
893
0cb159ec 894 if (up)
e8a588af 895 {
896 tree c;
897
75a70cf9 898 for (c = gimple_omp_taskreg_clauses (up->stmt);
e8a588af 899 c; c = OMP_CLAUSE_CHAIN (c))
900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
901 && OMP_CLAUSE_DECL (c) == decl)
902 break;
903
904 if (c)
784ad964 905 goto maybe_mark_addressable_and_ret;
e8a588af 906 }
907 }
fd6481cf 908
b8214689 909 /* For tasks avoid using copy-in/out. As tasks can be
fd6481cf 910 deferred or executed in different thread, when GOMP_task
911 returns, the task hasn't necessarily terminated. */
b8214689 912 if (is_task_ctx (shared_ctx))
fd6481cf 913 {
784ad964 914 tree outer;
915 maybe_mark_addressable_and_ret:
916 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
fd6481cf 917 if (is_gimple_reg (outer))
918 {
919 /* Taking address of OUTER in lower_send_shared_vars
920 might need regimplification of everything that uses the
921 variable. */
922 if (!task_shared_vars)
923 task_shared_vars = BITMAP_ALLOC (NULL);
924 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
925 TREE_ADDRESSABLE (outer) = 1;
926 }
927 return true;
928 }
1e8e9920 929 }
930
931 return false;
932}
933
79acaae1 934/* Construct a new automatic decl similar to VAR. */
935
936static tree
937omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
938{
939 tree copy = copy_var_decl (var, name, type);
940
941 DECL_CONTEXT (copy) = current_function_decl;
1767a056 942 DECL_CHAIN (copy) = ctx->block_vars;
1e8e9920 943 ctx->block_vars = copy;
944
945 return copy;
946}
947
948static tree
949omp_copy_decl_1 (tree var, omp_context *ctx)
950{
951 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
952}
953
445d06b6 954/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
955 as appropriate. */
956static tree
957omp_build_component_ref (tree obj, tree field)
958{
959 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
960 if (TREE_THIS_VOLATILE (field))
961 TREE_THIS_VOLATILE (ret) |= 1;
962 if (TREE_READONLY (field))
963 TREE_READONLY (ret) |= 1;
964 return ret;
965}
966
1e8e9920 967/* Build tree nodes to access the field for VAR on the receiver side. */
968
969static tree
970build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
971{
972 tree x, field = lookup_field (var, ctx);
973
974 /* If the receiver record type was remapped in the child function,
975 remap the field into the new record type. */
976 x = maybe_lookup_field (field, ctx);
977 if (x != NULL)
978 field = x;
979
182cf5a9 980 x = build_simple_mem_ref (ctx->receiver_decl);
445d06b6 981 x = omp_build_component_ref (x, field);
1e8e9920 982 if (by_ref)
182cf5a9 983 x = build_simple_mem_ref (x);
1e8e9920 984
985 return x;
986}
987
988/* Build tree nodes to access VAR in the scope outer to CTX. In the case
989 of a parallel, this is a component reference; for workshare constructs
990 this is some variable. */
991
992static tree
993build_outer_var_ref (tree var, omp_context *ctx)
994{
995 tree x;
996
f49d7bb5 997 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
1e8e9920 998 x = var;
999 else if (is_variable_sized (var))
1000 {
1001 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1002 x = build_outer_var_ref (x, ctx);
182cf5a9 1003 x = build_simple_mem_ref (x);
1e8e9920 1004 }
fd6481cf 1005 else if (is_taskreg_ctx (ctx))
1e8e9920 1006 {
e8a588af 1007 bool by_ref = use_pointer_for_field (var, NULL);
1e8e9920 1008 x = build_receiver_ref (var, by_ref, ctx);
1009 }
3d483a94 1010 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 1011 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 1012 {
1013 /* #pragma omp simd isn't a worksharing construct, and can reference even
1014 private vars in its linear etc. clauses. */
1015 x = NULL_TREE;
1016 if (ctx->outer && is_taskreg_ctx (ctx))
1017 x = lookup_decl (var, ctx->outer);
1018 else if (ctx->outer)
84cb1020 1019 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
3d483a94 1020 if (x == NULL_TREE)
1021 x = var;
1022 }
1e8e9920 1023 else if (ctx->outer)
1024 x = lookup_decl (var, ctx->outer);
9438af57 1025 else if (is_reference (var))
1026 /* This can happen with orphaned constructs. If var is reference, it is
1027 possible it is shared and as such valid. */
1028 x = var;
1e8e9920 1029 else
1030 gcc_unreachable ();
1031
1032 if (is_reference (var))
182cf5a9 1033 x = build_simple_mem_ref (x);
1e8e9920 1034
1035 return x;
1036}
1037
1038/* Build tree nodes to access the field for VAR on the sender side. */
1039
1040static tree
1041build_sender_ref (tree var, omp_context *ctx)
1042{
fd6481cf 1043 tree field = lookup_sfield (var, ctx);
445d06b6 1044 return omp_build_component_ref (ctx->sender_decl, field);
1e8e9920 1045}
1046
1047/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
1048
1049static void
fd6481cf 1050install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
1e8e9920 1051{
fd6481cf 1052 tree field, type, sfield = NULL_TREE;
1e8e9920 1053
fd6481cf 1054 gcc_assert ((mask & 1) == 0
1055 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
1056 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
1057 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
1e8e9920 1058
1059 type = TREE_TYPE (var);
bc7bff74 1060 if (mask & 4)
1061 {
1062 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
1063 type = build_pointer_type (build_pointer_type (type));
1064 }
1065 else if (by_ref)
1e8e9920 1066 type = build_pointer_type (type);
fd6481cf 1067 else if ((mask & 3) == 1 && is_reference (var))
1068 type = TREE_TYPE (type);
1e8e9920 1069
e60a6f7b 1070 field = build_decl (DECL_SOURCE_LOCATION (var),
1071 FIELD_DECL, DECL_NAME (var), type);
1e8e9920 1072
1073 /* Remember what variable this field was created for. This does have a
1074 side effect of making dwarf2out ignore this member, so for helpful
1075 debugging we clear it later in delete_omp_context. */
1076 DECL_ABSTRACT_ORIGIN (field) = var;
fd6481cf 1077 if (type == TREE_TYPE (var))
1078 {
1079 DECL_ALIGN (field) = DECL_ALIGN (var);
1080 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
1081 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
1082 }
1083 else
1084 DECL_ALIGN (field) = TYPE_ALIGN (type);
1e8e9920 1085
fd6481cf 1086 if ((mask & 3) == 3)
1087 {
1088 insert_field_into_struct (ctx->record_type, field);
1089 if (ctx->srecord_type)
1090 {
e60a6f7b 1091 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1092 FIELD_DECL, DECL_NAME (var), type);
fd6481cf 1093 DECL_ABSTRACT_ORIGIN (sfield) = var;
1094 DECL_ALIGN (sfield) = DECL_ALIGN (field);
1095 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
1096 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
1097 insert_field_into_struct (ctx->srecord_type, sfield);
1098 }
1099 }
1100 else
1101 {
1102 if (ctx->srecord_type == NULL_TREE)
1103 {
1104 tree t;
1105
1106 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
1107 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1108 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1109 {
e60a6f7b 1110 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1111 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
fd6481cf 1112 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
1113 insert_field_into_struct (ctx->srecord_type, sfield);
1114 splay_tree_insert (ctx->sfield_map,
1115 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
1116 (splay_tree_value) sfield);
1117 }
1118 }
1119 sfield = field;
1120 insert_field_into_struct ((mask & 1) ? ctx->record_type
1121 : ctx->srecord_type, field);
1122 }
1e8e9920 1123
fd6481cf 1124 if (mask & 1)
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
1126 (splay_tree_value) field);
1127 if ((mask & 2) && ctx->sfield_map)
1128 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1129 (splay_tree_value) sfield);
1e8e9920 1130}
1131
1132static tree
1133install_var_local (tree var, omp_context *ctx)
1134{
1135 tree new_var = omp_copy_decl_1 (var, ctx);
1136 insert_decl_map (&ctx->cb, var, new_var);
1137 return new_var;
1138}
1139
1140/* Adjust the replacement for DECL in CTX for the new context. This means
1141 copying the DECL_VALUE_EXPR, and fixing up the type. */
1142
1143static void
1144fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1145{
1146 tree new_decl, size;
1147
1148 new_decl = lookup_decl (decl, ctx);
1149
1150 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1151
1152 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1153 && DECL_HAS_VALUE_EXPR_P (decl))
1154 {
1155 tree ve = DECL_VALUE_EXPR (decl);
75a70cf9 1156 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
1e8e9920 1157 SET_DECL_VALUE_EXPR (new_decl, ve);
1158 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1159 }
1160
1161 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1162 {
1163 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1164 if (size == error_mark_node)
1165 size = TYPE_SIZE (TREE_TYPE (new_decl));
1166 DECL_SIZE (new_decl) = size;
1167
1168 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1169 if (size == error_mark_node)
1170 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1171 DECL_SIZE_UNIT (new_decl) = size;
1172 }
1173}
1174
1175/* The callback for remap_decl. Search all containing contexts for a
1176 mapping of the variable; this avoids having to duplicate the splay
1177 tree ahead of time. We know a mapping doesn't already exist in the
1178 given context. Create new mappings to implement default semantics. */
1179
1180static tree
1181omp_copy_decl (tree var, copy_body_data *cb)
1182{
1183 omp_context *ctx = (omp_context *) cb;
1184 tree new_var;
1185
1e8e9920 1186 if (TREE_CODE (var) == LABEL_DECL)
1187 {
e60a6f7b 1188 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
773c5ba7 1189 DECL_CONTEXT (new_var) = current_function_decl;
1e8e9920 1190 insert_decl_map (&ctx->cb, var, new_var);
1191 return new_var;
1192 }
1193
fd6481cf 1194 while (!is_taskreg_ctx (ctx))
1e8e9920 1195 {
1196 ctx = ctx->outer;
1197 if (ctx == NULL)
1198 return var;
1199 new_var = maybe_lookup_decl (var, ctx);
1200 if (new_var)
1201 return new_var;
1202 }
1203
f49d7bb5 1204 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1205 return var;
1206
1e8e9920 1207 return error_mark_node;
1208}
1209
773c5ba7 1210
1211/* Return the parallel region associated with STMT. */
1212
773c5ba7 1213/* Debugging dumps for parallel regions. */
1214void dump_omp_region (FILE *, struct omp_region *, int);
1215void debug_omp_region (struct omp_region *);
1216void debug_all_omp_regions (void);
1217
1218/* Dump the parallel region tree rooted at REGION. */
1219
1220void
1221dump_omp_region (FILE *file, struct omp_region *region, int indent)
1222{
61e47ac8 1223 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
75a70cf9 1224 gimple_code_name[region->type]);
773c5ba7 1225
1226 if (region->inner)
1227 dump_omp_region (file, region->inner, indent + 4);
1228
61e47ac8 1229 if (region->cont)
1230 {
75a70cf9 1231 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
61e47ac8 1232 region->cont->index);
1233 }
48e1416a 1234
773c5ba7 1235 if (region->exit)
75a70cf9 1236 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
61e47ac8 1237 region->exit->index);
773c5ba7 1238 else
61e47ac8 1239 fprintf (file, "%*s[no exit marker]\n", indent, "");
773c5ba7 1240
1241 if (region->next)
61e47ac8 1242 dump_omp_region (file, region->next, indent);
773c5ba7 1243}
1244
4b987fac 1245DEBUG_FUNCTION void
773c5ba7 1246debug_omp_region (struct omp_region *region)
1247{
1248 dump_omp_region (stderr, region, 0);
1249}
1250
4b987fac 1251DEBUG_FUNCTION void
773c5ba7 1252debug_all_omp_regions (void)
1253{
1254 dump_omp_region (stderr, root_omp_region, 0);
1255}
1256
1257
1258/* Create a new parallel region starting at STMT inside region PARENT. */
1259
7740abd8 1260static struct omp_region *
75a70cf9 1261new_omp_region (basic_block bb, enum gimple_code type,
1262 struct omp_region *parent)
773c5ba7 1263{
4077bf7a 1264 struct omp_region *region = XCNEW (struct omp_region);
773c5ba7 1265
1266 region->outer = parent;
61e47ac8 1267 region->entry = bb;
1268 region->type = type;
773c5ba7 1269
1270 if (parent)
1271 {
1272 /* This is a nested region. Add it to the list of inner
1273 regions in PARENT. */
1274 region->next = parent->inner;
1275 parent->inner = region;
1276 }
61e47ac8 1277 else
773c5ba7 1278 {
1279 /* This is a toplevel region. Add it to the list of toplevel
1280 regions in ROOT_OMP_REGION. */
1281 region->next = root_omp_region;
1282 root_omp_region = region;
1283 }
61e47ac8 1284
1285 return region;
1286}
1287
1288/* Release the memory associated with the region tree rooted at REGION. */
1289
1290static void
1291free_omp_region_1 (struct omp_region *region)
1292{
1293 struct omp_region *i, *n;
1294
1295 for (i = region->inner; i ; i = n)
773c5ba7 1296 {
61e47ac8 1297 n = i->next;
1298 free_omp_region_1 (i);
773c5ba7 1299 }
1300
61e47ac8 1301 free (region);
1302}
773c5ba7 1303
61e47ac8 1304/* Release the memory for the entire omp region tree. */
1305
1306void
1307free_omp_regions (void)
1308{
1309 struct omp_region *r, *n;
1310 for (r = root_omp_region; r ; r = n)
1311 {
1312 n = r->next;
1313 free_omp_region_1 (r);
1314 }
1315 root_omp_region = NULL;
773c5ba7 1316}
1317
1318
1e8e9920 1319/* Create a new context, with OUTER_CTX being the surrounding context. */
1320
1321static omp_context *
75a70cf9 1322new_omp_context (gimple stmt, omp_context *outer_ctx)
1e8e9920 1323{
1324 omp_context *ctx = XCNEW (omp_context);
1325
1326 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1327 (splay_tree_value) ctx);
1328 ctx->stmt = stmt;
1329
1330 if (outer_ctx)
1331 {
1332 ctx->outer = outer_ctx;
1333 ctx->cb = outer_ctx->cb;
1334 ctx->cb.block = NULL;
1335 ctx->depth = outer_ctx->depth + 1;
1336 }
1337 else
1338 {
1339 ctx->cb.src_fn = current_function_decl;
1340 ctx->cb.dst_fn = current_function_decl;
53f79206 1341 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1342 gcc_checking_assert (ctx->cb.src_node);
1e8e9920 1343 ctx->cb.dst_node = ctx->cb.src_node;
1344 ctx->cb.src_cfun = cfun;
1345 ctx->cb.copy_decl = omp_copy_decl;
e38def9c 1346 ctx->cb.eh_lp_nr = 0;
1e8e9920 1347 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1348 ctx->depth = 1;
1349 }
1350
e3022db7 1351 ctx->cb.decl_map = pointer_map_create ();
1e8e9920 1352
1353 return ctx;
1354}
1355
75a70cf9 1356static gimple_seq maybe_catch_exception (gimple_seq);
f6430caa 1357
1358/* Finalize task copyfn. */
1359
1360static void
75a70cf9 1361finalize_task_copyfn (gimple task_stmt)
f6430caa 1362{
1363 struct function *child_cfun;
9078126c 1364 tree child_fn;
e3a19533 1365 gimple_seq seq = NULL, new_seq;
75a70cf9 1366 gimple bind;
f6430caa 1367
75a70cf9 1368 child_fn = gimple_omp_task_copy_fn (task_stmt);
f6430caa 1369 if (child_fn == NULL_TREE)
1370 return;
1371
1372 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
82b40354 1373 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
f6430caa 1374
f6430caa 1375 push_cfun (child_cfun);
7e3aae05 1376 bind = gimplify_body (child_fn, false);
75a70cf9 1377 gimple_seq_add_stmt (&seq, bind);
1378 new_seq = maybe_catch_exception (seq);
1379 if (new_seq != seq)
1380 {
1381 bind = gimple_build_bind (NULL, new_seq, NULL);
e3a19533 1382 seq = NULL;
75a70cf9 1383 gimple_seq_add_stmt (&seq, bind);
1384 }
1385 gimple_set_body (child_fn, seq);
f6430caa 1386 pop_cfun ();
f6430caa 1387
82b40354 1388 /* Inform the callgraph about the new function. */
f6430caa 1389 cgraph_add_new_function (child_fn, false);
1390}
1391
1e8e9920 1392/* Destroy a omp_context data structures. Called through the splay tree
1393 value delete callback. */
1394
1395static void
1396delete_omp_context (splay_tree_value value)
1397{
1398 omp_context *ctx = (omp_context *) value;
1399
e3022db7 1400 pointer_map_destroy (ctx->cb.decl_map);
1e8e9920 1401
1402 if (ctx->field_map)
1403 splay_tree_delete (ctx->field_map);
fd6481cf 1404 if (ctx->sfield_map)
1405 splay_tree_delete (ctx->sfield_map);
1e8e9920 1406
1407 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1408 it produces corrupt debug information. */
1409 if (ctx->record_type)
1410 {
1411 tree t;
1767a056 1412 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1e8e9920 1413 DECL_ABSTRACT_ORIGIN (t) = NULL;
1414 }
fd6481cf 1415 if (ctx->srecord_type)
1416 {
1417 tree t;
1767a056 1418 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
fd6481cf 1419 DECL_ABSTRACT_ORIGIN (t) = NULL;
1420 }
1e8e9920 1421
f6430caa 1422 if (is_task_ctx (ctx))
1423 finalize_task_copyfn (ctx->stmt);
1424
1e8e9920 1425 XDELETE (ctx);
1426}
1427
1428/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1429 context. */
1430
1431static void
1432fixup_child_record_type (omp_context *ctx)
1433{
1434 tree f, type = ctx->record_type;
1435
1436 /* ??? It isn't sufficient to just call remap_type here, because
1437 variably_modified_type_p doesn't work the way we expect for
1438 record types. Testing each field for whether it needs remapping
1439 and creating a new record by hand works, however. */
1767a056 1440 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1e8e9920 1441 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1442 break;
1443 if (f)
1444 {
1445 tree name, new_fields = NULL;
1446
1447 type = lang_hooks.types.make_type (RECORD_TYPE);
1448 name = DECL_NAME (TYPE_NAME (ctx->record_type));
e60a6f7b 1449 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1450 TYPE_DECL, name, type);
1e8e9920 1451 TYPE_NAME (type) = name;
1452
1767a056 1453 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1e8e9920 1454 {
1455 tree new_f = copy_node (f);
1456 DECL_CONTEXT (new_f) = type;
1457 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1767a056 1458 DECL_CHAIN (new_f) = new_fields;
75a70cf9 1459 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1460 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1461 &ctx->cb, NULL);
1462 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1463 &ctx->cb, NULL);
1e8e9920 1464 new_fields = new_f;
1465
1466 /* Arrange to be able to look up the receiver field
1467 given the sender field. */
1468 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1469 (splay_tree_value) new_f);
1470 }
1471 TYPE_FIELDS (type) = nreverse (new_fields);
1472 layout_type (type);
1473 }
1474
1475 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1476}
1477
1478/* Instantiate decls as necessary in CTX to satisfy the data sharing
1479 specified by CLAUSES. */
1480
1481static void
1482scan_sharing_clauses (tree clauses, omp_context *ctx)
1483{
1484 tree c, decl;
1485 bool scan_array_reductions = false;
1486
1487 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1488 {
1489 bool by_ref;
1490
55d6e7cd 1491 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1492 {
1493 case OMP_CLAUSE_PRIVATE:
1494 decl = OMP_CLAUSE_DECL (c);
fd6481cf 1495 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1496 goto do_private;
1497 else if (!is_variable_sized (decl))
1e8e9920 1498 install_var_local (decl, ctx);
1499 break;
1500
1501 case OMP_CLAUSE_SHARED:
bc7bff74 1502 /* Ignore shared directives in teams construct. */
1503 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1504 break;
fd6481cf 1505 gcc_assert (is_taskreg_ctx (ctx));
1e8e9920 1506 decl = OMP_CLAUSE_DECL (c);
e7327393 1507 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1508 || !is_variable_sized (decl));
f49d7bb5 1509 /* Global variables don't need to be copied,
1510 the receiver side will use them directly. */
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1512 break;
fd6481cf 1513 by_ref = use_pointer_for_field (decl, ctx);
1e8e9920 1514 if (! TREE_READONLY (decl)
1515 || TREE_ADDRESSABLE (decl)
1516 || by_ref
1517 || is_reference (decl))
1518 {
fd6481cf 1519 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1520 install_var_local (decl, ctx);
1521 break;
1522 }
1523 /* We don't need to copy const scalar vars back. */
55d6e7cd 1524 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1e8e9920 1525 goto do_private;
1526
1527 case OMP_CLAUSE_LASTPRIVATE:
1528 /* Let the corresponding firstprivate clause create
1529 the variable. */
1530 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1531 break;
1532 /* FALLTHRU */
1533
1534 case OMP_CLAUSE_FIRSTPRIVATE:
1535 case OMP_CLAUSE_REDUCTION:
3d483a94 1536 case OMP_CLAUSE_LINEAR:
1e8e9920 1537 decl = OMP_CLAUSE_DECL (c);
1538 do_private:
1539 if (is_variable_sized (decl))
1e8e9920 1540 {
fd6481cf 1541 if (is_task_ctx (ctx))
1542 install_var_field (decl, false, 1, ctx);
1543 break;
1544 }
1545 else if (is_taskreg_ctx (ctx))
1546 {
1547 bool global
1548 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
e8a588af 1549 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1550
1551 if (is_task_ctx (ctx)
1552 && (global || by_ref || is_reference (decl)))
1553 {
1554 install_var_field (decl, false, 1, ctx);
1555 if (!global)
1556 install_var_field (decl, by_ref, 2, ctx);
1557 }
1558 else if (!global)
1559 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1560 }
1561 install_var_local (decl, ctx);
1562 break;
1563
bc7bff74 1564 case OMP_CLAUSE__LOOPTEMP_:
1565 gcc_assert (is_parallel_ctx (ctx));
1566 decl = OMP_CLAUSE_DECL (c);
1567 install_var_field (decl, false, 3, ctx);
1568 install_var_local (decl, ctx);
1569 break;
1570
1e8e9920 1571 case OMP_CLAUSE_COPYPRIVATE:
1e8e9920 1572 case OMP_CLAUSE_COPYIN:
1573 decl = OMP_CLAUSE_DECL (c);
e8a588af 1574 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1575 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1576 break;
1577
1578 case OMP_CLAUSE_DEFAULT:
1579 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1580 break;
1581
2169f33b 1582 case OMP_CLAUSE_FINAL:
1e8e9920 1583 case OMP_CLAUSE_IF:
1584 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1585 case OMP_CLAUSE_NUM_TEAMS:
1586 case OMP_CLAUSE_THREAD_LIMIT:
1587 case OMP_CLAUSE_DEVICE:
1e8e9920 1588 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1589 case OMP_CLAUSE_DIST_SCHEDULE:
1590 case OMP_CLAUSE_DEPEND:
1e8e9920 1591 if (ctx->outer)
75a70cf9 1592 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1e8e9920 1593 break;
1594
bc7bff74 1595 case OMP_CLAUSE_TO:
1596 case OMP_CLAUSE_FROM:
1597 case OMP_CLAUSE_MAP:
1598 if (ctx->outer)
1599 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1600 decl = OMP_CLAUSE_DECL (c);
1601 /* Global variables with "omp declare target" attribute
1602 don't need to be copied, the receiver side will use them
1603 directly. */
1604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1605 && DECL_P (decl)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && lookup_attribute ("omp declare target",
1608 DECL_ATTRIBUTES (decl)))
1609 break;
1610 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1611 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER)
1612 {
1613 /* Ignore OMP_CLAUSE_MAP_POINTER kind for arrays in
1614 #pragma omp target data, there is nothing to map for
1615 those. */
1616 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA
1617 && !POINTER_TYPE_P (TREE_TYPE (decl)))
1618 break;
1619 }
1620 if (DECL_P (decl))
1621 {
1622 if (DECL_SIZE (decl)
1623 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1624 {
1625 tree decl2 = DECL_VALUE_EXPR (decl);
1626 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1627 decl2 = TREE_OPERAND (decl2, 0);
1628 gcc_assert (DECL_P (decl2));
1629 install_var_field (decl2, true, 3, ctx);
1630 install_var_local (decl2, ctx);
1631 install_var_local (decl, ctx);
1632 }
1633 else
1634 {
1635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1636 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1638 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1639 install_var_field (decl, true, 7, ctx);
1640 else
1641 install_var_field (decl, true, 3, ctx);
1642 if (gimple_omp_target_kind (ctx->stmt)
1643 == GF_OMP_TARGET_KIND_REGION)
1644 install_var_local (decl, ctx);
1645 }
1646 }
1647 else
1648 {
1649 tree base = get_base_address (decl);
1650 tree nc = OMP_CLAUSE_CHAIN (c);
1651 if (DECL_P (base)
1652 && nc != NULL_TREE
1653 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1654 && OMP_CLAUSE_DECL (nc) == base
1655 && OMP_CLAUSE_MAP_KIND (nc) == OMP_CLAUSE_MAP_POINTER
1656 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1657 {
1658 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1659 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1660 }
1661 else
1662 {
1663 gcc_assert (!splay_tree_lookup (ctx->field_map,
1664 (splay_tree_key) decl));
1665 tree field
1666 = build_decl (OMP_CLAUSE_LOCATION (c),
1667 FIELD_DECL, NULL_TREE, ptr_type_node);
1668 DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
1669 insert_field_into_struct (ctx->record_type, field);
1670 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1671 (splay_tree_value) field);
1672 }
1673 }
1674 break;
1675
1e8e9920 1676 case OMP_CLAUSE_NOWAIT:
1677 case OMP_CLAUSE_ORDERED:
fd6481cf 1678 case OMP_CLAUSE_COLLAPSE:
1679 case OMP_CLAUSE_UNTIED:
2169f33b 1680 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1681 case OMP_CLAUSE_PROC_BIND:
3d483a94 1682 case OMP_CLAUSE_SAFELEN:
1e8e9920 1683 break;
1684
bc7bff74 1685 case OMP_CLAUSE_ALIGNED:
1686 decl = OMP_CLAUSE_DECL (c);
1687 if (is_global_var (decl)
1688 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1689 install_var_local (decl, ctx);
1690 break;
1691
1e8e9920 1692 default:
1693 gcc_unreachable ();
1694 }
1695 }
1696
1697 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1698 {
55d6e7cd 1699 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1700 {
1701 case OMP_CLAUSE_LASTPRIVATE:
1702 /* Let the corresponding firstprivate clause create
1703 the variable. */
75a70cf9 1704 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
fd6481cf 1705 scan_array_reductions = true;
1e8e9920 1706 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1707 break;
1708 /* FALLTHRU */
1709
1710 case OMP_CLAUSE_PRIVATE:
1711 case OMP_CLAUSE_FIRSTPRIVATE:
1712 case OMP_CLAUSE_REDUCTION:
3d483a94 1713 case OMP_CLAUSE_LINEAR:
1e8e9920 1714 decl = OMP_CLAUSE_DECL (c);
1715 if (is_variable_sized (decl))
1716 install_var_local (decl, ctx);
1717 fixup_remapped_decl (decl, ctx,
55d6e7cd 1718 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1e8e9920 1719 && OMP_CLAUSE_PRIVATE_DEBUG (c));
55d6e7cd 1720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1721 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1722 scan_array_reductions = true;
1723 break;
1724
1725 case OMP_CLAUSE_SHARED:
bc7bff74 1726 /* Ignore shared directives in teams construct. */
1727 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1728 break;
1e8e9920 1729 decl = OMP_CLAUSE_DECL (c);
f49d7bb5 1730 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1731 fixup_remapped_decl (decl, ctx, false);
1e8e9920 1732 break;
1733
bc7bff74 1734 case OMP_CLAUSE_MAP:
1735 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA)
1736 break;
1737 decl = OMP_CLAUSE_DECL (c);
1738 if (DECL_P (decl)
1739 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1740 && lookup_attribute ("omp declare target",
1741 DECL_ATTRIBUTES (decl)))
1742 break;
1743 if (DECL_P (decl))
1744 {
1745 if (OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1746 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1747 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1748 {
1749 tree new_decl = lookup_decl (decl, ctx);
1750 TREE_TYPE (new_decl)
1751 = remap_type (TREE_TYPE (decl), &ctx->cb);
1752 }
1753 else if (DECL_SIZE (decl)
1754 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1755 {
1756 tree decl2 = DECL_VALUE_EXPR (decl);
1757 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1758 decl2 = TREE_OPERAND (decl2, 0);
1759 gcc_assert (DECL_P (decl2));
1760 fixup_remapped_decl (decl2, ctx, false);
1761 fixup_remapped_decl (decl, ctx, true);
1762 }
1763 else
1764 fixup_remapped_decl (decl, ctx, false);
1765 }
1766 break;
1767
1e8e9920 1768 case OMP_CLAUSE_COPYPRIVATE:
1769 case OMP_CLAUSE_COPYIN:
1770 case OMP_CLAUSE_DEFAULT:
1771 case OMP_CLAUSE_IF:
1772 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 1773 case OMP_CLAUSE_NUM_TEAMS:
1774 case OMP_CLAUSE_THREAD_LIMIT:
1775 case OMP_CLAUSE_DEVICE:
1e8e9920 1776 case OMP_CLAUSE_SCHEDULE:
bc7bff74 1777 case OMP_CLAUSE_DIST_SCHEDULE:
1e8e9920 1778 case OMP_CLAUSE_NOWAIT:
1779 case OMP_CLAUSE_ORDERED:
fd6481cf 1780 case OMP_CLAUSE_COLLAPSE:
1781 case OMP_CLAUSE_UNTIED:
2169f33b 1782 case OMP_CLAUSE_FINAL:
1783 case OMP_CLAUSE_MERGEABLE:
bc7bff74 1784 case OMP_CLAUSE_PROC_BIND:
3d483a94 1785 case OMP_CLAUSE_SAFELEN:
bc7bff74 1786 case OMP_CLAUSE_ALIGNED:
1787 case OMP_CLAUSE_DEPEND:
1788 case OMP_CLAUSE__LOOPTEMP_:
1789 case OMP_CLAUSE_TO:
1790 case OMP_CLAUSE_FROM:
1e8e9920 1791 break;
1792
1793 default:
1794 gcc_unreachable ();
1795 }
1796 }
1797
1798 if (scan_array_reductions)
1799 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 1800 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1801 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1802 {
ab129075 1803 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1804 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1e8e9920 1805 }
fd6481cf 1806 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
75a70cf9 1807 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
ab129075 1808 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1e8e9920 1809}
1810
1811/* Create a new name for omp child function. Returns an identifier. */
1812
1813static GTY(()) unsigned int tmp_ompfn_id_num;
1814
1815static tree
fd6481cf 1816create_omp_child_function_name (bool task_copy)
1e8e9920 1817{
a70a5e2c 1818 return (clone_function_name (current_function_decl,
1819 task_copy ? "_omp_cpyfn" : "_omp_fn"));
1e8e9920 1820}
1821
1822/* Build a decl for the omp child function. It'll not contain a body
1823 yet, just the bare decl. */
1824
1825static void
fd6481cf 1826create_omp_child_function (omp_context *ctx, bool task_copy)
1e8e9920 1827{
1828 tree decl, type, name, t;
1829
fd6481cf 1830 name = create_omp_child_function_name (task_copy);
1831 if (task_copy)
1832 type = build_function_type_list (void_type_node, ptr_type_node,
1833 ptr_type_node, NULL_TREE);
1834 else
1835 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1e8e9920 1836
e60a6f7b 1837 decl = build_decl (gimple_location (ctx->stmt),
1838 FUNCTION_DECL, name, type);
1e8e9920 1839
fd6481cf 1840 if (!task_copy)
1841 ctx->cb.dst_fn = decl;
1842 else
75a70cf9 1843 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1e8e9920 1844
1845 TREE_STATIC (decl) = 1;
1846 TREE_USED (decl) = 1;
1847 DECL_ARTIFICIAL (decl) = 1;
84bfaaeb 1848 DECL_NAMELESS (decl) = 1;
1e8e9920 1849 DECL_IGNORED_P (decl) = 0;
1850 TREE_PUBLIC (decl) = 0;
1851 DECL_UNINLINABLE (decl) = 1;
1852 DECL_EXTERNAL (decl) = 0;
1853 DECL_CONTEXT (decl) = NULL_TREE;
773c5ba7 1854 DECL_INITIAL (decl) = make_node (BLOCK);
bc7bff74 1855 bool target_p = false;
1856 if (lookup_attribute ("omp declare target",
1857 DECL_ATTRIBUTES (current_function_decl)))
1858 target_p = true;
1859 else
1860 {
1861 omp_context *octx;
1862 for (octx = ctx; octx; octx = octx->outer)
1863 if (gimple_code (octx->stmt) == GIMPLE_OMP_TARGET
1864 && gimple_omp_target_kind (octx->stmt)
1865 == GF_OMP_TARGET_KIND_REGION)
1866 {
1867 target_p = true;
1868 break;
1869 }
1870 }
1871 if (target_p)
1872 DECL_ATTRIBUTES (decl)
1873 = tree_cons (get_identifier ("omp declare target"),
1874 NULL_TREE, DECL_ATTRIBUTES (decl));
1e8e9920 1875
e60a6f7b 1876 t = build_decl (DECL_SOURCE_LOCATION (decl),
1877 RESULT_DECL, NULL_TREE, void_type_node);
1e8e9920 1878 DECL_ARTIFICIAL (t) = 1;
1879 DECL_IGNORED_P (t) = 1;
8e5b4ed6 1880 DECL_CONTEXT (t) = decl;
1e8e9920 1881 DECL_RESULT (decl) = t;
1882
e60a6f7b 1883 t = build_decl (DECL_SOURCE_LOCATION (decl),
1884 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1e8e9920 1885 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1886 DECL_NAMELESS (t) = 1;
1e8e9920 1887 DECL_ARG_TYPE (t) = ptr_type_node;
773c5ba7 1888 DECL_CONTEXT (t) = current_function_decl;
1e8e9920 1889 TREE_USED (t) = 1;
1890 DECL_ARGUMENTS (decl) = t;
fd6481cf 1891 if (!task_copy)
1892 ctx->receiver_decl = t;
1893 else
1894 {
e60a6f7b 1895 t = build_decl (DECL_SOURCE_LOCATION (decl),
1896 PARM_DECL, get_identifier (".omp_data_o"),
fd6481cf 1897 ptr_type_node);
1898 DECL_ARTIFICIAL (t) = 1;
84bfaaeb 1899 DECL_NAMELESS (t) = 1;
fd6481cf 1900 DECL_ARG_TYPE (t) = ptr_type_node;
1901 DECL_CONTEXT (t) = current_function_decl;
1902 TREE_USED (t) = 1;
86f2ad37 1903 TREE_ADDRESSABLE (t) = 1;
1767a056 1904 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
fd6481cf 1905 DECL_ARGUMENTS (decl) = t;
1906 }
1e8e9920 1907
48e1416a 1908 /* Allocate memory for the function structure. The call to
773c5ba7 1909 allocate_struct_function clobbers CFUN, so we need to restore
1e8e9920 1910 it afterward. */
87d4aa85 1911 push_struct_function (decl);
75a70cf9 1912 cfun->function_end_locus = gimple_location (ctx->stmt);
87d4aa85 1913 pop_cfun ();
1e8e9920 1914}
1915
bc7bff74 1916/* Callback for walk_gimple_seq. Check if combined parallel
1917 contains gimple_omp_for_combined_into_p OMP_FOR. */
1918
1919static tree
1920find_combined_for (gimple_stmt_iterator *gsi_p,
1921 bool *handled_ops_p,
1922 struct walk_stmt_info *wi)
1923{
1924 gimple stmt = gsi_stmt (*gsi_p);
1925
1926 *handled_ops_p = true;
1927 switch (gimple_code (stmt))
1928 {
1929 WALK_SUBSTMTS;
1930
1931 case GIMPLE_OMP_FOR:
1932 if (gimple_omp_for_combined_into_p (stmt)
1933 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
1934 {
1935 wi->info = stmt;
1936 return integer_zero_node;
1937 }
1938 break;
1939 default:
1940 break;
1941 }
1942 return NULL;
1943}
1944
1e8e9920 1945/* Scan an OpenMP parallel directive. */
1946
1947static void
75a70cf9 1948scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1e8e9920 1949{
1950 omp_context *ctx;
1951 tree name;
75a70cf9 1952 gimple stmt = gsi_stmt (*gsi);
1e8e9920 1953
1954 /* Ignore parallel directives with empty bodies, unless there
1955 are copyin clauses. */
1956 if (optimize > 0
75a70cf9 1957 && empty_body_p (gimple_omp_body (stmt))
1958 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1959 OMP_CLAUSE_COPYIN) == NULL)
1e8e9920 1960 {
75a70cf9 1961 gsi_replace (gsi, gimple_build_nop (), false);
1e8e9920 1962 return;
1963 }
1964
bc7bff74 1965 if (gimple_omp_parallel_combined_p (stmt))
1966 {
1967 gimple for_stmt;
1968 struct walk_stmt_info wi;
1969
1970 memset (&wi, 0, sizeof (wi));
1971 wi.val_only = true;
1972 walk_gimple_seq (gimple_omp_body (stmt),
1973 find_combined_for, NULL, &wi);
1974 for_stmt = (gimple) wi.info;
1975 if (for_stmt)
1976 {
1977 struct omp_for_data fd;
1978 extract_omp_for_data (for_stmt, &fd, NULL);
1979 /* We need two temporaries with fd.loop.v type (istart/iend)
1980 and then (fd.collapse - 1) temporaries with the same
1981 type for count2 ... countN-1 vars if not constant. */
1982 size_t count = 2, i;
1983 tree type = fd.iter_type;
1984 if (fd.collapse > 1
1985 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1986 count += fd.collapse - 1;
1987 for (i = 0; i < count; i++)
1988 {
1989 tree temp = create_tmp_var (type, NULL);
1990 tree c = build_omp_clause (UNKNOWN_LOCATION,
1991 OMP_CLAUSE__LOOPTEMP_);
1992 OMP_CLAUSE_DECL (c) = temp;
1993 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1994 gimple_omp_parallel_set_clauses (stmt, c);
1995 }
1996 }
1997 }
1998
75a70cf9 1999 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 2000 if (taskreg_nesting_level > 1)
773c5ba7 2001 ctx->is_nested = true;
1e8e9920 2002 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1e8e9920 2003 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2004 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1e8e9920 2005 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 2006 name = build_decl (gimple_location (stmt),
2007 TYPE_DECL, name, ctx->record_type);
84bfaaeb 2008 DECL_ARTIFICIAL (name) = 1;
2009 DECL_NAMELESS (name) = 1;
1e8e9920 2010 TYPE_NAME (ctx->record_type) = name;
fd6481cf 2011 create_omp_child_function (ctx, false);
75a70cf9 2012 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1e8e9920 2013
75a70cf9 2014 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
ab129075 2015 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2016
2017 if (TYPE_FIELDS (ctx->record_type) == NULL)
2018 ctx->record_type = ctx->receiver_decl = NULL;
2019 else
2020 {
2021 layout_type (ctx->record_type);
2022 fixup_child_record_type (ctx);
2023 }
2024}
2025
fd6481cf 2026/* Scan an OpenMP task directive. */
2027
2028static void
75a70cf9 2029scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
fd6481cf 2030{
2031 omp_context *ctx;
75a70cf9 2032 tree name, t;
2033 gimple stmt = gsi_stmt (*gsi);
389dd41b 2034 location_t loc = gimple_location (stmt);
fd6481cf 2035
2036 /* Ignore task directives with empty bodies. */
2037 if (optimize > 0
75a70cf9 2038 && empty_body_p (gimple_omp_body (stmt)))
fd6481cf 2039 {
75a70cf9 2040 gsi_replace (gsi, gimple_build_nop (), false);
fd6481cf 2041 return;
2042 }
2043
75a70cf9 2044 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 2045 if (taskreg_nesting_level > 1)
2046 ctx->is_nested = true;
2047 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2048 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2049 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2050 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 2051 name = build_decl (gimple_location (stmt),
2052 TYPE_DECL, name, ctx->record_type);
84bfaaeb 2053 DECL_ARTIFICIAL (name) = 1;
2054 DECL_NAMELESS (name) = 1;
fd6481cf 2055 TYPE_NAME (ctx->record_type) = name;
2056 create_omp_child_function (ctx, false);
75a70cf9 2057 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
fd6481cf 2058
75a70cf9 2059 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
fd6481cf 2060
2061 if (ctx->srecord_type)
2062 {
2063 name = create_tmp_var_name (".omp_data_a");
e60a6f7b 2064 name = build_decl (gimple_location (stmt),
2065 TYPE_DECL, name, ctx->srecord_type);
84bfaaeb 2066 DECL_ARTIFICIAL (name) = 1;
2067 DECL_NAMELESS (name) = 1;
fd6481cf 2068 TYPE_NAME (ctx->srecord_type) = name;
2069 create_omp_child_function (ctx, true);
2070 }
2071
ab129075 2072 scan_omp (gimple_omp_body_ptr (stmt), ctx);
fd6481cf 2073
2074 if (TYPE_FIELDS (ctx->record_type) == NULL)
2075 {
2076 ctx->record_type = ctx->receiver_decl = NULL;
75a70cf9 2077 t = build_int_cst (long_integer_type_node, 0);
2078 gimple_omp_task_set_arg_size (stmt, t);
2079 t = build_int_cst (long_integer_type_node, 1);
2080 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2081 }
2082 else
2083 {
2084 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2085 /* Move VLA fields to the end. */
2086 p = &TYPE_FIELDS (ctx->record_type);
2087 while (*p)
2088 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2089 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2090 {
2091 *q = *p;
2092 *p = TREE_CHAIN (*p);
2093 TREE_CHAIN (*q) = NULL_TREE;
2094 q = &TREE_CHAIN (*q);
2095 }
2096 else
1767a056 2097 p = &DECL_CHAIN (*p);
fd6481cf 2098 *p = vla_fields;
2099 layout_type (ctx->record_type);
2100 fixup_child_record_type (ctx);
2101 if (ctx->srecord_type)
2102 layout_type (ctx->srecord_type);
389dd41b 2103 t = fold_convert_loc (loc, long_integer_type_node,
fd6481cf 2104 TYPE_SIZE_UNIT (ctx->record_type));
75a70cf9 2105 gimple_omp_task_set_arg_size (stmt, t);
2106 t = build_int_cst (long_integer_type_node,
fd6481cf 2107 TYPE_ALIGN_UNIT (ctx->record_type));
75a70cf9 2108 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 2109 }
2110}
2111
1e8e9920 2112
773c5ba7 2113/* Scan an OpenMP loop directive. */
1e8e9920 2114
2115static void
75a70cf9 2116scan_omp_for (gimple stmt, omp_context *outer_ctx)
1e8e9920 2117{
773c5ba7 2118 omp_context *ctx;
75a70cf9 2119 size_t i;
1e8e9920 2120
773c5ba7 2121 ctx = new_omp_context (stmt, outer_ctx);
1e8e9920 2122
75a70cf9 2123 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
1e8e9920 2124
ab129075 2125 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
75a70cf9 2126 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 2127 {
75a70cf9 2128 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2129 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2130 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2131 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
fd6481cf 2132 }
ab129075 2133 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2134}
2135
2136/* Scan an OpenMP sections directive. */
2137
2138static void
75a70cf9 2139scan_omp_sections (gimple stmt, omp_context *outer_ctx)
1e8e9920 2140{
1e8e9920 2141 omp_context *ctx;
2142
2143 ctx = new_omp_context (stmt, outer_ctx);
75a70cf9 2144 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
ab129075 2145 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2146}
2147
2148/* Scan an OpenMP single directive. */
2149
2150static void
75a70cf9 2151scan_omp_single (gimple stmt, omp_context *outer_ctx)
1e8e9920 2152{
1e8e9920 2153 omp_context *ctx;
2154 tree name;
2155
2156 ctx = new_omp_context (stmt, outer_ctx);
2157 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2158 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2159 name = create_tmp_var_name (".omp_copy_s");
e60a6f7b 2160 name = build_decl (gimple_location (stmt),
2161 TYPE_DECL, name, ctx->record_type);
1e8e9920 2162 TYPE_NAME (ctx->record_type) = name;
2163
75a70cf9 2164 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
ab129075 2165 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2166
2167 if (TYPE_FIELDS (ctx->record_type) == NULL)
2168 ctx->record_type = NULL;
2169 else
2170 layout_type (ctx->record_type);
2171}
2172
bc7bff74 2173/* Scan an OpenMP target{, data, update} directive. */
2174
2175static void
2176scan_omp_target (gimple stmt, omp_context *outer_ctx)
2177{
2178 omp_context *ctx;
2179 tree name;
2180 int kind = gimple_omp_target_kind (stmt);
2181
2182 ctx = new_omp_context (stmt, outer_ctx);
2183 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2184 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2185 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2186 name = create_tmp_var_name (".omp_data_t");
2187 name = build_decl (gimple_location (stmt),
2188 TYPE_DECL, name, ctx->record_type);
2189 DECL_ARTIFICIAL (name) = 1;
2190 DECL_NAMELESS (name) = 1;
2191 TYPE_NAME (ctx->record_type) = name;
2192 if (kind == GF_OMP_TARGET_KIND_REGION)
2193 {
2194 create_omp_child_function (ctx, false);
2195 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2196 }
2197
2198 scan_sharing_clauses (gimple_omp_target_clauses (stmt), ctx);
2199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2200
2201 if (TYPE_FIELDS (ctx->record_type) == NULL)
2202 ctx->record_type = ctx->receiver_decl = NULL;
2203 else
2204 {
2205 TYPE_FIELDS (ctx->record_type)
2206 = nreverse (TYPE_FIELDS (ctx->record_type));
2207#ifdef ENABLE_CHECKING
2208 tree field;
2209 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2210 for (field = TYPE_FIELDS (ctx->record_type);
2211 field;
2212 field = DECL_CHAIN (field))
2213 gcc_assert (DECL_ALIGN (field) == align);
2214#endif
2215 layout_type (ctx->record_type);
2216 if (kind == GF_OMP_TARGET_KIND_REGION)
2217 fixup_child_record_type (ctx);
2218 }
2219}
2220
2221/* Scan an OpenMP teams directive. */
2222
2223static void
2224scan_omp_teams (gimple stmt, omp_context *outer_ctx)
2225{
2226 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2227 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2228 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2229}
1e8e9920 2230
c1d127dd 2231/* Check OpenMP nesting restrictions. */
ab129075 2232static bool
2233check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
c1d127dd 2234{
3d483a94 2235 if (ctx != NULL)
2236 {
2237 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 2238 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 2239 {
2240 error_at (gimple_location (stmt),
2241 "OpenMP constructs may not be nested inside simd region");
2242 return false;
2243 }
bc7bff74 2244 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2245 {
2246 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2247 || (gimple_omp_for_kind (stmt)
2248 != GF_OMP_FOR_KIND_DISTRIBUTE))
2249 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2250 {
2251 error_at (gimple_location (stmt),
2252 "only distribute or parallel constructs are allowed to "
2253 "be closely nested inside teams construct");
2254 return false;
2255 }
2256 }
3d483a94 2257 }
75a70cf9 2258 switch (gimple_code (stmt))
c1d127dd 2259 {
75a70cf9 2260 case GIMPLE_OMP_FOR:
f2697631 2261 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 2262 return true;
bc7bff74 2263 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2264 {
2265 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2266 {
2267 error_at (gimple_location (stmt),
2268 "distribute construct must be closely nested inside "
2269 "teams construct");
2270 return false;
2271 }
2272 return true;
2273 }
2274 /* FALLTHRU */
2275 case GIMPLE_CALL:
2276 if (is_gimple_call (stmt)
2277 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2278 == BUILT_IN_GOMP_CANCEL
2279 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2280 == BUILT_IN_GOMP_CANCELLATION_POINT))
2281 {
2282 const char *bad = NULL;
2283 const char *kind = NULL;
2284 if (ctx == NULL)
2285 {
2286 error_at (gimple_location (stmt), "orphaned %qs construct",
2287 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2288 == BUILT_IN_GOMP_CANCEL
2289 ? "#pragma omp cancel"
2290 : "#pragma omp cancellation point");
2291 return false;
2292 }
35ec552a 2293 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
fcb97e84 2294 ? tree_to_shwi (gimple_call_arg (stmt, 0))
bc7bff74 2295 : 0)
2296 {
2297 case 1:
2298 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2299 bad = "#pragma omp parallel";
2300 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2301 == BUILT_IN_GOMP_CANCEL
2302 && !integer_zerop (gimple_call_arg (stmt, 1)))
2303 ctx->cancellable = true;
2304 kind = "parallel";
2305 break;
2306 case 2:
2307 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2308 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2309 bad = "#pragma omp for";
2310 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2311 == BUILT_IN_GOMP_CANCEL
2312 && !integer_zerop (gimple_call_arg (stmt, 1)))
2313 {
2314 ctx->cancellable = true;
2315 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2316 OMP_CLAUSE_NOWAIT))
2317 warning_at (gimple_location (stmt), 0,
2318 "%<#pragma omp cancel for%> inside "
2319 "%<nowait%> for construct");
2320 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2321 OMP_CLAUSE_ORDERED))
2322 warning_at (gimple_location (stmt), 0,
2323 "%<#pragma omp cancel for%> inside "
2324 "%<ordered%> for construct");
2325 }
2326 kind = "for";
2327 break;
2328 case 4:
2329 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2330 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2331 bad = "#pragma omp sections";
2332 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2333 == BUILT_IN_GOMP_CANCEL
2334 && !integer_zerop (gimple_call_arg (stmt, 1)))
2335 {
2336 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2337 {
2338 ctx->cancellable = true;
2339 if (find_omp_clause (gimple_omp_sections_clauses
2340 (ctx->stmt),
2341 OMP_CLAUSE_NOWAIT))
2342 warning_at (gimple_location (stmt), 0,
2343 "%<#pragma omp cancel sections%> inside "
2344 "%<nowait%> sections construct");
2345 }
2346 else
2347 {
2348 gcc_assert (ctx->outer
2349 && gimple_code (ctx->outer->stmt)
2350 == GIMPLE_OMP_SECTIONS);
2351 ctx->outer->cancellable = true;
2352 if (find_omp_clause (gimple_omp_sections_clauses
2353 (ctx->outer->stmt),
2354 OMP_CLAUSE_NOWAIT))
2355 warning_at (gimple_location (stmt), 0,
2356 "%<#pragma omp cancel sections%> inside "
2357 "%<nowait%> sections construct");
2358 }
2359 }
2360 kind = "sections";
2361 break;
2362 case 8:
2363 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2364 bad = "#pragma omp task";
2365 else
2366 ctx->cancellable = true;
2367 kind = "taskgroup";
2368 break;
2369 default:
2370 error_at (gimple_location (stmt), "invalid arguments");
2371 return false;
2372 }
2373 if (bad)
2374 {
2375 error_at (gimple_location (stmt),
2376 "%<%s %s%> construct not closely nested inside of %qs",
2377 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2378 == BUILT_IN_GOMP_CANCEL
2379 ? "#pragma omp cancel"
2380 : "#pragma omp cancellation point", kind, bad);
2381 return false;
2382 }
2383 }
3d483a94 2384 /* FALLTHRU */
75a70cf9 2385 case GIMPLE_OMP_SECTIONS:
2386 case GIMPLE_OMP_SINGLE:
c1d127dd 2387 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2388 switch (gimple_code (ctx->stmt))
c1d127dd 2389 {
75a70cf9 2390 case GIMPLE_OMP_FOR:
2391 case GIMPLE_OMP_SECTIONS:
2392 case GIMPLE_OMP_SINGLE:
2393 case GIMPLE_OMP_ORDERED:
2394 case GIMPLE_OMP_MASTER:
2395 case GIMPLE_OMP_TASK:
bc7bff74 2396 case GIMPLE_OMP_CRITICAL:
75a70cf9 2397 if (is_gimple_call (stmt))
fd6481cf 2398 {
bc7bff74 2399 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2400 != BUILT_IN_GOMP_BARRIER)
2401 return true;
ab129075 2402 error_at (gimple_location (stmt),
2403 "barrier region may not be closely nested inside "
2404 "of work-sharing, critical, ordered, master or "
2405 "explicit task region");
2406 return false;
fd6481cf 2407 }
ab129075 2408 error_at (gimple_location (stmt),
2409 "work-sharing region may not be closely nested inside "
2410 "of work-sharing, critical, ordered, master or explicit "
2411 "task region");
2412 return false;
75a70cf9 2413 case GIMPLE_OMP_PARALLEL:
ab129075 2414 return true;
c1d127dd 2415 default:
2416 break;
2417 }
2418 break;
75a70cf9 2419 case GIMPLE_OMP_MASTER:
c1d127dd 2420 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2421 switch (gimple_code (ctx->stmt))
c1d127dd 2422 {
75a70cf9 2423 case GIMPLE_OMP_FOR:
2424 case GIMPLE_OMP_SECTIONS:
2425 case GIMPLE_OMP_SINGLE:
2426 case GIMPLE_OMP_TASK:
ab129075 2427 error_at (gimple_location (stmt),
2428 "master region may not be closely nested inside "
2429 "of work-sharing or explicit task region");
2430 return false;
75a70cf9 2431 case GIMPLE_OMP_PARALLEL:
ab129075 2432 return true;
c1d127dd 2433 default:
2434 break;
2435 }
2436 break;
75a70cf9 2437 case GIMPLE_OMP_ORDERED:
c1d127dd 2438 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2439 switch (gimple_code (ctx->stmt))
c1d127dd 2440 {
75a70cf9 2441 case GIMPLE_OMP_CRITICAL:
2442 case GIMPLE_OMP_TASK:
ab129075 2443 error_at (gimple_location (stmt),
2444 "ordered region may not be closely nested inside "
2445 "of critical or explicit task region");
2446 return false;
75a70cf9 2447 case GIMPLE_OMP_FOR:
2448 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
c1d127dd 2449 OMP_CLAUSE_ORDERED) == NULL)
ab129075 2450 {
2451 error_at (gimple_location (stmt),
2452 "ordered region must be closely nested inside "
c1d127dd 2453 "a loop region with an ordered clause");
ab129075 2454 return false;
2455 }
2456 return true;
75a70cf9 2457 case GIMPLE_OMP_PARALLEL:
bc7bff74 2458 error_at (gimple_location (stmt),
2459 "ordered region must be closely nested inside "
2460 "a loop region with an ordered clause");
2461 return false;
c1d127dd 2462 default:
2463 break;
2464 }
2465 break;
75a70cf9 2466 case GIMPLE_OMP_CRITICAL:
c1d127dd 2467 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 2468 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
2469 && (gimple_omp_critical_name (stmt)
2470 == gimple_omp_critical_name (ctx->stmt)))
c1d127dd 2471 {
ab129075 2472 error_at (gimple_location (stmt),
2473 "critical region may not be nested inside a critical "
2474 "region with the same name");
2475 return false;
c1d127dd 2476 }
2477 break;
bc7bff74 2478 case GIMPLE_OMP_TEAMS:
2479 if (ctx == NULL
2480 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2481 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2482 {
2483 error_at (gimple_location (stmt),
2484 "teams construct not closely nested inside of target "
2485 "region");
2486 return false;
2487 }
2488 break;
c1d127dd 2489 default:
2490 break;
2491 }
ab129075 2492 return true;
c1d127dd 2493}
2494
2495
75a70cf9 2496/* Helper function scan_omp.
2497
2498 Callback for walk_tree or operators in walk_gimple_stmt used to
2499 scan for OpenMP directives in TP. */
1e8e9920 2500
2501static tree
75a70cf9 2502scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
1e8e9920 2503{
4077bf7a 2504 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2505 omp_context *ctx = (omp_context *) wi->info;
1e8e9920 2506 tree t = *tp;
2507
75a70cf9 2508 switch (TREE_CODE (t))
2509 {
2510 case VAR_DECL:
2511 case PARM_DECL:
2512 case LABEL_DECL:
2513 case RESULT_DECL:
2514 if (ctx)
2515 *tp = remap_decl (t, &ctx->cb);
2516 break;
2517
2518 default:
2519 if (ctx && TYPE_P (t))
2520 *tp = remap_type (t, &ctx->cb);
2521 else if (!DECL_P (t))
7cf869dd 2522 {
2523 *walk_subtrees = 1;
2524 if (ctx)
182cf5a9 2525 {
2526 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2527 if (tem != TREE_TYPE (t))
2528 {
2529 if (TREE_CODE (t) == INTEGER_CST)
2530 *tp = build_int_cst_wide (tem,
2531 TREE_INT_CST_LOW (t),
2532 TREE_INT_CST_HIGH (t));
2533 else
2534 TREE_TYPE (t) = tem;
2535 }
2536 }
7cf869dd 2537 }
75a70cf9 2538 break;
2539 }
2540
2541 return NULL_TREE;
2542}
2543
f2697631 2544/* Return true if FNDECL is a setjmp or a longjmp. */
2545
2546static bool
2547setjmp_or_longjmp_p (const_tree fndecl)
2548{
2549 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2550 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2551 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2552 return true;
2553
2554 tree declname = DECL_NAME (fndecl);
2555 if (!declname)
2556 return false;
2557 const char *name = IDENTIFIER_POINTER (declname);
2558 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2559}
2560
75a70cf9 2561
2562/* Helper function for scan_omp.
2563
2564 Callback for walk_gimple_stmt used to scan for OpenMP directives in
2565 the current statement in GSI. */
2566
2567static tree
2568scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2569 struct walk_stmt_info *wi)
2570{
2571 gimple stmt = gsi_stmt (*gsi);
2572 omp_context *ctx = (omp_context *) wi->info;
2573
2574 if (gimple_has_location (stmt))
2575 input_location = gimple_location (stmt);
1e8e9920 2576
c1d127dd 2577 /* Check the OpenMP nesting restrictions. */
bc7bff74 2578 bool remove = false;
2579 if (is_gimple_omp (stmt))
2580 remove = !check_omp_nesting_restrictions (stmt, ctx);
2581 else if (is_gimple_call (stmt))
2582 {
2583 tree fndecl = gimple_call_fndecl (stmt);
f2697631 2584 if (fndecl)
2585 {
2586 if (setjmp_or_longjmp_p (fndecl)
2587 && ctx
2588 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2589 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
2590 {
2591 remove = true;
2592 error_at (gimple_location (stmt),
2593 "setjmp/longjmp inside simd construct");
2594 }
2595 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2596 switch (DECL_FUNCTION_CODE (fndecl))
2597 {
2598 case BUILT_IN_GOMP_BARRIER:
2599 case BUILT_IN_GOMP_CANCEL:
2600 case BUILT_IN_GOMP_CANCELLATION_POINT:
2601 case BUILT_IN_GOMP_TASKYIELD:
2602 case BUILT_IN_GOMP_TASKWAIT:
2603 case BUILT_IN_GOMP_TASKGROUP_START:
2604 case BUILT_IN_GOMP_TASKGROUP_END:
2605 remove = !check_omp_nesting_restrictions (stmt, ctx);
2606 break;
2607 default:
2608 break;
2609 }
2610 }
bc7bff74 2611 }
2612 if (remove)
2613 {
2614 stmt = gimple_build_nop ();
2615 gsi_replace (gsi, stmt, false);
fd6481cf 2616 }
c1d127dd 2617
75a70cf9 2618 *handled_ops_p = true;
2619
2620 switch (gimple_code (stmt))
1e8e9920 2621 {
75a70cf9 2622 case GIMPLE_OMP_PARALLEL:
fd6481cf 2623 taskreg_nesting_level++;
75a70cf9 2624 scan_omp_parallel (gsi, ctx);
fd6481cf 2625 taskreg_nesting_level--;
2626 break;
2627
75a70cf9 2628 case GIMPLE_OMP_TASK:
fd6481cf 2629 taskreg_nesting_level++;
75a70cf9 2630 scan_omp_task (gsi, ctx);
fd6481cf 2631 taskreg_nesting_level--;
1e8e9920 2632 break;
2633
75a70cf9 2634 case GIMPLE_OMP_FOR:
2635 scan_omp_for (stmt, ctx);
1e8e9920 2636 break;
2637
75a70cf9 2638 case GIMPLE_OMP_SECTIONS:
2639 scan_omp_sections (stmt, ctx);
1e8e9920 2640 break;
2641
75a70cf9 2642 case GIMPLE_OMP_SINGLE:
2643 scan_omp_single (stmt, ctx);
1e8e9920 2644 break;
2645
75a70cf9 2646 case GIMPLE_OMP_SECTION:
2647 case GIMPLE_OMP_MASTER:
bc7bff74 2648 case GIMPLE_OMP_TASKGROUP:
75a70cf9 2649 case GIMPLE_OMP_ORDERED:
2650 case GIMPLE_OMP_CRITICAL:
2651 ctx = new_omp_context (stmt, ctx);
ab129075 2652 scan_omp (gimple_omp_body_ptr (stmt), ctx);
1e8e9920 2653 break;
2654
bc7bff74 2655 case GIMPLE_OMP_TARGET:
2656 scan_omp_target (stmt, ctx);
2657 break;
2658
2659 case GIMPLE_OMP_TEAMS:
2660 scan_omp_teams (stmt, ctx);
2661 break;
2662
75a70cf9 2663 case GIMPLE_BIND:
1e8e9920 2664 {
2665 tree var;
1e8e9920 2666
75a70cf9 2667 *handled_ops_p = false;
2668 if (ctx)
1767a056 2669 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
75a70cf9 2670 insert_decl_map (&ctx->cb, var, var);
1e8e9920 2671 }
2672 break;
1e8e9920 2673 default:
75a70cf9 2674 *handled_ops_p = false;
1e8e9920 2675 break;
2676 }
2677
2678 return NULL_TREE;
2679}
2680
2681
75a70cf9 2682/* Scan all the statements starting at the current statement. CTX
2683 contains context information about the OpenMP directives and
2684 clauses found during the scan. */
1e8e9920 2685
2686static void
ab129075 2687scan_omp (gimple_seq *body_p, omp_context *ctx)
1e8e9920 2688{
2689 location_t saved_location;
2690 struct walk_stmt_info wi;
2691
2692 memset (&wi, 0, sizeof (wi));
1e8e9920 2693 wi.info = ctx;
1e8e9920 2694 wi.want_locations = true;
2695
2696 saved_location = input_location;
ab129075 2697 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
1e8e9920 2698 input_location = saved_location;
2699}
2700\f
2701/* Re-gimplification and code generation routines. */
2702
2703/* Build a call to GOMP_barrier. */
2704
bc7bff74 2705static gimple
2706build_omp_barrier (tree lhs)
2707{
2708 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
2709 : BUILT_IN_GOMP_BARRIER);
2710 gimple g = gimple_build_call (fndecl, 0);
2711 if (lhs)
2712 gimple_call_set_lhs (g, lhs);
2713 return g;
1e8e9920 2714}
2715
2716/* If a context was created for STMT when it was scanned, return it. */
2717
2718static omp_context *
75a70cf9 2719maybe_lookup_ctx (gimple stmt)
1e8e9920 2720{
2721 splay_tree_node n;
2722 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2723 return n ? (omp_context *) n->value : NULL;
2724}
2725
773c5ba7 2726
2727/* Find the mapping for DECL in CTX or the immediately enclosing
2728 context that has a mapping for DECL.
2729
2730 If CTX is a nested parallel directive, we may have to use the decl
2731 mappings created in CTX's parent context. Suppose that we have the
2732 following parallel nesting (variable UIDs showed for clarity):
2733
2734 iD.1562 = 0;
2735 #omp parallel shared(iD.1562) -> outer parallel
2736 iD.1562 = iD.1562 + 1;
2737
2738 #omp parallel shared (iD.1562) -> inner parallel
2739 iD.1562 = iD.1562 - 1;
2740
2741 Each parallel structure will create a distinct .omp_data_s structure
2742 for copying iD.1562 in/out of the directive:
2743
2744 outer parallel .omp_data_s.1.i -> iD.1562
2745 inner parallel .omp_data_s.2.i -> iD.1562
2746
2747 A shared variable mapping will produce a copy-out operation before
2748 the parallel directive and a copy-in operation after it. So, in
2749 this case we would have:
2750
2751 iD.1562 = 0;
2752 .omp_data_o.1.i = iD.1562;
2753 #omp parallel shared(iD.1562) -> outer parallel
2754 .omp_data_i.1 = &.omp_data_o.1
2755 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2756
2757 .omp_data_o.2.i = iD.1562; -> **
2758 #omp parallel shared(iD.1562) -> inner parallel
2759 .omp_data_i.2 = &.omp_data_o.2
2760 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2761
2762
2763 ** This is a problem. The symbol iD.1562 cannot be referenced
2764 inside the body of the outer parallel region. But since we are
2765 emitting this copy operation while expanding the inner parallel
2766 directive, we need to access the CTX structure of the outer
2767 parallel directive to get the correct mapping:
2768
2769 .omp_data_o.2.i = .omp_data_i.1->i
2770
2771 Since there may be other workshare or parallel directives enclosing
2772 the parallel directive, it may be necessary to walk up the context
2773 parent chain. This is not a problem in general because nested
2774 parallelism happens only rarely. */
2775
2776static tree
2777lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2778{
2779 tree t;
2780 omp_context *up;
2781
773c5ba7 2782 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2783 t = maybe_lookup_decl (decl, up);
2784
87b31375 2785 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
773c5ba7 2786
c37594c7 2787 return t ? t : decl;
773c5ba7 2788}
2789
2790
f49d7bb5 2791/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2792 in outer contexts. */
2793
2794static tree
2795maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2796{
2797 tree t = NULL;
2798 omp_context *up;
2799
87b31375 2800 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2801 t = maybe_lookup_decl (decl, up);
f49d7bb5 2802
2803 return t ? t : decl;
2804}
2805
2806
1e8e9920 2807/* Construct the initialization value for reduction CLAUSE. */
2808
2809tree
2810omp_reduction_init (tree clause, tree type)
2811{
389dd41b 2812 location_t loc = OMP_CLAUSE_LOCATION (clause);
1e8e9920 2813 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2814 {
2815 case PLUS_EXPR:
2816 case MINUS_EXPR:
2817 case BIT_IOR_EXPR:
2818 case BIT_XOR_EXPR:
2819 case TRUTH_OR_EXPR:
2820 case TRUTH_ORIF_EXPR:
2821 case TRUTH_XOR_EXPR:
2822 case NE_EXPR:
385f3f36 2823 return build_zero_cst (type);
1e8e9920 2824
2825 case MULT_EXPR:
2826 case TRUTH_AND_EXPR:
2827 case TRUTH_ANDIF_EXPR:
2828 case EQ_EXPR:
389dd41b 2829 return fold_convert_loc (loc, type, integer_one_node);
1e8e9920 2830
2831 case BIT_AND_EXPR:
389dd41b 2832 return fold_convert_loc (loc, type, integer_minus_one_node);
1e8e9920 2833
2834 case MAX_EXPR:
2835 if (SCALAR_FLOAT_TYPE_P (type))
2836 {
2837 REAL_VALUE_TYPE max, min;
2838 if (HONOR_INFINITIES (TYPE_MODE (type)))
2839 {
2840 real_inf (&max);
2841 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2842 }
2843 else
2844 real_maxval (&min, 1, TYPE_MODE (type));
2845 return build_real (type, min);
2846 }
2847 else
2848 {
2849 gcc_assert (INTEGRAL_TYPE_P (type));
2850 return TYPE_MIN_VALUE (type);
2851 }
2852
2853 case MIN_EXPR:
2854 if (SCALAR_FLOAT_TYPE_P (type))
2855 {
2856 REAL_VALUE_TYPE max;
2857 if (HONOR_INFINITIES (TYPE_MODE (type)))
2858 real_inf (&max);
2859 else
2860 real_maxval (&max, 0, TYPE_MODE (type));
2861 return build_real (type, max);
2862 }
2863 else
2864 {
2865 gcc_assert (INTEGRAL_TYPE_P (type));
2866 return TYPE_MAX_VALUE (type);
2867 }
2868
2869 default:
2870 gcc_unreachable ();
2871 }
2872}
2873
bc7bff74 2874/* Return alignment to be assumed for var in CLAUSE, which should be
2875 OMP_CLAUSE_ALIGNED. */
2876
2877static tree
2878omp_clause_aligned_alignment (tree clause)
2879{
2880 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2881 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
2882
2883 /* Otherwise return implementation defined alignment. */
2884 unsigned int al = 1;
2885 enum machine_mode mode, vmode;
2886 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2887 if (vs)
2888 vs = 1 << floor_log2 (vs);
2889 static enum mode_class classes[]
2890 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
2891 for (int i = 0; i < 4; i += 2)
2892 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
2893 mode != VOIDmode;
2894 mode = GET_MODE_WIDER_MODE (mode))
2895 {
2896 vmode = targetm.vectorize.preferred_simd_mode (mode);
2897 if (GET_MODE_CLASS (vmode) != classes[i + 1])
2898 continue;
2899 while (vs
2900 && GET_MODE_SIZE (vmode) < vs
2901 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
2902 vmode = GET_MODE_2XWIDER_MODE (vmode);
2903
2904 tree type = lang_hooks.types.type_for_mode (mode, 1);
2905 if (type == NULL_TREE || TYPE_MODE (type) != mode)
2906 continue;
2907 type = build_vector_type (type, GET_MODE_SIZE (vmode)
2908 / GET_MODE_SIZE (mode));
2909 if (TYPE_MODE (type) != vmode)
2910 continue;
2911 if (TYPE_ALIGN_UNIT (type) > al)
2912 al = TYPE_ALIGN_UNIT (type);
2913 }
2914 return build_int_cst (integer_type_node, al);
2915}
2916
3d483a94 2917/* Return maximum possible vectorization factor for the target. */
2918
2919static int
2920omp_max_vf (void)
2921{
2922 if (!optimize
2923 || optimize_debug
043115ec 2924 || (!flag_tree_loop_vectorize
2925 && (global_options_set.x_flag_tree_loop_vectorize
2926 || global_options_set.x_flag_tree_vectorize)))
3d483a94 2927 return 1;
2928
2929 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2930 if (vs)
2931 {
2932 vs = 1 << floor_log2 (vs);
2933 return vs;
2934 }
2935 enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
2936 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
2937 return GET_MODE_NUNITS (vqimode);
2938 return 1;
2939}
2940
2941/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
2942 privatization. */
2943
2944static bool
2945lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
2946 tree &idx, tree &lane, tree &ivar, tree &lvar)
2947{
2948 if (max_vf == 0)
2949 {
2950 max_vf = omp_max_vf ();
2951 if (max_vf > 1)
2952 {
2953 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2954 OMP_CLAUSE_SAFELEN);
2955 if (c
2956 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
fcb97e84 2957 max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3d483a94 2958 }
2959 if (max_vf > 1)
2960 {
2961 idx = create_tmp_var (unsigned_type_node, NULL);
2962 lane = create_tmp_var (unsigned_type_node, NULL);
2963 }
2964 }
2965 if (max_vf == 1)
2966 return false;
2967
2968 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
2969 tree avar = create_tmp_var_raw (atype, NULL);
2970 if (TREE_ADDRESSABLE (new_var))
2971 TREE_ADDRESSABLE (avar) = 1;
2972 DECL_ATTRIBUTES (avar)
2973 = tree_cons (get_identifier ("omp simd array"), NULL,
2974 DECL_ATTRIBUTES (avar));
2975 gimple_add_tmp_var (avar);
2976 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
2977 NULL_TREE, NULL_TREE);
2978 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
2979 NULL_TREE, NULL_TREE);
bc7bff74 2980 if (DECL_P (new_var))
2981 {
2982 SET_DECL_VALUE_EXPR (new_var, lvar);
2983 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2984 }
3d483a94 2985 return true;
2986}
2987
1e8e9920 2988/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2989 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2990 private variables. Initialization statements go in ILIST, while calls
2991 to destructors go in DLIST. */
2992
2993static void
75a70cf9 2994lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
bc7bff74 2995 omp_context *ctx, struct omp_for_data *fd)
1e8e9920 2996{
c2f47e15 2997 tree c, dtor, copyin_seq, x, ptr;
1e8e9920 2998 bool copyin_by_ref = false;
f49d7bb5 2999 bool lastprivate_firstprivate = false;
bc7bff74 3000 bool reduction_omp_orig_ref = false;
1e8e9920 3001 int pass;
3d483a94 3002 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3003 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD);
3d483a94 3004 int max_vf = 0;
3005 tree lane = NULL_TREE, idx = NULL_TREE;
3006 tree ivar = NULL_TREE, lvar = NULL_TREE;
3007 gimple_seq llist[2] = { NULL, NULL };
1e8e9920 3008
1e8e9920 3009 copyin_seq = NULL;
3010
3d483a94 3011 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3012 with data sharing clauses referencing variable sized vars. That
3013 is unnecessarily hard to support and very unlikely to result in
3014 vectorized code anyway. */
3015 if (is_simd)
3016 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3017 switch (OMP_CLAUSE_CODE (c))
3018 {
3019 case OMP_CLAUSE_REDUCTION:
3d483a94 3020 case OMP_CLAUSE_PRIVATE:
3021 case OMP_CLAUSE_FIRSTPRIVATE:
3022 case OMP_CLAUSE_LASTPRIVATE:
3023 case OMP_CLAUSE_LINEAR:
3024 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3025 max_vf = 1;
3026 break;
3027 default:
3028 continue;
3029 }
3030
1e8e9920 3031 /* Do all the fixed sized types in the first pass, and the variable sized
3032 types in the second pass. This makes sure that the scalar arguments to
48e1416a 3033 the variable sized types are processed before we use them in the
1e8e9920 3034 variable sized operations. */
3035 for (pass = 0; pass < 2; ++pass)
3036 {
3037 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3038 {
55d6e7cd 3039 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1e8e9920 3040 tree var, new_var;
3041 bool by_ref;
389dd41b 3042 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3043
3044 switch (c_kind)
3045 {
3046 case OMP_CLAUSE_PRIVATE:
3047 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3048 continue;
3049 break;
3050 case OMP_CLAUSE_SHARED:
bc7bff74 3051 /* Ignore shared directives in teams construct. */
3052 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3053 continue;
f49d7bb5 3054 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3055 {
3056 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
3057 continue;
3058 }
1e8e9920 3059 case OMP_CLAUSE_FIRSTPRIVATE:
1e8e9920 3060 case OMP_CLAUSE_COPYIN:
bc7bff74 3061 case OMP_CLAUSE_LINEAR:
3062 break;
1e8e9920 3063 case OMP_CLAUSE_REDUCTION:
bc7bff74 3064 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3065 reduction_omp_orig_ref = true;
1e8e9920 3066 break;
bc7bff74 3067 case OMP_CLAUSE__LOOPTEMP_:
3068 /* Handle _looptemp_ clauses only on parallel. */
3069 if (fd)
3070 continue;
3d483a94 3071 break;
df2c34fc 3072 case OMP_CLAUSE_LASTPRIVATE:
f49d7bb5 3073 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3074 {
3075 lastprivate_firstprivate = true;
3076 if (pass != 0)
3077 continue;
3078 }
df2c34fc 3079 break;
bc7bff74 3080 case OMP_CLAUSE_ALIGNED:
3081 if (pass == 0)
3082 continue;
3083 var = OMP_CLAUSE_DECL (c);
3084 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3085 && !is_global_var (var))
3086 {
3087 new_var = maybe_lookup_decl (var, ctx);
3088 if (new_var == NULL_TREE)
3089 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3090 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3091 x = build_call_expr_loc (clause_loc, x, 2, new_var,
3092 omp_clause_aligned_alignment (c));
3093 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3094 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3095 gimplify_and_add (x, ilist);
3096 }
3097 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3098 && is_global_var (var))
3099 {
3100 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3101 new_var = lookup_decl (var, ctx);
3102 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3103 t = build_fold_addr_expr_loc (clause_loc, t);
3104 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3105 t = build_call_expr_loc (clause_loc, t2, 2, t,
3106 omp_clause_aligned_alignment (c));
3107 t = fold_convert_loc (clause_loc, ptype, t);
3108 x = create_tmp_var (ptype, NULL);
3109 t = build2 (MODIFY_EXPR, ptype, x, t);
3110 gimplify_and_add (t, ilist);
3111 t = build_simple_mem_ref_loc (clause_loc, x);
3112 SET_DECL_VALUE_EXPR (new_var, t);
3113 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3114 }
3115 continue;
1e8e9920 3116 default:
3117 continue;
3118 }
3119
3120 new_var = var = OMP_CLAUSE_DECL (c);
3121 if (c_kind != OMP_CLAUSE_COPYIN)
3122 new_var = lookup_decl (var, ctx);
3123
3124 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3125 {
3126 if (pass != 0)
3127 continue;
3128 }
1e8e9920 3129 else if (is_variable_sized (var))
3130 {
773c5ba7 3131 /* For variable sized types, we need to allocate the
3132 actual storage here. Call alloca and store the
3133 result in the pointer decl that we created elsewhere. */
1e8e9920 3134 if (pass == 0)
3135 continue;
3136
fd6481cf 3137 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3138 {
75a70cf9 3139 gimple stmt;
b9a16870 3140 tree tmp, atmp;
75a70cf9 3141
fd6481cf 3142 ptr = DECL_VALUE_EXPR (new_var);
3143 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3144 ptr = TREE_OPERAND (ptr, 0);
3145 gcc_assert (DECL_P (ptr));
3146 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
75a70cf9 3147
3148 /* void *tmp = __builtin_alloca */
b9a16870 3149 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3150 stmt = gimple_build_call (atmp, 1, x);
75a70cf9 3151 tmp = create_tmp_var_raw (ptr_type_node, NULL);
3152 gimple_add_tmp_var (tmp);
3153 gimple_call_set_lhs (stmt, tmp);
3154
3155 gimple_seq_add_stmt (ilist, stmt);
3156
389dd41b 3157 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
75a70cf9 3158 gimplify_assign (ptr, x, ilist);
fd6481cf 3159 }
1e8e9920 3160 }
1e8e9920 3161 else if (is_reference (var))
3162 {
773c5ba7 3163 /* For references that are being privatized for Fortran,
3164 allocate new backing storage for the new pointer
3165 variable. This allows us to avoid changing all the
3166 code that expects a pointer to something that expects
bc7bff74 3167 a direct variable. */
1e8e9920 3168 if (pass == 0)
3169 continue;
3170
3171 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
fd6481cf 3172 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
3173 {
3174 x = build_receiver_ref (var, false, ctx);
389dd41b 3175 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 3176 }
3177 else if (TREE_CONSTANT (x))
1e8e9920 3178 {
3179 const char *name = NULL;
3180 if (DECL_NAME (var))
3181 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
3182
df2c34fc 3183 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
3184 name);
3185 gimple_add_tmp_var (x);
86f2ad37 3186 TREE_ADDRESSABLE (x) = 1;
389dd41b 3187 x = build_fold_addr_expr_loc (clause_loc, x);
1e8e9920 3188 }
3189 else
3190 {
b9a16870 3191 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3192 x = build_call_expr_loc (clause_loc, atmp, 1, x);
1e8e9920 3193 }
3194
389dd41b 3195 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
75a70cf9 3196 gimplify_assign (new_var, x, ilist);
1e8e9920 3197
182cf5a9 3198 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3199 }
3200 else if (c_kind == OMP_CLAUSE_REDUCTION
3201 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3202 {
3203 if (pass == 0)
3204 continue;
3205 }
3206 else if (pass != 0)
3207 continue;
3208
55d6e7cd 3209 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3210 {
3211 case OMP_CLAUSE_SHARED:
bc7bff74 3212 /* Ignore shared directives in teams construct. */
3213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3214 continue;
f49d7bb5 3215 /* Shared global vars are just accessed directly. */
3216 if (is_global_var (new_var))
3217 break;
1e8e9920 3218 /* Set up the DECL_VALUE_EXPR for shared variables now. This
3219 needs to be delayed until after fixup_child_record_type so
3220 that we get the correct type during the dereference. */
e8a588af 3221 by_ref = use_pointer_for_field (var, ctx);
1e8e9920 3222 x = build_receiver_ref (var, by_ref, ctx);
3223 SET_DECL_VALUE_EXPR (new_var, x);
3224 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3225
3226 /* ??? If VAR is not passed by reference, and the variable
3227 hasn't been initialized yet, then we'll get a warning for
3228 the store into the omp_data_s structure. Ideally, we'd be
48e1416a 3229 able to notice this and not store anything at all, but
1e8e9920 3230 we're generating code too early. Suppress the warning. */
3231 if (!by_ref)
3232 TREE_NO_WARNING (var) = 1;
3233 break;
3234
3235 case OMP_CLAUSE_LASTPRIVATE:
3236 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3237 break;
3238 /* FALLTHRU */
3239
3240 case OMP_CLAUSE_PRIVATE:
fd6481cf 3241 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
3242 x = build_outer_var_ref (var, ctx);
3243 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3244 {
3245 if (is_task_ctx (ctx))
3246 x = build_receiver_ref (var, false, ctx);
3247 else
3248 x = build_outer_var_ref (var, ctx);
3249 }
3250 else
3251 x = NULL;
3d483a94 3252 do_private:
bc7bff74 3253 tree nx;
3254 nx = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
3d483a94 3255 if (is_simd)
3256 {
3257 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
bc7bff74 3258 if ((TREE_ADDRESSABLE (new_var) || nx || y
3d483a94 3259 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
3260 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3261 idx, lane, ivar, lvar))
3262 {
bc7bff74 3263 if (nx)
3d483a94 3264 x = lang_hooks.decls.omp_clause_default_ctor
3265 (c, unshare_expr (ivar), x);
bc7bff74 3266 if (nx && x)
3d483a94 3267 gimplify_and_add (x, &llist[0]);
3268 if (y)
3269 {
3270 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
3271 if (y)
3272 {
3273 gimple_seq tseq = NULL;
3274
3275 dtor = y;
3276 gimplify_stmt (&dtor, &tseq);
3277 gimple_seq_add_seq (&llist[1], tseq);
3278 }
3279 }
3280 break;
3281 }
3282 }
bc7bff74 3283 if (nx)
3284 gimplify_and_add (nx, ilist);
1e8e9920 3285 /* FALLTHRU */
3286
3287 do_dtor:
3288 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
3289 if (x)
3290 {
75a70cf9 3291 gimple_seq tseq = NULL;
3292
1e8e9920 3293 dtor = x;
75a70cf9 3294 gimplify_stmt (&dtor, &tseq);
e3a19533 3295 gimple_seq_add_seq (dlist, tseq);
1e8e9920 3296 }
3297 break;
3298
3d483a94 3299 case OMP_CLAUSE_LINEAR:
3300 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
3301 goto do_firstprivate;
3302 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3303 x = NULL;
3304 else
3305 x = build_outer_var_ref (var, ctx);
3306 goto do_private;
3307
1e8e9920 3308 case OMP_CLAUSE_FIRSTPRIVATE:
fd6481cf 3309 if (is_task_ctx (ctx))
3310 {
3311 if (is_reference (var) || is_variable_sized (var))
3312 goto do_dtor;
3313 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
3314 ctx))
3315 || use_pointer_for_field (var, NULL))
3316 {
3317 x = build_receiver_ref (var, false, ctx);
3318 SET_DECL_VALUE_EXPR (new_var, x);
3319 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3320 goto do_dtor;
3321 }
3322 }
3d483a94 3323 do_firstprivate:
1e8e9920 3324 x = build_outer_var_ref (var, ctx);
3d483a94 3325 if (is_simd)
3326 {
bc7bff74 3327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3328 && gimple_omp_for_combined_into_p (ctx->stmt))
3329 {
3330 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3331 ? sizetype : TREE_TYPE (x);
3332 tree t = fold_convert (stept,
3333 OMP_CLAUSE_LINEAR_STEP (c));
3334 tree c = find_omp_clause (clauses,
3335 OMP_CLAUSE__LOOPTEMP_);
3336 gcc_assert (c);
3337 tree l = OMP_CLAUSE_DECL (c);
3338 if (fd->collapse == 1)
3339 {
3340 tree n1 = fd->loop.n1;
3341 tree step = fd->loop.step;
3342 tree itype = TREE_TYPE (l);
3343 if (POINTER_TYPE_P (itype))
3344 itype = signed_type_for (itype);
3345 l = fold_build2 (MINUS_EXPR, itype, l, n1);
3346 if (TYPE_UNSIGNED (itype)
3347 && fd->loop.cond_code == GT_EXPR)
3348 l = fold_build2 (TRUNC_DIV_EXPR, itype,
3349 fold_build1 (NEGATE_EXPR,
3350 itype, l),
3351 fold_build1 (NEGATE_EXPR,
3352 itype, step));
3353 else
3354 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
3355 }
3356 t = fold_build2 (MULT_EXPR, stept,
3357 fold_convert (stept, l), t);
3358 if (POINTER_TYPE_P (TREE_TYPE (x)))
3359 x = fold_build2 (POINTER_PLUS_EXPR,
3360 TREE_TYPE (x), x, t);
3361 else
3362 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
3363 }
3364
3d483a94 3365 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
3366 || TREE_ADDRESSABLE (new_var))
3367 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3368 idx, lane, ivar, lvar))
3369 {
3370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
3371 {
3372 tree iv = create_tmp_var (TREE_TYPE (new_var), NULL);
3373 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
3374 gimplify_and_add (x, ilist);
3375 gimple_stmt_iterator gsi
3376 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3377 gimple g
3378 = gimple_build_assign (unshare_expr (lvar), iv);
3379 gsi_insert_before_without_update (&gsi, g,
3380 GSI_SAME_STMT);
3381 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3382 ? sizetype : TREE_TYPE (x);
3383 tree t = fold_convert (stept,
3384 OMP_CLAUSE_LINEAR_STEP (c));
3385 enum tree_code code = PLUS_EXPR;
3386 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
3387 code = POINTER_PLUS_EXPR;
3388 g = gimple_build_assign_with_ops (code, iv, iv, t);
3389 gsi_insert_before_without_update (&gsi, g,
3390 GSI_SAME_STMT);
3391 break;
3392 }
3393 x = lang_hooks.decls.omp_clause_copy_ctor
3394 (c, unshare_expr (ivar), x);
3395 gimplify_and_add (x, &llist[0]);
3396 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3397 if (x)
3398 {
3399 gimple_seq tseq = NULL;
3400
3401 dtor = x;
3402 gimplify_stmt (&dtor, &tseq);
3403 gimple_seq_add_seq (&llist[1], tseq);
3404 }
3405 break;
3406 }
3407 }
1e8e9920 3408 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
3409 gimplify_and_add (x, ilist);
3410 goto do_dtor;
1e8e9920 3411
bc7bff74 3412 case OMP_CLAUSE__LOOPTEMP_:
3413 gcc_assert (is_parallel_ctx (ctx));
3414 x = build_outer_var_ref (var, ctx);
3415 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3416 gimplify_and_add (x, ilist);
3417 break;
3418
1e8e9920 3419 case OMP_CLAUSE_COPYIN:
e8a588af 3420 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 3421 x = build_receiver_ref (var, by_ref, ctx);
3422 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
3423 append_to_statement_list (x, &copyin_seq);
3424 copyin_by_ref |= by_ref;
3425 break;
3426
3427 case OMP_CLAUSE_REDUCTION:
3428 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3429 {
fd6481cf 3430 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
bc7bff74 3431 gimple tseq;
fd6481cf 3432 x = build_outer_var_ref (var, ctx);
3433
bc7bff74 3434 if (is_reference (var)
3435 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3436 TREE_TYPE (x)))
389dd41b 3437 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 3438 SET_DECL_VALUE_EXPR (placeholder, x);
3439 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
bc7bff74 3440 tree new_vard = new_var;
3441 if (is_reference (var))
3442 {
3443 gcc_assert (TREE_CODE (new_var) == MEM_REF);
3444 new_vard = TREE_OPERAND (new_var, 0);
3445 gcc_assert (DECL_P (new_vard));
3446 }
3d483a94 3447 if (is_simd
3448 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3449 idx, lane, ivar, lvar))
3450 {
bc7bff74 3451 if (new_vard == new_var)
3452 {
3453 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
3454 SET_DECL_VALUE_EXPR (new_var, ivar);
3455 }
3456 else
3457 {
3458 SET_DECL_VALUE_EXPR (new_vard,
3459 build_fold_addr_expr (ivar));
3460 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3461 }
3462 x = lang_hooks.decls.omp_clause_default_ctor
3463 (c, unshare_expr (ivar),
3464 build_outer_var_ref (var, ctx));
3465 if (x)
3466 gimplify_and_add (x, &llist[0]);
3467 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3468 {
3469 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3470 lower_omp (&tseq, ctx);
3471 gimple_seq_add_seq (&llist[0], tseq);
3472 }
3473 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3474 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3475 lower_omp (&tseq, ctx);
3476 gimple_seq_add_seq (&llist[1], tseq);
3477 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3478 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3479 if (new_vard == new_var)
3480 SET_DECL_VALUE_EXPR (new_var, lvar);
3481 else
3482 SET_DECL_VALUE_EXPR (new_vard,
3483 build_fold_addr_expr (lvar));
3484 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3485 if (x)
3486 {
3487 tseq = NULL;
3488 dtor = x;
3489 gimplify_stmt (&dtor, &tseq);
3490 gimple_seq_add_seq (&llist[1], tseq);
3491 }
3492 break;
3493 }
3494 x = lang_hooks.decls.omp_clause_default_ctor
3495 (c, new_var, unshare_expr (x));
3496 if (x)
3497 gimplify_and_add (x, ilist);
3498 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3499 {
3500 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3501 lower_omp (&tseq, ctx);
3502 gimple_seq_add_seq (ilist, tseq);
3503 }
3504 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3505 if (is_simd)
3506 {
3507 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3508 lower_omp (&tseq, ctx);
3509 gimple_seq_add_seq (dlist, tseq);
3510 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3511 }
3512 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3513 goto do_dtor;
3514 }
3515 else
3516 {
3517 x = omp_reduction_init (c, TREE_TYPE (new_var));
3518 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
3519 if (is_simd
3520 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3521 idx, lane, ivar, lvar))
3522 {
3523 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3524 tree ref = build_outer_var_ref (var, ctx);
3525
3526 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
3527
3528 /* reduction(-:var) sums up the partial results, so it
3529 acts identically to reduction(+:var). */
3530 if (code == MINUS_EXPR)
3531 code = PLUS_EXPR;
3532
3533 x = build2 (code, TREE_TYPE (ref), ref, ivar);
3d483a94 3534 ref = build_outer_var_ref (var, ctx);
3535 gimplify_assign (ref, x, &llist[1]);
3536 }
3537 else
3538 {
3539 gimplify_assign (new_var, x, ilist);
3540 if (is_simd)
3541 gimplify_assign (build_outer_var_ref (var, ctx),
3542 new_var, dlist);
3543 }
1e8e9920 3544 }
3545 break;
3546
3547 default:
3548 gcc_unreachable ();
3549 }
3550 }
3551 }
3552
3d483a94 3553 if (lane)
3554 {
3555 tree uid = create_tmp_var (ptr_type_node, "simduid");
8e1a382d 3556 /* Don't want uninit warnings on simduid, it is always uninitialized,
3557 but we use it not for the value, but for the DECL_UID only. */
3558 TREE_NO_WARNING (uid) = 1;
3d483a94 3559 gimple g
3560 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
3561 gimple_call_set_lhs (g, lane);
3562 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3563 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
3564 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
3565 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
3566 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3567 gimple_omp_for_set_clauses (ctx->stmt, c);
3568 g = gimple_build_assign_with_ops (INTEGER_CST, lane,
3569 build_int_cst (unsigned_type_node, 0),
3570 NULL_TREE);
3571 gimple_seq_add_stmt (ilist, g);
3572 for (int i = 0; i < 2; i++)
3573 if (llist[i])
3574 {
3575 tree vf = create_tmp_var (unsigned_type_node, NULL);
3576 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
3577 gimple_call_set_lhs (g, vf);
3578 gimple_seq *seq = i == 0 ? ilist : dlist;
3579 gimple_seq_add_stmt (seq, g);
3580 tree t = build_int_cst (unsigned_type_node, 0);
3581 g = gimple_build_assign_with_ops (INTEGER_CST, idx, t, NULL_TREE);
3582 gimple_seq_add_stmt (seq, g);
3583 tree body = create_artificial_label (UNKNOWN_LOCATION);
3584 tree header = create_artificial_label (UNKNOWN_LOCATION);
3585 tree end = create_artificial_label (UNKNOWN_LOCATION);
3586 gimple_seq_add_stmt (seq, gimple_build_goto (header));
3587 gimple_seq_add_stmt (seq, gimple_build_label (body));
3588 gimple_seq_add_seq (seq, llist[i]);
3589 t = build_int_cst (unsigned_type_node, 1);
3590 g = gimple_build_assign_with_ops (PLUS_EXPR, idx, idx, t);
3591 gimple_seq_add_stmt (seq, g);
3592 gimple_seq_add_stmt (seq, gimple_build_label (header));
3593 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
3594 gimple_seq_add_stmt (seq, g);
3595 gimple_seq_add_stmt (seq, gimple_build_label (end));
3596 }
3597 }
3598
1e8e9920 3599 /* The copyin sequence is not to be executed by the main thread, since
3600 that would result in self-copies. Perhaps not visible to scalars,
3601 but it certainly is to C++ operator=. */
3602 if (copyin_seq)
3603 {
b9a16870 3604 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
3605 0);
1e8e9920 3606 x = build2 (NE_EXPR, boolean_type_node, x,
3607 build_int_cst (TREE_TYPE (x), 0));
3608 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
3609 gimplify_and_add (x, ilist);
3610 }
3611
3612 /* If any copyin variable is passed by reference, we must ensure the
3613 master thread doesn't modify it before it is copied over in all
f49d7bb5 3614 threads. Similarly for variables in both firstprivate and
3615 lastprivate clauses we need to ensure the lastprivate copying
bc7bff74 3616 happens after firstprivate copying in all threads. And similarly
3617 for UDRs if initializer expression refers to omp_orig. */
3618 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
3d483a94 3619 {
3620 /* Don't add any barrier for #pragma omp simd or
3621 #pragma omp distribute. */
3622 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
f2697631 3623 || gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_FOR)
bc7bff74 3624 gimple_seq_add_stmt (ilist, build_omp_barrier (NULL_TREE));
3d483a94 3625 }
3626
3627 /* If max_vf is non-zero, then we can use only a vectorization factor
3628 up to the max_vf we chose. So stick it into the safelen clause. */
3629 if (max_vf)
3630 {
3631 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
3632 OMP_CLAUSE_SAFELEN);
3633 if (c == NULL_TREE
3634 || compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3635 max_vf) == 1)
3636 {
3637 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
3638 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
3639 max_vf);
3640 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3641 gimple_omp_for_set_clauses (ctx->stmt, c);
3642 }
3643 }
1e8e9920 3644}
3645
773c5ba7 3646
1e8e9920 3647/* Generate code to implement the LASTPRIVATE clauses. This is used for
3648 both parallel and workshare constructs. PREDICATE may be NULL if it's
3649 always true. */
3650
3651static void
75a70cf9 3652lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
bc7bff74 3653 omp_context *ctx)
1e8e9920 3654{
3d483a94 3655 tree x, c, label = NULL, orig_clauses = clauses;
fd6481cf 3656 bool par_clauses = false;
3d483a94 3657 tree simduid = NULL, lastlane = NULL;
1e8e9920 3658
3d483a94 3659 /* Early exit if there are no lastprivate or linear clauses. */
3660 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
3661 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
3662 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
3663 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
3664 break;
1e8e9920 3665 if (clauses == NULL)
3666 {
3667 /* If this was a workshare clause, see if it had been combined
3668 with its parallel. In that case, look for the clauses on the
3669 parallel statement itself. */
3670 if (is_parallel_ctx (ctx))
3671 return;
3672
3673 ctx = ctx->outer;
3674 if (ctx == NULL || !is_parallel_ctx (ctx))
3675 return;
3676
75a70cf9 3677 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
1e8e9920 3678 OMP_CLAUSE_LASTPRIVATE);
3679 if (clauses == NULL)
3680 return;
fd6481cf 3681 par_clauses = true;
1e8e9920 3682 }
3683
75a70cf9 3684 if (predicate)
3685 {
3686 gimple stmt;
3687 tree label_true, arm1, arm2;
3688
e60a6f7b 3689 label = create_artificial_label (UNKNOWN_LOCATION);
3690 label_true = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 3691 arm1 = TREE_OPERAND (predicate, 0);
3692 arm2 = TREE_OPERAND (predicate, 1);
3693 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
3694 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
3695 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
3696 label_true, label);
3697 gimple_seq_add_stmt (stmt_list, stmt);
3698 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
3699 }
1e8e9920 3700
3d483a94 3701 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3702 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 3703 {
3704 simduid = find_omp_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
3705 if (simduid)
3706 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
3707 }
3708
fd6481cf 3709 for (c = clauses; c ;)
1e8e9920 3710 {
3711 tree var, new_var;
389dd41b 3712 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3713
3d483a94 3714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3715 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3716 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
fd6481cf 3717 {
3718 var = OMP_CLAUSE_DECL (c);
3719 new_var = lookup_decl (var, ctx);
1e8e9920 3720
3d483a94 3721 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3722 {
3723 tree val = DECL_VALUE_EXPR (new_var);
3724 if (TREE_CODE (val) == ARRAY_REF
3725 && VAR_P (TREE_OPERAND (val, 0))
3726 && lookup_attribute ("omp simd array",
3727 DECL_ATTRIBUTES (TREE_OPERAND (val,
3728 0))))
3729 {
3730 if (lastlane == NULL)
3731 {
3732 lastlane = create_tmp_var (unsigned_type_node, NULL);
3733 gimple g
3734 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
3735 2, simduid,
3736 TREE_OPERAND (val, 1));
3737 gimple_call_set_lhs (g, lastlane);
3738 gimple_seq_add_stmt (stmt_list, g);
3739 }
3740 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
3741 TREE_OPERAND (val, 0), lastlane,
3742 NULL_TREE, NULL_TREE);
3743 }
3744 }
3745
3746 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3747 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
75a70cf9 3748 {
e3a19533 3749 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
75a70cf9 3750 gimple_seq_add_seq (stmt_list,
3751 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
3d483a94 3752 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
75a70cf9 3753 }
1e8e9920 3754
fd6481cf 3755 x = build_outer_var_ref (var, ctx);
3756 if (is_reference (var))
182cf5a9 3757 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
fd6481cf 3758 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
75a70cf9 3759 gimplify_and_add (x, stmt_list);
fd6481cf 3760 }
3761 c = OMP_CLAUSE_CHAIN (c);
3762 if (c == NULL && !par_clauses)
3763 {
3764 /* If this was a workshare clause, see if it had been combined
3765 with its parallel. In that case, continue looking for the
3766 clauses also on the parallel statement itself. */
3767 if (is_parallel_ctx (ctx))
3768 break;
3769
3770 ctx = ctx->outer;
3771 if (ctx == NULL || !is_parallel_ctx (ctx))
3772 break;
3773
75a70cf9 3774 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
fd6481cf 3775 OMP_CLAUSE_LASTPRIVATE);
3776 par_clauses = true;
3777 }
1e8e9920 3778 }
3779
75a70cf9 3780 if (label)
3781 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
1e8e9920 3782}
3783
773c5ba7 3784
1e8e9920 3785/* Generate code to implement the REDUCTION clauses. */
3786
3787static void
75a70cf9 3788lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
1e8e9920 3789{
75a70cf9 3790 gimple_seq sub_seq = NULL;
3791 gimple stmt;
3792 tree x, c;
1e8e9920 3793 int count = 0;
3794
3d483a94 3795 /* SIMD reductions are handled in lower_rec_input_clauses. */
3796 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
f2697631 3797 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 3798 return;
3799
1e8e9920 3800 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
3801 update in that case, otherwise use a lock. */
3802 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 3803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
1e8e9920 3804 {
3805 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3806 {
bc7bff74 3807 /* Never use OMP_ATOMIC for array reductions or UDRs. */
1e8e9920 3808 count = -1;
3809 break;
3810 }
3811 count++;
3812 }
3813
3814 if (count == 0)
3815 return;
3816
3817 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3818 {
3819 tree var, ref, new_var;
3820 enum tree_code code;
389dd41b 3821 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3822
55d6e7cd 3823 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
1e8e9920 3824 continue;
3825
3826 var = OMP_CLAUSE_DECL (c);
3827 new_var = lookup_decl (var, ctx);
3828 if (is_reference (var))
182cf5a9 3829 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3830 ref = build_outer_var_ref (var, ctx);
3831 code = OMP_CLAUSE_REDUCTION_CODE (c);
773c5ba7 3832
3833 /* reduction(-:var) sums up the partial results, so it acts
3834 identically to reduction(+:var). */
1e8e9920 3835 if (code == MINUS_EXPR)
3836 code = PLUS_EXPR;
3837
3838 if (count == 1)
3839 {
389dd41b 3840 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 3841
3842 addr = save_expr (addr);
3843 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
389dd41b 3844 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
1e8e9920 3845 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
75a70cf9 3846 gimplify_and_add (x, stmt_seqp);
1e8e9920 3847 return;
3848 }
3849
3850 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3851 {
3852 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3853
bc7bff74 3854 if (is_reference (var)
3855 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3856 TREE_TYPE (ref)))
389dd41b 3857 ref = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 3858 SET_DECL_VALUE_EXPR (placeholder, ref);
3859 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
e3a19533 3860 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
75a70cf9 3861 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
3862 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
1e8e9920 3863 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
3864 }
3865 else
3866 {
3867 x = build2 (code, TREE_TYPE (ref), ref, new_var);
3868 ref = build_outer_var_ref (var, ctx);
75a70cf9 3869 gimplify_assign (ref, x, &sub_seq);
1e8e9920 3870 }
3871 }
3872
b9a16870 3873 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
3874 0);
75a70cf9 3875 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 3876
75a70cf9 3877 gimple_seq_add_seq (stmt_seqp, sub_seq);
1e8e9920 3878
b9a16870 3879 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
3880 0);
75a70cf9 3881 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 3882}
3883
773c5ba7 3884
1e8e9920 3885/* Generate code to implement the COPYPRIVATE clauses. */
3886
3887static void
75a70cf9 3888lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
1e8e9920 3889 omp_context *ctx)
3890{
3891 tree c;
3892
3893 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3894 {
cb561506 3895 tree var, new_var, ref, x;
1e8e9920 3896 bool by_ref;
389dd41b 3897 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3898
55d6e7cd 3899 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
1e8e9920 3900 continue;
3901
3902 var = OMP_CLAUSE_DECL (c);
e8a588af 3903 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 3904
3905 ref = build_sender_ref (var, ctx);
cb561506 3906 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
3907 if (by_ref)
3908 {
3909 x = build_fold_addr_expr_loc (clause_loc, new_var);
3910 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
3911 }
75a70cf9 3912 gimplify_assign (ref, x, slist);
1e8e9920 3913
cb561506 3914 ref = build_receiver_ref (var, false, ctx);
3915 if (by_ref)
3916 {
3917 ref = fold_convert_loc (clause_loc,
3918 build_pointer_type (TREE_TYPE (new_var)),
3919 ref);
3920 ref = build_fold_indirect_ref_loc (clause_loc, ref);
3921 }
1e8e9920 3922 if (is_reference (var))
3923 {
cb561506 3924 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
182cf5a9 3925 ref = build_simple_mem_ref_loc (clause_loc, ref);
3926 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
1e8e9920 3927 }
cb561506 3928 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
1e8e9920 3929 gimplify_and_add (x, rlist);
3930 }
3931}
3932
773c5ba7 3933
1e8e9920 3934/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
3935 and REDUCTION from the sender (aka parent) side. */
3936
3937static void
75a70cf9 3938lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
3939 omp_context *ctx)
1e8e9920 3940{
3941 tree c;
3942
3943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3944 {
773c5ba7 3945 tree val, ref, x, var;
1e8e9920 3946 bool by_ref, do_in = false, do_out = false;
389dd41b 3947 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 3948
55d6e7cd 3949 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3950 {
fd6481cf 3951 case OMP_CLAUSE_PRIVATE:
3952 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3953 break;
3954 continue;
1e8e9920 3955 case OMP_CLAUSE_FIRSTPRIVATE:
3956 case OMP_CLAUSE_COPYIN:
3957 case OMP_CLAUSE_LASTPRIVATE:
3958 case OMP_CLAUSE_REDUCTION:
bc7bff74 3959 case OMP_CLAUSE__LOOPTEMP_:
1e8e9920 3960 break;
3961 default:
3962 continue;
3963 }
3964
87b31375 3965 val = OMP_CLAUSE_DECL (c);
3966 var = lookup_decl_in_outer_ctx (val, ctx);
773c5ba7 3967
f49d7bb5 3968 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
3969 && is_global_var (var))
3970 continue;
1e8e9920 3971 if (is_variable_sized (val))
3972 continue;
e8a588af 3973 by_ref = use_pointer_for_field (val, NULL);
1e8e9920 3974
55d6e7cd 3975 switch (OMP_CLAUSE_CODE (c))
1e8e9920 3976 {
fd6481cf 3977 case OMP_CLAUSE_PRIVATE:
1e8e9920 3978 case OMP_CLAUSE_FIRSTPRIVATE:
3979 case OMP_CLAUSE_COPYIN:
bc7bff74 3980 case OMP_CLAUSE__LOOPTEMP_:
1e8e9920 3981 do_in = true;
3982 break;
3983
3984 case OMP_CLAUSE_LASTPRIVATE:
3985 if (by_ref || is_reference (val))
3986 {
3987 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3988 continue;
3989 do_in = true;
3990 }
3991 else
fd6481cf 3992 {
3993 do_out = true;
3994 if (lang_hooks.decls.omp_private_outer_ref (val))
3995 do_in = true;
3996 }
1e8e9920 3997 break;
3998
3999 case OMP_CLAUSE_REDUCTION:
4000 do_in = true;
4001 do_out = !(by_ref || is_reference (val));
4002 break;
4003
4004 default:
4005 gcc_unreachable ();
4006 }
4007
4008 if (do_in)
4009 {
4010 ref = build_sender_ref (val, ctx);
389dd41b 4011 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
75a70cf9 4012 gimplify_assign (ref, x, ilist);
fd6481cf 4013 if (is_task_ctx (ctx))
4014 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
1e8e9920 4015 }
773c5ba7 4016
1e8e9920 4017 if (do_out)
4018 {
4019 ref = build_sender_ref (val, ctx);
75a70cf9 4020 gimplify_assign (var, ref, olist);
1e8e9920 4021 }
4022 }
4023}
4024
75a70cf9 4025/* Generate code to implement SHARED from the sender (aka parent)
4026 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
4027 list things that got automatically shared. */
1e8e9920 4028
4029static void
75a70cf9 4030lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
1e8e9920 4031{
fd6481cf 4032 tree var, ovar, nvar, f, x, record_type;
1e8e9920 4033
4034 if (ctx->record_type == NULL)
4035 return;
773c5ba7 4036
fd6481cf 4037 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
1767a056 4038 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
1e8e9920 4039 {
4040 ovar = DECL_ABSTRACT_ORIGIN (f);
4041 nvar = maybe_lookup_decl (ovar, ctx);
4042 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
4043 continue;
4044
773c5ba7 4045 /* If CTX is a nested parallel directive. Find the immediately
4046 enclosing parallel or workshare construct that contains a
4047 mapping for OVAR. */
87b31375 4048 var = lookup_decl_in_outer_ctx (ovar, ctx);
773c5ba7 4049
e8a588af 4050 if (use_pointer_for_field (ovar, ctx))
1e8e9920 4051 {
4052 x = build_sender_ref (ovar, ctx);
773c5ba7 4053 var = build_fold_addr_expr (var);
75a70cf9 4054 gimplify_assign (x, var, ilist);
1e8e9920 4055 }
4056 else
4057 {
4058 x = build_sender_ref (ovar, ctx);
75a70cf9 4059 gimplify_assign (x, var, ilist);
1e8e9920 4060
d2263ebb 4061 if (!TREE_READONLY (var)
4062 /* We don't need to receive a new reference to a result
4063 or parm decl. In fact we may not store to it as we will
4064 invalidate any pending RSO and generate wrong gimple
4065 during inlining. */
4066 && !((TREE_CODE (var) == RESULT_DECL
4067 || TREE_CODE (var) == PARM_DECL)
4068 && DECL_BY_REFERENCE (var)))
fd6481cf 4069 {
4070 x = build_sender_ref (ovar, ctx);
75a70cf9 4071 gimplify_assign (var, x, olist);
fd6481cf 4072 }
1e8e9920 4073 }
4074 }
4075}
4076
75a70cf9 4077
4078/* A convenience function to build an empty GIMPLE_COND with just the
4079 condition. */
4080
4081static gimple
4082gimple_build_cond_empty (tree cond)
4083{
4084 enum tree_code pred_code;
4085 tree lhs, rhs;
4086
4087 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
4088 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
4089}
4090
4091
48e1416a 4092/* Build the function calls to GOMP_parallel_start etc to actually
773c5ba7 4093 generate the parallel operation. REGION is the parallel region
4094 being expanded. BB is the block where to insert the code. WS_ARGS
4095 will be set if this is a call to a combined parallel+workshare
4096 construct, it contains the list of additional arguments needed by
4097 the workshare construct. */
1e8e9920 4098
4099static void
61e47ac8 4100expand_parallel_call (struct omp_region *region, basic_block bb,
f1f41a6c 4101 gimple entry_stmt, vec<tree, va_gc> *ws_args)
1e8e9920 4102{
bc7bff74 4103 tree t, t1, t2, val, cond, c, clauses, flags;
75a70cf9 4104 gimple_stmt_iterator gsi;
4105 gimple stmt;
b9a16870 4106 enum built_in_function start_ix;
4107 int start_ix2;
389dd41b 4108 location_t clause_loc;
f1f41a6c 4109 vec<tree, va_gc> *args;
773c5ba7 4110
75a70cf9 4111 clauses = gimple_omp_parallel_clauses (entry_stmt);
773c5ba7 4112
bc7bff74 4113 /* Determine what flavor of GOMP_parallel we will be
773c5ba7 4114 emitting. */
bc7bff74 4115 start_ix = BUILT_IN_GOMP_PARALLEL;
773c5ba7 4116 if (is_combined_parallel (region))
4117 {
61e47ac8 4118 switch (region->inner->type)
773c5ba7 4119 {
75a70cf9 4120 case GIMPLE_OMP_FOR:
fd6481cf 4121 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
bc7bff74 4122 start_ix2 = ((int)BUILT_IN_GOMP_PARALLEL_LOOP_STATIC
b9a16870 4123 + (region->inner->sched_kind
4124 == OMP_CLAUSE_SCHEDULE_RUNTIME
4125 ? 3 : region->inner->sched_kind));
4126 start_ix = (enum built_in_function)start_ix2;
61e47ac8 4127 break;
75a70cf9 4128 case GIMPLE_OMP_SECTIONS:
bc7bff74 4129 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
61e47ac8 4130 break;
4131 default:
4132 gcc_unreachable ();
773c5ba7 4133 }
773c5ba7 4134 }
1e8e9920 4135
4136 /* By default, the value of NUM_THREADS is zero (selected at run time)
4137 and there is no conditional. */
4138 cond = NULL_TREE;
4139 val = build_int_cst (unsigned_type_node, 0);
bc7bff74 4140 flags = build_int_cst (unsigned_type_node, 0);
1e8e9920 4141
4142 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4143 if (c)
4144 cond = OMP_CLAUSE_IF_EXPR (c);
4145
4146 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
4147 if (c)
389dd41b 4148 {
4149 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
4150 clause_loc = OMP_CLAUSE_LOCATION (c);
4151 }
4152 else
4153 clause_loc = gimple_location (entry_stmt);
1e8e9920 4154
bc7bff74 4155 c = find_omp_clause (clauses, OMP_CLAUSE_PROC_BIND);
4156 if (c)
4157 flags = build_int_cst (unsigned_type_node, OMP_CLAUSE_PROC_BIND_KIND (c));
4158
1e8e9920 4159 /* Ensure 'val' is of the correct type. */
389dd41b 4160 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
1e8e9920 4161
4162 /* If we found the clause 'if (cond)', build either
4163 (cond != 0) or (cond ? val : 1u). */
4164 if (cond)
4165 {
75a70cf9 4166 gimple_stmt_iterator gsi;
773c5ba7 4167
4168 cond = gimple_boolify (cond);
4169
1e8e9920 4170 if (integer_zerop (val))
389dd41b 4171 val = fold_build2_loc (clause_loc,
4172 EQ_EXPR, unsigned_type_node, cond,
79acaae1 4173 build_int_cst (TREE_TYPE (cond), 0));
1e8e9920 4174 else
773c5ba7 4175 {
4176 basic_block cond_bb, then_bb, else_bb;
79acaae1 4177 edge e, e_then, e_else;
75a70cf9 4178 tree tmp_then, tmp_else, tmp_join, tmp_var;
79acaae1 4179
4180 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
4181 if (gimple_in_ssa_p (cfun))
4182 {
75a70cf9 4183 tmp_then = make_ssa_name (tmp_var, NULL);
4184 tmp_else = make_ssa_name (tmp_var, NULL);
4185 tmp_join = make_ssa_name (tmp_var, NULL);
79acaae1 4186 }
4187 else
4188 {
4189 tmp_then = tmp_var;
4190 tmp_else = tmp_var;
4191 tmp_join = tmp_var;
4192 }
773c5ba7 4193
773c5ba7 4194 e = split_block (bb, NULL);
4195 cond_bb = e->src;
4196 bb = e->dest;
4197 remove_edge (e);
4198
4199 then_bb = create_empty_bb (cond_bb);
4200 else_bb = create_empty_bb (then_bb);
79acaae1 4201 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
4202 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
773c5ba7 4203
75a70cf9 4204 stmt = gimple_build_cond_empty (cond);
4205 gsi = gsi_start_bb (cond_bb);
4206 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4207
75a70cf9 4208 gsi = gsi_start_bb (then_bb);
4209 stmt = gimple_build_assign (tmp_then, val);
4210 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4211
75a70cf9 4212 gsi = gsi_start_bb (else_bb);
4213 stmt = gimple_build_assign
4214 (tmp_else, build_int_cst (unsigned_type_node, 1));
4215 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4216
4217 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
4218 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
f6568ea4 4219 if (current_loops)
4220 {
4221 add_bb_to_loop (then_bb, cond_bb->loop_father);
4222 add_bb_to_loop (else_bb, cond_bb->loop_father);
4223 }
79acaae1 4224 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
4225 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
773c5ba7 4226
79acaae1 4227 if (gimple_in_ssa_p (cfun))
4228 {
75a70cf9 4229 gimple phi = create_phi_node (tmp_join, bb);
60d535d2 4230 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
4231 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
79acaae1 4232 }
4233
4234 val = tmp_join;
773c5ba7 4235 }
4236
75a70cf9 4237 gsi = gsi_start_bb (bb);
4238 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
4239 false, GSI_CONTINUE_LINKING);
1e8e9920 4240 }
4241
75a70cf9 4242 gsi = gsi_last_bb (bb);
4243 t = gimple_omp_parallel_data_arg (entry_stmt);
1e8e9920 4244 if (t == NULL)
c2f47e15 4245 t1 = null_pointer_node;
1e8e9920 4246 else
c2f47e15 4247 t1 = build_fold_addr_expr (t);
75a70cf9 4248 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
773c5ba7 4249
bc7bff74 4250 vec_alloc (args, 4 + vec_safe_length (ws_args));
f1f41a6c 4251 args->quick_push (t2);
4252 args->quick_push (t1);
4253 args->quick_push (val);
4254 if (ws_args)
4255 args->splice (*ws_args);
bc7bff74 4256 args->quick_push (flags);
414c3a2c 4257
4258 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
b9a16870 4259 builtin_decl_explicit (start_ix), args);
773c5ba7 4260
75a70cf9 4261 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4262 false, GSI_CONTINUE_LINKING);
1e8e9920 4263}
4264
773c5ba7 4265
fd6481cf 4266/* Build the function call to GOMP_task to actually
4267 generate the task operation. BB is the block where to insert the code. */
4268
4269static void
75a70cf9 4270expand_task_call (basic_block bb, gimple entry_stmt)
fd6481cf 4271{
bc7bff74 4272 tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
75a70cf9 4273 gimple_stmt_iterator gsi;
389dd41b 4274 location_t loc = gimple_location (entry_stmt);
fd6481cf 4275
75a70cf9 4276 clauses = gimple_omp_task_clauses (entry_stmt);
fd6481cf 4277
fd6481cf 4278 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4279 if (c)
4280 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
4281 else
4282 cond = boolean_true_node;
4283
4284 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
2169f33b 4285 c2 = find_omp_clause (clauses, OMP_CLAUSE_MERGEABLE);
bc7bff74 4286 depend = find_omp_clause (clauses, OMP_CLAUSE_DEPEND);
2169f33b 4287 flags = build_int_cst (unsigned_type_node,
bc7bff74 4288 (c ? 1 : 0) + (c2 ? 4 : 0) + (depend ? 8 : 0));
2169f33b 4289
4290 c = find_omp_clause (clauses, OMP_CLAUSE_FINAL);
4291 if (c)
4292 {
4293 c = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (c));
4294 c = fold_build3_loc (loc, COND_EXPR, unsigned_type_node, c,
4295 build_int_cst (unsigned_type_node, 2),
4296 build_int_cst (unsigned_type_node, 0));
4297 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_node, flags, c);
4298 }
bc7bff74 4299 if (depend)
4300 depend = OMP_CLAUSE_DECL (depend);
4301 else
4302 depend = build_int_cst (ptr_type_node, 0);
fd6481cf 4303
75a70cf9 4304 gsi = gsi_last_bb (bb);
4305 t = gimple_omp_task_data_arg (entry_stmt);
fd6481cf 4306 if (t == NULL)
4307 t2 = null_pointer_node;
4308 else
389dd41b 4309 t2 = build_fold_addr_expr_loc (loc, t);
4310 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
75a70cf9 4311 t = gimple_omp_task_copy_fn (entry_stmt);
fd6481cf 4312 if (t == NULL)
4313 t3 = null_pointer_node;
4314 else
389dd41b 4315 t3 = build_fold_addr_expr_loc (loc, t);
fd6481cf 4316
b9a16870 4317 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
bc7bff74 4318 8, t1, t2, t3,
75a70cf9 4319 gimple_omp_task_arg_size (entry_stmt),
bc7bff74 4320 gimple_omp_task_arg_align (entry_stmt), cond, flags,
4321 depend);
fd6481cf 4322
75a70cf9 4323 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4324 false, GSI_CONTINUE_LINKING);
fd6481cf 4325}
4326
4327
75a70cf9 4328/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
4329 catch handler and return it. This prevents programs from violating the
4330 structured block semantics with throws. */
1e8e9920 4331
75a70cf9 4332static gimple_seq
4333maybe_catch_exception (gimple_seq body)
1e8e9920 4334{
e38def9c 4335 gimple g;
4336 tree decl;
1e8e9920 4337
4338 if (!flag_exceptions)
75a70cf9 4339 return body;
1e8e9920 4340
596981c8 4341 if (lang_hooks.eh_protect_cleanup_actions != NULL)
4342 decl = lang_hooks.eh_protect_cleanup_actions ();
1e8e9920 4343 else
b9a16870 4344 decl = builtin_decl_explicit (BUILT_IN_TRAP);
75a70cf9 4345
e38def9c 4346 g = gimple_build_eh_must_not_throw (decl);
4347 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
75a70cf9 4348 GIMPLE_TRY_CATCH);
1e8e9920 4349
e38def9c 4350 return gimple_seq_alloc_with_stmt (g);
1e8e9920 4351}
4352
773c5ba7 4353/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
1e8e9920 4354
773c5ba7 4355static tree
f1f41a6c 4356vec2chain (vec<tree, va_gc> *v)
1e8e9920 4357{
2ab2ce89 4358 tree chain = NULL_TREE, t;
4359 unsigned ix;
1e8e9920 4360
f1f41a6c 4361 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
773c5ba7 4362 {
1767a056 4363 DECL_CHAIN (t) = chain;
2ab2ce89 4364 chain = t;
773c5ba7 4365 }
1e8e9920 4366
2ab2ce89 4367 return chain;
773c5ba7 4368}
1e8e9920 4369
1e8e9920 4370
773c5ba7 4371/* Remove barriers in REGION->EXIT's block. Note that this is only
75a70cf9 4372 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
4373 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
4374 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
773c5ba7 4375 removed. */
1e8e9920 4376
773c5ba7 4377static void
4378remove_exit_barrier (struct omp_region *region)
4379{
75a70cf9 4380 gimple_stmt_iterator gsi;
773c5ba7 4381 basic_block exit_bb;
61e47ac8 4382 edge_iterator ei;
4383 edge e;
75a70cf9 4384 gimple stmt;
4a04f4b4 4385 int any_addressable_vars = -1;
1e8e9920 4386
61e47ac8 4387 exit_bb = region->exit;
1e8e9920 4388
5056ba1a 4389 /* If the parallel region doesn't return, we don't have REGION->EXIT
4390 block at all. */
4391 if (! exit_bb)
4392 return;
4393
75a70cf9 4394 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
4395 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
61e47ac8 4396 statements that can appear in between are extremely limited -- no
4397 memory operations at all. Here, we allow nothing at all, so the
75a70cf9 4398 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
4399 gsi = gsi_last_bb (exit_bb);
4400 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4401 gsi_prev (&gsi);
4402 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
773c5ba7 4403 return;
1e8e9920 4404
61e47ac8 4405 FOR_EACH_EDGE (e, ei, exit_bb->preds)
4406 {
75a70cf9 4407 gsi = gsi_last_bb (e->src);
4408 if (gsi_end_p (gsi))
61e47ac8 4409 continue;
75a70cf9 4410 stmt = gsi_stmt (gsi);
4a04f4b4 4411 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
4412 && !gimple_omp_return_nowait_p (stmt))
4413 {
4414 /* OpenMP 3.0 tasks unfortunately prevent this optimization
4415 in many cases. If there could be tasks queued, the barrier
4416 might be needed to let the tasks run before some local
4417 variable of the parallel that the task uses as shared
4418 runs out of scope. The task can be spawned either
4419 from within current function (this would be easy to check)
4420 or from some function it calls and gets passed an address
4421 of such a variable. */
4422 if (any_addressable_vars < 0)
4423 {
4424 gimple parallel_stmt = last_stmt (region->entry);
4425 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
2ab2ce89 4426 tree local_decls, block, decl;
4427 unsigned ix;
4a04f4b4 4428
4429 any_addressable_vars = 0;
2ab2ce89 4430 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
4431 if (TREE_ADDRESSABLE (decl))
4a04f4b4 4432 {
4433 any_addressable_vars = 1;
4434 break;
4435 }
4436 for (block = gimple_block (stmt);
4437 !any_addressable_vars
4438 && block
4439 && TREE_CODE (block) == BLOCK;
4440 block = BLOCK_SUPERCONTEXT (block))
4441 {
4442 for (local_decls = BLOCK_VARS (block);
4443 local_decls;
1767a056 4444 local_decls = DECL_CHAIN (local_decls))
4a04f4b4 4445 if (TREE_ADDRESSABLE (local_decls))
4446 {
4447 any_addressable_vars = 1;
4448 break;
4449 }
4450 if (block == gimple_block (parallel_stmt))
4451 break;
4452 }
4453 }
4454 if (!any_addressable_vars)
4455 gimple_omp_return_set_nowait (stmt);
4456 }
61e47ac8 4457 }
1e8e9920 4458}
4459
61e47ac8 4460static void
4461remove_exit_barriers (struct omp_region *region)
4462{
75a70cf9 4463 if (region->type == GIMPLE_OMP_PARALLEL)
61e47ac8 4464 remove_exit_barrier (region);
4465
4466 if (region->inner)
4467 {
4468 region = region->inner;
4469 remove_exit_barriers (region);
4470 while (region->next)
4471 {
4472 region = region->next;
4473 remove_exit_barriers (region);
4474 }
4475 }
4476}
773c5ba7 4477
658b4427 4478/* Optimize omp_get_thread_num () and omp_get_num_threads ()
4479 calls. These can't be declared as const functions, but
4480 within one parallel body they are constant, so they can be
4481 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
fd6481cf 4482 which are declared const. Similarly for task body, except
4483 that in untied task omp_get_thread_num () can change at any task
4484 scheduling point. */
658b4427 4485
4486static void
75a70cf9 4487optimize_omp_library_calls (gimple entry_stmt)
658b4427 4488{
4489 basic_block bb;
75a70cf9 4490 gimple_stmt_iterator gsi;
b9a16870 4491 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4492 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree);
4493 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
4494 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree);
75a70cf9 4495 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
4496 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
fd6481cf 4497 OMP_CLAUSE_UNTIED) != NULL);
658b4427 4498
4499 FOR_EACH_BB (bb)
75a70cf9 4500 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
658b4427 4501 {
75a70cf9 4502 gimple call = gsi_stmt (gsi);
658b4427 4503 tree decl;
4504
75a70cf9 4505 if (is_gimple_call (call)
4506 && (decl = gimple_call_fndecl (call))
658b4427 4507 && DECL_EXTERNAL (decl)
4508 && TREE_PUBLIC (decl)
4509 && DECL_INITIAL (decl) == NULL)
4510 {
4511 tree built_in;
4512
4513 if (DECL_NAME (decl) == thr_num_id)
fd6481cf 4514 {
4515 /* In #pragma omp task untied omp_get_thread_num () can change
4516 during the execution of the task region. */
4517 if (untied_task)
4518 continue;
b9a16870 4519 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
fd6481cf 4520 }
658b4427 4521 else if (DECL_NAME (decl) == num_thr_id)
b9a16870 4522 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
658b4427 4523 else
4524 continue;
4525
4526 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
75a70cf9 4527 || gimple_call_num_args (call) != 0)
658b4427 4528 continue;
4529
4530 if (flag_exceptions && !TREE_NOTHROW (decl))
4531 continue;
4532
4533 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
1ea6a73c 4534 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
4535 TREE_TYPE (TREE_TYPE (built_in))))
658b4427 4536 continue;
4537
0acacf9e 4538 gimple_call_set_fndecl (call, built_in);
658b4427 4539 }
4540 }
4541}
4542
8e6b4515 4543/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
4544 regimplified. */
4545
4546static tree
4547expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
4548{
4549 tree t = *tp;
4550
4551 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
4552 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
4553 return t;
4554
4555 if (TREE_CODE (t) == ADDR_EXPR)
4556 recompute_tree_invariant_for_addr_expr (t);
4557
4558 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
4559 return NULL_TREE;
4560}
4561
3d483a94 4562/* Prepend TO = FROM assignment before *GSI_P. */
4563
4564static void
4565expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
4566{
4567 bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
4568 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
4569 true, GSI_SAME_STMT);
4570 gimple stmt = gimple_build_assign (to, from);
4571 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
4572 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
4573 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
4574 {
4575 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4576 gimple_regimplify_operands (stmt, &gsi);
4577 }
4578}
4579
fd6481cf 4580/* Expand the OpenMP parallel or task directive starting at REGION. */
1e8e9920 4581
4582static void
fd6481cf 4583expand_omp_taskreg (struct omp_region *region)
1e8e9920 4584{
773c5ba7 4585 basic_block entry_bb, exit_bb, new_bb;
87d4aa85 4586 struct function *child_cfun;
414c3a2c 4587 tree child_fn, block, t;
75a70cf9 4588 gimple_stmt_iterator gsi;
4589 gimple entry_stmt, stmt;
773c5ba7 4590 edge e;
f1f41a6c 4591 vec<tree, va_gc> *ws_args;
773c5ba7 4592
61e47ac8 4593 entry_stmt = last_stmt (region->entry);
75a70cf9 4594 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
773c5ba7 4595 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
773c5ba7 4596
61e47ac8 4597 entry_bb = region->entry;
4598 exit_bb = region->exit;
773c5ba7 4599
773c5ba7 4600 if (is_combined_parallel (region))
61e47ac8 4601 ws_args = region->ws_args;
773c5ba7 4602 else
414c3a2c 4603 ws_args = NULL;
1e8e9920 4604
61e47ac8 4605 if (child_cfun->cfg)
1e8e9920 4606 {
773c5ba7 4607 /* Due to inlining, it may happen that we have already outlined
4608 the region, in which case all we need to do is make the
4609 sub-graph unreachable and emit the parallel call. */
4610 edge entry_succ_e, exit_succ_e;
75a70cf9 4611 gimple_stmt_iterator gsi;
773c5ba7 4612
4613 entry_succ_e = single_succ_edge (entry_bb);
773c5ba7 4614
75a70cf9 4615 gsi = gsi_last_bb (entry_bb);
4616 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
4617 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
4618 gsi_remove (&gsi, true);
773c5ba7 4619
4620 new_bb = entry_bb;
03ed154b 4621 if (exit_bb)
4622 {
4623 exit_succ_e = single_succ_edge (exit_bb);
4624 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
4625 }
79acaae1 4626 remove_edge_and_dominated_blocks (entry_succ_e);
1e8e9920 4627 }
773c5ba7 4628 else
4629 {
501bdd19 4630 unsigned srcidx, dstidx, num;
2ab2ce89 4631
773c5ba7 4632 /* If the parallel region needs data sent from the parent
3480139d 4633 function, then the very first statement (except possible
4634 tree profile counter updates) of the parallel body
773c5ba7 4635 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
4636 &.OMP_DATA_O is passed as an argument to the child function,
4637 we need to replace it with the argument as seen by the child
4638 function.
4639
4640 In most cases, this will end up being the identity assignment
4641 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
4642 a function call that has been inlined, the original PARM_DECL
4643 .OMP_DATA_I may have been converted into a different local
4644 variable. In which case, we need to keep the assignment. */
75a70cf9 4645 if (gimple_omp_taskreg_data_arg (entry_stmt))
773c5ba7 4646 {
4647 basic_block entry_succ_bb = single_succ (entry_bb);
75a70cf9 4648 gimple_stmt_iterator gsi;
4649 tree arg, narg;
4650 gimple parcopy_stmt = NULL;
1e8e9920 4651
75a70cf9 4652 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
3480139d 4653 {
75a70cf9 4654 gimple stmt;
3480139d 4655
75a70cf9 4656 gcc_assert (!gsi_end_p (gsi));
4657 stmt = gsi_stmt (gsi);
4658 if (gimple_code (stmt) != GIMPLE_ASSIGN)
cc6b725b 4659 continue;
4660
75a70cf9 4661 if (gimple_num_ops (stmt) == 2)
3480139d 4662 {
75a70cf9 4663 tree arg = gimple_assign_rhs1 (stmt);
4664
4665 /* We're ignore the subcode because we're
4666 effectively doing a STRIP_NOPS. */
4667
4668 if (TREE_CODE (arg) == ADDR_EXPR
4669 && TREE_OPERAND (arg, 0)
4670 == gimple_omp_taskreg_data_arg (entry_stmt))
4671 {
4672 parcopy_stmt = stmt;
4673 break;
4674 }
3480139d 4675 }
4676 }
79acaae1 4677
75a70cf9 4678 gcc_assert (parcopy_stmt != NULL);
79acaae1 4679 arg = DECL_ARGUMENTS (child_fn);
4680
4681 if (!gimple_in_ssa_p (cfun))
4682 {
75a70cf9 4683 if (gimple_assign_lhs (parcopy_stmt) == arg)
4684 gsi_remove (&gsi, true);
79acaae1 4685 else
75a70cf9 4686 {
4687 /* ?? Is setting the subcode really necessary ?? */
4688 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
4689 gimple_assign_set_rhs1 (parcopy_stmt, arg);
4690 }
79acaae1 4691 }
4692 else
4693 {
4694 /* If we are in ssa form, we must load the value from the default
4695 definition of the argument. That should not be defined now,
4696 since the argument is not used uninitialized. */
c6dfe037 4697 gcc_assert (ssa_default_def (cfun, arg) == NULL);
75a70cf9 4698 narg = make_ssa_name (arg, gimple_build_nop ());
c6dfe037 4699 set_ssa_default_def (cfun, arg, narg);
75a70cf9 4700 /* ?? Is setting the subcode really necessary ?? */
4701 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
4702 gimple_assign_set_rhs1 (parcopy_stmt, narg);
79acaae1 4703 update_stmt (parcopy_stmt);
4704 }
773c5ba7 4705 }
4706
4707 /* Declare local variables needed in CHILD_CFUN. */
4708 block = DECL_INITIAL (child_fn);
2ab2ce89 4709 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
e1a7ccb9 4710 /* The gimplifier could record temporaries in parallel/task block
4711 rather than in containing function's local_decls chain,
4712 which would mean cgraph missed finalizing them. Do it now. */
1767a056 4713 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
e1a7ccb9 4714 if (TREE_CODE (t) == VAR_DECL
4715 && TREE_STATIC (t)
4716 && !DECL_EXTERNAL (t))
4717 varpool_finalize_decl (t);
75a70cf9 4718 DECL_SAVED_TREE (child_fn) = NULL;
e3a19533 4719 /* We'll create a CFG for child_fn, so no gimple body is needed. */
4720 gimple_set_body (child_fn, NULL);
1d22f541 4721 TREE_USED (block) = 1;
773c5ba7 4722
79acaae1 4723 /* Reset DECL_CONTEXT on function arguments. */
1767a056 4724 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
773c5ba7 4725 DECL_CONTEXT (t) = child_fn;
4726
75a70cf9 4727 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
4728 so that it can be moved to the child function. */
4729 gsi = gsi_last_bb (entry_bb);
4730 stmt = gsi_stmt (gsi);
4731 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
4732 || gimple_code (stmt) == GIMPLE_OMP_TASK));
4733 gsi_remove (&gsi, true);
4734 e = split_block (entry_bb, stmt);
773c5ba7 4735 entry_bb = e->dest;
4736 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4737
75a70cf9 4738 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
5056ba1a 4739 if (exit_bb)
4740 {
75a70cf9 4741 gsi = gsi_last_bb (exit_bb);
4742 gcc_assert (!gsi_end_p (gsi)
4743 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4744 stmt = gimple_build_return (NULL);
4745 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4746 gsi_remove (&gsi, true);
5056ba1a 4747 }
79acaae1 4748
4749 /* Move the parallel region into CHILD_CFUN. */
48e1416a 4750
79acaae1 4751 if (gimple_in_ssa_p (cfun))
4752 {
bcaa2770 4753 init_tree_ssa (child_cfun);
5084b2e4 4754 init_ssa_operands (child_cfun);
4755 child_cfun->gimple_df->in_ssa_p = true;
1d22f541 4756 block = NULL_TREE;
79acaae1 4757 }
1d22f541 4758 else
75a70cf9 4759 block = gimple_block (entry_stmt);
1d22f541 4760
4761 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
79acaae1 4762 if (exit_bb)
4763 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
04c2922b 4764 /* When the OMP expansion process cannot guarantee an up-to-date
4765 loop tree arrange for the child function to fixup loops. */
4766 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
4767 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
79acaae1 4768
1d22f541 4769 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
f1f41a6c 4770 num = vec_safe_length (child_cfun->local_decls);
501bdd19 4771 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
4772 {
f1f41a6c 4773 t = (*child_cfun->local_decls)[srcidx];
501bdd19 4774 if (DECL_CONTEXT (t) == cfun->decl)
4775 continue;
4776 if (srcidx != dstidx)
f1f41a6c 4777 (*child_cfun->local_decls)[dstidx] = t;
501bdd19 4778 dstidx++;
4779 }
4780 if (dstidx != num)
f1f41a6c 4781 vec_safe_truncate (child_cfun->local_decls, dstidx);
1d22f541 4782
79acaae1 4783 /* Inform the callgraph about the new function. */
82b40354 4784 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
79acaae1 4785 cgraph_add_new_function (child_fn, true);
4786
4787 /* Fix the callgraph edges for child_cfun. Those for cfun will be
4788 fixed in a following pass. */
4789 push_cfun (child_cfun);
658b4427 4790 if (optimize)
fd6481cf 4791 optimize_omp_library_calls (entry_stmt);
79acaae1 4792 rebuild_cgraph_edges ();
fbe86b1b 4793
4794 /* Some EH regions might become dead, see PR34608. If
4795 pass_cleanup_cfg isn't the first pass to happen with the
4796 new child, these dead EH edges might cause problems.
4797 Clean them up now. */
4798 if (flag_exceptions)
4799 {
4800 basic_block bb;
fbe86b1b 4801 bool changed = false;
4802
fbe86b1b 4803 FOR_EACH_BB (bb)
75a70cf9 4804 changed |= gimple_purge_dead_eh_edges (bb);
fbe86b1b 4805 if (changed)
4806 cleanup_tree_cfg ();
fbe86b1b 4807 }
dd277d48 4808 if (gimple_in_ssa_p (cfun))
4809 update_ssa (TODO_update_ssa);
79acaae1 4810 pop_cfun ();
773c5ba7 4811 }
48e1416a 4812
773c5ba7 4813 /* Emit a library call to launch the children threads. */
75a70cf9 4814 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
fd6481cf 4815 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
4816 else
4817 expand_task_call (new_bb, entry_stmt);
083152fb 4818 if (gimple_in_ssa_p (cfun))
4819 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 4820}
4821
773c5ba7 4822
3d483a94 4823/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
4824 of the combined collapse > 1 loop constructs, generate code like:
4825 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
4826 if (cond3 is <)
4827 adj = STEP3 - 1;
4828 else
4829 adj = STEP3 + 1;
4830 count3 = (adj + N32 - N31) / STEP3;
4831 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
4832 if (cond2 is <)
4833 adj = STEP2 - 1;
4834 else
4835 adj = STEP2 + 1;
4836 count2 = (adj + N22 - N21) / STEP2;
4837 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
4838 if (cond1 is <)
4839 adj = STEP1 - 1;
4840 else
4841 adj = STEP1 + 1;
4842 count1 = (adj + N12 - N11) / STEP1;
4843 count = count1 * count2 * count3;
4844 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
4845 count = 0;
bc7bff74 4846 and set ZERO_ITER_BB to that bb. If this isn't the outermost
4847 of the combined loop constructs, just initialize COUNTS array
4848 from the _looptemp_ clauses. */
3d483a94 4849
4850/* NOTE: It *could* be better to moosh all of the BBs together,
4851 creating one larger BB with all the computation and the unexpected
4852 jump at the end. I.e.
4853
4854 bool zero3, zero2, zero1, zero;
4855
4856 zero3 = N32 c3 N31;
4857 count3 = (N32 - N31) /[cl] STEP3;
4858 zero2 = N22 c2 N21;
4859 count2 = (N22 - N21) /[cl] STEP2;
4860 zero1 = N12 c1 N11;
4861 count1 = (N12 - N11) /[cl] STEP1;
4862 zero = zero3 || zero2 || zero1;
4863 count = count1 * count2 * count3;
4864 if (__builtin_expect(zero, false)) goto zero_iter_bb;
4865
4866 After all, we expect the zero=false, and thus we expect to have to
4867 evaluate all of the comparison expressions, so short-circuiting
4868 oughtn't be a win. Since the condition isn't protecting a
4869 denominator, we're not concerned about divide-by-zero, so we can
4870 fully evaluate count even if a numerator turned out to be wrong.
4871
4872 It seems like putting this all together would create much better
4873 scheduling opportunities, and less pressure on the chip's branch
4874 predictor. */
4875
4876static void
4877expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
4878 basic_block &entry_bb, tree *counts,
4879 basic_block &zero_iter_bb, int &first_zero_iter,
4880 basic_block &l2_dom_bb)
4881{
4882 tree t, type = TREE_TYPE (fd->loop.v);
4883 gimple stmt;
4884 edge e, ne;
4885 int i;
4886
4887 /* Collapsed loops need work for expansion into SSA form. */
4888 gcc_assert (!gimple_in_ssa_p (cfun));
4889
bc7bff74 4890 if (gimple_omp_for_combined_into_p (fd->for_stmt)
4891 && TREE_CODE (fd->loop.n2) != INTEGER_CST)
4892 {
4893 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4894 isn't supposed to be handled, as the inner loop doesn't
4895 use it. */
4896 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
4897 OMP_CLAUSE__LOOPTEMP_);
4898 gcc_assert (innerc);
4899 for (i = 0; i < fd->collapse; i++)
4900 {
4901 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
4902 OMP_CLAUSE__LOOPTEMP_);
4903 gcc_assert (innerc);
4904 if (i)
4905 counts[i] = OMP_CLAUSE_DECL (innerc);
4906 else
4907 counts[0] = NULL_TREE;
4908 }
4909 return;
4910 }
4911
3d483a94 4912 for (i = 0; i < fd->collapse; i++)
4913 {
4914 tree itype = TREE_TYPE (fd->loops[i].v);
4915
4916 if (SSA_VAR_P (fd->loop.n2)
4917 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,
4918 fold_convert (itype, fd->loops[i].n1),
4919 fold_convert (itype, fd->loops[i].n2)))
4920 == NULL_TREE || !integer_onep (t)))
4921 {
4922 tree n1, n2;
4923 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
4924 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
4925 true, GSI_SAME_STMT);
4926 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
4927 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
4928 true, GSI_SAME_STMT);
4929 stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
4930 NULL_TREE, NULL_TREE);
4931 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4932 if (walk_tree (gimple_cond_lhs_ptr (stmt),
4933 expand_omp_regimplify_p, NULL, NULL)
4934 || walk_tree (gimple_cond_rhs_ptr (stmt),
4935 expand_omp_regimplify_p, NULL, NULL))
4936 {
4937 *gsi = gsi_for_stmt (stmt);
4938 gimple_regimplify_operands (stmt, gsi);
4939 }
4940 e = split_block (entry_bb, stmt);
4941 if (zero_iter_bb == NULL)
4942 {
4943 first_zero_iter = i;
4944 zero_iter_bb = create_empty_bb (entry_bb);
4945 if (current_loops)
4946 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
4947 *gsi = gsi_after_labels (zero_iter_bb);
4948 stmt = gimple_build_assign (fd->loop.n2,
4949 build_zero_cst (type));
4950 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4951 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
4952 entry_bb);
4953 }
4954 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
4955 ne->probability = REG_BR_PROB_BASE / 2000 - 1;
4956 e->flags = EDGE_TRUE_VALUE;
4957 e->probability = REG_BR_PROB_BASE - ne->probability;
4958 if (l2_dom_bb == NULL)
4959 l2_dom_bb = entry_bb;
4960 entry_bb = e->dest;
4961 *gsi = gsi_last_bb (entry_bb);
4962 }
4963
4964 if (POINTER_TYPE_P (itype))
4965 itype = signed_type_for (itype);
4966 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
4967 ? -1 : 1));
4968 t = fold_build2 (PLUS_EXPR, itype,
4969 fold_convert (itype, fd->loops[i].step), t);
4970 t = fold_build2 (PLUS_EXPR, itype, t,
4971 fold_convert (itype, fd->loops[i].n2));
4972 t = fold_build2 (MINUS_EXPR, itype, t,
4973 fold_convert (itype, fd->loops[i].n1));
4974 /* ?? We could probably use CEIL_DIV_EXPR instead of
4975 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
4976 generate the same code in the end because generically we
4977 don't know that the values involved must be negative for
4978 GT?? */
4979 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
4980 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4981 fold_build1 (NEGATE_EXPR, itype, t),
4982 fold_build1 (NEGATE_EXPR, itype,
4983 fold_convert (itype,
4984 fd->loops[i].step)));
4985 else
4986 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
4987 fold_convert (itype, fd->loops[i].step));
4988 t = fold_convert (type, t);
4989 if (TREE_CODE (t) == INTEGER_CST)
4990 counts[i] = t;
4991 else
4992 {
4993 counts[i] = create_tmp_reg (type, ".count");
4994 expand_omp_build_assign (gsi, counts[i], t);
4995 }
4996 if (SSA_VAR_P (fd->loop.n2))
4997 {
4998 if (i == 0)
4999 t = counts[0];
5000 else
5001 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
5002 expand_omp_build_assign (gsi, fd->loop.n2, t);
5003 }
5004 }
5005}
5006
5007
5008/* Helper function for expand_omp_{for_*,simd}. Generate code like:
5009 T = V;
5010 V3 = N31 + (T % count3) * STEP3;
5011 T = T / count3;
5012 V2 = N21 + (T % count2) * STEP2;
5013 T = T / count2;
5014 V1 = N11 + T * STEP1;
bc7bff74 5015 if this loop doesn't have an inner loop construct combined with it.
5016 If it does have an inner loop construct combined with it and the
5017 iteration count isn't known constant, store values from counts array
5018 into its _looptemp_ temporaries instead. */
3d483a94 5019
5020static void
5021expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
bc7bff74 5022 tree *counts, gimple inner_stmt, tree startvar)
3d483a94 5023{
5024 int i;
bc7bff74 5025 if (gimple_omp_for_combined_p (fd->for_stmt))
5026 {
5027 /* If fd->loop.n2 is constant, then no propagation of the counts
5028 is needed, they are constant. */
5029 if (TREE_CODE (fd->loop.n2) == INTEGER_CST)
5030 return;
5031
5032 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5033 ? gimple_omp_parallel_clauses (inner_stmt)
5034 : gimple_omp_for_clauses (inner_stmt);
5035 /* First two _looptemp_ clauses are for istart/iend, counts[0]
5036 isn't supposed to be handled, as the inner loop doesn't
5037 use it. */
5038 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5039 gcc_assert (innerc);
5040 for (i = 0; i < fd->collapse; i++)
5041 {
5042 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5043 OMP_CLAUSE__LOOPTEMP_);
5044 gcc_assert (innerc);
5045 if (i)
5046 {
5047 tree tem = OMP_CLAUSE_DECL (innerc);
5048 tree t = fold_convert (TREE_TYPE (tem), counts[i]);
5049 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5050 false, GSI_CONTINUE_LINKING);
5051 gimple stmt = gimple_build_assign (tem, t);
5052 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5053 }
5054 }
5055 return;
5056 }
5057
3d483a94 5058 tree type = TREE_TYPE (fd->loop.v);
5059 tree tem = create_tmp_reg (type, ".tem");
5060 gimple stmt = gimple_build_assign (tem, startvar);
5061 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5062
5063 for (i = fd->collapse - 1; i >= 0; i--)
5064 {
5065 tree vtype = TREE_TYPE (fd->loops[i].v), itype, t;
5066 itype = vtype;
5067 if (POINTER_TYPE_P (vtype))
5068 itype = signed_type_for (vtype);
5069 if (i != 0)
5070 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
5071 else
5072 t = tem;
5073 t = fold_convert (itype, t);
5074 t = fold_build2 (MULT_EXPR, itype, t,
5075 fold_convert (itype, fd->loops[i].step));
5076 if (POINTER_TYPE_P (vtype))
5077 t = fold_build_pointer_plus (fd->loops[i].n1, t);
5078 else
5079 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
5080 t = force_gimple_operand_gsi (gsi, t,
5081 DECL_P (fd->loops[i].v)
5082 && TREE_ADDRESSABLE (fd->loops[i].v),
5083 NULL_TREE, false,
5084 GSI_CONTINUE_LINKING);
5085 stmt = gimple_build_assign (fd->loops[i].v, t);
5086 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5087 if (i != 0)
5088 {
5089 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
5090 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5091 false, GSI_CONTINUE_LINKING);
5092 stmt = gimple_build_assign (tem, t);
5093 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5094 }
5095 }
5096}
5097
5098
5099/* Helper function for expand_omp_for_*. Generate code like:
5100 L10:
5101 V3 += STEP3;
5102 if (V3 cond3 N32) goto BODY_BB; else goto L11;
5103 L11:
5104 V3 = N31;
5105 V2 += STEP2;
5106 if (V2 cond2 N22) goto BODY_BB; else goto L12;
5107 L12:
5108 V2 = N21;
5109 V1 += STEP1;
5110 goto BODY_BB; */
5111
5112static basic_block
5113extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
5114 basic_block body_bb)
5115{
5116 basic_block last_bb, bb, collapse_bb = NULL;
5117 int i;
5118 gimple_stmt_iterator gsi;
5119 edge e;
5120 tree t;
5121 gimple stmt;
5122
5123 last_bb = cont_bb;
5124 for (i = fd->collapse - 1; i >= 0; i--)
5125 {
5126 tree vtype = TREE_TYPE (fd->loops[i].v);
5127
5128 bb = create_empty_bb (last_bb);
5129 if (current_loops)
5130 add_bb_to_loop (bb, last_bb->loop_father);
5131 gsi = gsi_start_bb (bb);
5132
5133 if (i < fd->collapse - 1)
5134 {
5135 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
5136 e->probability = REG_BR_PROB_BASE / 8;
5137
5138 t = fd->loops[i + 1].n1;
5139 t = force_gimple_operand_gsi (&gsi, t,
5140 DECL_P (fd->loops[i + 1].v)
5141 && TREE_ADDRESSABLE (fd->loops[i
5142 + 1].v),
5143 NULL_TREE, false,
5144 GSI_CONTINUE_LINKING);
5145 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
5146 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5147 }
5148 else
5149 collapse_bb = bb;
5150
5151 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
5152
5153 if (POINTER_TYPE_P (vtype))
5154 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
5155 else
5156 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step);
5157 t = force_gimple_operand_gsi (&gsi, t,
5158 DECL_P (fd->loops[i].v)
5159 && TREE_ADDRESSABLE (fd->loops[i].v),
5160 NULL_TREE, false, GSI_CONTINUE_LINKING);
5161 stmt = gimple_build_assign (fd->loops[i].v, t);
5162 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5163
5164 if (i > 0)
5165 {
5166 t = fd->loops[i].n2;
5167 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5168 false, GSI_CONTINUE_LINKING);
5169 tree v = fd->loops[i].v;
5170 if (DECL_P (v) && TREE_ADDRESSABLE (v))
5171 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE,
5172 false, GSI_CONTINUE_LINKING);
5173 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t);
5174 stmt = gimple_build_cond_empty (t);
5175 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5176 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
5177 e->probability = REG_BR_PROB_BASE * 7 / 8;
5178 }
5179 else
5180 make_edge (bb, body_bb, EDGE_FALLTHRU);
5181 last_bb = bb;
5182 }
5183
5184 return collapse_bb;
5185}
5186
5187
773c5ba7 5188/* A subroutine of expand_omp_for. Generate code for a parallel
1e8e9920 5189 loop with any schedule. Given parameters:
5190
5191 for (V = N1; V cond N2; V += STEP) BODY;
5192
5193 where COND is "<" or ">", we generate pseudocode
5194
5195 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
773c5ba7 5196 if (more) goto L0; else goto L3;
1e8e9920 5197 L0:
5198 V = istart0;
5199 iend = iend0;
5200 L1:
5201 BODY;
5202 V += STEP;
773c5ba7 5203 if (V cond iend) goto L1; else goto L2;
1e8e9920 5204 L2:
773c5ba7 5205 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5206 L3:
1e8e9920 5207
773c5ba7 5208 If this is a combined omp parallel loop, instead of the call to
fd6481cf 5209 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
bc7bff74 5210 If this is gimple_omp_for_combined_p loop, then instead of assigning
5211 V and iend in L0 we assign the first two _looptemp_ clause decls of the
5212 inner GIMPLE_OMP_FOR and V += STEP; and
5213 if (V cond iend) goto L1; else goto L2; are removed.
fd6481cf 5214
5215 For collapsed loops, given parameters:
5216 collapse(3)
5217 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
5218 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
5219 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
5220 BODY;
5221
5222 we generate pseudocode
5223
8e6b4515 5224 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
fd6481cf 5225 if (cond3 is <)
5226 adj = STEP3 - 1;
5227 else
5228 adj = STEP3 + 1;
5229 count3 = (adj + N32 - N31) / STEP3;
8e6b4515 5230 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
fd6481cf 5231 if (cond2 is <)
5232 adj = STEP2 - 1;
5233 else
5234 adj = STEP2 + 1;
5235 count2 = (adj + N22 - N21) / STEP2;
8e6b4515 5236 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
fd6481cf 5237 if (cond1 is <)
5238 adj = STEP1 - 1;
5239 else
5240 adj = STEP1 + 1;
5241 count1 = (adj + N12 - N11) / STEP1;
5242 count = count1 * count2 * count3;
8e6b4515 5243 goto Z1;
5244 Z0:
5245 count = 0;
5246 Z1:
fd6481cf 5247 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
5248 if (more) goto L0; else goto L3;
5249 L0:
5250 V = istart0;
5251 T = V;
5252 V3 = N31 + (T % count3) * STEP3;
5253 T = T / count3;
5254 V2 = N21 + (T % count2) * STEP2;
5255 T = T / count2;
5256 V1 = N11 + T * STEP1;
5257 iend = iend0;
5258 L1:
5259 BODY;
5260 V += 1;
5261 if (V < iend) goto L10; else goto L2;
5262 L10:
5263 V3 += STEP3;
5264 if (V3 cond3 N32) goto L1; else goto L11;
5265 L11:
5266 V3 = N31;
5267 V2 += STEP2;
5268 if (V2 cond2 N22) goto L1; else goto L12;
5269 L12:
5270 V2 = N21;
5271 V1 += STEP1;
5272 goto L1;
5273 L2:
5274 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5275 L3:
5276
5277 */
1e8e9920 5278
61e47ac8 5279static void
773c5ba7 5280expand_omp_for_generic (struct omp_region *region,
5281 struct omp_for_data *fd,
1e8e9920 5282 enum built_in_function start_fn,
bc7bff74 5283 enum built_in_function next_fn,
5284 gimple inner_stmt)
1e8e9920 5285{
75a70cf9 5286 tree type, istart0, iend0, iend;
fd6481cf 5287 tree t, vmain, vback, bias = NULL_TREE;
5288 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
03ed154b 5289 basic_block l2_bb = NULL, l3_bb = NULL;
75a70cf9 5290 gimple_stmt_iterator gsi;
5291 gimple stmt;
773c5ba7 5292 bool in_combined_parallel = is_combined_parallel (region);
ac6e3339 5293 bool broken_loop = region->cont == NULL;
79acaae1 5294 edge e, ne;
fd6481cf 5295 tree *counts = NULL;
5296 int i;
ac6e3339 5297
5298 gcc_assert (!broken_loop || !in_combined_parallel);
fd6481cf 5299 gcc_assert (fd->iter_type == long_integer_type_node
5300 || !in_combined_parallel);
1e8e9920 5301
fd6481cf 5302 type = TREE_TYPE (fd->loop.v);
5303 istart0 = create_tmp_var (fd->iter_type, ".istart0");
5304 iend0 = create_tmp_var (fd->iter_type, ".iend0");
6d63fc03 5305 TREE_ADDRESSABLE (istart0) = 1;
5306 TREE_ADDRESSABLE (iend0) = 1;
1e8e9920 5307
fd6481cf 5308 /* See if we need to bias by LLONG_MIN. */
5309 if (fd->iter_type == long_long_unsigned_type_node
5310 && TREE_CODE (type) == INTEGER_TYPE
5311 && !TYPE_UNSIGNED (type))
5312 {
5313 tree n1, n2;
5314
5315 if (fd->loop.cond_code == LT_EXPR)
5316 {
5317 n1 = fd->loop.n1;
5318 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
5319 }
5320 else
5321 {
5322 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
5323 n2 = fd->loop.n1;
5324 }
5325 if (TREE_CODE (n1) != INTEGER_CST
5326 || TREE_CODE (n2) != INTEGER_CST
5327 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
5328 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
5329 }
5330
61e47ac8 5331 entry_bb = region->entry;
03ed154b 5332 cont_bb = region->cont;
fd6481cf 5333 collapse_bb = NULL;
ac6e3339 5334 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
5335 gcc_assert (broken_loop
5336 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
5337 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5338 l1_bb = single_succ (l0_bb);
5339 if (!broken_loop)
03ed154b 5340 {
5341 l2_bb = create_empty_bb (cont_bb);
ac6e3339 5342 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
5343 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
03ed154b 5344 }
ac6e3339 5345 else
5346 l2_bb = NULL;
5347 l3_bb = BRANCH_EDGE (entry_bb)->dest;
5348 exit_bb = region->exit;
773c5ba7 5349
75a70cf9 5350 gsi = gsi_last_bb (entry_bb);
fd6481cf 5351
75a70cf9 5352 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
fd6481cf 5353 if (fd->collapse > 1)
5354 {
8e6b4515 5355 int first_zero_iter = -1;
3d483a94 5356 basic_block zero_iter_bb = NULL, l2_dom_bb = NULL;
8e6b4515 5357
3d483a94 5358 counts = XALLOCAVEC (tree, fd->collapse);
5359 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5360 zero_iter_bb, first_zero_iter,
5361 l2_dom_bb);
fd6481cf 5362
8e6b4515 5363 if (zero_iter_bb)
5364 {
5365 /* Some counts[i] vars might be uninitialized if
5366 some loop has zero iterations. But the body shouldn't
5367 be executed in that case, so just avoid uninit warnings. */
5368 for (i = first_zero_iter; i < fd->collapse; i++)
5369 if (SSA_VAR_P (counts[i]))
5370 TREE_NO_WARNING (counts[i]) = 1;
5371 gsi_prev (&gsi);
5372 e = split_block (entry_bb, gsi_stmt (gsi));
5373 entry_bb = e->dest;
5374 make_edge (zero_iter_bb, entry_bb, EDGE_FALLTHRU);
5375 gsi = gsi_last_bb (entry_bb);
5376 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
5377 get_immediate_dominator (CDI_DOMINATORS,
5378 zero_iter_bb));
5379 }
fd6481cf 5380 }
79acaae1 5381 if (in_combined_parallel)
5382 {
5383 /* In a combined parallel loop, emit a call to
5384 GOMP_loop_foo_next. */
b9a16870 5385 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
79acaae1 5386 build_fold_addr_expr (istart0),
5387 build_fold_addr_expr (iend0));
5388 }
5389 else
1e8e9920 5390 {
c2f47e15 5391 tree t0, t1, t2, t3, t4;
773c5ba7 5392 /* If this is not a combined parallel loop, emit a call to
5393 GOMP_loop_foo_start in ENTRY_BB. */
c2f47e15 5394 t4 = build_fold_addr_expr (iend0);
5395 t3 = build_fold_addr_expr (istart0);
fd6481cf 5396 t2 = fold_convert (fd->iter_type, fd->loop.step);
3d483a94 5397 t1 = fd->loop.n2;
5398 t0 = fd->loop.n1;
bc7bff74 5399 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5400 {
5401 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5402 OMP_CLAUSE__LOOPTEMP_);
5403 gcc_assert (innerc);
5404 t0 = OMP_CLAUSE_DECL (innerc);
5405 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5406 OMP_CLAUSE__LOOPTEMP_);
5407 gcc_assert (innerc);
5408 t1 = OMP_CLAUSE_DECL (innerc);
5409 }
3d483a94 5410 if (POINTER_TYPE_P (TREE_TYPE (t0))
5411 && TYPE_PRECISION (TREE_TYPE (t0))
5412 != TYPE_PRECISION (fd->iter_type))
c799f233 5413 {
5414 /* Avoid casting pointers to integer of a different size. */
3cea8318 5415 tree itype = signed_type_for (type);
3d483a94 5416 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1));
5417 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0));
c799f233 5418 }
5419 else
5420 {
3d483a94 5421 t1 = fold_convert (fd->iter_type, t1);
5422 t0 = fold_convert (fd->iter_type, t0);
c799f233 5423 }
fd6481cf 5424 if (bias)
1e8e9920 5425 {
fd6481cf 5426 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
5427 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
5428 }
5429 if (fd->iter_type == long_integer_type_node)
5430 {
5431 if (fd->chunk_size)
5432 {
5433 t = fold_convert (fd->iter_type, fd->chunk_size);
b9a16870 5434 t = build_call_expr (builtin_decl_explicit (start_fn),
5435 6, t0, t1, t2, t, t3, t4);
fd6481cf 5436 }
5437 else
b9a16870 5438 t = build_call_expr (builtin_decl_explicit (start_fn),
5439 5, t0, t1, t2, t3, t4);
1e8e9920 5440 }
c2f47e15 5441 else
fd6481cf 5442 {
5443 tree t5;
5444 tree c_bool_type;
b9a16870 5445 tree bfn_decl;
fd6481cf 5446
5447 /* The GOMP_loop_ull_*start functions have additional boolean
5448 argument, true for < loops and false for > loops.
5449 In Fortran, the C bool type can be different from
5450 boolean_type_node. */
b9a16870 5451 bfn_decl = builtin_decl_explicit (start_fn);
5452 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl));
fd6481cf 5453 t5 = build_int_cst (c_bool_type,
5454 fd->loop.cond_code == LT_EXPR ? 1 : 0);
5455 if (fd->chunk_size)
5456 {
b9a16870 5457 tree bfn_decl = builtin_decl_explicit (start_fn);
fd6481cf 5458 t = fold_convert (fd->iter_type, fd->chunk_size);
b9a16870 5459 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
fd6481cf 5460 }
5461 else
b9a16870 5462 t = build_call_expr (builtin_decl_explicit (start_fn),
5463 6, t5, t0, t1, t2, t3, t4);
fd6481cf 5464 }
1e8e9920 5465 }
fd6481cf 5466 if (TREE_TYPE (t) != boolean_type_node)
5467 t = fold_build2 (NE_EXPR, boolean_type_node,
5468 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 5469 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5470 true, GSI_SAME_STMT);
5471 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
79acaae1 5472
75a70cf9 5473 /* Remove the GIMPLE_OMP_FOR statement. */
5474 gsi_remove (&gsi, true);
1e8e9920 5475
773c5ba7 5476 /* Iteration setup for sequential loop goes in L0_BB. */
3d483a94 5477 tree startvar = fd->loop.v;
5478 tree endvar = NULL_TREE;
5479
bc7bff74 5480 if (gimple_omp_for_combined_p (fd->for_stmt))
5481 {
5482 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR
5483 && gimple_omp_for_kind (inner_stmt)
5484 == GF_OMP_FOR_KIND_SIMD);
5485 tree innerc = find_omp_clause (gimple_omp_for_clauses (inner_stmt),
5486 OMP_CLAUSE__LOOPTEMP_);
5487 gcc_assert (innerc);
5488 startvar = OMP_CLAUSE_DECL (innerc);
5489 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5490 OMP_CLAUSE__LOOPTEMP_);
5491 gcc_assert (innerc);
5492 endvar = OMP_CLAUSE_DECL (innerc);
5493 }
5494
75a70cf9 5495 gsi = gsi_start_bb (l0_bb);
1efcacec 5496 t = istart0;
fd6481cf 5497 if (bias)
1efcacec 5498 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3d483a94 5499 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5500 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5501 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 5502 t = force_gimple_operand_gsi (&gsi, t,
3d483a94 5503 DECL_P (startvar)
5504 && TREE_ADDRESSABLE (startvar),
4abecb72 5505 NULL_TREE, false, GSI_CONTINUE_LINKING);
3d483a94 5506 stmt = gimple_build_assign (startvar, t);
75a70cf9 5507 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
1e8e9920 5508
1efcacec 5509 t = iend0;
fd6481cf 5510 if (bias)
1efcacec 5511 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3d483a94 5512 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5513 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5514 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 5515 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5516 false, GSI_CONTINUE_LINKING);
3d483a94 5517 if (endvar)
fd6481cf 5518 {
3d483a94 5519 stmt = gimple_build_assign (endvar, iend);
75a70cf9 5520 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 5521 }
3d483a94 5522 if (fd->collapse > 1)
bc7bff74 5523 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
773c5ba7 5524
ac6e3339 5525 if (!broken_loop)
03ed154b 5526 {
ac6e3339 5527 /* Code to control the increment and predicate for the sequential
5528 loop goes in the CONT_BB. */
75a70cf9 5529 gsi = gsi_last_bb (cont_bb);
5530 stmt = gsi_stmt (gsi);
5531 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5532 vmain = gimple_omp_continue_control_use (stmt);
5533 vback = gimple_omp_continue_control_def (stmt);
79acaae1 5534
bc7bff74 5535 if (!gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5536 {
5537 if (POINTER_TYPE_P (type))
5538 t = fold_build_pointer_plus (vmain, fd->loop.step);
5539 else
5540 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
5541 t = force_gimple_operand_gsi (&gsi, t,
5542 DECL_P (vback)
5543 && TREE_ADDRESSABLE (vback),
5544 NULL_TREE, true, GSI_SAME_STMT);
5545 stmt = gimple_build_assign (vback, t);
5546 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5547
5548 t = build2 (fd->loop.cond_code, boolean_type_node,
5549 DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
5550 iend);
5551 stmt = gimple_build_cond_empty (t);
5552 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5553 }
773c5ba7 5554
75a70cf9 5555 /* Remove GIMPLE_OMP_CONTINUE. */
5556 gsi_remove (&gsi, true);
773c5ba7 5557
bc7bff74 5558 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5559 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, l1_bb);
fd6481cf 5560
ac6e3339 5561 /* Emit code to get the next parallel iteration in L2_BB. */
75a70cf9 5562 gsi = gsi_start_bb (l2_bb);
773c5ba7 5563
b9a16870 5564 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
ac6e3339 5565 build_fold_addr_expr (istart0),
5566 build_fold_addr_expr (iend0));
75a70cf9 5567 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5568 false, GSI_CONTINUE_LINKING);
fd6481cf 5569 if (TREE_TYPE (t) != boolean_type_node)
5570 t = fold_build2 (NE_EXPR, boolean_type_node,
5571 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 5572 stmt = gimple_build_cond_empty (t);
5573 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
ac6e3339 5574 }
1e8e9920 5575
61e47ac8 5576 /* Add the loop cleanup function. */
75a70cf9 5577 gsi = gsi_last_bb (exit_bb);
5578 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
b9a16870 5579 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
bc7bff74 5580 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5581 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
61e47ac8 5582 else
b9a16870 5583 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
75a70cf9 5584 stmt = gimple_build_call (t, 0);
bc7bff74 5585 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5586 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
75a70cf9 5587 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
5588 gsi_remove (&gsi, true);
773c5ba7 5589
5590 /* Connect the new blocks. */
79acaae1 5591 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
5592 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
1e8e9920 5593
ac6e3339 5594 if (!broken_loop)
5595 {
75a70cf9 5596 gimple_seq phis;
5597
79acaae1 5598 e = find_edge (cont_bb, l3_bb);
5599 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
5600
75a70cf9 5601 phis = phi_nodes (l3_bb);
5602 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5603 {
5604 gimple phi = gsi_stmt (gsi);
5605 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
5606 PHI_ARG_DEF_FROM_EDGE (phi, e));
5607 }
79acaae1 5608 remove_edge (e);
5609
ac6e3339 5610 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
f6568ea4 5611 if (current_loops)
5612 add_bb_to_loop (l2_bb, cont_bb->loop_father);
3d483a94 5613 e = find_edge (cont_bb, l1_bb);
bc7bff74 5614 if (gimple_omp_for_combined_p (fd->for_stmt))
5615 {
5616 remove_edge (e);
5617 e = NULL;
5618 }
3d483a94 5619 else if (fd->collapse > 1)
fd6481cf 5620 {
fd6481cf 5621 remove_edge (e);
5622 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5623 }
5624 else
3d483a94 5625 e->flags = EDGE_TRUE_VALUE;
5626 if (e)
fd6481cf 5627 {
3d483a94 5628 e->probability = REG_BR_PROB_BASE * 7 / 8;
5629 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
5630 }
5631 else
5632 {
5633 e = find_edge (cont_bb, l2_bb);
5634 e->flags = EDGE_FALLTHRU;
fd6481cf 5635 }
ac6e3339 5636 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
79acaae1 5637
5638 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
5639 recompute_dominator (CDI_DOMINATORS, l2_bb));
5640 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
5641 recompute_dominator (CDI_DOMINATORS, l3_bb));
5642 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
5643 recompute_dominator (CDI_DOMINATORS, l0_bb));
5644 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
5645 recompute_dominator (CDI_DOMINATORS, l1_bb));
04c2922b 5646
5647 struct loop *outer_loop = alloc_loop ();
5648 outer_loop->header = l0_bb;
5649 outer_loop->latch = l2_bb;
5650 add_loop (outer_loop, l0_bb->loop_father);
5651
bc7bff74 5652 if (!gimple_omp_for_combined_p (fd->for_stmt))
3d483a94 5653 {
5654 struct loop *loop = alloc_loop ();
5655 loop->header = l1_bb;
5656 /* The loop may have multiple latches. */
5657 add_loop (loop, outer_loop);
5658 }
ac6e3339 5659 }
1e8e9920 5660}
5661
5662
773c5ba7 5663/* A subroutine of expand_omp_for. Generate code for a parallel
5664 loop with static schedule and no specified chunk size. Given
5665 parameters:
1e8e9920 5666
5667 for (V = N1; V cond N2; V += STEP) BODY;
5668
5669 where COND is "<" or ">", we generate pseudocode
5670
8e6b4515 5671 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
1e8e9920 5672 if (cond is <)
5673 adj = STEP - 1;
5674 else
5675 adj = STEP + 1;
fd6481cf 5676 if ((__typeof (V)) -1 > 0 && cond is >)
5677 n = -(adj + N2 - N1) / -STEP;
5678 else
5679 n = (adj + N2 - N1) / STEP;
1e8e9920 5680 q = n / nthreads;
31712e83 5681 tt = n % nthreads;
5682 if (threadid < tt) goto L3; else goto L4;
5683 L3:
5684 tt = 0;
5685 q = q + 1;
5686 L4:
5687 s0 = q * threadid + tt;
5688 e0 = s0 + q;
79acaae1 5689 V = s0 * STEP + N1;
1e8e9920 5690 if (s0 >= e0) goto L2; else goto L0;
5691 L0:
1e8e9920 5692 e = e0 * STEP + N1;
5693 L1:
5694 BODY;
5695 V += STEP;
5696 if (V cond e) goto L1;
1e8e9920 5697 L2:
5698*/
5699
61e47ac8 5700static void
773c5ba7 5701expand_omp_for_static_nochunk (struct omp_region *region,
bc7bff74 5702 struct omp_for_data *fd,
5703 gimple inner_stmt)
1e8e9920 5704{
31712e83 5705 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
fd6481cf 5706 tree type, itype, vmain, vback;
31712e83 5707 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
bc7bff74 5708 basic_block body_bb, cont_bb, collapse_bb = NULL;
61e47ac8 5709 basic_block fin_bb;
75a70cf9 5710 gimple_stmt_iterator gsi;
5711 gimple stmt;
31712e83 5712 edge ep;
bc7bff74 5713 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
5714 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
5715 bool broken_loop = region->cont == NULL;
5716 tree *counts = NULL;
5717 tree n1, n2, step;
1e8e9920 5718
fd6481cf 5719 itype = type = TREE_TYPE (fd->loop.v);
5720 if (POINTER_TYPE_P (type))
3cea8318 5721 itype = signed_type_for (type);
1e8e9920 5722
61e47ac8 5723 entry_bb = region->entry;
61e47ac8 5724 cont_bb = region->cont;
ac6e3339 5725 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
bc7bff74 5726 fin_bb = BRANCH_EDGE (entry_bb)->dest;
5727 gcc_assert (broken_loop
5728 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest));
ac6e3339 5729 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5730 body_bb = single_succ (seq_start_bb);
bc7bff74 5731 if (!broken_loop)
5732 {
5733 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
5734 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
5735 }
61e47ac8 5736 exit_bb = region->exit;
5737
773c5ba7 5738 /* Iteration space partitioning goes in ENTRY_BB. */
75a70cf9 5739 gsi = gsi_last_bb (entry_bb);
5740 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
61e47ac8 5741
bc7bff74 5742 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
5743 {
5744 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
5745 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
5746 }
5747
5748 if (fd->collapse > 1)
5749 {
5750 int first_zero_iter = -1;
5751 basic_block l2_dom_bb = NULL;
5752
5753 counts = XALLOCAVEC (tree, fd->collapse);
5754 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5755 fin_bb, first_zero_iter,
5756 l2_dom_bb);
5757 t = NULL_TREE;
5758 }
5759 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
5760 t = integer_one_node;
5761 else
5762 t = fold_binary (fd->loop.cond_code, boolean_type_node,
5763 fold_convert (type, fd->loop.n1),
5764 fold_convert (type, fd->loop.n2));
5765 if (fd->collapse == 1
5766 && TYPE_UNSIGNED (type)
8e6b4515 5767 && (t == NULL_TREE || !integer_onep (t)))
5768 {
8e6b4515 5769 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
5770 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE,
5771 true, GSI_SAME_STMT);
5772 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
5773 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
5774 true, GSI_SAME_STMT);
5775 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
5776 NULL_TREE, NULL_TREE);
5777 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5778 if (walk_tree (gimple_cond_lhs_ptr (stmt),
5779 expand_omp_regimplify_p, NULL, NULL)
5780 || walk_tree (gimple_cond_rhs_ptr (stmt),
5781 expand_omp_regimplify_p, NULL, NULL))
5782 {
5783 gsi = gsi_for_stmt (stmt);
5784 gimple_regimplify_operands (stmt, &gsi);
5785 }
5786 ep = split_block (entry_bb, stmt);
5787 ep->flags = EDGE_TRUE_VALUE;
5788 entry_bb = ep->dest;
5789 ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
5790 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
5791 ep->probability = REG_BR_PROB_BASE / 2000 - 1;
5792 if (gimple_in_ssa_p (cfun))
5793 {
5794 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
5795 for (gsi = gsi_start_phis (fin_bb);
5796 !gsi_end_p (gsi); gsi_next (&gsi))
5797 {
5798 gimple phi = gsi_stmt (gsi);
5799 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
5800 ep, UNKNOWN_LOCATION);
5801 }
5802 }
5803 gsi = gsi_last_bb (entry_bb);
5804 }
5805
bc7bff74 5806 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
fd6481cf 5807 t = fold_convert (itype, t);
75a70cf9 5808 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5809 true, GSI_SAME_STMT);
48e1416a 5810
bc7bff74 5811 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
fd6481cf 5812 t = fold_convert (itype, t);
75a70cf9 5813 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5814 true, GSI_SAME_STMT);
1e8e9920 5815
bc7bff74 5816 n1 = fd->loop.n1;
5817 n2 = fd->loop.n2;
5818 step = fd->loop.step;
5819 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5820 {
5821 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5822 OMP_CLAUSE__LOOPTEMP_);
5823 gcc_assert (innerc);
5824 n1 = OMP_CLAUSE_DECL (innerc);
5825 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5826 OMP_CLAUSE__LOOPTEMP_);
5827 gcc_assert (innerc);
5828 n2 = OMP_CLAUSE_DECL (innerc);
5829 }
5830 n1 = force_gimple_operand_gsi (&gsi, fold_convert (type, n1),
5831 true, NULL_TREE, true, GSI_SAME_STMT);
5832 n2 = force_gimple_operand_gsi (&gsi, fold_convert (itype, n2),
5833 true, NULL_TREE, true, GSI_SAME_STMT);
5834 step = force_gimple_operand_gsi (&gsi, fold_convert (itype, step),
5835 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 5836
5837 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
bc7bff74 5838 t = fold_build2 (PLUS_EXPR, itype, step, t);
5839 t = fold_build2 (PLUS_EXPR, itype, t, n2);
5840 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
fd6481cf 5841 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
5842 t = fold_build2 (TRUNC_DIV_EXPR, itype,
5843 fold_build1 (NEGATE_EXPR, itype, t),
bc7bff74 5844 fold_build1 (NEGATE_EXPR, itype, step));
fd6481cf 5845 else
bc7bff74 5846 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
fd6481cf 5847 t = fold_convert (itype, t);
75a70cf9 5848 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 5849
072f7ab1 5850 q = create_tmp_reg (itype, "q");
fd6481cf 5851 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
31712e83 5852 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5853 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
5854
072f7ab1 5855 tt = create_tmp_reg (itype, "tt");
31712e83 5856 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
5857 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5858 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
1e8e9920 5859
31712e83 5860 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
5861 stmt = gimple_build_cond_empty (t);
5862 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5863
5864 second_bb = split_block (entry_bb, stmt)->dest;
5865 gsi = gsi_last_bb (second_bb);
5866 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
5867
5868 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
5869 GSI_SAME_STMT);
5870 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
5871 build_int_cst (itype, 1));
5872 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5873
5874 third_bb = split_block (second_bb, stmt)->dest;
5875 gsi = gsi_last_bb (third_bb);
5876 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
1e8e9920 5877
fd6481cf 5878 t = build2 (MULT_EXPR, itype, q, threadid);
31712e83 5879 t = build2 (PLUS_EXPR, itype, t, tt);
75a70cf9 5880 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 5881
fd6481cf 5882 t = fold_build2 (PLUS_EXPR, itype, s0, q);
75a70cf9 5883 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 5884
1e8e9920 5885 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
75a70cf9 5886 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
773c5ba7 5887
75a70cf9 5888 /* Remove the GIMPLE_OMP_FOR statement. */
5889 gsi_remove (&gsi, true);
773c5ba7 5890
5891 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 5892 gsi = gsi_start_bb (seq_start_bb);
1e8e9920 5893
bc7bff74 5894 tree startvar = fd->loop.v;
5895 tree endvar = NULL_TREE;
5896
5897 if (gimple_omp_for_combined_p (fd->for_stmt))
5898 {
5899 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5900 ? gimple_omp_parallel_clauses (inner_stmt)
5901 : gimple_omp_for_clauses (inner_stmt);
5902 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5903 gcc_assert (innerc);
5904 startvar = OMP_CLAUSE_DECL (innerc);
5905 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5906 OMP_CLAUSE__LOOPTEMP_);
5907 gcc_assert (innerc);
5908 endvar = OMP_CLAUSE_DECL (innerc);
5909 }
fd6481cf 5910 t = fold_convert (itype, s0);
bc7bff74 5911 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 5912 if (POINTER_TYPE_P (type))
bc7bff74 5913 t = fold_build_pointer_plus (n1, t);
fd6481cf 5914 else
bc7bff74 5915 t = fold_build2 (PLUS_EXPR, type, t, n1);
5916 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 5917 t = force_gimple_operand_gsi (&gsi, t,
bc7bff74 5918 DECL_P (startvar)
5919 && TREE_ADDRESSABLE (startvar),
4abecb72 5920 NULL_TREE, false, GSI_CONTINUE_LINKING);
bc7bff74 5921 stmt = gimple_build_assign (startvar, t);
75a70cf9 5922 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
48e1416a 5923
fd6481cf 5924 t = fold_convert (itype, e0);
bc7bff74 5925 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 5926 if (POINTER_TYPE_P (type))
bc7bff74 5927 t = fold_build_pointer_plus (n1, t);
fd6481cf 5928 else
bc7bff74 5929 t = fold_build2 (PLUS_EXPR, type, t, n1);
5930 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 5931 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5932 false, GSI_CONTINUE_LINKING);
bc7bff74 5933 if (endvar)
5934 {
5935 stmt = gimple_build_assign (endvar, e);
5936 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5937 }
5938 if (fd->collapse > 1)
5939 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
1e8e9920 5940
bc7bff74 5941 if (!broken_loop)
5942 {
5943 /* The code controlling the sequential loop replaces the
5944 GIMPLE_OMP_CONTINUE. */
5945 gsi = gsi_last_bb (cont_bb);
5946 stmt = gsi_stmt (gsi);
5947 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5948 vmain = gimple_omp_continue_control_use (stmt);
5949 vback = gimple_omp_continue_control_def (stmt);
79acaae1 5950
bc7bff74 5951 if (!gimple_omp_for_combined_p (fd->for_stmt))
5952 {
5953 if (POINTER_TYPE_P (type))
5954 t = fold_build_pointer_plus (vmain, step);
5955 else
5956 t = fold_build2 (PLUS_EXPR, type, vmain, step);
5957 t = force_gimple_operand_gsi (&gsi, t,
5958 DECL_P (vback)
5959 && TREE_ADDRESSABLE (vback),
5960 NULL_TREE, true, GSI_SAME_STMT);
5961 stmt = gimple_build_assign (vback, t);
5962 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
79acaae1 5963
bc7bff74 5964 t = build2 (fd->loop.cond_code, boolean_type_node,
5965 DECL_P (vback) && TREE_ADDRESSABLE (vback)
5966 ? t : vback, e);
5967 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
5968 }
1e8e9920 5969
bc7bff74 5970 /* Remove the GIMPLE_OMP_CONTINUE statement. */
5971 gsi_remove (&gsi, true);
5972
5973 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
5974 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
5975 }
773c5ba7 5976
75a70cf9 5977 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
5978 gsi = gsi_last_bb (exit_bb);
5979 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
bc7bff74 5980 {
5981 t = gimple_omp_return_lhs (gsi_stmt (gsi));
5982 gsi_insert_after (&gsi, build_omp_barrier (t), GSI_SAME_STMT);
5983 }
75a70cf9 5984 gsi_remove (&gsi, true);
773c5ba7 5985
5986 /* Connect all the blocks. */
31712e83 5987 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
5988 ep->probability = REG_BR_PROB_BASE / 4 * 3;
5989 ep = find_edge (entry_bb, second_bb);
5990 ep->flags = EDGE_TRUE_VALUE;
5991 ep->probability = REG_BR_PROB_BASE / 4;
5992 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
5993 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
79acaae1 5994
bc7bff74 5995 if (!broken_loop)
5996 {
5997 ep = find_edge (cont_bb, body_bb);
5998 if (gimple_omp_for_combined_p (fd->for_stmt))
5999 {
6000 remove_edge (ep);
6001 ep = NULL;
6002 }
6003 else if (fd->collapse > 1)
6004 {
6005 remove_edge (ep);
6006 ep = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6007 }
6008 else
6009 ep->flags = EDGE_TRUE_VALUE;
6010 find_edge (cont_bb, fin_bb)->flags
6011 = ep ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
6012 }
48e1416a 6013
31712e83 6014 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
6015 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
6016 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
bc7bff74 6017
79acaae1 6018 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6019 recompute_dominator (CDI_DOMINATORS, body_bb));
6020 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6021 recompute_dominator (CDI_DOMINATORS, fin_bb));
04c2922b 6022
bc7bff74 6023 if (!broken_loop && !gimple_omp_for_combined_p (fd->for_stmt))
6024 {
6025 struct loop *loop = alloc_loop ();
6026 loop->header = body_bb;
6027 if (collapse_bb == NULL)
6028 loop->latch = cont_bb;
6029 add_loop (loop, body_bb->loop_father);
6030 }
1e8e9920 6031}
6032
773c5ba7 6033
6034/* A subroutine of expand_omp_for. Generate code for a parallel
6035 loop with static schedule and a specified chunk size. Given
6036 parameters:
1e8e9920 6037
6038 for (V = N1; V cond N2; V += STEP) BODY;
6039
6040 where COND is "<" or ">", we generate pseudocode
6041
8e6b4515 6042 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
1e8e9920 6043 if (cond is <)
6044 adj = STEP - 1;
6045 else
6046 adj = STEP + 1;
fd6481cf 6047 if ((__typeof (V)) -1 > 0 && cond is >)
6048 n = -(adj + N2 - N1) / -STEP;
6049 else
6050 n = (adj + N2 - N1) / STEP;
1e8e9920 6051 trip = 0;
79acaae1 6052 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
6053 here so that V is defined
6054 if the loop is not entered
1e8e9920 6055 L0:
6056 s0 = (trip * nthreads + threadid) * CHUNK;
6057 e0 = min(s0 + CHUNK, n);
6058 if (s0 < n) goto L1; else goto L4;
6059 L1:
6060 V = s0 * STEP + N1;
6061 e = e0 * STEP + N1;
6062 L2:
6063 BODY;
6064 V += STEP;
6065 if (V cond e) goto L2; else goto L3;
6066 L3:
6067 trip += 1;
6068 goto L0;
6069 L4:
1e8e9920 6070*/
6071
61e47ac8 6072static void
bc7bff74 6073expand_omp_for_static_chunk (struct omp_region *region,
6074 struct omp_for_data *fd, gimple inner_stmt)
1e8e9920 6075{
75a70cf9 6076 tree n, s0, e0, e, t;
79acaae1 6077 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
75a70cf9 6078 tree type, itype, v_main, v_back, v_extra;
773c5ba7 6079 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
bc7bff74 6080 basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
75a70cf9 6081 gimple_stmt_iterator si;
6082 gimple stmt;
6083 edge se;
bc7bff74 6084 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
6085 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
6086 bool broken_loop = region->cont == NULL;
6087 tree *counts = NULL;
6088 tree n1, n2, step;
1e8e9920 6089
fd6481cf 6090 itype = type = TREE_TYPE (fd->loop.v);
6091 if (POINTER_TYPE_P (type))
3cea8318 6092 itype = signed_type_for (type);
1e8e9920 6093
61e47ac8 6094 entry_bb = region->entry;
ac6e3339 6095 se = split_block (entry_bb, last_stmt (entry_bb));
6096 entry_bb = se->src;
6097 iter_part_bb = se->dest;
61e47ac8 6098 cont_bb = region->cont;
ac6e3339 6099 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
bc7bff74 6100 fin_bb = BRANCH_EDGE (iter_part_bb)->dest;
6101 gcc_assert (broken_loop
6102 || fin_bb == FALLTHRU_EDGE (cont_bb)->dest);
ac6e3339 6103 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
6104 body_bb = single_succ (seq_start_bb);
bc7bff74 6105 if (!broken_loop)
6106 {
6107 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
6108 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6109 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
6110 }
61e47ac8 6111 exit_bb = region->exit;
773c5ba7 6112
773c5ba7 6113 /* Trip and adjustment setup goes in ENTRY_BB. */
75a70cf9 6114 si = gsi_last_bb (entry_bb);
6115 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
773c5ba7 6116
bc7bff74 6117 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
6118 {
6119 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
6120 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
6121 }
6122
6123 if (fd->collapse > 1)
6124 {
6125 int first_zero_iter = -1;
6126 basic_block l2_dom_bb = NULL;
6127
6128 counts = XALLOCAVEC (tree, fd->collapse);
6129 expand_omp_for_init_counts (fd, &si, entry_bb, counts,
6130 fin_bb, first_zero_iter,
6131 l2_dom_bb);
6132 t = NULL_TREE;
6133 }
6134 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
6135 t = integer_one_node;
6136 else
6137 t = fold_binary (fd->loop.cond_code, boolean_type_node,
6138 fold_convert (type, fd->loop.n1),
6139 fold_convert (type, fd->loop.n2));
6140 if (fd->collapse == 1
6141 && TYPE_UNSIGNED (type)
8e6b4515 6142 && (t == NULL_TREE || !integer_onep (t)))
6143 {
8e6b4515 6144 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
6145 n1 = force_gimple_operand_gsi (&si, n1, true, NULL_TREE,
6146 true, GSI_SAME_STMT);
6147 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
6148 n2 = force_gimple_operand_gsi (&si, n2, true, NULL_TREE,
6149 true, GSI_SAME_STMT);
6150 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
6151 NULL_TREE, NULL_TREE);
6152 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
6153 if (walk_tree (gimple_cond_lhs_ptr (stmt),
6154 expand_omp_regimplify_p, NULL, NULL)
6155 || walk_tree (gimple_cond_rhs_ptr (stmt),
6156 expand_omp_regimplify_p, NULL, NULL))
6157 {
6158 si = gsi_for_stmt (stmt);
6159 gimple_regimplify_operands (stmt, &si);
6160 }
6161 se = split_block (entry_bb, stmt);
6162 se->flags = EDGE_TRUE_VALUE;
6163 entry_bb = se->dest;
6164 se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
6165 se = make_edge (se->src, fin_bb, EDGE_FALSE_VALUE);
6166 se->probability = REG_BR_PROB_BASE / 2000 - 1;
6167 if (gimple_in_ssa_p (cfun))
6168 {
6169 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
6170 for (si = gsi_start_phis (fin_bb);
6171 !gsi_end_p (si); gsi_next (&si))
6172 {
6173 gimple phi = gsi_stmt (si);
6174 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
6175 se, UNKNOWN_LOCATION);
6176 }
6177 }
6178 si = gsi_last_bb (entry_bb);
6179 }
6180
bc7bff74 6181 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
fd6481cf 6182 t = fold_convert (itype, t);
75a70cf9 6183 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6184 true, GSI_SAME_STMT);
48e1416a 6185
bc7bff74 6186 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
fd6481cf 6187 t = fold_convert (itype, t);
75a70cf9 6188 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6189 true, GSI_SAME_STMT);
79acaae1 6190
bc7bff74 6191 n1 = fd->loop.n1;
6192 n2 = fd->loop.n2;
6193 step = fd->loop.step;
6194 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6195 {
6196 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6197 OMP_CLAUSE__LOOPTEMP_);
6198 gcc_assert (innerc);
6199 n1 = OMP_CLAUSE_DECL (innerc);
6200 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6201 OMP_CLAUSE__LOOPTEMP_);
6202 gcc_assert (innerc);
6203 n2 = OMP_CLAUSE_DECL (innerc);
6204 }
6205 n1 = force_gimple_operand_gsi (&si, fold_convert (type, n1),
6206 true, NULL_TREE, true, GSI_SAME_STMT);
6207 n2 = force_gimple_operand_gsi (&si, fold_convert (itype, n2),
6208 true, NULL_TREE, true, GSI_SAME_STMT);
6209 step = force_gimple_operand_gsi (&si, fold_convert (itype, step),
6210 true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 6211 fd->chunk_size
75a70cf9 6212 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
6213 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 6214
6215 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
bc7bff74 6216 t = fold_build2 (PLUS_EXPR, itype, step, t);
6217 t = fold_build2 (PLUS_EXPR, itype, t, n2);
6218 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
fd6481cf 6219 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
6220 t = fold_build2 (TRUNC_DIV_EXPR, itype,
6221 fold_build1 (NEGATE_EXPR, itype, t),
bc7bff74 6222 fold_build1 (NEGATE_EXPR, itype, step));
fd6481cf 6223 else
bc7bff74 6224 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
fd6481cf 6225 t = fold_convert (itype, t);
75a70cf9 6226 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6227 true, GSI_SAME_STMT);
79acaae1 6228
083152fb 6229 trip_var = create_tmp_reg (itype, ".trip");
79acaae1 6230 if (gimple_in_ssa_p (cfun))
6231 {
75a70cf9 6232 trip_init = make_ssa_name (trip_var, NULL);
6233 trip_main = make_ssa_name (trip_var, NULL);
6234 trip_back = make_ssa_name (trip_var, NULL);
79acaae1 6235 }
1e8e9920 6236 else
79acaae1 6237 {
6238 trip_init = trip_var;
6239 trip_main = trip_var;
6240 trip_back = trip_var;
6241 }
1e8e9920 6242
75a70cf9 6243 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
6244 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
773c5ba7 6245
fd6481cf 6246 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
bc7bff74 6247 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6248 if (POINTER_TYPE_P (type))
bc7bff74 6249 t = fold_build_pointer_plus (n1, t);
fd6481cf 6250 else
bc7bff74 6251 t = fold_build2 (PLUS_EXPR, type, t, n1);
75a70cf9 6252 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6253 true, GSI_SAME_STMT);
79acaae1 6254
75a70cf9 6255 /* Remove the GIMPLE_OMP_FOR. */
6256 gsi_remove (&si, true);
773c5ba7 6257
6258 /* Iteration space partitioning goes in ITER_PART_BB. */
75a70cf9 6259 si = gsi_last_bb (iter_part_bb);
1e8e9920 6260
fd6481cf 6261 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
6262 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
6263 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
75a70cf9 6264 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6265 false, GSI_CONTINUE_LINKING);
1e8e9920 6266
fd6481cf 6267 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
6268 t = fold_build2 (MIN_EXPR, itype, t, n);
75a70cf9 6269 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6270 false, GSI_CONTINUE_LINKING);
1e8e9920 6271
6272 t = build2 (LT_EXPR, boolean_type_node, s0, n);
75a70cf9 6273 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
773c5ba7 6274
6275 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 6276 si = gsi_start_bb (seq_start_bb);
1e8e9920 6277
bc7bff74 6278 tree startvar = fd->loop.v;
6279 tree endvar = NULL_TREE;
6280
6281 if (gimple_omp_for_combined_p (fd->for_stmt))
6282 {
6283 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
6284 ? gimple_omp_parallel_clauses (inner_stmt)
6285 : gimple_omp_for_clauses (inner_stmt);
6286 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
6287 gcc_assert (innerc);
6288 startvar = OMP_CLAUSE_DECL (innerc);
6289 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6290 OMP_CLAUSE__LOOPTEMP_);
6291 gcc_assert (innerc);
6292 endvar = OMP_CLAUSE_DECL (innerc);
6293 }
6294
fd6481cf 6295 t = fold_convert (itype, s0);
bc7bff74 6296 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6297 if (POINTER_TYPE_P (type))
bc7bff74 6298 t = fold_build_pointer_plus (n1, t);
fd6481cf 6299 else
bc7bff74 6300 t = fold_build2 (PLUS_EXPR, type, t, n1);
6301 t = fold_convert (TREE_TYPE (startvar), t);
4abecb72 6302 t = force_gimple_operand_gsi (&si, t,
bc7bff74 6303 DECL_P (startvar)
6304 && TREE_ADDRESSABLE (startvar),
4abecb72 6305 NULL_TREE, false, GSI_CONTINUE_LINKING);
bc7bff74 6306 stmt = gimple_build_assign (startvar, t);
75a70cf9 6307 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
1e8e9920 6308
fd6481cf 6309 t = fold_convert (itype, e0);
bc7bff74 6310 t = fold_build2 (MULT_EXPR, itype, t, step);
fd6481cf 6311 if (POINTER_TYPE_P (type))
bc7bff74 6312 t = fold_build_pointer_plus (n1, t);
fd6481cf 6313 else
bc7bff74 6314 t = fold_build2 (PLUS_EXPR, type, t, n1);
6315 t = fold_convert (TREE_TYPE (startvar), t);
75a70cf9 6316 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6317 false, GSI_CONTINUE_LINKING);
bc7bff74 6318 if (endvar)
6319 {
6320 stmt = gimple_build_assign (endvar, e);
6321 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6322 }
6323 if (fd->collapse > 1)
6324 expand_omp_for_init_vars (fd, &si, counts, inner_stmt, startvar);
6325
6326 if (!broken_loop)
6327 {
6328 /* The code controlling the sequential loop goes in CONT_BB,
6329 replacing the GIMPLE_OMP_CONTINUE. */
6330 si = gsi_last_bb (cont_bb);
6331 stmt = gsi_stmt (si);
6332 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6333 v_main = gimple_omp_continue_control_use (stmt);
6334 v_back = gimple_omp_continue_control_def (stmt);
1e8e9920 6335
bc7bff74 6336 if (!gimple_omp_for_combined_p (fd->for_stmt))
6337 {
6338 if (POINTER_TYPE_P (type))
6339 t = fold_build_pointer_plus (v_main, step);
6340 else
6341 t = fold_build2 (PLUS_EXPR, type, v_main, step);
6342 if (DECL_P (v_back) && TREE_ADDRESSABLE (v_back))
6343 t = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6344 true, GSI_SAME_STMT);
6345 stmt = gimple_build_assign (v_back, t);
6346 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
79acaae1 6347
bc7bff74 6348 t = build2 (fd->loop.cond_code, boolean_type_node,
6349 DECL_P (v_back) && TREE_ADDRESSABLE (v_back)
6350 ? t : v_back, e);
6351 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
6352 }
79acaae1 6353
bc7bff74 6354 /* Remove GIMPLE_OMP_CONTINUE. */
6355 gsi_remove (&si, true);
48e1416a 6356
bc7bff74 6357 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
6358 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
773c5ba7 6359
bc7bff74 6360 /* Trip update code goes into TRIP_UPDATE_BB. */
6361 si = gsi_start_bb (trip_update_bb);
1e8e9920 6362
bc7bff74 6363 t = build_int_cst (itype, 1);
6364 t = build2 (PLUS_EXPR, itype, trip_main, t);
6365 stmt = gimple_build_assign (trip_back, t);
6366 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6367 }
1e8e9920 6368
75a70cf9 6369 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
6370 si = gsi_last_bb (exit_bb);
6371 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
bc7bff74 6372 {
6373 t = gimple_omp_return_lhs (gsi_stmt (si));
6374 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
6375 }
75a70cf9 6376 gsi_remove (&si, true);
1e8e9920 6377
773c5ba7 6378 /* Connect the new blocks. */
ac6e3339 6379 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
6380 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
79acaae1 6381
bc7bff74 6382 if (!broken_loop)
6383 {
6384 se = find_edge (cont_bb, body_bb);
6385 if (gimple_omp_for_combined_p (fd->for_stmt))
6386 {
6387 remove_edge (se);
6388 se = NULL;
6389 }
6390 else if (fd->collapse > 1)
6391 {
6392 remove_edge (se);
6393 se = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6394 }
6395 else
6396 se->flags = EDGE_TRUE_VALUE;
6397 find_edge (cont_bb, trip_update_bb)->flags
6398 = se ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
79acaae1 6399
bc7bff74 6400 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
6401 }
79acaae1 6402
6403 if (gimple_in_ssa_p (cfun))
6404 {
75a70cf9 6405 gimple_stmt_iterator psi;
6406 gimple phi;
6407 edge re, ene;
f1f41a6c 6408 edge_var_map_vector *head;
75a70cf9 6409 edge_var_map *vm;
6410 size_t i;
6411
bc7bff74 6412 gcc_assert (fd->collapse == 1 && !broken_loop);
6413
79acaae1 6414 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
6415 remove arguments of the phi nodes in fin_bb. We need to create
6416 appropriate phi nodes in iter_part_bb instead. */
6417 se = single_pred_edge (fin_bb);
6418 re = single_succ_edge (trip_update_bb);
75a70cf9 6419 head = redirect_edge_var_map_vector (re);
79acaae1 6420 ene = single_succ_edge (entry_bb);
6421
75a70cf9 6422 psi = gsi_start_phis (fin_bb);
f1f41a6c 6423 for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
75a70cf9 6424 gsi_next (&psi), ++i)
79acaae1 6425 {
75a70cf9 6426 gimple nphi;
efbcb6de 6427 source_location locus;
75a70cf9 6428
6429 phi = gsi_stmt (psi);
6430 t = gimple_phi_result (phi);
6431 gcc_assert (t == redirect_edge_var_map_result (vm));
79acaae1 6432 nphi = create_phi_node (t, iter_part_bb);
79acaae1 6433
6434 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
efbcb6de 6435 locus = gimple_phi_arg_location_from_edge (phi, se);
6436
fd6481cf 6437 /* A special case -- fd->loop.v is not yet computed in
6438 iter_part_bb, we need to use v_extra instead. */
6439 if (t == fd->loop.v)
79acaae1 6440 t = v_extra;
60d535d2 6441 add_phi_arg (nphi, t, ene, locus);
efbcb6de 6442 locus = redirect_edge_var_map_location (vm);
60d535d2 6443 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
75a70cf9 6444 }
f1f41a6c 6445 gcc_assert (!gsi_end_p (psi) && i == head->length ());
75a70cf9 6446 redirect_edge_var_map_clear (re);
6447 while (1)
6448 {
6449 psi = gsi_start_phis (fin_bb);
6450 if (gsi_end_p (psi))
6451 break;
6452 remove_phi_node (&psi, false);
79acaae1 6453 }
79acaae1 6454
6455 /* Make phi node for trip. */
6456 phi = create_phi_node (trip_main, iter_part_bb);
efbcb6de 6457 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
60d535d2 6458 UNKNOWN_LOCATION);
efbcb6de 6459 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
60d535d2 6460 UNKNOWN_LOCATION);
79acaae1 6461 }
6462
bc7bff74 6463 if (!broken_loop)
6464 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
79acaae1 6465 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
6466 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
6467 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6468 recompute_dominator (CDI_DOMINATORS, fin_bb));
6469 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
6470 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
6471 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6472 recompute_dominator (CDI_DOMINATORS, body_bb));
04c2922b 6473
bc7bff74 6474 if (!broken_loop)
6475 {
6476 struct loop *trip_loop = alloc_loop ();
6477 trip_loop->header = iter_part_bb;
6478 trip_loop->latch = trip_update_bb;
6479 add_loop (trip_loop, iter_part_bb->loop_father);
04c2922b 6480
bc7bff74 6481 if (!gimple_omp_for_combined_p (fd->for_stmt))
6482 {
6483 struct loop *loop = alloc_loop ();
6484 loop->header = body_bb;
6485 loop->latch = cont_bb;
6486 add_loop (loop, trip_loop);
6487 }
6488 }
1e8e9920 6489}
6490
bc7bff74 6491
3d483a94 6492/* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
6493 loop. Given parameters:
6494
6495 for (V = N1; V cond N2; V += STEP) BODY;
6496
6497 where COND is "<" or ">", we generate pseudocode
6498
6499 V = N1;
6500 goto L1;
6501 L0:
6502 BODY;
6503 V += STEP;
6504 L1:
6505 if (V cond N2) goto L0; else goto L2;
6506 L2:
6507
6508 For collapsed loops, given parameters:
6509 collapse(3)
6510 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
6511 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
6512 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
6513 BODY;
6514
6515 we generate pseudocode
6516
6517 if (cond3 is <)
6518 adj = STEP3 - 1;
6519 else
6520 adj = STEP3 + 1;
6521 count3 = (adj + N32 - N31) / STEP3;
6522 if (cond2 is <)
6523 adj = STEP2 - 1;
6524 else
6525 adj = STEP2 + 1;
6526 count2 = (adj + N22 - N21) / STEP2;
6527 if (cond1 is <)
6528 adj = STEP1 - 1;
6529 else
6530 adj = STEP1 + 1;
6531 count1 = (adj + N12 - N11) / STEP1;
6532 count = count1 * count2 * count3;
6533 V = 0;
6534 V1 = N11;
6535 V2 = N21;
6536 V3 = N31;
6537 goto L1;
6538 L0:
6539 BODY;
6540 V += 1;
6541 V3 += STEP3;
6542 V2 += (V3 cond3 N32) ? 0 : STEP2;
6543 V3 = (V3 cond3 N32) ? V3 : N31;
6544 V1 += (V2 cond2 N22) ? 0 : STEP1;
6545 V2 = (V2 cond2 N22) ? V2 : N21;
6546 L1:
6547 if (V < count) goto L0; else goto L2;
6548 L2:
6549
6550 */
6551
6552static void
6553expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
6554{
6555 tree type, t;
6556 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
6557 gimple_stmt_iterator gsi;
6558 gimple stmt;
6559 bool broken_loop = region->cont == NULL;
6560 edge e, ne;
6561 tree *counts = NULL;
6562 int i;
6563 tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6564 OMP_CLAUSE_SAFELEN);
6565 tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6566 OMP_CLAUSE__SIMDUID_);
bc7bff74 6567 tree n1, n2;
3d483a94 6568
6569 type = TREE_TYPE (fd->loop.v);
6570 entry_bb = region->entry;
6571 cont_bb = region->cont;
6572 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
6573 gcc_assert (broken_loop
6574 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
6575 l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
6576 if (!broken_loop)
6577 {
6578 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
6579 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6580 l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
6581 l2_bb = BRANCH_EDGE (entry_bb)->dest;
6582 }
6583 else
6584 {
6585 BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
6586 l1_bb = split_edge (BRANCH_EDGE (entry_bb));
6587 l2_bb = single_succ (l1_bb);
6588 }
6589 exit_bb = region->exit;
6590 l2_dom_bb = NULL;
6591
6592 gsi = gsi_last_bb (entry_bb);
6593
6594 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
6595 /* Not needed in SSA form right now. */
6596 gcc_assert (!gimple_in_ssa_p (cfun));
6597 if (fd->collapse > 1)
6598 {
6599 int first_zero_iter = -1;
6600 basic_block zero_iter_bb = l2_bb;
6601
6602 counts = XALLOCAVEC (tree, fd->collapse);
6603 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
6604 zero_iter_bb, first_zero_iter,
6605 l2_dom_bb);
6606 }
6607 if (l2_dom_bb == NULL)
6608 l2_dom_bb = l1_bb;
6609
bc7bff74 6610 n1 = fd->loop.n1;
3d483a94 6611 n2 = fd->loop.n2;
bc7bff74 6612 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6613 {
6614 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6615 OMP_CLAUSE__LOOPTEMP_);
6616 gcc_assert (innerc);
6617 n1 = OMP_CLAUSE_DECL (innerc);
6618 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6619 OMP_CLAUSE__LOOPTEMP_);
6620 gcc_assert (innerc);
6621 n2 = OMP_CLAUSE_DECL (innerc);
6622 expand_omp_build_assign (&gsi, fd->loop.v,
6623 fold_convert (type, n1));
6624 if (fd->collapse > 1)
6625 {
6626 gsi_prev (&gsi);
6627 expand_omp_for_init_vars (fd, &gsi, counts, NULL, n1);
6628 gsi_next (&gsi);
6629 }
6630 }
3d483a94 6631 else
6632 {
6633 expand_omp_build_assign (&gsi, fd->loop.v,
6634 fold_convert (type, fd->loop.n1));
6635 if (fd->collapse > 1)
6636 for (i = 0; i < fd->collapse; i++)
6637 {
6638 tree itype = TREE_TYPE (fd->loops[i].v);
6639 if (POINTER_TYPE_P (itype))
6640 itype = signed_type_for (itype);
6641 t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
6642 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6643 }
6644 }
6645
6646 /* Remove the GIMPLE_OMP_FOR statement. */
6647 gsi_remove (&gsi, true);
6648
6649 if (!broken_loop)
6650 {
6651 /* Code to control the increment goes in the CONT_BB. */
6652 gsi = gsi_last_bb (cont_bb);
6653 stmt = gsi_stmt (gsi);
6654 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6655
6656 if (POINTER_TYPE_P (type))
6657 t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
6658 else
6659 t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
6660 expand_omp_build_assign (&gsi, fd->loop.v, t);
6661
6662 if (fd->collapse > 1)
6663 {
6664 i = fd->collapse - 1;
6665 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
6666 {
6667 t = fold_convert (sizetype, fd->loops[i].step);
6668 t = fold_build_pointer_plus (fd->loops[i].v, t);
6669 }
6670 else
6671 {
6672 t = fold_convert (TREE_TYPE (fd->loops[i].v),
6673 fd->loops[i].step);
6674 t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
6675 fd->loops[i].v, t);
6676 }
6677 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6678
6679 for (i = fd->collapse - 1; i > 0; i--)
6680 {
6681 tree itype = TREE_TYPE (fd->loops[i].v);
6682 tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
6683 if (POINTER_TYPE_P (itype2))
6684 itype2 = signed_type_for (itype2);
6685 t = build3 (COND_EXPR, itype2,
6686 build2 (fd->loops[i].cond_code, boolean_type_node,
6687 fd->loops[i].v,
6688 fold_convert (itype, fd->loops[i].n2)),
6689 build_int_cst (itype2, 0),
6690 fold_convert (itype2, fd->loops[i - 1].step));
6691 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
6692 t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
6693 else
6694 t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
6695 expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
6696
6697 t = build3 (COND_EXPR, itype,
6698 build2 (fd->loops[i].cond_code, boolean_type_node,
6699 fd->loops[i].v,
6700 fold_convert (itype, fd->loops[i].n2)),
6701 fd->loops[i].v,
6702 fold_convert (itype, fd->loops[i].n1));
6703 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6704 }
6705 }
6706
6707 /* Remove GIMPLE_OMP_CONTINUE. */
6708 gsi_remove (&gsi, true);
6709 }
6710
6711 /* Emit the condition in L1_BB. */
6712 gsi = gsi_start_bb (l1_bb);
6713
6714 t = fold_convert (type, n2);
6715 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
6716 false, GSI_CONTINUE_LINKING);
6717 t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
6718 stmt = gimple_build_cond_empty (t);
6719 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
6720 if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
6721 NULL, NULL)
6722 || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
6723 NULL, NULL))
6724 {
6725 gsi = gsi_for_stmt (stmt);
6726 gimple_regimplify_operands (stmt, &gsi);
6727 }
6728
6729 /* Remove GIMPLE_OMP_RETURN. */
6730 gsi = gsi_last_bb (exit_bb);
6731 gsi_remove (&gsi, true);
6732
6733 /* Connect the new blocks. */
6734 remove_edge (FALLTHRU_EDGE (entry_bb));
6735
6736 if (!broken_loop)
6737 {
6738 remove_edge (BRANCH_EDGE (entry_bb));
6739 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
6740
6741 e = BRANCH_EDGE (l1_bb);
6742 ne = FALLTHRU_EDGE (l1_bb);
6743 e->flags = EDGE_TRUE_VALUE;
6744 }
6745 else
6746 {
6747 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
6748
6749 ne = single_succ_edge (l1_bb);
6750 e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
6751
6752 }
6753 ne->flags = EDGE_FALSE_VALUE;
6754 e->probability = REG_BR_PROB_BASE * 7 / 8;
6755 ne->probability = REG_BR_PROB_BASE / 8;
6756
6757 set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
6758 set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
6759 set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
6760
6761 if (!broken_loop)
6762 {
6763 struct loop *loop = alloc_loop ();
6764 loop->header = l1_bb;
6765 loop->latch = e->dest;
6766 add_loop (loop, l1_bb->loop_father);
6767 if (safelen == NULL_TREE)
6768 loop->safelen = INT_MAX;
6769 else
6770 {
6771 safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
cd4547bf 6772 if (!tree_fits_uhwi_p (safelen)
aa59f000 6773 || tree_to_uhwi (safelen) > INT_MAX)
3d483a94 6774 loop->safelen = INT_MAX;
6775 else
6a0712d4 6776 loop->safelen = tree_to_uhwi (safelen);
3d483a94 6777 if (loop->safelen == 1)
6778 loop->safelen = 0;
6779 }
6780 if (simduid)
6781 {
6782 loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6783 cfun->has_simduid_loops = true;
6784 }
043115ec 6785 /* If not -fno-tree-loop-vectorize, hint that we want to vectorize
3d483a94 6786 the loop. */
043115ec 6787 if ((flag_tree_loop_vectorize
6788 || (!global_options_set.x_flag_tree_loop_vectorize
6789 && !global_options_set.x_flag_tree_vectorize))
3d483a94 6790 && loop->safelen > 1)
6791 {
6792 loop->force_vect = true;
6793 cfun->has_force_vect_loops = true;
6794 }
6795 }
6796}
6797
1e8e9920 6798
773c5ba7 6799/* Expand the OpenMP loop defined by REGION. */
1e8e9920 6800
773c5ba7 6801static void
bc7bff74 6802expand_omp_for (struct omp_region *region, gimple inner_stmt)
773c5ba7 6803{
6804 struct omp_for_data fd;
fd6481cf 6805 struct omp_for_data_loop *loops;
1e8e9920 6806
fd6481cf 6807 loops
6808 = (struct omp_for_data_loop *)
75a70cf9 6809 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
fd6481cf 6810 * sizeof (struct omp_for_data_loop));
fd6481cf 6811 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
f77459c5 6812 region->sched_kind = fd.sched_kind;
1e8e9920 6813
b3a3ddec 6814 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
6815 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6816 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6817 if (region->cont)
6818 {
6819 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
6820 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6821 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6822 }
04c2922b 6823 else
75de4aa2 6824 /* If there isn't a continue then this is a degerate case where
04c2922b 6825 the introduction of abnormal edges during lowering will prevent
6826 original loops from being detected. Fix that up. */
6827 loops_state_set (LOOPS_NEED_FIXUP);
b3a3ddec 6828
f2697631 6829 if (gimple_omp_for_kind (fd.for_stmt) & GF_OMP_FOR_KIND_SIMD)
3d483a94 6830 expand_omp_simd (region, &fd);
6831 else if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
bc7bff74 6832 && !fd.have_ordered)
1e8e9920 6833 {
6834 if (fd.chunk_size == NULL)
bc7bff74 6835 expand_omp_for_static_nochunk (region, &fd, inner_stmt);
1e8e9920 6836 else
bc7bff74 6837 expand_omp_for_static_chunk (region, &fd, inner_stmt);
1e8e9920 6838 }
6839 else
6840 {
fd6481cf 6841 int fn_index, start_ix, next_ix;
6842
3d483a94 6843 gcc_assert (gimple_omp_for_kind (fd.for_stmt)
6844 == GF_OMP_FOR_KIND_FOR);
0416ca72 6845 if (fd.chunk_size == NULL
6846 && fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
6847 fd.chunk_size = integer_zero_node;
fd6481cf 6848 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
6849 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
75a70cf9 6850 ? 3 : fd.sched_kind;
fd6481cf 6851 fn_index += fd.have_ordered * 4;
b9a16870 6852 start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
6853 next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
fd6481cf 6854 if (fd.iter_type == long_long_unsigned_type_node)
6855 {
b9a16870 6856 start_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_START
6857 - (int)BUILT_IN_GOMP_LOOP_STATIC_START);
6858 next_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
6859 - (int)BUILT_IN_GOMP_LOOP_STATIC_NEXT);
fd6481cf 6860 }
b9c74b4d 6861 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
bc7bff74 6862 (enum built_in_function) next_ix, inner_stmt);
1e8e9920 6863 }
28c92cbb 6864
083152fb 6865 if (gimple_in_ssa_p (cfun))
6866 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 6867}
6868
1e8e9920 6869
6870/* Expand code for an OpenMP sections directive. In pseudo code, we generate
6871
1e8e9920 6872 v = GOMP_sections_start (n);
6873 L0:
6874 switch (v)
6875 {
6876 case 0:
6877 goto L2;
6878 case 1:
6879 section 1;
6880 goto L1;
6881 case 2:
6882 ...
6883 case n:
6884 ...
1e8e9920 6885 default:
6886 abort ();
6887 }
6888 L1:
6889 v = GOMP_sections_next ();
6890 goto L0;
6891 L2:
6892 reduction;
6893
773c5ba7 6894 If this is a combined parallel sections, replace the call to
79acaae1 6895 GOMP_sections_start with call to GOMP_sections_next. */
1e8e9920 6896
6897static void
773c5ba7 6898expand_omp_sections (struct omp_region *region)
1e8e9920 6899{
f018d957 6900 tree t, u, vin = NULL, vmain, vnext, l2;
f1f41a6c 6901 vec<tree> label_vec;
75a70cf9 6902 unsigned len;
ac6e3339 6903 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
75a70cf9 6904 gimple_stmt_iterator si, switch_si;
6905 gimple sections_stmt, stmt, cont;
9884aaf8 6906 edge_iterator ei;
6907 edge e;
61e47ac8 6908 struct omp_region *inner;
75a70cf9 6909 unsigned i, casei;
ac6e3339 6910 bool exit_reachable = region->cont != NULL;
1e8e9920 6911
d244d9de 6912 gcc_assert (region->exit != NULL);
61e47ac8 6913 entry_bb = region->entry;
ac6e3339 6914 l0_bb = single_succ (entry_bb);
61e47ac8 6915 l1_bb = region->cont;
ac6e3339 6916 l2_bb = region->exit;
d244d9de 6917 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
6918 l2 = gimple_block_label (l2_bb);
6919 else
03ed154b 6920 {
d244d9de 6921 /* This can happen if there are reductions. */
6922 len = EDGE_COUNT (l0_bb->succs);
6923 gcc_assert (len > 0);
6924 e = EDGE_SUCC (l0_bb, len - 1);
6925 si = gsi_last_bb (e->dest);
6926 l2 = NULL_TREE;
6927 if (gsi_end_p (si)
6928 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
6929 l2 = gimple_block_label (e->dest);
9884aaf8 6930 else
d244d9de 6931 FOR_EACH_EDGE (e, ei, l0_bb->succs)
6932 {
6933 si = gsi_last_bb (e->dest);
6934 if (gsi_end_p (si)
6935 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
9884aaf8 6936 {
d244d9de 6937 l2 = gimple_block_label (e->dest);
6938 break;
9884aaf8 6939 }
d244d9de 6940 }
03ed154b 6941 }
d244d9de 6942 if (exit_reachable)
6943 default_bb = create_empty_bb (l1_bb->prev_bb);
03ed154b 6944 else
d244d9de 6945 default_bb = create_empty_bb (l0_bb);
773c5ba7 6946
6947 /* We will build a switch() with enough cases for all the
75a70cf9 6948 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
773c5ba7 6949 and a default case to abort if something goes wrong. */
ac6e3339 6950 len = EDGE_COUNT (l0_bb->succs);
75a70cf9 6951
f1f41a6c 6952 /* Use vec::quick_push on label_vec throughout, since we know the size
75a70cf9 6953 in advance. */
f1f41a6c 6954 label_vec.create (len);
1e8e9920 6955
61e47ac8 6956 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
75a70cf9 6957 GIMPLE_OMP_SECTIONS statement. */
6958 si = gsi_last_bb (entry_bb);
6959 sections_stmt = gsi_stmt (si);
6960 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
6961 vin = gimple_omp_sections_control (sections_stmt);
773c5ba7 6962 if (!is_combined_parallel (region))
1e8e9920 6963 {
773c5ba7 6964 /* If we are not inside a combined parallel+sections region,
6965 call GOMP_sections_start. */
39cb6d68 6966 t = build_int_cst (unsigned_type_node, len - 1);
b9a16870 6967 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_START);
75a70cf9 6968 stmt = gimple_build_call (u, 1, t);
1e8e9920 6969 }
79acaae1 6970 else
6971 {
6972 /* Otherwise, call GOMP_sections_next. */
b9a16870 6973 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
75a70cf9 6974 stmt = gimple_build_call (u, 0);
79acaae1 6975 }
75a70cf9 6976 gimple_call_set_lhs (stmt, vin);
6977 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
6978 gsi_remove (&si, true);
6979
6980 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
6981 L0_BB. */
6982 switch_si = gsi_last_bb (l0_bb);
6983 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
79acaae1 6984 if (exit_reachable)
6985 {
6986 cont = last_stmt (l1_bb);
75a70cf9 6987 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
6988 vmain = gimple_omp_continue_control_use (cont);
6989 vnext = gimple_omp_continue_control_def (cont);
79acaae1 6990 }
6991 else
6992 {
6993 vmain = vin;
6994 vnext = NULL_TREE;
6995 }
1e8e9920 6996
d244d9de 6997 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
f1f41a6c 6998 label_vec.quick_push (t);
d244d9de 6999 i = 1;
03ed154b 7000
75a70cf9 7001 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
ac6e3339 7002 for (inner = region->inner, casei = 1;
7003 inner;
7004 inner = inner->next, i++, casei++)
1e8e9920 7005 {
773c5ba7 7006 basic_block s_entry_bb, s_exit_bb;
7007
9884aaf8 7008 /* Skip optional reduction region. */
75a70cf9 7009 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
9884aaf8 7010 {
7011 --i;
7012 --casei;
7013 continue;
7014 }
7015
61e47ac8 7016 s_entry_bb = inner->entry;
7017 s_exit_bb = inner->exit;
1e8e9920 7018
75a70cf9 7019 t = gimple_block_label (s_entry_bb);
ac6e3339 7020 u = build_int_cst (unsigned_type_node, casei);
b6e3dd65 7021 u = build_case_label (u, NULL, t);
f1f41a6c 7022 label_vec.quick_push (u);
61e47ac8 7023
75a70cf9 7024 si = gsi_last_bb (s_entry_bb);
7025 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
7026 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
7027 gsi_remove (&si, true);
61e47ac8 7028 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
03ed154b 7029
7030 if (s_exit_bb == NULL)
7031 continue;
7032
75a70cf9 7033 si = gsi_last_bb (s_exit_bb);
7034 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7035 gsi_remove (&si, true);
03ed154b 7036
773c5ba7 7037 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
1e8e9920 7038 }
7039
773c5ba7 7040 /* Error handling code goes in DEFAULT_BB. */
75a70cf9 7041 t = gimple_block_label (default_bb);
b6e3dd65 7042 u = build_case_label (NULL, NULL, t);
61e47ac8 7043 make_edge (l0_bb, default_bb, 0);
f6568ea4 7044 if (current_loops)
04c2922b 7045 add_bb_to_loop (default_bb, current_loops->tree_root);
1e8e9920 7046
49a70175 7047 stmt = gimple_build_switch (vmain, u, label_vec);
75a70cf9 7048 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
7049 gsi_remove (&switch_si, true);
f1f41a6c 7050 label_vec.release ();
75a70cf9 7051
7052 si = gsi_start_bb (default_bb);
b9a16870 7053 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
75a70cf9 7054 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
773c5ba7 7055
ac6e3339 7056 if (exit_reachable)
03ed154b 7057 {
b9a16870 7058 tree bfn_decl;
7059
ac6e3339 7060 /* Code to get the next section goes in L1_BB. */
75a70cf9 7061 si = gsi_last_bb (l1_bb);
7062 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
1e8e9920 7063
b9a16870 7064 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
7065 stmt = gimple_build_call (bfn_decl, 0);
75a70cf9 7066 gimple_call_set_lhs (stmt, vnext);
7067 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7068 gsi_remove (&si, true);
773c5ba7 7069
ac6e3339 7070 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
03ed154b 7071 }
773c5ba7 7072
d244d9de 7073 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
7074 si = gsi_last_bb (l2_bb);
7075 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
7076 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_NOWAIT);
bc7bff74 7077 else if (gimple_omp_return_lhs (gsi_stmt (si)))
7078 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_CANCEL);
d244d9de 7079 else
7080 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END);
7081 stmt = gimple_build_call (t, 0);
bc7bff74 7082 if (gimple_omp_return_lhs (gsi_stmt (si)))
7083 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (si)));
d244d9de 7084 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7085 gsi_remove (&si, true);
7086
79acaae1 7087 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
773c5ba7 7088}
1e8e9920 7089
1e8e9920 7090
61e47ac8 7091/* Expand code for an OpenMP single directive. We've already expanded
7092 much of the code, here we simply place the GOMP_barrier call. */
7093
7094static void
7095expand_omp_single (struct omp_region *region)
7096{
7097 basic_block entry_bb, exit_bb;
75a70cf9 7098 gimple_stmt_iterator si;
61e47ac8 7099
7100 entry_bb = region->entry;
7101 exit_bb = region->exit;
7102
75a70cf9 7103 si = gsi_last_bb (entry_bb);
75a70cf9 7104 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
7105 gsi_remove (&si, true);
61e47ac8 7106 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7107
75a70cf9 7108 si = gsi_last_bb (exit_bb);
bc7bff74 7109 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
7110 {
7111 tree t = gimple_omp_return_lhs (gsi_stmt (si));
7112 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
7113 }
75a70cf9 7114 gsi_remove (&si, true);
61e47ac8 7115 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7116}
7117
7118
7119/* Generic expansion for OpenMP synchronization directives: master,
7120 ordered and critical. All we need to do here is remove the entry
7121 and exit markers for REGION. */
773c5ba7 7122
7123static void
7124expand_omp_synch (struct omp_region *region)
7125{
7126 basic_block entry_bb, exit_bb;
75a70cf9 7127 gimple_stmt_iterator si;
773c5ba7 7128
61e47ac8 7129 entry_bb = region->entry;
7130 exit_bb = region->exit;
773c5ba7 7131
75a70cf9 7132 si = gsi_last_bb (entry_bb);
7133 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
7134 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
bc7bff74 7135 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TASKGROUP
75a70cf9 7136 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
bc7bff74 7137 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL
7138 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TEAMS);
75a70cf9 7139 gsi_remove (&si, true);
773c5ba7 7140 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7141
03ed154b 7142 if (exit_bb)
7143 {
75a70cf9 7144 si = gsi_last_bb (exit_bb);
7145 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7146 gsi_remove (&si, true);
03ed154b 7147 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7148 }
773c5ba7 7149}
1e8e9920 7150
2169f33b 7151/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7152 operation as a normal volatile load. */
7153
7154static bool
3ec11c49 7155expand_omp_atomic_load (basic_block load_bb, tree addr,
7156 tree loaded_val, int index)
2169f33b 7157{
3ec11c49 7158 enum built_in_function tmpbase;
7159 gimple_stmt_iterator gsi;
7160 basic_block store_bb;
7161 location_t loc;
7162 gimple stmt;
7163 tree decl, call, type, itype;
7164
7165 gsi = gsi_last_bb (load_bb);
7166 stmt = gsi_stmt (gsi);
7167 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7168 loc = gimple_location (stmt);
7169
7170 /* ??? If the target does not implement atomic_load_optab[mode], and mode
7171 is smaller than word size, then expand_atomic_load assumes that the load
7172 is atomic. We could avoid the builtin entirely in this case. */
7173
7174 tmpbase = (enum built_in_function) (BUILT_IN_ATOMIC_LOAD_N + index + 1);
7175 decl = builtin_decl_explicit (tmpbase);
7176 if (decl == NULL_TREE)
7177 return false;
7178
7179 type = TREE_TYPE (loaded_val);
7180 itype = TREE_TYPE (TREE_TYPE (decl));
7181
7182 call = build_call_expr_loc (loc, decl, 2, addr,
bc7bff74 7183 build_int_cst (NULL,
7184 gimple_omp_atomic_seq_cst_p (stmt)
7185 ? MEMMODEL_SEQ_CST
7186 : MEMMODEL_RELAXED));
3ec11c49 7187 if (!useless_type_conversion_p (type, itype))
7188 call = fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7189 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7190
7191 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7192 gsi_remove (&gsi, true);
7193
7194 store_bb = single_succ (load_bb);
7195 gsi = gsi_last_bb (store_bb);
7196 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7197 gsi_remove (&gsi, true);
7198
7199 if (gimple_in_ssa_p (cfun))
7200 update_ssa (TODO_update_ssa_no_phi);
7201
7202 return true;
2169f33b 7203}
7204
7205/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7206 operation as a normal volatile store. */
7207
7208static bool
3ec11c49 7209expand_omp_atomic_store (basic_block load_bb, tree addr,
7210 tree loaded_val, tree stored_val, int index)
2169f33b 7211{
3ec11c49 7212 enum built_in_function tmpbase;
7213 gimple_stmt_iterator gsi;
7214 basic_block store_bb = single_succ (load_bb);
7215 location_t loc;
7216 gimple stmt;
7217 tree decl, call, type, itype;
7218 enum machine_mode imode;
7219 bool exchange;
7220
7221 gsi = gsi_last_bb (load_bb);
7222 stmt = gsi_stmt (gsi);
7223 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7224
7225 /* If the load value is needed, then this isn't a store but an exchange. */
7226 exchange = gimple_omp_atomic_need_value_p (stmt);
7227
7228 gsi = gsi_last_bb (store_bb);
7229 stmt = gsi_stmt (gsi);
7230 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE);
7231 loc = gimple_location (stmt);
7232
7233 /* ??? If the target does not implement atomic_store_optab[mode], and mode
7234 is smaller than word size, then expand_atomic_store assumes that the store
7235 is atomic. We could avoid the builtin entirely in this case. */
7236
7237 tmpbase = (exchange ? BUILT_IN_ATOMIC_EXCHANGE_N : BUILT_IN_ATOMIC_STORE_N);
7238 tmpbase = (enum built_in_function) ((int) tmpbase + index + 1);
7239 decl = builtin_decl_explicit (tmpbase);
7240 if (decl == NULL_TREE)
7241 return false;
7242
7243 type = TREE_TYPE (stored_val);
7244
7245 /* Dig out the type of the function's second argument. */
7246 itype = TREE_TYPE (decl);
7247 itype = TYPE_ARG_TYPES (itype);
7248 itype = TREE_CHAIN (itype);
7249 itype = TREE_VALUE (itype);
7250 imode = TYPE_MODE (itype);
7251
7252 if (exchange && !can_atomic_exchange_p (imode, true))
7253 return false;
7254
7255 if (!useless_type_conversion_p (itype, type))
7256 stored_val = fold_build1_loc (loc, VIEW_CONVERT_EXPR, itype, stored_val);
7257 call = build_call_expr_loc (loc, decl, 3, addr, stored_val,
bc7bff74 7258 build_int_cst (NULL,
7259 gimple_omp_atomic_seq_cst_p (stmt)
7260 ? MEMMODEL_SEQ_CST
7261 : MEMMODEL_RELAXED));
3ec11c49 7262 if (exchange)
7263 {
7264 if (!useless_type_conversion_p (type, itype))
7265 call = build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7266 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7267 }
7268
7269 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7270 gsi_remove (&gsi, true);
7271
7272 /* Remove the GIMPLE_OMP_ATOMIC_LOAD that we verified above. */
7273 gsi = gsi_last_bb (load_bb);
7274 gsi_remove (&gsi, true);
7275
7276 if (gimple_in_ssa_p (cfun))
7277 update_ssa (TODO_update_ssa_no_phi);
7278
7279 return true;
2169f33b 7280}
7281
cb7f680b 7282/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
1cd6e20d 7283 operation as a __atomic_fetch_op builtin. INDEX is log2 of the
cb7f680b 7284 size of the data type, and thus usable to find the index of the builtin
7285 decl. Returns false if the expression is not of the proper form. */
7286
7287static bool
7288expand_omp_atomic_fetch_op (basic_block load_bb,
7289 tree addr, tree loaded_val,
7290 tree stored_val, int index)
7291{
b9a16870 7292 enum built_in_function oldbase, newbase, tmpbase;
cb7f680b 7293 tree decl, itype, call;
2169f33b 7294 tree lhs, rhs;
cb7f680b 7295 basic_block store_bb = single_succ (load_bb);
75a70cf9 7296 gimple_stmt_iterator gsi;
7297 gimple stmt;
389dd41b 7298 location_t loc;
1cd6e20d 7299 enum tree_code code;
2169f33b 7300 bool need_old, need_new;
1cd6e20d 7301 enum machine_mode imode;
bc7bff74 7302 bool seq_cst;
cb7f680b 7303
7304 /* We expect to find the following sequences:
48e1416a 7305
cb7f680b 7306 load_bb:
75a70cf9 7307 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
cb7f680b 7308
7309 store_bb:
7310 val = tmp OP something; (or: something OP tmp)
48e1416a 7311 GIMPLE_OMP_STORE (val)
cb7f680b 7312
48e1416a 7313 ???FIXME: Allow a more flexible sequence.
cb7f680b 7314 Perhaps use data flow to pick the statements.
48e1416a 7315
cb7f680b 7316 */
7317
75a70cf9 7318 gsi = gsi_after_labels (store_bb);
7319 stmt = gsi_stmt (gsi);
389dd41b 7320 loc = gimple_location (stmt);
75a70cf9 7321 if (!is_gimple_assign (stmt))
cb7f680b 7322 return false;
75a70cf9 7323 gsi_next (&gsi);
7324 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 7325 return false;
2169f33b 7326 need_new = gimple_omp_atomic_need_value_p (gsi_stmt (gsi));
7327 need_old = gimple_omp_atomic_need_value_p (last_stmt (load_bb));
bc7bff74 7328 seq_cst = gimple_omp_atomic_seq_cst_p (last_stmt (load_bb));
2169f33b 7329 gcc_checking_assert (!need_old || !need_new);
cb7f680b 7330
75a70cf9 7331 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
cb7f680b 7332 return false;
7333
cb7f680b 7334 /* Check for one of the supported fetch-op operations. */
1cd6e20d 7335 code = gimple_assign_rhs_code (stmt);
7336 switch (code)
cb7f680b 7337 {
7338 case PLUS_EXPR:
7339 case POINTER_PLUS_EXPR:
1cd6e20d 7340 oldbase = BUILT_IN_ATOMIC_FETCH_ADD_N;
7341 newbase = BUILT_IN_ATOMIC_ADD_FETCH_N;
cb7f680b 7342 break;
7343 case MINUS_EXPR:
1cd6e20d 7344 oldbase = BUILT_IN_ATOMIC_FETCH_SUB_N;
7345 newbase = BUILT_IN_ATOMIC_SUB_FETCH_N;
cb7f680b 7346 break;
7347 case BIT_AND_EXPR:
1cd6e20d 7348 oldbase = BUILT_IN_ATOMIC_FETCH_AND_N;
7349 newbase = BUILT_IN_ATOMIC_AND_FETCH_N;
cb7f680b 7350 break;
7351 case BIT_IOR_EXPR:
1cd6e20d 7352 oldbase = BUILT_IN_ATOMIC_FETCH_OR_N;
7353 newbase = BUILT_IN_ATOMIC_OR_FETCH_N;
cb7f680b 7354 break;
7355 case BIT_XOR_EXPR:
1cd6e20d 7356 oldbase = BUILT_IN_ATOMIC_FETCH_XOR_N;
7357 newbase = BUILT_IN_ATOMIC_XOR_FETCH_N;
cb7f680b 7358 break;
7359 default:
7360 return false;
7361 }
1cd6e20d 7362
cb7f680b 7363 /* Make sure the expression is of the proper form. */
75a70cf9 7364 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
7365 rhs = gimple_assign_rhs2 (stmt);
7366 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
7367 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
7368 rhs = gimple_assign_rhs1 (stmt);
cb7f680b 7369 else
7370 return false;
7371
b9a16870 7372 tmpbase = ((enum built_in_function)
7373 ((need_new ? newbase : oldbase) + index + 1));
7374 decl = builtin_decl_explicit (tmpbase);
0f94f46b 7375 if (decl == NULL_TREE)
7376 return false;
cb7f680b 7377 itype = TREE_TYPE (TREE_TYPE (decl));
1cd6e20d 7378 imode = TYPE_MODE (itype);
cb7f680b 7379
1cd6e20d 7380 /* We could test all of the various optabs involved, but the fact of the
7381 matter is that (with the exception of i486 vs i586 and xadd) all targets
7382 that support any atomic operaton optab also implements compare-and-swap.
7383 Let optabs.c take care of expanding any compare-and-swap loop. */
29139cdc 7384 if (!can_compare_and_swap_p (imode, true))
cb7f680b 7385 return false;
7386
75a70cf9 7387 gsi = gsi_last_bb (load_bb);
7388 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
1cd6e20d 7389
7390 /* OpenMP does not imply any barrier-like semantics on its atomic ops.
7391 It only requires that the operation happen atomically. Thus we can
7392 use the RELAXED memory model. */
7393 call = build_call_expr_loc (loc, decl, 3, addr,
7394 fold_convert_loc (loc, itype, rhs),
bc7bff74 7395 build_int_cst (NULL,
7396 seq_cst ? MEMMODEL_SEQ_CST
7397 : MEMMODEL_RELAXED));
1cd6e20d 7398
2169f33b 7399 if (need_old || need_new)
7400 {
7401 lhs = need_old ? loaded_val : stored_val;
7402 call = fold_convert_loc (loc, TREE_TYPE (lhs), call);
7403 call = build2_loc (loc, MODIFY_EXPR, void_type_node, lhs, call);
7404 }
7405 else
7406 call = fold_convert_loc (loc, void_type_node, call);
75a70cf9 7407 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7408 gsi_remove (&gsi, true);
cb7f680b 7409
75a70cf9 7410 gsi = gsi_last_bb (store_bb);
7411 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7412 gsi_remove (&gsi, true);
7413 gsi = gsi_last_bb (store_bb);
7414 gsi_remove (&gsi, true);
cb7f680b 7415
7416 if (gimple_in_ssa_p (cfun))
7417 update_ssa (TODO_update_ssa_no_phi);
7418
7419 return true;
7420}
7421
7422/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7423
7424 oldval = *addr;
7425 repeat:
7426 newval = rhs; // with oldval replacing *addr in rhs
7427 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
7428 if (oldval != newval)
7429 goto repeat;
7430
7431 INDEX is log2 of the size of the data type, and thus usable to find the
7432 index of the builtin decl. */
7433
7434static bool
7435expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
7436 tree addr, tree loaded_val, tree stored_val,
7437 int index)
7438{
790368c5 7439 tree loadedi, storedi, initial, new_storedi, old_vali;
cb7f680b 7440 tree type, itype, cmpxchg, iaddr;
75a70cf9 7441 gimple_stmt_iterator si;
cb7f680b 7442 basic_block loop_header = single_succ (load_bb);
75a70cf9 7443 gimple phi, stmt;
cb7f680b 7444 edge e;
b9a16870 7445 enum built_in_function fncode;
cb7f680b 7446
1cd6e20d 7447 /* ??? We need a non-pointer interface to __atomic_compare_exchange in
7448 order to use the RELAXED memory model effectively. */
b9a16870 7449 fncode = (enum built_in_function)((int)BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
7450 + index + 1);
7451 cmpxchg = builtin_decl_explicit (fncode);
0f94f46b 7452 if (cmpxchg == NULL_TREE)
7453 return false;
cb7f680b 7454 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7455 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
7456
29139cdc 7457 if (!can_compare_and_swap_p (TYPE_MODE (itype), true))
cb7f680b 7458 return false;
7459
75a70cf9 7460 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
7461 si = gsi_last_bb (load_bb);
7462 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
7463
790368c5 7464 /* For floating-point values, we'll need to view-convert them to integers
7465 so that we can perform the atomic compare and swap. Simplify the
7466 following code by always setting up the "i"ntegral variables. */
7467 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7468 {
75a70cf9 7469 tree iaddr_val;
7470
072f7ab1 7471 iaddr = create_tmp_reg (build_pointer_type_for_mode (itype, ptr_mode,
7472 true), NULL);
75a70cf9 7473 iaddr_val
7474 = force_gimple_operand_gsi (&si,
7475 fold_convert (TREE_TYPE (iaddr), addr),
7476 false, NULL_TREE, true, GSI_SAME_STMT);
7477 stmt = gimple_build_assign (iaddr, iaddr_val);
7478 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
790368c5 7479 loadedi = create_tmp_var (itype, NULL);
7480 if (gimple_in_ssa_p (cfun))
b03e5397 7481 loadedi = make_ssa_name (loadedi, NULL);
790368c5 7482 }
7483 else
7484 {
7485 iaddr = addr;
7486 loadedi = loaded_val;
7487 }
75a70cf9 7488
182cf5a9 7489 initial
7490 = force_gimple_operand_gsi (&si,
7491 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
7492 iaddr,
7493 build_int_cst (TREE_TYPE (iaddr), 0)),
7494 true, NULL_TREE, true, GSI_SAME_STMT);
790368c5 7495
7496 /* Move the value to the LOADEDI temporary. */
cb7f680b 7497 if (gimple_in_ssa_p (cfun))
7498 {
75a70cf9 7499 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
790368c5 7500 phi = create_phi_node (loadedi, loop_header);
cb7f680b 7501 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
7502 initial);
7503 }
7504 else
75a70cf9 7505 gsi_insert_before (&si,
7506 gimple_build_assign (loadedi, initial),
7507 GSI_SAME_STMT);
790368c5 7508 if (loadedi != loaded_val)
7509 {
75a70cf9 7510 gimple_stmt_iterator gsi2;
7511 tree x;
790368c5 7512
7513 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
75a70cf9 7514 gsi2 = gsi_start_bb (loop_header);
790368c5 7515 if (gimple_in_ssa_p (cfun))
7516 {
75a70cf9 7517 gimple stmt;
7518 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7519 true, GSI_SAME_STMT);
7520 stmt = gimple_build_assign (loaded_val, x);
7521 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
790368c5 7522 }
7523 else
7524 {
75a70cf9 7525 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
7526 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7527 true, GSI_SAME_STMT);
790368c5 7528 }
7529 }
75a70cf9 7530 gsi_remove (&si, true);
cb7f680b 7531
75a70cf9 7532 si = gsi_last_bb (store_bb);
7533 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 7534
790368c5 7535 if (iaddr == addr)
7536 storedi = stored_val;
cb7f680b 7537 else
790368c5 7538 storedi =
75a70cf9 7539 force_gimple_operand_gsi (&si,
790368c5 7540 build1 (VIEW_CONVERT_EXPR, itype,
7541 stored_val), true, NULL_TREE, true,
75a70cf9 7542 GSI_SAME_STMT);
cb7f680b 7543
7544 /* Build the compare&swap statement. */
7545 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
75a70cf9 7546 new_storedi = force_gimple_operand_gsi (&si,
87f9ffa4 7547 fold_convert (TREE_TYPE (loadedi),
7548 new_storedi),
cb7f680b 7549 true, NULL_TREE,
75a70cf9 7550 true, GSI_SAME_STMT);
cb7f680b 7551
7552 if (gimple_in_ssa_p (cfun))
7553 old_vali = loadedi;
7554 else
7555 {
87f9ffa4 7556 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
75a70cf9 7557 stmt = gimple_build_assign (old_vali, loadedi);
7558 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7559
75a70cf9 7560 stmt = gimple_build_assign (loadedi, new_storedi);
7561 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7562 }
7563
7564 /* Note that we always perform the comparison as an integer, even for
48e1416a 7565 floating point. This allows the atomic operation to properly
cb7f680b 7566 succeed even with NaNs and -0.0. */
75a70cf9 7567 stmt = gimple_build_cond_empty
7568 (build2 (NE_EXPR, boolean_type_node,
7569 new_storedi, old_vali));
7570 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7571
7572 /* Update cfg. */
7573 e = single_succ_edge (store_bb);
7574 e->flags &= ~EDGE_FALLTHRU;
7575 e->flags |= EDGE_FALSE_VALUE;
7576
7577 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
7578
790368c5 7579 /* Copy the new value to loadedi (we already did that before the condition
cb7f680b 7580 if we are not in SSA). */
7581 if (gimple_in_ssa_p (cfun))
7582 {
75a70cf9 7583 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
790368c5 7584 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
cb7f680b 7585 }
7586
75a70cf9 7587 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
7588 gsi_remove (&si, true);
cb7f680b 7589
04c2922b 7590 struct loop *loop = alloc_loop ();
7591 loop->header = loop_header;
5f037457 7592 loop->latch = store_bb;
04c2922b 7593 add_loop (loop, loop_header->loop_father);
7594
cb7f680b 7595 if (gimple_in_ssa_p (cfun))
7596 update_ssa (TODO_update_ssa_no_phi);
7597
7598 return true;
7599}
7600
7601/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7602
7603 GOMP_atomic_start ();
7604 *addr = rhs;
7605 GOMP_atomic_end ();
7606
7607 The result is not globally atomic, but works so long as all parallel
7608 references are within #pragma omp atomic directives. According to
7609 responses received from omp@openmp.org, appears to be within spec.
7610 Which makes sense, since that's how several other compilers handle
48e1416a 7611 this situation as well.
75a70cf9 7612 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
7613 expanding. STORED_VAL is the operand of the matching
7614 GIMPLE_OMP_ATOMIC_STORE.
cb7f680b 7615
48e1416a 7616 We replace
7617 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
cb7f680b 7618 loaded_val = *addr;
7619
7620 and replace
3ec11c49 7621 GIMPLE_OMP_ATOMIC_STORE (stored_val) with
48e1416a 7622 *addr = stored_val;
cb7f680b 7623*/
7624
7625static bool
7626expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
7627 tree addr, tree loaded_val, tree stored_val)
7628{
75a70cf9 7629 gimple_stmt_iterator si;
7630 gimple stmt;
cb7f680b 7631 tree t;
7632
75a70cf9 7633 si = gsi_last_bb (load_bb);
7634 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 7635
b9a16870 7636 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
414c3a2c 7637 t = build_call_expr (t, 0);
75a70cf9 7638 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
cb7f680b 7639
182cf5a9 7640 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
75a70cf9 7641 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
7642 gsi_remove (&si, true);
cb7f680b 7643
75a70cf9 7644 si = gsi_last_bb (store_bb);
7645 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 7646
182cf5a9 7647 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
7648 stored_val);
75a70cf9 7649 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 7650
b9a16870 7651 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
414c3a2c 7652 t = build_call_expr (t, 0);
75a70cf9 7653 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
7654 gsi_remove (&si, true);
cb7f680b 7655
7656 if (gimple_in_ssa_p (cfun))
7657 update_ssa (TODO_update_ssa_no_phi);
7658 return true;
7659}
7660
48e1416a 7661/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
7662 using expand_omp_atomic_fetch_op. If it failed, we try to
cb7f680b 7663 call expand_omp_atomic_pipeline, and if it fails too, the
7664 ultimate fallback is wrapping the operation in a mutex
48e1416a 7665 (expand_omp_atomic_mutex). REGION is the atomic region built
7666 by build_omp_regions_1(). */
cb7f680b 7667
7668static void
7669expand_omp_atomic (struct omp_region *region)
7670{
7671 basic_block load_bb = region->entry, store_bb = region->exit;
75a70cf9 7672 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
7673 tree loaded_val = gimple_omp_atomic_load_lhs (load);
7674 tree addr = gimple_omp_atomic_load_rhs (load);
7675 tree stored_val = gimple_omp_atomic_store_val (store);
cb7f680b 7676 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7677 HOST_WIDE_INT index;
7678
7679 /* Make sure the type is one of the supported sizes. */
6a0712d4 7680 index = tree_to_uhwi (TYPE_SIZE_UNIT (type));
cb7f680b 7681 index = exact_log2 (index);
7682 if (index >= 0 && index <= 4)
7683 {
7684 unsigned int align = TYPE_ALIGN_UNIT (type);
7685
7686 /* __sync builtins require strict data alignment. */
dcf7024c 7687 if (exact_log2 (align) >= index)
cb7f680b 7688 {
3ec11c49 7689 /* Atomic load. */
2169f33b 7690 if (loaded_val == stored_val
7691 && (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7692 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7693 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
3ec11c49 7694 && expand_omp_atomic_load (load_bb, addr, loaded_val, index))
2169f33b 7695 return;
7696
3ec11c49 7697 /* Atomic store. */
2169f33b 7698 if ((GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7699 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7700 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
7701 && store_bb == single_succ (load_bb)
7702 && first_stmt (store_bb) == store
3ec11c49 7703 && expand_omp_atomic_store (load_bb, addr, loaded_val,
7704 stored_val, index))
2169f33b 7705 return;
7706
cb7f680b 7707 /* When possible, use specialized atomic update functions. */
7708 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
3ec11c49 7709 && store_bb == single_succ (load_bb)
7710 && expand_omp_atomic_fetch_op (load_bb, addr,
7711 loaded_val, stored_val, index))
7712 return;
cb7f680b 7713
7714 /* If we don't have specialized __sync builtins, try and implement
7715 as a compare and swap loop. */
7716 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
7717 loaded_val, stored_val, index))
7718 return;
7719 }
7720 }
7721
7722 /* The ultimate fallback is wrapping the operation in a mutex. */
7723 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
7724}
7725
1e8e9920 7726
bc7bff74 7727/* Expand the OpenMP target{, data, update} directive starting at REGION. */
7728
7729static void
7730expand_omp_target (struct omp_region *region)
7731{
7732 basic_block entry_bb, exit_bb, new_bb;
7733 struct function *child_cfun = NULL;
7734 tree child_fn = NULL_TREE, block, t;
7735 gimple_stmt_iterator gsi;
7736 gimple entry_stmt, stmt;
7737 edge e;
7738
7739 entry_stmt = last_stmt (region->entry);
7740 new_bb = region->entry;
7741 int kind = gimple_omp_target_kind (entry_stmt);
7742 if (kind == GF_OMP_TARGET_KIND_REGION)
7743 {
7744 child_fn = gimple_omp_target_child_fn (entry_stmt);
7745 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7746 }
7747
7748 entry_bb = region->entry;
7749 exit_bb = region->exit;
7750
7751 if (kind == GF_OMP_TARGET_KIND_REGION)
7752 {
7753 unsigned srcidx, dstidx, num;
7754
7755 /* If the target region needs data sent from the parent
7756 function, then the very first statement (except possible
7757 tree profile counter updates) of the parallel body
7758 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
7759 &.OMP_DATA_O is passed as an argument to the child function,
7760 we need to replace it with the argument as seen by the child
7761 function.
7762
7763 In most cases, this will end up being the identity assignment
7764 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
7765 a function call that has been inlined, the original PARM_DECL
7766 .OMP_DATA_I may have been converted into a different local
7767 variable. In which case, we need to keep the assignment. */
7768 if (gimple_omp_target_data_arg (entry_stmt))
7769 {
7770 basic_block entry_succ_bb = single_succ (entry_bb);
7771 gimple_stmt_iterator gsi;
7772 tree arg;
7773 gimple tgtcopy_stmt = NULL;
7774 tree sender
7775 = TREE_VEC_ELT (gimple_omp_target_data_arg (entry_stmt), 0);
7776
7777 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
7778 {
7779 gcc_assert (!gsi_end_p (gsi));
7780 stmt = gsi_stmt (gsi);
7781 if (gimple_code (stmt) != GIMPLE_ASSIGN)
7782 continue;
7783
7784 if (gimple_num_ops (stmt) == 2)
7785 {
7786 tree arg = gimple_assign_rhs1 (stmt);
7787
7788 /* We're ignoring the subcode because we're
7789 effectively doing a STRIP_NOPS. */
7790
7791 if (TREE_CODE (arg) == ADDR_EXPR
7792 && TREE_OPERAND (arg, 0) == sender)
7793 {
7794 tgtcopy_stmt = stmt;
7795 break;
7796 }
7797 }
7798 }
7799
7800 gcc_assert (tgtcopy_stmt != NULL);
7801 arg = DECL_ARGUMENTS (child_fn);
7802
7803 gcc_assert (gimple_assign_lhs (tgtcopy_stmt) == arg);
7804 gsi_remove (&gsi, true);
7805 }
7806
7807 /* Declare local variables needed in CHILD_CFUN. */
7808 block = DECL_INITIAL (child_fn);
7809 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
7810 /* The gimplifier could record temporaries in target block
7811 rather than in containing function's local_decls chain,
7812 which would mean cgraph missed finalizing them. Do it now. */
7813 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
7814 if (TREE_CODE (t) == VAR_DECL
7815 && TREE_STATIC (t)
7816 && !DECL_EXTERNAL (t))
7817 varpool_finalize_decl (t);
7818 DECL_SAVED_TREE (child_fn) = NULL;
7819 /* We'll create a CFG for child_fn, so no gimple body is needed. */
7820 gimple_set_body (child_fn, NULL);
7821 TREE_USED (block) = 1;
7822
7823 /* Reset DECL_CONTEXT on function arguments. */
7824 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7825 DECL_CONTEXT (t) = child_fn;
7826
7827 /* Split ENTRY_BB at GIMPLE_OMP_TARGET,
7828 so that it can be moved to the child function. */
7829 gsi = gsi_last_bb (entry_bb);
7830 stmt = gsi_stmt (gsi);
7831 gcc_assert (stmt && gimple_code (stmt) == GIMPLE_OMP_TARGET
7832 && gimple_omp_target_kind (stmt)
7833 == GF_OMP_TARGET_KIND_REGION);
7834 gsi_remove (&gsi, true);
7835 e = split_block (entry_bb, stmt);
7836 entry_bb = e->dest;
7837 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7838
7839 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
7840 if (exit_bb)
7841 {
7842 gsi = gsi_last_bb (exit_bb);
7843 gcc_assert (!gsi_end_p (gsi)
7844 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
7845 stmt = gimple_build_return (NULL);
7846 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
7847 gsi_remove (&gsi, true);
7848 }
7849
7850 /* Move the target region into CHILD_CFUN. */
7851
7852 block = gimple_block (entry_stmt);
7853
7854 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
7855 if (exit_bb)
7856 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
7857 /* When the OMP expansion process cannot guarantee an up-to-date
7858 loop tree arrange for the child function to fixup loops. */
7859 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
7860 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
7861
7862 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
7863 num = vec_safe_length (child_cfun->local_decls);
7864 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
7865 {
7866 t = (*child_cfun->local_decls)[srcidx];
7867 if (DECL_CONTEXT (t) == cfun->decl)
7868 continue;
7869 if (srcidx != dstidx)
7870 (*child_cfun->local_decls)[dstidx] = t;
7871 dstidx++;
7872 }
7873 if (dstidx != num)
7874 vec_safe_truncate (child_cfun->local_decls, dstidx);
7875
7876 /* Inform the callgraph about the new function. */
7877 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
7878 cgraph_add_new_function (child_fn, true);
7879
7880 /* Fix the callgraph edges for child_cfun. Those for cfun will be
7881 fixed in a following pass. */
7882 push_cfun (child_cfun);
7883 rebuild_cgraph_edges ();
7884
7885 /* Some EH regions might become dead, see PR34608. If
7886 pass_cleanup_cfg isn't the first pass to happen with the
7887 new child, these dead EH edges might cause problems.
7888 Clean them up now. */
7889 if (flag_exceptions)
7890 {
7891 basic_block bb;
7892 bool changed = false;
7893
7894 FOR_EACH_BB (bb)
7895 changed |= gimple_purge_dead_eh_edges (bb);
7896 if (changed)
7897 cleanup_tree_cfg ();
7898 }
7899 pop_cfun ();
7900 }
7901
7902 /* Emit a library call to launch the target region, or do data
7903 transfers. */
7904 tree t1, t2, t3, t4, device, cond, c, clauses;
7905 enum built_in_function start_ix;
7906 location_t clause_loc;
7907
7908 clauses = gimple_omp_target_clauses (entry_stmt);
7909
7910 if (kind == GF_OMP_TARGET_KIND_REGION)
7911 start_ix = BUILT_IN_GOMP_TARGET;
7912 else if (kind == GF_OMP_TARGET_KIND_DATA)
7913 start_ix = BUILT_IN_GOMP_TARGET_DATA;
7914 else
7915 start_ix = BUILT_IN_GOMP_TARGET_UPDATE;
7916
7917 /* By default, the value of DEVICE is -1 (let runtime library choose)
7918 and there is no conditional. */
7919 cond = NULL_TREE;
7920 device = build_int_cst (integer_type_node, -1);
7921
7922 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
7923 if (c)
7924 cond = OMP_CLAUSE_IF_EXPR (c);
7925
7926 c = find_omp_clause (clauses, OMP_CLAUSE_DEVICE);
7927 if (c)
7928 {
7929 device = OMP_CLAUSE_DEVICE_ID (c);
7930 clause_loc = OMP_CLAUSE_LOCATION (c);
7931 }
7932 else
7933 clause_loc = gimple_location (entry_stmt);
7934
7935 /* Ensure 'device' is of the correct type. */
7936 device = fold_convert_loc (clause_loc, integer_type_node, device);
7937
7938 /* If we found the clause 'if (cond)', build
7939 (cond ? device : -2). */
7940 if (cond)
7941 {
7942 cond = gimple_boolify (cond);
7943
7944 basic_block cond_bb, then_bb, else_bb;
7945 edge e;
7946 tree tmp_var;
7947
7948 tmp_var = create_tmp_var (TREE_TYPE (device), NULL);
7949 if (kind != GF_OMP_TARGET_KIND_REGION)
7950 {
7951 gsi = gsi_last_bb (new_bb);
7952 gsi_prev (&gsi);
7953 e = split_block (new_bb, gsi_stmt (gsi));
7954 }
7955 else
7956 e = split_block (new_bb, NULL);
7957 cond_bb = e->src;
7958 new_bb = e->dest;
7959 remove_edge (e);
7960
7961 then_bb = create_empty_bb (cond_bb);
7962 else_bb = create_empty_bb (then_bb);
7963 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
7964 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
7965
7966 stmt = gimple_build_cond_empty (cond);
7967 gsi = gsi_last_bb (cond_bb);
7968 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7969
7970 gsi = gsi_start_bb (then_bb);
7971 stmt = gimple_build_assign (tmp_var, device);
7972 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7973
7974 gsi = gsi_start_bb (else_bb);
7975 stmt = gimple_build_assign (tmp_var,
7976 build_int_cst (integer_type_node, -2));
7977 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7978
7979 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
7980 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
7981 if (current_loops)
7982 {
7983 add_bb_to_loop (then_bb, cond_bb->loop_father);
7984 add_bb_to_loop (else_bb, cond_bb->loop_father);
7985 }
7986 make_edge (then_bb, new_bb, EDGE_FALLTHRU);
7987 make_edge (else_bb, new_bb, EDGE_FALLTHRU);
7988
7989 device = tmp_var;
7990 }
7991
7992 gsi = gsi_last_bb (new_bb);
7993 t = gimple_omp_target_data_arg (entry_stmt);
7994 if (t == NULL)
7995 {
7996 t1 = size_zero_node;
7997 t2 = build_zero_cst (ptr_type_node);
7998 t3 = t2;
7999 t4 = t2;
8000 }
8001 else
8002 {
8003 t1 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (TREE_VEC_ELT (t, 1))));
8004 t1 = size_binop (PLUS_EXPR, t1, size_int (1));
8005 t2 = build_fold_addr_expr (TREE_VEC_ELT (t, 0));
8006 t3 = build_fold_addr_expr (TREE_VEC_ELT (t, 1));
8007 t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
8008 }
8009
8010 gimple g;
8011 /* FIXME: This will be address of
8012 extern char __OPENMP_TARGET__[] __attribute__((visibility ("hidden")))
8013 symbol, as soon as the linker plugin is able to create it for us. */
8014 tree openmp_target = build_zero_cst (ptr_type_node);
8015 if (kind == GF_OMP_TARGET_KIND_REGION)
8016 {
8017 tree fnaddr = build_fold_addr_expr (child_fn);
8018 g = gimple_build_call (builtin_decl_explicit (start_ix), 7,
8019 device, fnaddr, openmp_target, t1, t2, t3, t4);
8020 }
8021 else
8022 g = gimple_build_call (builtin_decl_explicit (start_ix), 6,
8023 device, openmp_target, t1, t2, t3, t4);
8024 gimple_set_location (g, gimple_location (entry_stmt));
8025 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8026 if (kind != GF_OMP_TARGET_KIND_REGION)
8027 {
8028 g = gsi_stmt (gsi);
8029 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_TARGET);
8030 gsi_remove (&gsi, true);
8031 }
8032 if (kind == GF_OMP_TARGET_KIND_DATA && region->exit)
8033 {
8034 gsi = gsi_last_bb (region->exit);
8035 g = gsi_stmt (gsi);
8036 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_RETURN);
8037 gsi_remove (&gsi, true);
8038 }
8039}
8040
8041
8042/* Expand the parallel region tree rooted at REGION. Expansion
8043 proceeds in depth-first order. Innermost regions are expanded
8044 first. This way, parallel regions that require a new function to
75a70cf9 8045 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
773c5ba7 8046 internal dependencies in their body. */
8047
8048static void
8049expand_omp (struct omp_region *region)
8050{
8051 while (region)
8052 {
1d22f541 8053 location_t saved_location;
bc7bff74 8054 gimple inner_stmt = NULL;
1d22f541 8055
d1d5b012 8056 /* First, determine whether this is a combined parallel+workshare
8057 region. */
75a70cf9 8058 if (region->type == GIMPLE_OMP_PARALLEL)
d1d5b012 8059 determine_parallel_type (region);
8060
bc7bff74 8061 if (region->type == GIMPLE_OMP_FOR
8062 && gimple_omp_for_combined_p (last_stmt (region->entry)))
8063 inner_stmt = last_stmt (region->inner->entry);
8064
773c5ba7 8065 if (region->inner)
8066 expand_omp (region->inner);
8067
1d22f541 8068 saved_location = input_location;
75a70cf9 8069 if (gimple_has_location (last_stmt (region->entry)))
8070 input_location = gimple_location (last_stmt (region->entry));
1d22f541 8071
61e47ac8 8072 switch (region->type)
773c5ba7 8073 {
75a70cf9 8074 case GIMPLE_OMP_PARALLEL:
8075 case GIMPLE_OMP_TASK:
fd6481cf 8076 expand_omp_taskreg (region);
8077 break;
8078
75a70cf9 8079 case GIMPLE_OMP_FOR:
bc7bff74 8080 expand_omp_for (region, inner_stmt);
61e47ac8 8081 break;
773c5ba7 8082
75a70cf9 8083 case GIMPLE_OMP_SECTIONS:
61e47ac8 8084 expand_omp_sections (region);
8085 break;
773c5ba7 8086
75a70cf9 8087 case GIMPLE_OMP_SECTION:
61e47ac8 8088 /* Individual omp sections are handled together with their
75a70cf9 8089 parent GIMPLE_OMP_SECTIONS region. */
61e47ac8 8090 break;
773c5ba7 8091
75a70cf9 8092 case GIMPLE_OMP_SINGLE:
61e47ac8 8093 expand_omp_single (region);
8094 break;
773c5ba7 8095
75a70cf9 8096 case GIMPLE_OMP_MASTER:
bc7bff74 8097 case GIMPLE_OMP_TASKGROUP:
75a70cf9 8098 case GIMPLE_OMP_ORDERED:
8099 case GIMPLE_OMP_CRITICAL:
bc7bff74 8100 case GIMPLE_OMP_TEAMS:
61e47ac8 8101 expand_omp_synch (region);
8102 break;
773c5ba7 8103
75a70cf9 8104 case GIMPLE_OMP_ATOMIC_LOAD:
cb7f680b 8105 expand_omp_atomic (region);
8106 break;
8107
bc7bff74 8108 case GIMPLE_OMP_TARGET:
8109 expand_omp_target (region);
8110 break;
8111
61e47ac8 8112 default:
8113 gcc_unreachable ();
8114 }
cc5982dc 8115
1d22f541 8116 input_location = saved_location;
773c5ba7 8117 region = region->next;
8118 }
8119}
8120
8121
8122/* Helper for build_omp_regions. Scan the dominator tree starting at
28c92cbb 8123 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
8124 true, the function ends once a single tree is built (otherwise, whole
8125 forest of OMP constructs may be built). */
773c5ba7 8126
8127static void
28c92cbb 8128build_omp_regions_1 (basic_block bb, struct omp_region *parent,
8129 bool single_tree)
773c5ba7 8130{
75a70cf9 8131 gimple_stmt_iterator gsi;
8132 gimple stmt;
773c5ba7 8133 basic_block son;
8134
75a70cf9 8135 gsi = gsi_last_bb (bb);
8136 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
773c5ba7 8137 {
8138 struct omp_region *region;
75a70cf9 8139 enum gimple_code code;
773c5ba7 8140
75a70cf9 8141 stmt = gsi_stmt (gsi);
8142 code = gimple_code (stmt);
8143 if (code == GIMPLE_OMP_RETURN)
773c5ba7 8144 {
8145 /* STMT is the return point out of region PARENT. Mark it
8146 as the exit point and make PARENT the immediately
8147 enclosing region. */
8148 gcc_assert (parent);
8149 region = parent;
61e47ac8 8150 region->exit = bb;
773c5ba7 8151 parent = parent->outer;
773c5ba7 8152 }
75a70cf9 8153 else if (code == GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 8154 {
75a70cf9 8155 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
8156 GIMPLE_OMP_RETURN, but matches with
8157 GIMPLE_OMP_ATOMIC_LOAD. */
cb7f680b 8158 gcc_assert (parent);
75a70cf9 8159 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 8160 region = parent;
8161 region->exit = bb;
8162 parent = parent->outer;
8163 }
8164
75a70cf9 8165 else if (code == GIMPLE_OMP_CONTINUE)
61e47ac8 8166 {
8167 gcc_assert (parent);
8168 parent->cont = bb;
8169 }
75a70cf9 8170 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
ac6e3339 8171 {
75a70cf9 8172 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
8173 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
8174 ;
ac6e3339 8175 }
bc7bff74 8176 else if (code == GIMPLE_OMP_TARGET
8177 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_UPDATE)
8178 new_omp_region (bb, code, parent);
773c5ba7 8179 else
8180 {
8181 /* Otherwise, this directive becomes the parent for a new
8182 region. */
61e47ac8 8183 region = new_omp_region (bb, code, parent);
773c5ba7 8184 parent = region;
8185 }
773c5ba7 8186 }
8187
28c92cbb 8188 if (single_tree && !parent)
8189 return;
8190
773c5ba7 8191 for (son = first_dom_son (CDI_DOMINATORS, bb);
8192 son;
8193 son = next_dom_son (CDI_DOMINATORS, son))
28c92cbb 8194 build_omp_regions_1 (son, parent, single_tree);
8195}
8196
8197/* Builds the tree of OMP regions rooted at ROOT, storing it to
8198 root_omp_region. */
8199
8200static void
8201build_omp_regions_root (basic_block root)
8202{
8203 gcc_assert (root_omp_region == NULL);
8204 build_omp_regions_1 (root, NULL, true);
8205 gcc_assert (root_omp_region != NULL);
773c5ba7 8206}
8207
28c92cbb 8208/* Expands omp construct (and its subconstructs) starting in HEAD. */
8209
8210void
8211omp_expand_local (basic_block head)
8212{
8213 build_omp_regions_root (head);
8214 if (dump_file && (dump_flags & TDF_DETAILS))
8215 {
8216 fprintf (dump_file, "\nOMP region tree\n\n");
8217 dump_omp_region (dump_file, root_omp_region, 0);
8218 fprintf (dump_file, "\n");
8219 }
8220
8221 remove_exit_barriers (root_omp_region);
8222 expand_omp (root_omp_region);
8223
8224 free_omp_regions ();
8225}
773c5ba7 8226
8227/* Scan the CFG and build a tree of OMP regions. Return the root of
8228 the OMP region tree. */
8229
8230static void
8231build_omp_regions (void)
8232{
61e47ac8 8233 gcc_assert (root_omp_region == NULL);
773c5ba7 8234 calculate_dominance_info (CDI_DOMINATORS);
28c92cbb 8235 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
773c5ba7 8236}
8237
773c5ba7 8238/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
8239
2a1990e9 8240static unsigned int
773c5ba7 8241execute_expand_omp (void)
8242{
8243 build_omp_regions ();
8244
61e47ac8 8245 if (!root_omp_region)
8246 return 0;
773c5ba7 8247
61e47ac8 8248 if (dump_file)
8249 {
8250 fprintf (dump_file, "\nOMP region tree\n\n");
8251 dump_omp_region (dump_file, root_omp_region, 0);
8252 fprintf (dump_file, "\n");
773c5ba7 8253 }
61e47ac8 8254
8255 remove_exit_barriers (root_omp_region);
8256
8257 expand_omp (root_omp_region);
8258
61e47ac8 8259 cleanup_tree_cfg ();
8260
8261 free_omp_regions ();
8262
2a1990e9 8263 return 0;
773c5ba7 8264}
8265
79acaae1 8266/* OMP expansion -- the default pass, run before creation of SSA form. */
8267
773c5ba7 8268static bool
8269gate_expand_omp (void)
8270{
f2697631 8271 return ((flag_openmp != 0 || flag_openmp_simd != 0
8272 || flag_enable_cilkplus != 0) && !seen_error ());
773c5ba7 8273}
8274
cbe8bda8 8275namespace {
8276
8277const pass_data pass_data_expand_omp =
8278{
8279 GIMPLE_PASS, /* type */
8280 "ompexp", /* name */
8281 OPTGROUP_NONE, /* optinfo_flags */
8282 true, /* has_gate */
8283 true, /* has_execute */
8284 TV_NONE, /* tv_id */
8285 PROP_gimple_any, /* properties_required */
8286 0, /* properties_provided */
8287 0, /* properties_destroyed */
8288 0, /* todo_flags_start */
8289 0, /* todo_flags_finish */
773c5ba7 8290};
cbe8bda8 8291
8292class pass_expand_omp : public gimple_opt_pass
8293{
8294public:
9af5ce0c 8295 pass_expand_omp (gcc::context *ctxt)
8296 : gimple_opt_pass (pass_data_expand_omp, ctxt)
cbe8bda8 8297 {}
8298
8299 /* opt_pass methods: */
8300 bool gate () { return gate_expand_omp (); }
8301 unsigned int execute () { return execute_expand_omp (); }
8302
8303}; // class pass_expand_omp
8304
8305} // anon namespace
8306
8307gimple_opt_pass *
8308make_pass_expand_omp (gcc::context *ctxt)
8309{
8310 return new pass_expand_omp (ctxt);
8311}
773c5ba7 8312\f
8313/* Routines to lower OpenMP directives into OMP-GIMPLE. */
8314
bc7bff74 8315/* If ctx is a worksharing context inside of a cancellable parallel
8316 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8317 and conditional branch to parallel's cancel_label to handle
8318 cancellation in the implicit barrier. */
8319
8320static void
8321maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
8322{
8323 gimple omp_return = gimple_seq_last_stmt (*body);
8324 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8325 if (gimple_omp_return_nowait_p (omp_return))
8326 return;
8327 if (ctx->outer
8328 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
8329 && ctx->outer->cancellable)
8330 {
8331 tree lhs = create_tmp_var (boolean_type_node, NULL);
8332 gimple_omp_return_set_lhs (omp_return, lhs);
8333 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8334 gimple g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
8335 ctx->outer->cancel_label, fallthru_label);
8336 gimple_seq_add_stmt (body, g);
8337 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8338 }
8339}
8340
75a70cf9 8341/* Lower the OpenMP sections directive in the current statement in GSI_P.
8342 CTX is the enclosing OMP context for the current statement. */
773c5ba7 8343
8344static void
75a70cf9 8345lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 8346{
75a70cf9 8347 tree block, control;
8348 gimple_stmt_iterator tgsi;
75a70cf9 8349 gimple stmt, new_stmt, bind, t;
e3a19533 8350 gimple_seq ilist, dlist, olist, new_body;
dac18d1a 8351 struct gimplify_ctx gctx;
773c5ba7 8352
75a70cf9 8353 stmt = gsi_stmt (*gsi_p);
773c5ba7 8354
dac18d1a 8355 push_gimplify_context (&gctx);
773c5ba7 8356
8357 dlist = NULL;
8358 ilist = NULL;
75a70cf9 8359 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
bc7bff74 8360 &ilist, &dlist, ctx, NULL);
773c5ba7 8361
e3a19533 8362 new_body = gimple_omp_body (stmt);
8363 gimple_omp_set_body (stmt, NULL);
8364 tgsi = gsi_start (new_body);
8365 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
773c5ba7 8366 {
8367 omp_context *sctx;
75a70cf9 8368 gimple sec_start;
773c5ba7 8369
75a70cf9 8370 sec_start = gsi_stmt (tgsi);
773c5ba7 8371 sctx = maybe_lookup_ctx (sec_start);
8372 gcc_assert (sctx);
8373
e3a19533 8374 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8375 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8376 GSI_CONTINUE_LINKING);
75a70cf9 8377 gimple_omp_set_body (sec_start, NULL);
773c5ba7 8378
e3a19533 8379 if (gsi_one_before_end_p (tgsi))
773c5ba7 8380 {
75a70cf9 8381 gimple_seq l = NULL;
8382 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
773c5ba7 8383 &l, ctx);
e3a19533 8384 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
75a70cf9 8385 gimple_omp_section_set_last (sec_start);
773c5ba7 8386 }
48e1416a 8387
e3a19533 8388 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8389 GSI_CONTINUE_LINKING);
773c5ba7 8390 }
1e8e9920 8391
8392 block = make_node (BLOCK);
e3a19533 8393 bind = gimple_build_bind (NULL, new_body, block);
1e8e9920 8394
75a70cf9 8395 olist = NULL;
8396 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
773c5ba7 8397
1d22f541 8398 block = make_node (BLOCK);
75a70cf9 8399 new_stmt = gimple_build_bind (NULL, NULL, block);
e3a19533 8400 gsi_replace (gsi_p, new_stmt, true);
773c5ba7 8401
1d22f541 8402 pop_gimplify_context (new_stmt);
75a70cf9 8403 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8404 BLOCK_VARS (block) = gimple_bind_vars (bind);
1d22f541 8405 if (BLOCK_VARS (block))
8406 TREE_USED (block) = 1;
8407
75a70cf9 8408 new_body = NULL;
8409 gimple_seq_add_seq (&new_body, ilist);
8410 gimple_seq_add_stmt (&new_body, stmt);
8411 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8412 gimple_seq_add_stmt (&new_body, bind);
61e47ac8 8413
ac6e3339 8414 control = create_tmp_var (unsigned_type_node, ".section");
75a70cf9 8415 t = gimple_build_omp_continue (control, control);
8416 gimple_omp_sections_set_control (stmt, control);
8417 gimple_seq_add_stmt (&new_body, t);
61e47ac8 8418
75a70cf9 8419 gimple_seq_add_seq (&new_body, olist);
bc7bff74 8420 if (ctx->cancellable)
8421 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
75a70cf9 8422 gimple_seq_add_seq (&new_body, dlist);
773c5ba7 8423
75a70cf9 8424 new_body = maybe_catch_exception (new_body);
aade31a0 8425
75a70cf9 8426 t = gimple_build_omp_return
8427 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
8428 OMP_CLAUSE_NOWAIT));
8429 gimple_seq_add_stmt (&new_body, t);
bc7bff74 8430 maybe_add_implicit_barrier_cancel (ctx, &new_body);
61e47ac8 8431
75a70cf9 8432 gimple_bind_set_body (new_stmt, new_body);
1e8e9920 8433}
8434
8435
773c5ba7 8436/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 8437 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
1e8e9920 8438
8439 if (GOMP_single_start ())
8440 BODY;
8441 [ GOMP_barrier (); ] -> unless 'nowait' is present.
773c5ba7 8442
8443 FIXME. It may be better to delay expanding the logic of this until
8444 pass_expand_omp. The expanded logic may make the job more difficult
8445 to a synchronization analysis pass. */
1e8e9920 8446
8447static void
75a70cf9 8448lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
1e8e9920 8449{
e60a6f7b 8450 location_t loc = gimple_location (single_stmt);
8451 tree tlabel = create_artificial_label (loc);
8452 tree flabel = create_artificial_label (loc);
75a70cf9 8453 gimple call, cond;
8454 tree lhs, decl;
8455
b9a16870 8456 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
75a70cf9 8457 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
8458 call = gimple_build_call (decl, 0);
8459 gimple_call_set_lhs (call, lhs);
8460 gimple_seq_add_stmt (pre_p, call);
8461
8462 cond = gimple_build_cond (EQ_EXPR, lhs,
389dd41b 8463 fold_convert_loc (loc, TREE_TYPE (lhs),
8464 boolean_true_node),
75a70cf9 8465 tlabel, flabel);
8466 gimple_seq_add_stmt (pre_p, cond);
8467 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8468 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8469 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
1e8e9920 8470}
8471
773c5ba7 8472
8473/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 8474 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
1e8e9920 8475
8476 #pragma omp single copyprivate (a, b, c)
8477
8478 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8479
8480 {
8481 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8482 {
8483 BODY;
8484 copyout.a = a;
8485 copyout.b = b;
8486 copyout.c = c;
8487 GOMP_single_copy_end (&copyout);
8488 }
8489 else
8490 {
8491 a = copyout_p->a;
8492 b = copyout_p->b;
8493 c = copyout_p->c;
8494 }
8495 GOMP_barrier ();
8496 }
773c5ba7 8497
8498 FIXME. It may be better to delay expanding the logic of this until
8499 pass_expand_omp. The expanded logic may make the job more difficult
8500 to a synchronization analysis pass. */
1e8e9920 8501
8502static void
75a70cf9 8503lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
1e8e9920 8504{
b9a16870 8505 tree ptr_type, t, l0, l1, l2, bfn_decl;
75a70cf9 8506 gimple_seq copyin_seq;
e60a6f7b 8507 location_t loc = gimple_location (single_stmt);
1e8e9920 8508
8509 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8510
8511 ptr_type = build_pointer_type (ctx->record_type);
8512 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8513
e60a6f7b 8514 l0 = create_artificial_label (loc);
8515 l1 = create_artificial_label (loc);
8516 l2 = create_artificial_label (loc);
1e8e9920 8517
b9a16870 8518 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8519 t = build_call_expr_loc (loc, bfn_decl, 0);
389dd41b 8520 t = fold_convert_loc (loc, ptr_type, t);
75a70cf9 8521 gimplify_assign (ctx->receiver_decl, t, pre_p);
1e8e9920 8522
8523 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8524 build_int_cst (ptr_type, 0));
8525 t = build3 (COND_EXPR, void_type_node, t,
8526 build_and_jump (&l0), build_and_jump (&l1));
8527 gimplify_and_add (t, pre_p);
8528
75a70cf9 8529 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
1e8e9920 8530
75a70cf9 8531 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
1e8e9920 8532
8533 copyin_seq = NULL;
75a70cf9 8534 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
1e8e9920 8535 &copyin_seq, ctx);
8536
389dd41b 8537 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
b9a16870 8538 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8539 t = build_call_expr_loc (loc, bfn_decl, 1, t);
1e8e9920 8540 gimplify_and_add (t, pre_p);
8541
8542 t = build_and_jump (&l2);
8543 gimplify_and_add (t, pre_p);
8544
75a70cf9 8545 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
1e8e9920 8546
75a70cf9 8547 gimple_seq_add_seq (pre_p, copyin_seq);
1e8e9920 8548
75a70cf9 8549 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
1e8e9920 8550}
8551
773c5ba7 8552
1e8e9920 8553/* Expand code for an OpenMP single directive. */
8554
8555static void
75a70cf9 8556lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8557{
75a70cf9 8558 tree block;
8559 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
bc7bff74 8560 gimple_seq bind_body, bind_body_tail = NULL, dlist;
dac18d1a 8561 struct gimplify_ctx gctx;
1e8e9920 8562
dac18d1a 8563 push_gimplify_context (&gctx);
1e8e9920 8564
e3a19533 8565 block = make_node (BLOCK);
8566 bind = gimple_build_bind (NULL, NULL, block);
8567 gsi_replace (gsi_p, bind, true);
75a70cf9 8568 bind_body = NULL;
e3a19533 8569 dlist = NULL;
75a70cf9 8570 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
bc7bff74 8571 &bind_body, &dlist, ctx, NULL);
e3a19533 8572 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
1e8e9920 8573
75a70cf9 8574 gimple_seq_add_stmt (&bind_body, single_stmt);
1e8e9920 8575
8576 if (ctx->record_type)
75a70cf9 8577 lower_omp_single_copy (single_stmt, &bind_body, ctx);
1e8e9920 8578 else
75a70cf9 8579 lower_omp_single_simple (single_stmt, &bind_body);
8580
8581 gimple_omp_set_body (single_stmt, NULL);
1e8e9920 8582
75a70cf9 8583 gimple_seq_add_seq (&bind_body, dlist);
61e47ac8 8584
75a70cf9 8585 bind_body = maybe_catch_exception (bind_body);
61e47ac8 8586
48e1416a 8587 t = gimple_build_omp_return
75a70cf9 8588 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
8589 OMP_CLAUSE_NOWAIT));
bc7bff74 8590 gimple_seq_add_stmt (&bind_body_tail, t);
8591 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
8592 if (ctx->record_type)
8593 {
8594 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8595 tree clobber = build_constructor (ctx->record_type, NULL);
8596 TREE_THIS_VOLATILE (clobber) = 1;
8597 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8598 clobber), GSI_SAME_STMT);
8599 }
8600 gimple_seq_add_seq (&bind_body, bind_body_tail);
e3a19533 8601 gimple_bind_set_body (bind, bind_body);
61e47ac8 8602
1e8e9920 8603 pop_gimplify_context (bind);
773c5ba7 8604
75a70cf9 8605 gimple_bind_append_vars (bind, ctx->block_vars);
8606 BLOCK_VARS (block) = ctx->block_vars;
1d22f541 8607 if (BLOCK_VARS (block))
8608 TREE_USED (block) = 1;
1e8e9920 8609}
8610
773c5ba7 8611
1e8e9920 8612/* Expand code for an OpenMP master directive. */
8613
8614static void
75a70cf9 8615lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8616{
b9a16870 8617 tree block, lab = NULL, x, bfn_decl;
75a70cf9 8618 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 8619 location_t loc = gimple_location (stmt);
75a70cf9 8620 gimple_seq tseq;
dac18d1a 8621 struct gimplify_ctx gctx;
1e8e9920 8622
dac18d1a 8623 push_gimplify_context (&gctx);
1e8e9920 8624
8625 block = make_node (BLOCK);
e3a19533 8626 bind = gimple_build_bind (NULL, NULL, block);
8627 gsi_replace (gsi_p, bind, true);
8628 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8629
b9a16870 8630 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8631 x = build_call_expr_loc (loc, bfn_decl, 0);
1e8e9920 8632 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8633 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
75a70cf9 8634 tseq = NULL;
8635 gimplify_and_add (x, &tseq);
8636 gimple_bind_add_seq (bind, tseq);
1e8e9920 8637
e3a19533 8638 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8639 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8640 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8641 gimple_omp_set_body (stmt, NULL);
1e8e9920 8642
75a70cf9 8643 gimple_bind_add_stmt (bind, gimple_build_label (lab));
61e47ac8 8644
75a70cf9 8645 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 8646
1e8e9920 8647 pop_gimplify_context (bind);
773c5ba7 8648
75a70cf9 8649 gimple_bind_append_vars (bind, ctx->block_vars);
8650 BLOCK_VARS (block) = ctx->block_vars;
1e8e9920 8651}
8652
773c5ba7 8653
bc7bff74 8654/* Expand code for an OpenMP taskgroup directive. */
8655
8656static void
8657lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8658{
8659 gimple stmt = gsi_stmt (*gsi_p), bind, x;
8660 tree block = make_node (BLOCK);
8661
8662 bind = gimple_build_bind (NULL, NULL, block);
8663 gsi_replace (gsi_p, bind, true);
8664 gimple_bind_add_stmt (bind, stmt);
8665
8666 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8667 0);
8668 gimple_bind_add_stmt (bind, x);
8669
8670 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8671 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8672 gimple_omp_set_body (stmt, NULL);
8673
8674 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8675
8676 gimple_bind_append_vars (bind, ctx->block_vars);
8677 BLOCK_VARS (block) = ctx->block_vars;
8678}
8679
8680
1e8e9920 8681/* Expand code for an OpenMP ordered directive. */
8682
8683static void
75a70cf9 8684lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8685{
75a70cf9 8686 tree block;
8687 gimple stmt = gsi_stmt (*gsi_p), bind, x;
dac18d1a 8688 struct gimplify_ctx gctx;
1e8e9920 8689
dac18d1a 8690 push_gimplify_context (&gctx);
1e8e9920 8691
8692 block = make_node (BLOCK);
e3a19533 8693 bind = gimple_build_bind (NULL, NULL, block);
8694 gsi_replace (gsi_p, bind, true);
8695 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8696
b9a16870 8697 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8698 0);
75a70cf9 8699 gimple_bind_add_stmt (bind, x);
1e8e9920 8700
e3a19533 8701 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8702 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8703 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8704 gimple_omp_set_body (stmt, NULL);
1e8e9920 8705
b9a16870 8706 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 0);
75a70cf9 8707 gimple_bind_add_stmt (bind, x);
61e47ac8 8708
75a70cf9 8709 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 8710
1e8e9920 8711 pop_gimplify_context (bind);
773c5ba7 8712
75a70cf9 8713 gimple_bind_append_vars (bind, ctx->block_vars);
8714 BLOCK_VARS (block) = gimple_bind_vars (bind);
1e8e9920 8715}
8716
1e8e9920 8717
75a70cf9 8718/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
1e8e9920 8719 substitution of a couple of function calls. But in the NAMED case,
8720 requires that languages coordinate a symbol name. It is therefore
8721 best put here in common code. */
8722
8723static GTY((param1_is (tree), param2_is (tree)))
8724 splay_tree critical_name_mutexes;
8725
8726static void
75a70cf9 8727lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 8728{
75a70cf9 8729 tree block;
8730 tree name, lock, unlock;
8731 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 8732 location_t loc = gimple_location (stmt);
75a70cf9 8733 gimple_seq tbody;
dac18d1a 8734 struct gimplify_ctx gctx;
1e8e9920 8735
75a70cf9 8736 name = gimple_omp_critical_name (stmt);
1e8e9920 8737 if (name)
8738 {
c2f47e15 8739 tree decl;
1e8e9920 8740 splay_tree_node n;
8741
8742 if (!critical_name_mutexes)
8743 critical_name_mutexes
ba72912a 8744 = splay_tree_new_ggc (splay_tree_compare_pointers,
8745 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
8746 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
1e8e9920 8747
8748 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
8749 if (n == NULL)
8750 {
8751 char *new_str;
8752
8753 decl = create_tmp_var_raw (ptr_type_node, NULL);
8754
8755 new_str = ACONCAT ((".gomp_critical_user_",
8756 IDENTIFIER_POINTER (name), NULL));
8757 DECL_NAME (decl) = get_identifier (new_str);
8758 TREE_PUBLIC (decl) = 1;
8759 TREE_STATIC (decl) = 1;
8760 DECL_COMMON (decl) = 1;
8761 DECL_ARTIFICIAL (decl) = 1;
8762 DECL_IGNORED_P (decl) = 1;
1d416bd7 8763 varpool_finalize_decl (decl);
1e8e9920 8764
8765 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
8766 (splay_tree_value) decl);
8767 }
8768 else
8769 decl = (tree) n->value;
8770
b9a16870 8771 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
389dd41b 8772 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
1e8e9920 8773
b9a16870 8774 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
389dd41b 8775 unlock = build_call_expr_loc (loc, unlock, 1,
8776 build_fold_addr_expr_loc (loc, decl));
1e8e9920 8777 }
8778 else
8779 {
b9a16870 8780 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
389dd41b 8781 lock = build_call_expr_loc (loc, lock, 0);
1e8e9920 8782
b9a16870 8783 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
389dd41b 8784 unlock = build_call_expr_loc (loc, unlock, 0);
1e8e9920 8785 }
8786
dac18d1a 8787 push_gimplify_context (&gctx);
1e8e9920 8788
8789 block = make_node (BLOCK);
e3a19533 8790 bind = gimple_build_bind (NULL, NULL, block);
8791 gsi_replace (gsi_p, bind, true);
8792 gimple_bind_add_stmt (bind, stmt);
61e47ac8 8793
75a70cf9 8794 tbody = gimple_bind_body (bind);
8795 gimplify_and_add (lock, &tbody);
8796 gimple_bind_set_body (bind, tbody);
1e8e9920 8797
e3a19533 8798 lower_omp (gimple_omp_body_ptr (stmt), ctx);
75a70cf9 8799 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8800 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8801 gimple_omp_set_body (stmt, NULL);
1e8e9920 8802
75a70cf9 8803 tbody = gimple_bind_body (bind);
8804 gimplify_and_add (unlock, &tbody);
8805 gimple_bind_set_body (bind, tbody);
61e47ac8 8806
75a70cf9 8807 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
1e8e9920 8808
8809 pop_gimplify_context (bind);
75a70cf9 8810 gimple_bind_append_vars (bind, ctx->block_vars);
8811 BLOCK_VARS (block) = gimple_bind_vars (bind);
773c5ba7 8812}
8813
8814
8815/* A subroutine of lower_omp_for. Generate code to emit the predicate
8816 for a lastprivate clause. Given a loop control predicate of (V
8817 cond N2), we gate the clause on (!(V cond N2)). The lowered form
1e4afe3c 8818 is appended to *DLIST, iterator initialization is appended to
8819 *BODY_P. */
773c5ba7 8820
8821static void
75a70cf9 8822lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8823 gimple_seq *dlist, struct omp_context *ctx)
773c5ba7 8824{
75a70cf9 8825 tree clauses, cond, vinit;
773c5ba7 8826 enum tree_code cond_code;
75a70cf9 8827 gimple_seq stmts;
48e1416a 8828
fd6481cf 8829 cond_code = fd->loop.cond_code;
773c5ba7 8830 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8831
8832 /* When possible, use a strict equality expression. This can let VRP
8833 type optimizations deduce the value and remove a copy. */
35ec552a 8834 if (tree_fits_shwi_p (fd->loop.step))
773c5ba7 8835 {
fd6481cf 8836 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
773c5ba7 8837 if (step == 1 || step == -1)
8838 cond_code = EQ_EXPR;
8839 }
8840
fd6481cf 8841 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
773c5ba7 8842
75a70cf9 8843 clauses = gimple_omp_for_clauses (fd->for_stmt);
1e4afe3c 8844 stmts = NULL;
8845 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
75a70cf9 8846 if (!gimple_seq_empty_p (stmts))
1e4afe3c 8847 {
75a70cf9 8848 gimple_seq_add_seq (&stmts, *dlist);
fd6481cf 8849 *dlist = stmts;
1e4afe3c 8850
8851 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
fd6481cf 8852 vinit = fd->loop.n1;
1e4afe3c 8853 if (cond_code == EQ_EXPR
35ec552a 8854 && tree_fits_shwi_p (fd->loop.n2)
fd6481cf 8855 && ! integer_zerop (fd->loop.n2))
8856 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
3d483a94 8857 else
8858 vinit = unshare_expr (vinit);
1e4afe3c 8859
8860 /* Initialize the iterator variable, so that threads that don't execute
8861 any iterations don't execute the lastprivate clauses by accident. */
75a70cf9 8862 gimplify_assign (fd->loop.v, vinit, body_p);
1e4afe3c 8863 }
773c5ba7 8864}
8865
8866
8867/* Lower code for an OpenMP loop directive. */
8868
8869static void
75a70cf9 8870lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 8871{
75a70cf9 8872 tree *rhs_p, block;
bc7bff74 8873 struct omp_for_data fd, *fdp = NULL;
75a70cf9 8874 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
f018d957 8875 gimple_seq omp_for_body, body, dlist;
75a70cf9 8876 size_t i;
dac18d1a 8877 struct gimplify_ctx gctx;
773c5ba7 8878
dac18d1a 8879 push_gimplify_context (&gctx);
773c5ba7 8880
e3a19533 8881 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
773c5ba7 8882
1d22f541 8883 block = make_node (BLOCK);
75a70cf9 8884 new_stmt = gimple_build_bind (NULL, NULL, block);
e3a19533 8885 /* Replace at gsi right away, so that 'stmt' is no member
8886 of a sequence anymore as we're going to add to to a different
8887 one below. */
8888 gsi_replace (gsi_p, new_stmt, true);
1d22f541 8889
773c5ba7 8890 /* Move declaration of temporaries in the loop body before we make
8891 it go away. */
75a70cf9 8892 omp_for_body = gimple_omp_body (stmt);
8893 if (!gimple_seq_empty_p (omp_for_body)
8894 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8895 {
8896 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
8897 gimple_bind_append_vars (new_stmt, vars);
8898 }
773c5ba7 8899
bc7bff74 8900 if (gimple_omp_for_combined_into_p (stmt))
8901 {
8902 extract_omp_for_data (stmt, &fd, NULL);
8903 fdp = &fd;
8904
8905 /* We need two temporaries with fd.loop.v type (istart/iend)
8906 and then (fd.collapse - 1) temporaries with the same
8907 type for count2 ... countN-1 vars if not constant. */
8908 size_t count = 2;
8909 tree type = fd.iter_type;
8910 if (fd.collapse > 1
8911 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8912 count += fd.collapse - 1;
8913 bool parallel_for = gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR;
8914 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8915 tree clauses = *pc;
8916 if (parallel_for)
8917 outerc
8918 = find_omp_clause (gimple_omp_parallel_clauses (ctx->outer->stmt),
8919 OMP_CLAUSE__LOOPTEMP_);
8920 for (i = 0; i < count; i++)
8921 {
8922 tree temp;
8923 if (parallel_for)
8924 {
8925 gcc_assert (outerc);
8926 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8927 outerc = find_omp_clause (OMP_CLAUSE_CHAIN (outerc),
8928 OMP_CLAUSE__LOOPTEMP_);
8929 }
8930 else
8931 temp = create_tmp_var (type, NULL);
8932 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8933 OMP_CLAUSE_DECL (*pc) = temp;
8934 pc = &OMP_CLAUSE_CHAIN (*pc);
8935 }
8936 *pc = clauses;
8937 }
8938
75a70cf9 8939 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
773c5ba7 8940 dlist = NULL;
75a70cf9 8941 body = NULL;
bc7bff74 8942 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8943 fdp);
75a70cf9 8944 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
773c5ba7 8945
3d483a94 8946 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8947
773c5ba7 8948 /* Lower the header expressions. At this point, we can assume that
8949 the header is of the form:
8950
8951 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8952
8953 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8954 using the .omp_data_s mapping, if needed. */
75a70cf9 8955 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 8956 {
75a70cf9 8957 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
fd6481cf 8958 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8959 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8960
75a70cf9 8961 rhs_p = gimple_omp_for_final_ptr (stmt, i);
fd6481cf 8962 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8963 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8964
75a70cf9 8965 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
fd6481cf 8966 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 8967 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 8968 }
773c5ba7 8969
8970 /* Once lowered, extract the bounds and clauses. */
fd6481cf 8971 extract_omp_for_data (stmt, &fd, NULL);
773c5ba7 8972
75a70cf9 8973 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
773c5ba7 8974
75a70cf9 8975 gimple_seq_add_stmt (&body, stmt);
8976 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
61e47ac8 8977
75a70cf9 8978 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8979 fd.loop.v));
61e47ac8 8980
773c5ba7 8981 /* After the loop, add exit clauses. */
75a70cf9 8982 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
bc7bff74 8983
8984 if (ctx->cancellable)
8985 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8986
75a70cf9 8987 gimple_seq_add_seq (&body, dlist);
773c5ba7 8988
75a70cf9 8989 body = maybe_catch_exception (body);
aade31a0 8990
61e47ac8 8991 /* Region exit marker goes at the end of the loop body. */
75a70cf9 8992 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
bc7bff74 8993 maybe_add_implicit_barrier_cancel (ctx, &body);
1d22f541 8994 pop_gimplify_context (new_stmt);
75a70cf9 8995
8996 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8997 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
1d22f541 8998 if (BLOCK_VARS (block))
8999 TREE_USED (block) = 1;
773c5ba7 9000
75a70cf9 9001 gimple_bind_set_body (new_stmt, body);
9002 gimple_omp_set_body (stmt, NULL);
9003 gimple_omp_for_set_pre_body (stmt, NULL);
1e8e9920 9004}
9005
48e1416a 9006/* Callback for walk_stmts. Check if the current statement only contains
75a70cf9 9007 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
de7ef844 9008
9009static tree
75a70cf9 9010check_combined_parallel (gimple_stmt_iterator *gsi_p,
9011 bool *handled_ops_p,
9012 struct walk_stmt_info *wi)
de7ef844 9013{
4077bf7a 9014 int *info = (int *) wi->info;
75a70cf9 9015 gimple stmt = gsi_stmt (*gsi_p);
de7ef844 9016
75a70cf9 9017 *handled_ops_p = true;
9018 switch (gimple_code (stmt))
de7ef844 9019 {
75a70cf9 9020 WALK_SUBSTMTS;
9021
9022 case GIMPLE_OMP_FOR:
9023 case GIMPLE_OMP_SECTIONS:
de7ef844 9024 *info = *info == 0 ? 1 : -1;
9025 break;
9026 default:
9027 *info = -1;
9028 break;
9029 }
9030 return NULL;
9031}
773c5ba7 9032
fd6481cf 9033struct omp_taskcopy_context
9034{
9035 /* This field must be at the beginning, as we do "inheritance": Some
9036 callback functions for tree-inline.c (e.g., omp_copy_decl)
9037 receive a copy_body_data pointer that is up-casted to an
9038 omp_context pointer. */
9039 copy_body_data cb;
9040 omp_context *ctx;
9041};
9042
9043static tree
9044task_copyfn_copy_decl (tree var, copy_body_data *cb)
9045{
9046 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9047
9048 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9049 return create_tmp_var (TREE_TYPE (var), NULL);
9050
9051 return var;
9052}
9053
9054static tree
9055task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9056{
9057 tree name, new_fields = NULL, type, f;
9058
9059 type = lang_hooks.types.make_type (RECORD_TYPE);
9060 name = DECL_NAME (TYPE_NAME (orig_type));
e60a6f7b 9061 name = build_decl (gimple_location (tcctx->ctx->stmt),
9062 TYPE_DECL, name, type);
fd6481cf 9063 TYPE_NAME (type) = name;
9064
9065 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9066 {
9067 tree new_f = copy_node (f);
9068 DECL_CONTEXT (new_f) = type;
9069 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9070 TREE_CHAIN (new_f) = new_fields;
75a70cf9 9071 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9072 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9073 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9074 &tcctx->cb, NULL);
fd6481cf 9075 new_fields = new_f;
9076 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
9077 }
9078 TYPE_FIELDS (type) = nreverse (new_fields);
9079 layout_type (type);
9080 return type;
9081}
9082
9083/* Create task copyfn. */
9084
9085static void
75a70cf9 9086create_task_copyfn (gimple task_stmt, omp_context *ctx)
fd6481cf 9087{
9088 struct function *child_cfun;
9089 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9090 tree record_type, srecord_type, bind, list;
9091 bool record_needs_remap = false, srecord_needs_remap = false;
9092 splay_tree_node n;
9093 struct omp_taskcopy_context tcctx;
dac18d1a 9094 struct gimplify_ctx gctx;
389dd41b 9095 location_t loc = gimple_location (task_stmt);
fd6481cf 9096
75a70cf9 9097 child_fn = gimple_omp_task_copy_fn (task_stmt);
fd6481cf 9098 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9099 gcc_assert (child_cfun->cfg == NULL);
fd6481cf 9100 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9101
9102 /* Reset DECL_CONTEXT on function arguments. */
1767a056 9103 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
fd6481cf 9104 DECL_CONTEXT (t) = child_fn;
9105
9106 /* Populate the function. */
dac18d1a 9107 push_gimplify_context (&gctx);
9078126c 9108 push_cfun (child_cfun);
fd6481cf 9109
9110 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9111 TREE_SIDE_EFFECTS (bind) = 1;
9112 list = NULL;
9113 DECL_SAVED_TREE (child_fn) = bind;
75a70cf9 9114 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
fd6481cf 9115
9116 /* Remap src and dst argument types if needed. */
9117 record_type = ctx->record_type;
9118 srecord_type = ctx->srecord_type;
1767a056 9119 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
fd6481cf 9120 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9121 {
9122 record_needs_remap = true;
9123 break;
9124 }
1767a056 9125 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
fd6481cf 9126 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9127 {
9128 srecord_needs_remap = true;
9129 break;
9130 }
9131
9132 if (record_needs_remap || srecord_needs_remap)
9133 {
9134 memset (&tcctx, '\0', sizeof (tcctx));
9135 tcctx.cb.src_fn = ctx->cb.src_fn;
9136 tcctx.cb.dst_fn = child_fn;
53f79206 9137 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
9138 gcc_checking_assert (tcctx.cb.src_node);
fd6481cf 9139 tcctx.cb.dst_node = tcctx.cb.src_node;
9140 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9141 tcctx.cb.copy_decl = task_copyfn_copy_decl;
e38def9c 9142 tcctx.cb.eh_lp_nr = 0;
fd6481cf 9143 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9144 tcctx.cb.decl_map = pointer_map_create ();
9145 tcctx.ctx = ctx;
9146
9147 if (record_needs_remap)
9148 record_type = task_copyfn_remap_type (&tcctx, record_type);
9149 if (srecord_needs_remap)
9150 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9151 }
9152 else
9153 tcctx.cb.decl_map = NULL;
9154
fd6481cf 9155 arg = DECL_ARGUMENTS (child_fn);
9156 TREE_TYPE (arg) = build_pointer_type (record_type);
1767a056 9157 sarg = DECL_CHAIN (arg);
fd6481cf 9158 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9159
9160 /* First pass: initialize temporaries used in record_type and srecord_type
9161 sizes and field offsets. */
9162 if (tcctx.cb.decl_map)
75a70cf9 9163 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9164 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9165 {
9166 tree *p;
9167
9168 decl = OMP_CLAUSE_DECL (c);
9169 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
9170 if (p == NULL)
9171 continue;
9172 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9173 sf = (tree) n->value;
9174 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9175 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9176 src = omp_build_component_ref (src, sf);
75a70cf9 9177 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
fd6481cf 9178 append_to_statement_list (t, &list);
9179 }
9180
9181 /* Second pass: copy shared var pointers and copy construct non-VLA
9182 firstprivate vars. */
75a70cf9 9183 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9184 switch (OMP_CLAUSE_CODE (c))
9185 {
9186 case OMP_CLAUSE_SHARED:
9187 decl = OMP_CLAUSE_DECL (c);
9188 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9189 if (n == NULL)
9190 break;
9191 f = (tree) n->value;
9192 if (tcctx.cb.decl_map)
9193 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9194 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9195 sf = (tree) n->value;
9196 if (tcctx.cb.decl_map)
9197 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9198 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9199 src = omp_build_component_ref (src, sf);
182cf5a9 9200 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9201 dst = omp_build_component_ref (dst, f);
75a70cf9 9202 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 9203 append_to_statement_list (t, &list);
9204 break;
9205 case OMP_CLAUSE_FIRSTPRIVATE:
9206 decl = OMP_CLAUSE_DECL (c);
9207 if (is_variable_sized (decl))
9208 break;
9209 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9210 if (n == NULL)
9211 break;
9212 f = (tree) n->value;
9213 if (tcctx.cb.decl_map)
9214 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9215 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9216 if (n != NULL)
9217 {
9218 sf = (tree) n->value;
9219 if (tcctx.cb.decl_map)
9220 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9221 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9222 src = omp_build_component_ref (src, sf);
fd6481cf 9223 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
182cf5a9 9224 src = build_simple_mem_ref_loc (loc, src);
fd6481cf 9225 }
9226 else
9227 src = decl;
182cf5a9 9228 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9229 dst = omp_build_component_ref (dst, f);
fd6481cf 9230 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9231 append_to_statement_list (t, &list);
9232 break;
9233 case OMP_CLAUSE_PRIVATE:
9234 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9235 break;
9236 decl = OMP_CLAUSE_DECL (c);
9237 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9238 f = (tree) n->value;
9239 if (tcctx.cb.decl_map)
9240 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9241 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9242 if (n != NULL)
9243 {
9244 sf = (tree) n->value;
9245 if (tcctx.cb.decl_map)
9246 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9247 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9248 src = omp_build_component_ref (src, sf);
fd6481cf 9249 if (use_pointer_for_field (decl, NULL))
182cf5a9 9250 src = build_simple_mem_ref_loc (loc, src);
fd6481cf 9251 }
9252 else
9253 src = decl;
182cf5a9 9254 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9255 dst = omp_build_component_ref (dst, f);
75a70cf9 9256 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 9257 append_to_statement_list (t, &list);
9258 break;
9259 default:
9260 break;
9261 }
9262
9263 /* Last pass: handle VLA firstprivates. */
9264 if (tcctx.cb.decl_map)
75a70cf9 9265 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 9266 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9267 {
9268 tree ind, ptr, df;
9269
9270 decl = OMP_CLAUSE_DECL (c);
9271 if (!is_variable_sized (decl))
9272 continue;
9273 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9274 if (n == NULL)
9275 continue;
9276 f = (tree) n->value;
9277 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9278 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9279 ind = DECL_VALUE_EXPR (decl);
9280 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9281 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9282 n = splay_tree_lookup (ctx->sfield_map,
9283 (splay_tree_key) TREE_OPERAND (ind, 0));
9284 sf = (tree) n->value;
9285 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
182cf5a9 9286 src = build_simple_mem_ref_loc (loc, sarg);
445d06b6 9287 src = omp_build_component_ref (src, sf);
182cf5a9 9288 src = build_simple_mem_ref_loc (loc, src);
9289 dst = build_simple_mem_ref_loc (loc, arg);
445d06b6 9290 dst = omp_build_component_ref (dst, f);
fd6481cf 9291 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9292 append_to_statement_list (t, &list);
9293 n = splay_tree_lookup (ctx->field_map,
9294 (splay_tree_key) TREE_OPERAND (ind, 0));
9295 df = (tree) n->value;
9296 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
182cf5a9 9297 ptr = build_simple_mem_ref_loc (loc, arg);
445d06b6 9298 ptr = omp_build_component_ref (ptr, df);
75a70cf9 9299 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
389dd41b 9300 build_fold_addr_expr_loc (loc, dst));
fd6481cf 9301 append_to_statement_list (t, &list);
9302 }
9303
9304 t = build1 (RETURN_EXPR, void_type_node, NULL);
9305 append_to_statement_list (t, &list);
9306
9307 if (tcctx.cb.decl_map)
9308 pointer_map_destroy (tcctx.cb.decl_map);
9309 pop_gimplify_context (NULL);
9310 BIND_EXPR_BODY (bind) = list;
9311 pop_cfun ();
fd6481cf 9312}
9313
bc7bff74 9314static void
9315lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
9316{
9317 tree c, clauses;
9318 gimple g;
9319 size_t n_in = 0, n_out = 0, idx = 2, i;
9320
9321 clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
9322 OMP_CLAUSE_DEPEND);
9323 gcc_assert (clauses);
9324 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9325 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9326 switch (OMP_CLAUSE_DEPEND_KIND (c))
9327 {
9328 case OMP_CLAUSE_DEPEND_IN:
9329 n_in++;
9330 break;
9331 case OMP_CLAUSE_DEPEND_OUT:
9332 case OMP_CLAUSE_DEPEND_INOUT:
9333 n_out++;
9334 break;
9335 default:
9336 gcc_unreachable ();
9337 }
9338 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
9339 tree array = create_tmp_var (type, NULL);
9340 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9341 NULL_TREE);
9342 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
9343 gimple_seq_add_stmt (iseq, g);
9344 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9345 NULL_TREE);
9346 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
9347 gimple_seq_add_stmt (iseq, g);
9348 for (i = 0; i < 2; i++)
9349 {
9350 if ((i ? n_in : n_out) == 0)
9351 continue;
9352 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9354 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
9355 {
9356 tree t = OMP_CLAUSE_DECL (c);
9357 t = fold_convert (ptr_type_node, t);
9358 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9359 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9360 NULL_TREE, NULL_TREE);
9361 g = gimple_build_assign (r, t);
9362 gimple_seq_add_stmt (iseq, g);
9363 }
9364 }
9365 tree *p = gimple_omp_task_clauses_ptr (stmt);
9366 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9367 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9368 OMP_CLAUSE_CHAIN (c) = *p;
9369 *p = c;
9370 tree clobber = build_constructor (type, NULL);
9371 TREE_THIS_VOLATILE (clobber) = 1;
9372 g = gimple_build_assign (array, clobber);
9373 gimple_seq_add_stmt (oseq, g);
9374}
9375
75a70cf9 9376/* Lower the OpenMP parallel or task directive in the current statement
9377 in GSI_P. CTX holds context information for the directive. */
773c5ba7 9378
9379static void
75a70cf9 9380lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 9381{
75a70cf9 9382 tree clauses;
9383 tree child_fn, t;
9384 gimple stmt = gsi_stmt (*gsi_p);
bc7bff74 9385 gimple par_bind, bind, dep_bind = NULL;
9386 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
9387 struct gimplify_ctx gctx, dep_gctx;
389dd41b 9388 location_t loc = gimple_location (stmt);
773c5ba7 9389
75a70cf9 9390 clauses = gimple_omp_taskreg_clauses (stmt);
9391 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9392 par_body = gimple_bind_body (par_bind);
773c5ba7 9393 child_fn = ctx->cb.dst_fn;
75a70cf9 9394 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9395 && !gimple_omp_parallel_combined_p (stmt))
de7ef844 9396 {
9397 struct walk_stmt_info wi;
9398 int ws_num = 0;
9399
9400 memset (&wi, 0, sizeof (wi));
de7ef844 9401 wi.info = &ws_num;
9402 wi.val_only = true;
75a70cf9 9403 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
de7ef844 9404 if (ws_num == 1)
75a70cf9 9405 gimple_omp_parallel_set_combined_p (stmt, true);
de7ef844 9406 }
bc7bff74 9407 gimple_seq dep_ilist = NULL;
9408 gimple_seq dep_olist = NULL;
9409 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9410 && find_omp_clause (clauses, OMP_CLAUSE_DEPEND))
9411 {
9412 push_gimplify_context (&dep_gctx);
9413 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9414 lower_depend_clauses (stmt, &dep_ilist, &dep_olist);
9415 }
9416
fd6481cf 9417 if (ctx->srecord_type)
9418 create_task_copyfn (stmt, ctx);
773c5ba7 9419
dac18d1a 9420 push_gimplify_context (&gctx);
773c5ba7 9421
75a70cf9 9422 par_olist = NULL;
9423 par_ilist = NULL;
bc7bff74 9424 par_rlist = NULL;
9425 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
e3a19533 9426 lower_omp (&par_body, ctx);
75a70cf9 9427 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
bc7bff74 9428 lower_reduction_clauses (clauses, &par_rlist, ctx);
773c5ba7 9429
9430 /* Declare all the variables created by mapping and the variables
9431 declared in the scope of the parallel body. */
9432 record_vars_into (ctx->block_vars, child_fn);
75a70cf9 9433 record_vars_into (gimple_bind_vars (par_bind), child_fn);
773c5ba7 9434
9435 if (ctx->record_type)
9436 {
fd6481cf 9437 ctx->sender_decl
9438 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9439 : ctx->record_type, ".omp_data_o");
84bfaaeb 9440 DECL_NAMELESS (ctx->sender_decl) = 1;
86f2ad37 9441 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
75a70cf9 9442 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
773c5ba7 9443 }
9444
75a70cf9 9445 olist = NULL;
9446 ilist = NULL;
773c5ba7 9447 lower_send_clauses (clauses, &ilist, &olist, ctx);
9448 lower_send_shared_vars (&ilist, &olist, ctx);
9449
bc7bff74 9450 if (ctx->record_type)
9451 {
9452 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9453 TREE_THIS_VOLATILE (clobber) = 1;
9454 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9455 clobber));
9456 }
9457
773c5ba7 9458 /* Once all the expansions are done, sequence all the different
75a70cf9 9459 fragments inside gimple_omp_body. */
773c5ba7 9460
75a70cf9 9461 new_body = NULL;
773c5ba7 9462
9463 if (ctx->record_type)
9464 {
389dd41b 9465 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
cc6b725b 9466 /* fixup_child_record_type might have changed receiver_decl's type. */
389dd41b 9467 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
75a70cf9 9468 gimple_seq_add_stmt (&new_body,
9469 gimple_build_assign (ctx->receiver_decl, t));
773c5ba7 9470 }
9471
75a70cf9 9472 gimple_seq_add_seq (&new_body, par_ilist);
9473 gimple_seq_add_seq (&new_body, par_body);
bc7bff74 9474 gimple_seq_add_seq (&new_body, par_rlist);
9475 if (ctx->cancellable)
9476 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
75a70cf9 9477 gimple_seq_add_seq (&new_body, par_olist);
9478 new_body = maybe_catch_exception (new_body);
9479 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9480 gimple_omp_set_body (stmt, new_body);
773c5ba7 9481
75a70cf9 9482 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
bc7bff74 9483 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9484 gimple_bind_add_seq (bind, ilist);
9485 gimple_bind_add_stmt (bind, stmt);
9486 gimple_bind_add_seq (bind, olist);
9487
9488 pop_gimplify_context (NULL);
9489
9490 if (dep_bind)
9491 {
9492 gimple_bind_add_seq (dep_bind, dep_ilist);
9493 gimple_bind_add_stmt (dep_bind, bind);
9494 gimple_bind_add_seq (dep_bind, dep_olist);
9495 pop_gimplify_context (dep_bind);
9496 }
9497}
9498
9499/* Lower the OpenMP target directive in the current statement
9500 in GSI_P. CTX holds context information for the directive. */
9501
9502static void
9503lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9504{
9505 tree clauses;
9506 tree child_fn, t, c;
9507 gimple stmt = gsi_stmt (*gsi_p);
9508 gimple tgt_bind = NULL, bind;
9509 gimple_seq tgt_body = NULL, olist, ilist, new_body;
9510 struct gimplify_ctx gctx;
9511 location_t loc = gimple_location (stmt);
9512 int kind = gimple_omp_target_kind (stmt);
9513 unsigned int map_cnt = 0;
9514
9515 clauses = gimple_omp_target_clauses (stmt);
9516 if (kind == GF_OMP_TARGET_KIND_REGION)
9517 {
9518 tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9519 tgt_body = gimple_bind_body (tgt_bind);
9520 }
9521 else if (kind == GF_OMP_TARGET_KIND_DATA)
9522 tgt_body = gimple_omp_body (stmt);
9523 child_fn = ctx->cb.dst_fn;
9524
9525 push_gimplify_context (&gctx);
9526
9527 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9528 switch (OMP_CLAUSE_CODE (c))
9529 {
9530 tree var, x;
9531
9532 default:
9533 break;
9534 case OMP_CLAUSE_MAP:
9535 case OMP_CLAUSE_TO:
9536 case OMP_CLAUSE_FROM:
9537 var = OMP_CLAUSE_DECL (c);
9538 if (!DECL_P (var))
9539 {
9540 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9541 || !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9542 map_cnt++;
9543 continue;
9544 }
9545
9546 if (DECL_SIZE (var)
9547 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9548 {
9549 tree var2 = DECL_VALUE_EXPR (var);
9550 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9551 var2 = TREE_OPERAND (var2, 0);
9552 gcc_assert (DECL_P (var2));
9553 var = var2;
9554 }
9555
9556 if (!maybe_lookup_field (var, ctx))
9557 continue;
9558
9559 if (kind == GF_OMP_TARGET_KIND_REGION)
9560 {
9561 x = build_receiver_ref (var, true, ctx);
9562 tree new_var = lookup_decl (var, ctx);
9563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9564 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9565 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9566 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9567 x = build_simple_mem_ref (x);
9568 SET_DECL_VALUE_EXPR (new_var, x);
9569 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9570 }
9571 map_cnt++;
9572 }
9573
9574 if (kind == GF_OMP_TARGET_KIND_REGION)
9575 {
9576 target_nesting_level++;
9577 lower_omp (&tgt_body, ctx);
9578 target_nesting_level--;
9579 }
9580 else if (kind == GF_OMP_TARGET_KIND_DATA)
9581 lower_omp (&tgt_body, ctx);
9582
9583 if (kind == GF_OMP_TARGET_KIND_REGION)
9584 {
9585 /* Declare all the variables created by mapping and the variables
9586 declared in the scope of the target body. */
9587 record_vars_into (ctx->block_vars, child_fn);
9588 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9589 }
9590
9591 olist = NULL;
9592 ilist = NULL;
9593 if (ctx->record_type)
9594 {
9595 ctx->sender_decl
9596 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9597 DECL_NAMELESS (ctx->sender_decl) = 1;
9598 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9599 t = make_tree_vec (3);
9600 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9601 TREE_VEC_ELT (t, 1)
9602 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9603 ".omp_data_sizes");
9604 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9605 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9606 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9607 TREE_VEC_ELT (t, 2)
9608 = create_tmp_var (build_array_type_nelts (unsigned_char_type_node,
9609 map_cnt),
9610 ".omp_data_kinds");
9611 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9612 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9613 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9614 gimple_omp_target_set_data_arg (stmt, t);
9615
9616 vec<constructor_elt, va_gc> *vsize;
9617 vec<constructor_elt, va_gc> *vkind;
9618 vec_alloc (vsize, map_cnt);
9619 vec_alloc (vkind, map_cnt);
9620 unsigned int map_idx = 0;
9621
9622 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9623 switch (OMP_CLAUSE_CODE (c))
9624 {
9625 tree ovar, nc;
9626
9627 default:
9628 break;
9629 case OMP_CLAUSE_MAP:
9630 case OMP_CLAUSE_TO:
9631 case OMP_CLAUSE_FROM:
9632 nc = c;
9633 ovar = OMP_CLAUSE_DECL (c);
9634 if (!DECL_P (ovar))
9635 {
9636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9637 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9638 {
9639 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9640 == get_base_address (ovar));
9641 nc = OMP_CLAUSE_CHAIN (c);
9642 ovar = OMP_CLAUSE_DECL (nc);
9643 }
9644 else
9645 {
9646 tree x = build_sender_ref (ovar, ctx);
9647 tree v
9648 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9649 gimplify_assign (x, v, &ilist);
9650 nc = NULL_TREE;
9651 }
9652 }
9653 else
9654 {
9655 if (DECL_SIZE (ovar)
9656 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9657 {
9658 tree ovar2 = DECL_VALUE_EXPR (ovar);
9659 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9660 ovar2 = TREE_OPERAND (ovar2, 0);
9661 gcc_assert (DECL_P (ovar2));
9662 ovar = ovar2;
9663 }
9664 if (!maybe_lookup_field (ovar, ctx))
9665 continue;
9666 }
9667
9668 if (nc)
9669 {
9670 tree var = lookup_decl_in_outer_ctx (ovar, ctx);
9671 tree x = build_sender_ref (ovar, ctx);
9672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9673 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9674 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9675 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9676 {
9677 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9678 tree avar
9679 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)), NULL);
9680 mark_addressable (avar);
9681 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9682 avar = build_fold_addr_expr (avar);
9683 gimplify_assign (x, avar, &ilist);
9684 }
9685 else if (is_gimple_reg (var))
9686 {
9687 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9688 tree avar = create_tmp_var (TREE_TYPE (var), NULL);
9689 mark_addressable (avar);
9690 if (OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_ALLOC
9691 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_FROM)
9692 gimplify_assign (avar, var, &ilist);
9693 avar = build_fold_addr_expr (avar);
9694 gimplify_assign (x, avar, &ilist);
9695 if ((OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_FROM
9696 || OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_TOFROM)
9697 && !TYPE_READONLY (TREE_TYPE (var)))
9698 {
9699 x = build_sender_ref (ovar, ctx);
9700 x = build_simple_mem_ref (x);
9701 gimplify_assign (var, x, &olist);
9702 }
9703 }
9704 else
9705 {
9706 var = build_fold_addr_expr (var);
9707 gimplify_assign (x, var, &ilist);
9708 }
9709 }
9710 tree s = OMP_CLAUSE_SIZE (c);
9711 if (s == NULL_TREE)
9712 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9713 s = fold_convert (size_type_node, s);
9714 tree purpose = size_int (map_idx++);
9715 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9716 if (TREE_CODE (s) != INTEGER_CST)
9717 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9718
9719 unsigned char tkind = 0;
9720 switch (OMP_CLAUSE_CODE (c))
9721 {
9722 case OMP_CLAUSE_MAP:
9723 tkind = OMP_CLAUSE_MAP_KIND (c);
9724 break;
9725 case OMP_CLAUSE_TO:
9726 tkind = OMP_CLAUSE_MAP_TO;
9727 break;
9728 case OMP_CLAUSE_FROM:
9729 tkind = OMP_CLAUSE_MAP_FROM;
9730 break;
9731 default:
9732 gcc_unreachable ();
9733 }
9734 unsigned int talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9735 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9736 talign = DECL_ALIGN_UNIT (ovar);
9737 talign = ceil_log2 (talign);
9738 tkind |= talign << 3;
9739 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9740 build_int_cst (unsigned_char_type_node,
9741 tkind));
9742 if (nc && nc != c)
9743 c = nc;
9744 }
9745
9746 gcc_assert (map_idx == map_cnt);
9747
9748 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9749 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9750 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9751 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9752 if (!TREE_STATIC (TREE_VEC_ELT (t, 1)))
9753 {
9754 gimple_seq initlist = NULL;
9755 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9756 TREE_VEC_ELT (t, 1)),
9757 &initlist, true, NULL_TREE);
9758 gimple_seq_add_seq (&ilist, initlist);
9759 }
9760
9761 tree clobber = build_constructor (ctx->record_type, NULL);
9762 TREE_THIS_VOLATILE (clobber) = 1;
9763 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9764 clobber));
9765 }
9766
9767 /* Once all the expansions are done, sequence all the different
9768 fragments inside gimple_omp_body. */
9769
9770 new_body = NULL;
9771
9772 if (ctx->record_type && kind == GF_OMP_TARGET_KIND_REGION)
9773 {
9774 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9775 /* fixup_child_record_type might have changed receiver_decl's type. */
9776 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9777 gimple_seq_add_stmt (&new_body,
9778 gimple_build_assign (ctx->receiver_decl, t));
9779 }
9780
9781 if (kind == GF_OMP_TARGET_KIND_REGION)
9782 {
9783 gimple_seq_add_seq (&new_body, tgt_body);
9784 new_body = maybe_catch_exception (new_body);
9785 }
9786 else if (kind == GF_OMP_TARGET_KIND_DATA)
9787 new_body = tgt_body;
9788 if (kind != GF_OMP_TARGET_KIND_UPDATE)
9789 {
9790 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9791 gimple_omp_set_body (stmt, new_body);
9792 }
9793
9794 bind = gimple_build_bind (NULL, NULL,
9795 tgt_bind ? gimple_bind_block (tgt_bind)
9796 : NULL_TREE);
75a70cf9 9797 gsi_replace (gsi_p, bind, true);
e3a19533 9798 gimple_bind_add_seq (bind, ilist);
9799 gimple_bind_add_stmt (bind, stmt);
9800 gimple_bind_add_seq (bind, olist);
773c5ba7 9801
75a70cf9 9802 pop_gimplify_context (NULL);
773c5ba7 9803}
9804
bc7bff74 9805/* Expand code for an OpenMP teams directive. */
9806
9807static void
9808lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9809{
9810 gimple teams_stmt = gsi_stmt (*gsi_p);
9811 struct gimplify_ctx gctx;
9812 push_gimplify_context (&gctx);
9813
9814 tree block = make_node (BLOCK);
9815 gimple bind = gimple_build_bind (NULL, NULL, block);
9816 gsi_replace (gsi_p, bind, true);
9817 gimple_seq bind_body = NULL;
9818 gimple_seq dlist = NULL;
9819 gimple_seq olist = NULL;
9820
9821 tree num_teams = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9822 OMP_CLAUSE_NUM_TEAMS);
9823 if (num_teams == NULL_TREE)
9824 num_teams = build_int_cst (unsigned_type_node, 0);
9825 else
9826 {
9827 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
9828 num_teams = fold_convert (unsigned_type_node, num_teams);
9829 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
9830 }
9831 tree thread_limit = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9832 OMP_CLAUSE_THREAD_LIMIT);
9833 if (thread_limit == NULL_TREE)
9834 thread_limit = build_int_cst (unsigned_type_node, 0);
9835 else
9836 {
9837 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
9838 thread_limit = fold_convert (unsigned_type_node, thread_limit);
9839 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
9840 fb_rvalue);
9841 }
9842
9843 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
9844 &bind_body, &dlist, ctx, NULL);
9845 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9846 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
9847 gimple_seq_add_stmt (&bind_body, teams_stmt);
9848
9849 location_t loc = gimple_location (teams_stmt);
9850 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
9851 gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
9852 gimple_set_location (call, loc);
9853 gimple_seq_add_stmt (&bind_body, call);
9854
9855 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
9856 gimple_omp_set_body (teams_stmt, NULL);
9857 gimple_seq_add_seq (&bind_body, olist);
9858 gimple_seq_add_seq (&bind_body, dlist);
9859 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
9860 gimple_bind_set_body (bind, bind_body);
9861
9862 pop_gimplify_context (bind);
9863
9864 gimple_bind_append_vars (bind, ctx->block_vars);
9865 BLOCK_VARS (block) = ctx->block_vars;
9866 if (BLOCK_VARS (block))
9867 TREE_USED (block) = 1;
9868}
9869
9870
a4890dc9 9871/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
75a70cf9 9872 regimplified. If DATA is non-NULL, lower_omp_1 is outside
9873 of OpenMP context, but with task_shared_vars set. */
46515c92 9874
9875static tree
75a70cf9 9876lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
9877 void *data)
46515c92 9878{
a4890dc9 9879 tree t = *tp;
46515c92 9880
a4890dc9 9881 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
75a70cf9 9882 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
9f49e155 9883 return t;
9884
9885 if (task_shared_vars
9886 && DECL_P (t)
9887 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
a4890dc9 9888 return t;
46515c92 9889
a4890dc9 9890 /* If a global variable has been privatized, TREE_CONSTANT on
9891 ADDR_EXPR might be wrong. */
75a70cf9 9892 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
a4890dc9 9893 recompute_tree_invariant_for_addr_expr (t);
46515c92 9894
a4890dc9 9895 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
9896 return NULL_TREE;
46515c92 9897}
773c5ba7 9898
a4890dc9 9899static void
75a70cf9 9900lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 9901{
75a70cf9 9902 gimple stmt = gsi_stmt (*gsi_p);
9903 struct walk_stmt_info wi;
1e8e9920 9904
75a70cf9 9905 if (gimple_has_location (stmt))
9906 input_location = gimple_location (stmt);
a4890dc9 9907
75a70cf9 9908 if (task_shared_vars)
9909 memset (&wi, '\0', sizeof (wi));
a4890dc9 9910
773c5ba7 9911 /* If we have issued syntax errors, avoid doing any heavy lifting.
9912 Just replace the OpenMP directives with a NOP to avoid
9913 confusing RTL expansion. */
852f689e 9914 if (seen_error () && is_gimple_omp (stmt))
773c5ba7 9915 {
75a70cf9 9916 gsi_replace (gsi_p, gimple_build_nop (), true);
a4890dc9 9917 return;
773c5ba7 9918 }
9919
75a70cf9 9920 switch (gimple_code (stmt))
1e8e9920 9921 {
75a70cf9 9922 case GIMPLE_COND:
fd6481cf 9923 if ((ctx || task_shared_vars)
75a70cf9 9924 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
9925 ctx ? NULL : &wi, NULL)
9926 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
9927 ctx ? NULL : &wi, NULL)))
9928 gimple_regimplify_operands (stmt, gsi_p);
a4890dc9 9929 break;
75a70cf9 9930 case GIMPLE_CATCH:
e3a19533 9931 lower_omp (gimple_catch_handler_ptr (stmt), ctx);
a4890dc9 9932 break;
75a70cf9 9933 case GIMPLE_EH_FILTER:
e3a19533 9934 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
a4890dc9 9935 break;
75a70cf9 9936 case GIMPLE_TRY:
e3a19533 9937 lower_omp (gimple_try_eval_ptr (stmt), ctx);
9938 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
a4890dc9 9939 break;
35215227 9940 case GIMPLE_TRANSACTION:
9941 lower_omp (gimple_transaction_body_ptr (stmt), ctx);
9942 break;
75a70cf9 9943 case GIMPLE_BIND:
e3a19533 9944 lower_omp (gimple_bind_body_ptr (stmt), ctx);
a4890dc9 9945 break;
75a70cf9 9946 case GIMPLE_OMP_PARALLEL:
9947 case GIMPLE_OMP_TASK:
9948 ctx = maybe_lookup_ctx (stmt);
bc7bff74 9949 gcc_assert (ctx);
9950 if (ctx->cancellable)
9951 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9952 lower_omp_taskreg (gsi_p, ctx);
a4890dc9 9953 break;
75a70cf9 9954 case GIMPLE_OMP_FOR:
9955 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9956 gcc_assert (ctx);
bc7bff74 9957 if (ctx->cancellable)
9958 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9959 lower_omp_for (gsi_p, ctx);
1e8e9920 9960 break;
75a70cf9 9961 case GIMPLE_OMP_SECTIONS:
9962 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9963 gcc_assert (ctx);
bc7bff74 9964 if (ctx->cancellable)
9965 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 9966 lower_omp_sections (gsi_p, ctx);
1e8e9920 9967 break;
75a70cf9 9968 case GIMPLE_OMP_SINGLE:
9969 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9970 gcc_assert (ctx);
75a70cf9 9971 lower_omp_single (gsi_p, ctx);
1e8e9920 9972 break;
75a70cf9 9973 case GIMPLE_OMP_MASTER:
9974 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9975 gcc_assert (ctx);
75a70cf9 9976 lower_omp_master (gsi_p, ctx);
1e8e9920 9977 break;
bc7bff74 9978 case GIMPLE_OMP_TASKGROUP:
9979 ctx = maybe_lookup_ctx (stmt);
9980 gcc_assert (ctx);
9981 lower_omp_taskgroup (gsi_p, ctx);
9982 break;
75a70cf9 9983 case GIMPLE_OMP_ORDERED:
9984 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9985 gcc_assert (ctx);
75a70cf9 9986 lower_omp_ordered (gsi_p, ctx);
1e8e9920 9987 break;
75a70cf9 9988 case GIMPLE_OMP_CRITICAL:
9989 ctx = maybe_lookup_ctx (stmt);
1e8e9920 9990 gcc_assert (ctx);
75a70cf9 9991 lower_omp_critical (gsi_p, ctx);
9992 break;
9993 case GIMPLE_OMP_ATOMIC_LOAD:
9994 if ((ctx || task_shared_vars)
9995 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
9996 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
9997 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 9998 break;
bc7bff74 9999 case GIMPLE_OMP_TARGET:
10000 ctx = maybe_lookup_ctx (stmt);
10001 gcc_assert (ctx);
10002 lower_omp_target (gsi_p, ctx);
10003 break;
10004 case GIMPLE_OMP_TEAMS:
10005 ctx = maybe_lookup_ctx (stmt);
10006 gcc_assert (ctx);
10007 lower_omp_teams (gsi_p, ctx);
10008 break;
10009 case GIMPLE_CALL:
10010 tree fndecl;
10011 fndecl = gimple_call_fndecl (stmt);
10012 if (fndecl
10013 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10014 switch (DECL_FUNCTION_CODE (fndecl))
10015 {
10016 case BUILT_IN_GOMP_BARRIER:
10017 if (ctx == NULL)
10018 break;
10019 /* FALLTHRU */
10020 case BUILT_IN_GOMP_CANCEL:
10021 case BUILT_IN_GOMP_CANCELLATION_POINT:
10022 omp_context *cctx;
10023 cctx = ctx;
10024 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10025 cctx = cctx->outer;
10026 gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
10027 if (!cctx->cancellable)
10028 {
10029 if (DECL_FUNCTION_CODE (fndecl)
10030 == BUILT_IN_GOMP_CANCELLATION_POINT)
10031 {
10032 stmt = gimple_build_nop ();
10033 gsi_replace (gsi_p, stmt, false);
10034 }
10035 break;
10036 }
10037 tree lhs;
10038 lhs = create_tmp_var (boolean_type_node, NULL);
10039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10040 {
10041 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10042 gimple_call_set_fndecl (stmt, fndecl);
10043 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
10044 }
10045 gimple_call_set_lhs (stmt, lhs);
10046 tree fallthru_label;
10047 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10048 gimple g;
10049 g = gimple_build_label (fallthru_label);
10050 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10051 g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
10052 cctx->cancel_label, fallthru_label);
10053 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10054 break;
10055 default:
10056 break;
10057 }
10058 /* FALLTHRU */
a4890dc9 10059 default:
fd6481cf 10060 if ((ctx || task_shared_vars)
75a70cf9 10061 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10062 ctx ? NULL : &wi))
10063 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 10064 break;
1e8e9920 10065 }
1e8e9920 10066}
10067
10068static void
e3a19533 10069lower_omp (gimple_seq *body, omp_context *ctx)
1e8e9920 10070{
1d22f541 10071 location_t saved_location = input_location;
e3a19533 10072 gimple_stmt_iterator gsi;
10073 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
75a70cf9 10074 lower_omp_1 (&gsi, ctx);
bc7bff74 10075 /* Inside target region we haven't called fold_stmt during gimplification,
10076 because it can break code by adding decl references that weren't in the
10077 source. Call fold_stmt now. */
10078 if (target_nesting_level)
10079 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10080 fold_stmt (&gsi);
1d22f541 10081 input_location = saved_location;
1e8e9920 10082}
10083\f
10084/* Main entry point. */
10085
2a1990e9 10086static unsigned int
1e8e9920 10087execute_lower_omp (void)
10088{
75a70cf9 10089 gimple_seq body;
10090
41709826 10091 /* This pass always runs, to provide PROP_gimple_lomp.
10092 But there is nothing to do unless -fopenmp is given. */
f2697631 10093 if (flag_openmp == 0 && flag_openmp_simd == 0 && flag_enable_cilkplus == 0)
41709826 10094 return 0;
10095
1e8e9920 10096 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10097 delete_omp_context);
10098
75a70cf9 10099 body = gimple_body (current_function_decl);
ab129075 10100 scan_omp (&body, NULL);
fd6481cf 10101 gcc_assert (taskreg_nesting_level == 0);
1e8e9920 10102
10103 if (all_contexts->root)
fd6481cf 10104 {
dac18d1a 10105 struct gimplify_ctx gctx;
10106
fd6481cf 10107 if (task_shared_vars)
dac18d1a 10108 push_gimplify_context (&gctx);
e3a19533 10109 lower_omp (&body, NULL);
fd6481cf 10110 if (task_shared_vars)
10111 pop_gimplify_context (NULL);
10112 }
1e8e9920 10113
773c5ba7 10114 if (all_contexts)
10115 {
10116 splay_tree_delete (all_contexts);
10117 all_contexts = NULL;
10118 }
fd6481cf 10119 BITMAP_FREE (task_shared_vars);
2a1990e9 10120 return 0;
1e8e9920 10121}
10122
cbe8bda8 10123namespace {
10124
10125const pass_data pass_data_lower_omp =
10126{
10127 GIMPLE_PASS, /* type */
10128 "omplower", /* name */
10129 OPTGROUP_NONE, /* optinfo_flags */
10130 false, /* has_gate */
10131 true, /* has_execute */
10132 TV_NONE, /* tv_id */
10133 PROP_gimple_any, /* properties_required */
10134 PROP_gimple_lomp, /* properties_provided */
10135 0, /* properties_destroyed */
10136 0, /* todo_flags_start */
10137 0, /* todo_flags_finish */
1e8e9920 10138};
cbe8bda8 10139
10140class pass_lower_omp : public gimple_opt_pass
10141{
10142public:
9af5ce0c 10143 pass_lower_omp (gcc::context *ctxt)
10144 : gimple_opt_pass (pass_data_lower_omp, ctxt)
cbe8bda8 10145 {}
10146
10147 /* opt_pass methods: */
10148 unsigned int execute () { return execute_lower_omp (); }
10149
10150}; // class pass_lower_omp
10151
10152} // anon namespace
10153
10154gimple_opt_pass *
10155make_pass_lower_omp (gcc::context *ctxt)
10156{
10157 return new pass_lower_omp (ctxt);
10158}
1e8e9920 10159\f
10160/* The following is a utility to diagnose OpenMP structured block violations.
61e47ac8 10161 It is not part of the "omplower" pass, as that's invoked too late. It
10162 should be invoked by the respective front ends after gimplification. */
1e8e9920 10163
10164static splay_tree all_labels;
10165
10166/* Check for mismatched contexts and generate an error if needed. Return
10167 true if an error is detected. */
10168
10169static bool
75a70cf9 10170diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10171 gimple branch_ctx, gimple label_ctx)
1e8e9920 10172{
75a70cf9 10173 if (label_ctx == branch_ctx)
1e8e9920 10174 return false;
10175
48e1416a 10176
75a70cf9 10177 /*
10178 Previously we kept track of the label's entire context in diagnose_sb_[12]
10179 so we could traverse it and issue a correct "exit" or "enter" error
10180 message upon a structured block violation.
10181
10182 We built the context by building a list with tree_cons'ing, but there is
10183 no easy counterpart in gimple tuples. It seems like far too much work
10184 for issuing exit/enter error messages. If someone really misses the
10185 distinct error message... patches welcome.
10186 */
48e1416a 10187
75a70cf9 10188#if 0
1e8e9920 10189 /* Try to avoid confusing the user by producing and error message
f0b5f617 10190 with correct "exit" or "enter" verbiage. We prefer "exit"
1e8e9920 10191 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10192 if (branch_ctx == NULL)
10193 exit_p = false;
10194 else
10195 {
10196 while (label_ctx)
10197 {
10198 if (TREE_VALUE (label_ctx) == branch_ctx)
10199 {
10200 exit_p = false;
10201 break;
10202 }
10203 label_ctx = TREE_CHAIN (label_ctx);
10204 }
10205 }
10206
10207 if (exit_p)
10208 error ("invalid exit from OpenMP structured block");
10209 else
10210 error ("invalid entry to OpenMP structured block");
75a70cf9 10211#endif
1e8e9920 10212
f2697631 10213 bool cilkplus_block = false;
10214 if (flag_enable_cilkplus)
10215 {
10216 if ((branch_ctx
10217 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
10218 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
10219 || (gimple_code (label_ctx) == GIMPLE_OMP_FOR
10220 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
10221 cilkplus_block = true;
10222 }
10223
75a70cf9 10224 /* If it's obvious we have an invalid entry, be specific about the error. */
10225 if (branch_ctx == NULL)
f2697631 10226 {
10227 if (cilkplus_block)
10228 error ("invalid entry to Cilk Plus structured block");
10229 else
10230 error ("invalid entry to OpenMP structured block");
10231 }
75a70cf9 10232 else
f2697631 10233 {
10234 /* Otherwise, be vague and lazy, but efficient. */
10235 if (cilkplus_block)
10236 error ("invalid branch to/from a Cilk Plus structured block");
10237 else
10238 error ("invalid branch to/from an OpenMP structured block");
10239 }
75a70cf9 10240
10241 gsi_replace (gsi_p, gimple_build_nop (), false);
1e8e9920 10242 return true;
10243}
10244
10245/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
75a70cf9 10246 where each label is found. */
1e8e9920 10247
10248static tree
75a70cf9 10249diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10250 struct walk_stmt_info *wi)
1e8e9920 10251{
75a70cf9 10252 gimple context = (gimple) wi->info;
10253 gimple inner_context;
10254 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 10255
75a70cf9 10256 *handled_ops_p = true;
10257
10258 switch (gimple_code (stmt))
1e8e9920 10259 {
75a70cf9 10260 WALK_SUBSTMTS;
48e1416a 10261
75a70cf9 10262 case GIMPLE_OMP_PARALLEL:
10263 case GIMPLE_OMP_TASK:
10264 case GIMPLE_OMP_SECTIONS:
10265 case GIMPLE_OMP_SINGLE:
10266 case GIMPLE_OMP_SECTION:
10267 case GIMPLE_OMP_MASTER:
10268 case GIMPLE_OMP_ORDERED:
10269 case GIMPLE_OMP_CRITICAL:
bc7bff74 10270 case GIMPLE_OMP_TARGET:
10271 case GIMPLE_OMP_TEAMS:
10272 case GIMPLE_OMP_TASKGROUP:
75a70cf9 10273 /* The minimal context here is just the current OMP construct. */
10274 inner_context = stmt;
1e8e9920 10275 wi->info = inner_context;
75a70cf9 10276 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 10277 wi->info = context;
10278 break;
10279
75a70cf9 10280 case GIMPLE_OMP_FOR:
10281 inner_context = stmt;
1e8e9920 10282 wi->info = inner_context;
75a70cf9 10283 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10284 walk them. */
10285 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10286 diagnose_sb_1, NULL, wi);
10287 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 10288 wi->info = context;
10289 break;
10290
75a70cf9 10291 case GIMPLE_LABEL:
10292 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
1e8e9920 10293 (splay_tree_value) context);
10294 break;
10295
10296 default:
10297 break;
10298 }
10299
10300 return NULL_TREE;
10301}
10302
10303/* Pass 2: Check each branch and see if its context differs from that of
10304 the destination label's context. */
10305
10306static tree
75a70cf9 10307diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10308 struct walk_stmt_info *wi)
1e8e9920 10309{
75a70cf9 10310 gimple context = (gimple) wi->info;
1e8e9920 10311 splay_tree_node n;
75a70cf9 10312 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 10313
75a70cf9 10314 *handled_ops_p = true;
10315
10316 switch (gimple_code (stmt))
1e8e9920 10317 {
75a70cf9 10318 WALK_SUBSTMTS;
10319
10320 case GIMPLE_OMP_PARALLEL:
10321 case GIMPLE_OMP_TASK:
10322 case GIMPLE_OMP_SECTIONS:
10323 case GIMPLE_OMP_SINGLE:
10324 case GIMPLE_OMP_SECTION:
10325 case GIMPLE_OMP_MASTER:
10326 case GIMPLE_OMP_ORDERED:
10327 case GIMPLE_OMP_CRITICAL:
bc7bff74 10328 case GIMPLE_OMP_TARGET:
10329 case GIMPLE_OMP_TEAMS:
10330 case GIMPLE_OMP_TASKGROUP:
75a70cf9 10331 wi->info = stmt;
e3a19533 10332 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 10333 wi->info = context;
10334 break;
10335
75a70cf9 10336 case GIMPLE_OMP_FOR:
10337 wi->info = stmt;
10338 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10339 walk them. */
e3a19533 10340 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10341 diagnose_sb_2, NULL, wi);
10342 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 10343 wi->info = context;
10344 break;
10345
0e1818e7 10346 case GIMPLE_COND:
10347 {
10348 tree lab = gimple_cond_true_label (stmt);
10349 if (lab)
10350 {
10351 n = splay_tree_lookup (all_labels,
10352 (splay_tree_key) lab);
10353 diagnose_sb_0 (gsi_p, context,
10354 n ? (gimple) n->value : NULL);
10355 }
10356 lab = gimple_cond_false_label (stmt);
10357 if (lab)
10358 {
10359 n = splay_tree_lookup (all_labels,
10360 (splay_tree_key) lab);
10361 diagnose_sb_0 (gsi_p, context,
10362 n ? (gimple) n->value : NULL);
10363 }
10364 }
10365 break;
10366
75a70cf9 10367 case GIMPLE_GOTO:
1e8e9920 10368 {
75a70cf9 10369 tree lab = gimple_goto_dest (stmt);
1e8e9920 10370 if (TREE_CODE (lab) != LABEL_DECL)
10371 break;
10372
10373 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 10374 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
1e8e9920 10375 }
10376 break;
10377
75a70cf9 10378 case GIMPLE_SWITCH:
1e8e9920 10379 {
75a70cf9 10380 unsigned int i;
10381 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
1e8e9920 10382 {
75a70cf9 10383 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
1e8e9920 10384 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 10385 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
1e8e9920 10386 break;
10387 }
10388 }
10389 break;
10390
75a70cf9 10391 case GIMPLE_RETURN:
10392 diagnose_sb_0 (gsi_p, context, NULL);
1e8e9920 10393 break;
10394
10395 default:
10396 break;
10397 }
10398
10399 return NULL_TREE;
10400}
10401
7740abd8 10402/* Called from tree-cfg.c::make_edges to create cfg edges for all GIMPLE_OMP
10403 codes. */
10404bool
10405make_gimple_omp_edges (basic_block bb, struct omp_region **region)
10406{
10407 gimple last = last_stmt (bb);
10408 enum gimple_code code = gimple_code (last);
10409 struct omp_region *cur_region = *region;
10410 bool fallthru = false;
10411
10412 switch (code)
10413 {
10414 case GIMPLE_OMP_PARALLEL:
10415 case GIMPLE_OMP_TASK:
10416 case GIMPLE_OMP_FOR:
10417 case GIMPLE_OMP_SINGLE:
10418 case GIMPLE_OMP_TEAMS:
10419 case GIMPLE_OMP_MASTER:
10420 case GIMPLE_OMP_TASKGROUP:
10421 case GIMPLE_OMP_ORDERED:
10422 case GIMPLE_OMP_CRITICAL:
10423 case GIMPLE_OMP_SECTION:
10424 cur_region = new_omp_region (bb, code, cur_region);
10425 fallthru = true;
10426 break;
10427
10428 case GIMPLE_OMP_TARGET:
10429 cur_region = new_omp_region (bb, code, cur_region);
10430 fallthru = true;
10431 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
10432 cur_region = cur_region->outer;
10433 break;
10434
10435 case GIMPLE_OMP_SECTIONS:
10436 cur_region = new_omp_region (bb, code, cur_region);
10437 fallthru = true;
10438 break;
10439
10440 case GIMPLE_OMP_SECTIONS_SWITCH:
10441 fallthru = false;
10442 break;
10443
10444 case GIMPLE_OMP_ATOMIC_LOAD:
10445 case GIMPLE_OMP_ATOMIC_STORE:
10446 fallthru = true;
10447 break;
10448
10449 case GIMPLE_OMP_RETURN:
10450 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
10451 somewhere other than the next block. This will be
10452 created later. */
10453 cur_region->exit = bb;
10454 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
10455 cur_region = cur_region->outer;
10456 break;
10457
10458 case GIMPLE_OMP_CONTINUE:
10459 cur_region->cont = bb;
10460 switch (cur_region->type)
10461 {
10462 case GIMPLE_OMP_FOR:
10463 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
10464 succs edges as abnormal to prevent splitting
10465 them. */
10466 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
10467 /* Make the loopback edge. */
10468 make_edge (bb, single_succ (cur_region->entry),
10469 EDGE_ABNORMAL);
10470
10471 /* Create an edge from GIMPLE_OMP_FOR to exit, which
10472 corresponds to the case that the body of the loop
10473 is not executed at all. */
10474 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
10475 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
10476 fallthru = false;
10477 break;
10478
10479 case GIMPLE_OMP_SECTIONS:
10480 /* Wire up the edges into and out of the nested sections. */
10481 {
10482 basic_block switch_bb = single_succ (cur_region->entry);
10483
10484 struct omp_region *i;
10485 for (i = cur_region->inner; i ; i = i->next)
10486 {
10487 gcc_assert (i->type == GIMPLE_OMP_SECTION);
10488 make_edge (switch_bb, i->entry, 0);
10489 make_edge (i->exit, bb, EDGE_FALLTHRU);
10490 }
10491
10492 /* Make the loopback edge to the block with
10493 GIMPLE_OMP_SECTIONS_SWITCH. */
10494 make_edge (bb, switch_bb, 0);
10495
10496 /* Make the edge from the switch to exit. */
10497 make_edge (switch_bb, bb->next_bb, 0);
10498 fallthru = false;
10499 }
10500 break;
10501
10502 default:
10503 gcc_unreachable ();
10504 }
10505 break;
10506
10507 default:
10508 gcc_unreachable ();
10509 }
10510
10511 if (*region != cur_region)
10512 *region = cur_region;
10513
10514 return fallthru;
10515}
10516
bfec3452 10517static unsigned int
10518diagnose_omp_structured_block_errors (void)
1e8e9920 10519{
1e8e9920 10520 struct walk_stmt_info wi;
bfec3452 10521 gimple_seq body = gimple_body (current_function_decl);
1e8e9920 10522
10523 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10524
10525 memset (&wi, 0, sizeof (wi));
75a70cf9 10526 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
1e8e9920 10527
10528 memset (&wi, 0, sizeof (wi));
1e8e9920 10529 wi.want_locations = true;
e3a19533 10530 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10531
10532 gimple_set_body (current_function_decl, body);
1e8e9920 10533
10534 splay_tree_delete (all_labels);
10535 all_labels = NULL;
10536
bfec3452 10537 return 0;
1e8e9920 10538}
10539
bfec3452 10540static bool
10541gate_diagnose_omp_blocks (void)
10542{
f2697631 10543 return flag_openmp || flag_enable_cilkplus;
bfec3452 10544}
10545
cbe8bda8 10546namespace {
10547
10548const pass_data pass_data_diagnose_omp_blocks =
10549{
10550 GIMPLE_PASS, /* type */
10551 "*diagnose_omp_blocks", /* name */
10552 OPTGROUP_NONE, /* optinfo_flags */
10553 true, /* has_gate */
10554 true, /* has_execute */
10555 TV_NONE, /* tv_id */
10556 PROP_gimple_any, /* properties_required */
10557 0, /* properties_provided */
10558 0, /* properties_destroyed */
10559 0, /* todo_flags_start */
10560 0, /* todo_flags_finish */
bfec3452 10561};
10562
cbe8bda8 10563class pass_diagnose_omp_blocks : public gimple_opt_pass
10564{
10565public:
9af5ce0c 10566 pass_diagnose_omp_blocks (gcc::context *ctxt)
10567 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
cbe8bda8 10568 {}
10569
10570 /* opt_pass methods: */
10571 bool gate () { return gate_diagnose_omp_blocks (); }
10572 unsigned int execute () {
10573 return diagnose_omp_structured_block_errors ();
10574 }
10575
10576}; // class pass_diagnose_omp_blocks
10577
10578} // anon namespace
10579
10580gimple_opt_pass *
10581make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10582{
10583 return new pass_diagnose_omp_blocks (ctxt);
10584}
10585
1e8e9920 10586#include "gt-omp-low.h"